From cf03b86ba7487306a4b0b7b58d9c14580ecc9a06 Mon Sep 17 00:00:00 2001 From: alberthli Date: Sat, 11 Nov 2023 23:32:10 -0800 Subject: [PATCH] first stab at stricter typing checks --- ambersim/utils/_internal_utils.py | 6 +- ambersim/utils/conversion_utils.py | 8 +- ambersim/utils/io_utils.py | 24 +- examples/convex_decomposition.py | 8 +- pyrightconfig.json | 6 +- tests/test_model_io.py | 6 +- typings/coacd/__init__.pyi | 47 + typings/dm_control/__init__.pyi | 7 + typings/dm_control/_render/__init__.pyi | 26 + typings/dm_control/_render/base.pyi | 59 + typings/dm_control/_render/constants.pyi | 11 + .../dm_control/_render/executor/__init__.pyi | 32 + .../_render/executor/render_executor.pyi | 102 + typings/dm_control/_render/glfw_renderer.pyi | 12 + .../dm_control/_render/pyopengl/__init__.pyi | 3 + .../dm_control/_render/pyopengl/egl_ext.pyi | 15 + .../_render/pyopengl/egl_renderer.pyi | 24 + .../_render/pyopengl/osmesa_renderer.pyi | 18 + typings/dm_control/autowrap/__init__.pyi | 3 + typings/dm_control/composer/__init__.pyi | 14 + typings/dm_control/composer/arena.pyi | 19 + typings/dm_control/composer/constants.pyi | 6 + typings/dm_control/composer/define.pyi | 25 + typings/dm_control/composer/entity.pyi | 336 +++ typings/dm_control/composer/environment.pyi | 211 ++ typings/dm_control/composer/initializer.pyi | 13 + .../composer/initializers/__init__.pyi | 8 + .../initializers/prop_initializer.pyi | 83 + .../composer/initializers/tcp_initializer.pyi | 67 + .../composer/initializers/utils.pyi | 27 + .../composer/observation/__init__.pyi | 9 + .../composer/observation/obs_buffer.pyi | 99 + .../observation/observable/__init__.pyi | 8 + .../composer/observation/observable/base.pyi | 205 ++ .../composer/observation/observable/mjcf.pyi | 165 ++ .../composer/observation/updater.pyi | 75 + typings/dm_control/composer/robot.pyi | 20 + typings/dm_control/composer/task.pyi | 231 ++ .../composer/variation/__init__.pyi | 76 + .../dm_control/composer/variation/base.pyi | 72 + .../composer/variation/distributions.pyi | 97 + .../composer/variation/rotations.pyi | 35 + .../composer/variation/variation_values.pyi | 22 + typings/dm_control/entities/__init__.pyi | 3 + .../entities/manipulators/__init__.pyi | 5 + .../dm_control/entities/manipulators/base.pyi | 100 + .../entities/manipulators/kinova/__init__.pyi | 8 + .../manipulators/kinova/assets_path.pyi | 7 + .../entities/manipulators/kinova/jaco_arm.pyi | 53 + .../manipulators/kinova/jaco_hand.pyi | 80 + .../dm_control/entities/props/__init__.pyi | 12 + .../entities/props/duplo/__init__.pyi | 55 + .../entities/props/position_detector.pyi | 84 + .../dm_control/entities/props/primitive.pyi | 51 + typings/dm_control/locomotion/__init__.pyi | 3 + typings/dm_control/manipulation/__init__.pyi | 44 + typings/dm_control/manipulation/bricks.pyi | 172 ++ typings/dm_control/manipulation/lift.pyi | 81 + typings/dm_control/manipulation/place.pyi | 88 + typings/dm_control/manipulation/reach.pyi | 64 + .../manipulation/shared/__init__.pyi | 3 + .../dm_control/manipulation/shared/arenas.pyi | 23 + .../manipulation/shared/cameras.pyi | 25 + .../manipulation/shared/constants.pyi | 13 + .../manipulation/shared/observations.pyi | 60 + .../manipulation/shared/registry.pyi | 14 + .../dm_control/manipulation/shared/robots.pyi | 28 + .../dm_control/manipulation/shared/tags.pyi | 8 + .../manipulation/shared/workspaces.pyi | 49 + typings/dm_control/mjcf/__init__.pyi | 23 + typings/dm_control/mjcf/attribute.pyi | 125 + typings/dm_control/mjcf/base.pyi | 236 ++ typings/dm_control/mjcf/constants.pyi | 30 + typings/dm_control/mjcf/copier.pyi | 15 + typings/dm_control/mjcf/debugging.pyi | 137 ++ typings/dm_control/mjcf/element.pyi | 393 ++++ .../dm_control/mjcf/export_with_assets.pyi | 29 + .../mjcf/export_with_assets_as_zip.pyi | 30 + typings/dm_control/mjcf/namescope.pyi | 119 + typings/dm_control/mjcf/parser.pyi | 69 + typings/dm_control/mjcf/physics.pyi | 214 ++ typings/dm_control/mjcf/schema.pyi | 52 + typings/dm_control/mjcf/skin.pyi | 34 + typings/dm_control/mjcf/traversal_utils.pyi | 28 + typings/dm_control/mujoco/__init__.pyi | 10 + typings/dm_control/mujoco/engine.pyi | 527 +++++ typings/dm_control/mujoco/index.pyi | 231 ++ .../dm_control/mujoco/wrapper/__init__.pyi | 23 + typings/dm_control/mujoco/wrapper/core.pyi | 387 ++++ .../mujoco/wrapper/mjbindings/__init__.pyi | 9 + .../mujoco/wrapper/mjbindings/constants.pyi | 80 + .../mujoco/wrapper/mjbindings/enums.pyi | 50 + .../mujoco/wrapper/mjbindings/functions.pyi | 19 + .../mujoco/wrapper/mjbindings/sizes.pyi | 9 + typings/dm_control/mujoco/wrapper/util.pyi | 27 + typings/dm_control/rl/__init__.pyi | 7 + typings/dm_control/rl/control.pyi | 273 +++ typings/dm_control/suite/__init__.pyi | 82 + typings/dm_control/suite/acrobot.pyi | 69 + typings/dm_control/suite/ball_in_cup.pyi | 48 + typings/dm_control/suite/base.pyi | 66 + typings/dm_control/suite/cartpole.pyi | 100 + typings/dm_control/suite/cheetah.pyi | 40 + typings/dm_control/suite/common/__init__.pyi | 16 + typings/dm_control/suite/dog.pyi | 180 ++ typings/dm_control/suite/finger.pyi | 105 + typings/dm_control/suite/fish.pyi | 89 + typings/dm_control/suite/hopper.pyi | 64 + typings/dm_control/suite/humanoid.pyi | 95 + typings/dm_control/suite/humanoid_CMU.pyi | 87 + typings/dm_control/suite/lqr.pyi | 79 + typings/dm_control/suite/manipulator.pyi | 85 + typings/dm_control/suite/pendulum.pyi | 72 + typings/dm_control/suite/point_mass.pyi | 65 + typings/dm_control/suite/quadruped.pyi | 154 ++ typings/dm_control/suite/reacher.pyi | 59 + typings/dm_control/suite/stacker.pyi | 69 + typings/dm_control/suite/swimmer.pyi | 82 + typings/dm_control/suite/utils/__init__.pyi | 5 + .../dm_control/suite/utils/randomizers.pyi | 28 + typings/dm_control/suite/walker.pyi | 82 + typings/dm_control/utils/__init__.pyi | 3 + typings/dm_control/utils/containers.pyi | 74 + .../dm_control/utils/inverse_kinematics.pyi | 96 + typings/dm_control/utils/io.pyi | 14 + typings/dm_control/utils/rewards.pyi | 36 + typings/dm_control/utils/xml_tools.pyi | 61 + typings/dm_control/viewer/__init__.pyi | 25 + typings/dm_control/viewer/application.pyi | 98 + typings/dm_control/viewer/gui/__init__.pyi | 14 + typings/dm_control/viewer/gui/base.pyi | 34 + .../dm_control/viewer/gui/fullscreen_quad.pyi | 34 + typings/dm_control/viewer/gui/glfw_gui.pyi | 138 ++ typings/dm_control/viewer/renderer.pyi | 355 +++ typings/dm_control/viewer/runtime.pyi | 87 + typings/dm_control/viewer/user_input.pyi | 205 ++ typings/dm_control/viewer/util.pyi | 212 ++ typings/dm_control/viewer/viewer.pyi | 233 ++ typings/dm_control/viewer/views.pyi | 118 + typings/mujoco/__init__.pyi | 44 + typings/mujoco/bindings_test.pyi | 226 ++ typings/mujoco/cgl/__init__.pyi | 25 + typings/mujoco/cgl/cgl.pyi | 81 + typings/mujoco/egl/__init__.pyi | 35 + typings/mujoco/egl/egl_ext.pyi | 15 + typings/mujoco/gl_context.pyi | 11 + typings/mujoco/glfw/__init__.pyi | 18 + typings/mujoco/mjx/__init__.pyi | 16 + typings/mujoco/mjx/_src/__init__.pyi | 7 + typings/mujoco/mjx/_src/collision_base.pyi | 51 + typings/mujoco/mjx/_src/collision_convex.pyi | 23 + typings/mujoco/mjx/_src/collision_driver.pyi | 33 + .../mujoco/mjx/_src/collision_primitive.pyi | 27 + typings/mujoco/mjx/_src/constraint.pyi | 33 + typings/mujoco/mjx/_src/dataclasses.pyi | 41 + typings/mujoco/mjx/_src/device.pyi | 53 + typings/mujoco/mjx/_src/forward.pyi | 23 + typings/mujoco/mjx/_src/io.pyi | 13 + typings/mujoco/mjx/_src/math.pyi | 194 ++ typings/mujoco/mjx/_src/mesh.pyi | 20 + typings/mujoco/mjx/_src/passive.pyi | 13 + typings/mujoco/mjx/_src/scan.pyi | 73 + typings/mujoco/mjx/_src/smooth.pyi | 50 + typings/mujoco/mjx/_src/solver.pyi | 87 + typings/mujoco/mjx/_src/support.pyi | 26 + typings/mujoco/mjx/_src/types.pyi | 608 +++++ typings/mujoco/msh2obj.pyi | 35 + typings/mujoco/msh2obj_test.pyi | 14 + typings/mujoco/osmesa/__init__.pyi | 30 + typings/mujoco/render_test.pyi | 32 + typings/mujoco/renderer.pyi | 78 + typings/mujoco/renderer_test.pyi | 25 + typings/mujoco/rollout.pyi | 31 + typings/mujoco/rollout_test.pyi | 73 + typings/mujoco/viewer.pyi | 103 + typings/mujoco/viewer_test.pyi | 13 + typings/trimesh/__init__.pyi | 98 + typings/trimesh/base.pyi | 2030 +++++++++++++++++ typings/trimesh/boolean.pyi | 73 + typings/trimesh/bounds.pyi | 143 ++ typings/trimesh/caching.pyi | 366 +++ typings/trimesh/collision.pyi | 401 ++++ typings/trimesh/comparison.pyi | 49 + typings/trimesh/constants.pyi | 125 + typings/trimesh/convex.pyi | 86 + typings/trimesh/creation.pyi | 431 ++++ typings/trimesh/curvature.pyi | 118 + typings/trimesh/decomposition.pyi | 38 + typings/trimesh/exceptions.pyi | 26 + typings/trimesh/exchange/__init__.pyi | 18 + typings/trimesh/exchange/binvox.pyi | 342 +++ typings/trimesh/exchange/dae.pyi | 74 + typings/trimesh/exchange/export.pyi | 87 + typings/trimesh/exchange/gltf.pyi | 184 ++ typings/trimesh/exchange/load.pyi | 180 ++ typings/trimesh/exchange/misc.pyi | 55 + typings/trimesh/exchange/obj.pyi | 103 + typings/trimesh/exchange/off.pyi | 40 + typings/trimesh/exchange/openctm.pyi | 31 + typings/trimesh/exchange/ply.pyi | 94 + typings/trimesh/exchange/stl.pyi | 95 + typings/trimesh/exchange/threedxml.pyi | 39 + typings/trimesh/exchange/threemf.pyi | 44 + typings/trimesh/exchange/urdf.pyi | 23 + typings/trimesh/exchange/xaml.pyi | 28 + typings/trimesh/exchange/xyz.pyi | 45 + typings/trimesh/geometry.pyi | 221 ++ typings/trimesh/graph.pyi | 449 ++++ typings/trimesh/grouping.pyi | 417 ++++ typings/trimesh/inertia.pyi | 145 ++ typings/trimesh/interfaces/__init__.pyi | 7 + typings/trimesh/interfaces/blender.pyi | 27 + typings/trimesh/interfaces/generic.pyi | 11 + typings/trimesh/interfaces/gmsh.pyi | 76 + typings/trimesh/intersections.pyi | 208 ++ typings/trimesh/interval.pyi | 59 + typings/trimesh/nsphere.pyi | 72 + typings/trimesh/parent.pyi | 201 ++ typings/trimesh/path/__init__.pyi | 14 + typings/trimesh/path/arc.pyi | 84 + typings/trimesh/path/creation.pyi | 117 + typings/trimesh/path/curve.pyi | 61 + typings/trimesh/path/entities.pyi | 519 +++++ typings/trimesh/path/exchange/__init__.pyi | 3 + typings/trimesh/path/exchange/dxf.pyi | 110 + typings/trimesh/path/exchange/export.pyi | 30 + typings/trimesh/path/exchange/load.pyi | 42 + typings/trimesh/path/exchange/misc.pyi | 106 + typings/trimesh/path/exchange/svg_io.pyi | 75 + typings/trimesh/path/packing.pyi | 301 +++ typings/trimesh/path/path.pyi | 886 +++++++ typings/trimesh/path/polygons.pyi | 405 ++++ typings/trimesh/path/raster.pyi | 36 + typings/trimesh/path/repair.pyi | 33 + typings/trimesh/path/segments.pyi | 238 ++ typings/trimesh/path/simplify.pyi | 153 ++ typings/trimesh/path/traversal.pyi | 147 ++ typings/trimesh/path/util.pyi | 19 + typings/trimesh/permutate.pyi | 81 + typings/trimesh/points.pyi | 440 ++++ typings/trimesh/poses.pyi | 60 + typings/trimesh/primitives.pyi | 557 +++++ typings/trimesh/proximity.pyi | 258 +++ typings/trimesh/ray/__init__.pyi | 8 + typings/trimesh/ray/ray_pyembree.pyi | 150 ++ typings/trimesh/ray/ray_triangle.pyi | 197 ++ typings/trimesh/ray/ray_util.pyi | 25 + typings/trimesh/registration.pyi | 281 +++ typings/trimesh/remesh.pyi | 131 ++ typings/trimesh/rendering.pyi | 185 ++ typings/trimesh/repair.pyi | 118 + typings/trimesh/resolvers.pyi | 308 +++ typings/trimesh/resources/__init__.pyi | 49 + typings/trimesh/sample.pyi | 145 ++ typings/trimesh/scene/__init__.pyi | 8 + typings/trimesh/scene/cameras.pyi | 240 ++ typings/trimesh/scene/lighting.pyi | 188 ++ typings/trimesh/scene/scene.pyi | 651 ++++++ typings/trimesh/scene/transforms.pyi | 365 +++ typings/trimesh/schemas.pyi | 30 + typings/trimesh/smoothing.pyi | 164 ++ typings/trimesh/transformations.pyi | 1300 +++++++++++ typings/trimesh/triangles.pyi | 321 +++ typings/trimesh/typed.pyi | 5 + typings/trimesh/units.pyi | 71 + typings/trimesh/util.pyi | 1317 +++++++++++ typings/trimesh/version.pyi | 12 + typings/trimesh/viewer/__init__.pyi | 16 + typings/trimesh/viewer/notebook.pyi | 57 + typings/trimesh/viewer/trackball.pyi | 99 + typings/trimesh/viewer/widget.pyi | 53 + typings/trimesh/viewer/windowed.pyi | 249 ++ typings/trimesh/visual/__init__.pyi | 40 + typings/trimesh/visual/base.pyi | 53 + typings/trimesh/visual/color.pyi | 474 ++++ typings/trimesh/visual/gloss.pyi | 44 + typings/trimesh/visual/material.pyi | 503 ++++ typings/trimesh/visual/objects.pyi | 49 + typings/trimesh/visual/texture.pyi | 189 ++ typings/trimesh/voxel/__init__.pyi | 7 + typings/trimesh/voxel/base.pyi | 291 +++ typings/trimesh/voxel/creation.pyi | 130 ++ typings/trimesh/voxel/encoding.pyi | 484 ++++ typings/trimesh/voxel/morphology.pyi | 117 + typings/trimesh/voxel/ops.pyi | 169 ++ typings/trimesh/voxel/runlength.pyi | 416 ++++ typings/trimesh/voxel/transforms.pyi | 127 ++ 287 files changed, 34541 insertions(+), 26 deletions(-) create mode 100644 typings/coacd/__init__.pyi create mode 100644 typings/dm_control/__init__.pyi create mode 100644 typings/dm_control/_render/__init__.pyi create mode 100644 typings/dm_control/_render/base.pyi create mode 100644 typings/dm_control/_render/constants.pyi create mode 100644 typings/dm_control/_render/executor/__init__.pyi create mode 100644 typings/dm_control/_render/executor/render_executor.pyi create mode 100644 typings/dm_control/_render/glfw_renderer.pyi create mode 100644 typings/dm_control/_render/pyopengl/__init__.pyi create mode 100644 typings/dm_control/_render/pyopengl/egl_ext.pyi create mode 100644 typings/dm_control/_render/pyopengl/egl_renderer.pyi create mode 100644 typings/dm_control/_render/pyopengl/osmesa_renderer.pyi create mode 100644 typings/dm_control/autowrap/__init__.pyi create mode 100644 typings/dm_control/composer/__init__.pyi create mode 100644 typings/dm_control/composer/arena.pyi create mode 100644 typings/dm_control/composer/constants.pyi create mode 100644 typings/dm_control/composer/define.pyi create mode 100644 typings/dm_control/composer/entity.pyi create mode 100644 typings/dm_control/composer/environment.pyi create mode 100644 typings/dm_control/composer/initializer.pyi create mode 100644 typings/dm_control/composer/initializers/__init__.pyi create mode 100644 typings/dm_control/composer/initializers/prop_initializer.pyi create mode 100644 typings/dm_control/composer/initializers/tcp_initializer.pyi create mode 100644 typings/dm_control/composer/initializers/utils.pyi create mode 100644 typings/dm_control/composer/observation/__init__.pyi create mode 100644 typings/dm_control/composer/observation/obs_buffer.pyi create mode 100644 typings/dm_control/composer/observation/observable/__init__.pyi create mode 100644 typings/dm_control/composer/observation/observable/base.pyi create mode 100644 typings/dm_control/composer/observation/observable/mjcf.pyi create mode 100644 typings/dm_control/composer/observation/updater.pyi create mode 100644 typings/dm_control/composer/robot.pyi create mode 100644 typings/dm_control/composer/task.pyi create mode 100644 typings/dm_control/composer/variation/__init__.pyi create mode 100644 typings/dm_control/composer/variation/base.pyi create mode 100644 typings/dm_control/composer/variation/distributions.pyi create mode 100644 typings/dm_control/composer/variation/rotations.pyi create mode 100644 typings/dm_control/composer/variation/variation_values.pyi create mode 100644 typings/dm_control/entities/__init__.pyi create mode 100644 typings/dm_control/entities/manipulators/__init__.pyi create mode 100644 typings/dm_control/entities/manipulators/base.pyi create mode 100644 typings/dm_control/entities/manipulators/kinova/__init__.pyi create mode 100644 typings/dm_control/entities/manipulators/kinova/assets_path.pyi create mode 100644 typings/dm_control/entities/manipulators/kinova/jaco_arm.pyi create mode 100644 typings/dm_control/entities/manipulators/kinova/jaco_hand.pyi create mode 100644 typings/dm_control/entities/props/__init__.pyi create mode 100644 typings/dm_control/entities/props/duplo/__init__.pyi create mode 100644 typings/dm_control/entities/props/position_detector.pyi create mode 100644 typings/dm_control/entities/props/primitive.pyi create mode 100644 typings/dm_control/locomotion/__init__.pyi create mode 100644 typings/dm_control/manipulation/__init__.pyi create mode 100644 typings/dm_control/manipulation/bricks.pyi create mode 100644 typings/dm_control/manipulation/lift.pyi create mode 100644 typings/dm_control/manipulation/place.pyi create mode 100644 typings/dm_control/manipulation/reach.pyi create mode 100644 typings/dm_control/manipulation/shared/__init__.pyi create mode 100644 typings/dm_control/manipulation/shared/arenas.pyi create mode 100644 typings/dm_control/manipulation/shared/cameras.pyi create mode 100644 typings/dm_control/manipulation/shared/constants.pyi create mode 100644 typings/dm_control/manipulation/shared/observations.pyi create mode 100644 typings/dm_control/manipulation/shared/registry.pyi create mode 100644 typings/dm_control/manipulation/shared/robots.pyi create mode 100644 typings/dm_control/manipulation/shared/tags.pyi create mode 100644 typings/dm_control/manipulation/shared/workspaces.pyi create mode 100644 typings/dm_control/mjcf/__init__.pyi create mode 100644 typings/dm_control/mjcf/attribute.pyi create mode 100644 typings/dm_control/mjcf/base.pyi create mode 100644 typings/dm_control/mjcf/constants.pyi create mode 100644 typings/dm_control/mjcf/copier.pyi create mode 100644 typings/dm_control/mjcf/debugging.pyi create mode 100644 typings/dm_control/mjcf/element.pyi create mode 100644 typings/dm_control/mjcf/export_with_assets.pyi create mode 100644 typings/dm_control/mjcf/export_with_assets_as_zip.pyi create mode 100644 typings/dm_control/mjcf/namescope.pyi create mode 100644 typings/dm_control/mjcf/parser.pyi create mode 100644 typings/dm_control/mjcf/physics.pyi create mode 100644 typings/dm_control/mjcf/schema.pyi create mode 100644 typings/dm_control/mjcf/skin.pyi create mode 100644 typings/dm_control/mjcf/traversal_utils.pyi create mode 100644 typings/dm_control/mujoco/__init__.pyi create mode 100644 typings/dm_control/mujoco/engine.pyi create mode 100644 typings/dm_control/mujoco/index.pyi create mode 100644 typings/dm_control/mujoco/wrapper/__init__.pyi create mode 100644 typings/dm_control/mujoco/wrapper/core.pyi create mode 100644 typings/dm_control/mujoco/wrapper/mjbindings/__init__.pyi create mode 100644 typings/dm_control/mujoco/wrapper/mjbindings/constants.pyi create mode 100644 typings/dm_control/mujoco/wrapper/mjbindings/enums.pyi create mode 100644 typings/dm_control/mujoco/wrapper/mjbindings/functions.pyi create mode 100644 typings/dm_control/mujoco/wrapper/mjbindings/sizes.pyi create mode 100644 typings/dm_control/mujoco/wrapper/util.pyi create mode 100644 typings/dm_control/rl/__init__.pyi create mode 100644 typings/dm_control/rl/control.pyi create mode 100644 typings/dm_control/suite/__init__.pyi create mode 100644 typings/dm_control/suite/acrobot.pyi create mode 100644 typings/dm_control/suite/ball_in_cup.pyi create mode 100644 typings/dm_control/suite/base.pyi create mode 100644 typings/dm_control/suite/cartpole.pyi create mode 100644 typings/dm_control/suite/cheetah.pyi create mode 100644 typings/dm_control/suite/common/__init__.pyi create mode 100644 typings/dm_control/suite/dog.pyi create mode 100644 typings/dm_control/suite/finger.pyi create mode 100644 typings/dm_control/suite/fish.pyi create mode 100644 typings/dm_control/suite/hopper.pyi create mode 100644 typings/dm_control/suite/humanoid.pyi create mode 100644 typings/dm_control/suite/humanoid_CMU.pyi create mode 100644 typings/dm_control/suite/lqr.pyi create mode 100644 typings/dm_control/suite/manipulator.pyi create mode 100644 typings/dm_control/suite/pendulum.pyi create mode 100644 typings/dm_control/suite/point_mass.pyi create mode 100644 typings/dm_control/suite/quadruped.pyi create mode 100644 typings/dm_control/suite/reacher.pyi create mode 100644 typings/dm_control/suite/stacker.pyi create mode 100644 typings/dm_control/suite/swimmer.pyi create mode 100644 typings/dm_control/suite/utils/__init__.pyi create mode 100644 typings/dm_control/suite/utils/randomizers.pyi create mode 100644 typings/dm_control/suite/walker.pyi create mode 100644 typings/dm_control/utils/__init__.pyi create mode 100644 typings/dm_control/utils/containers.pyi create mode 100644 typings/dm_control/utils/inverse_kinematics.pyi create mode 100644 typings/dm_control/utils/io.pyi create mode 100644 typings/dm_control/utils/rewards.pyi create mode 100644 typings/dm_control/utils/xml_tools.pyi create mode 100644 typings/dm_control/viewer/__init__.pyi create mode 100644 typings/dm_control/viewer/application.pyi create mode 100644 typings/dm_control/viewer/gui/__init__.pyi create mode 100644 typings/dm_control/viewer/gui/base.pyi create mode 100644 typings/dm_control/viewer/gui/fullscreen_quad.pyi create mode 100644 typings/dm_control/viewer/gui/glfw_gui.pyi create mode 100644 typings/dm_control/viewer/renderer.pyi create mode 100644 typings/dm_control/viewer/runtime.pyi create mode 100644 typings/dm_control/viewer/user_input.pyi create mode 100644 typings/dm_control/viewer/util.pyi create mode 100644 typings/dm_control/viewer/viewer.pyi create mode 100644 typings/dm_control/viewer/views.pyi create mode 100644 typings/mujoco/__init__.pyi create mode 100644 typings/mujoco/bindings_test.pyi create mode 100644 typings/mujoco/cgl/__init__.pyi create mode 100644 typings/mujoco/cgl/cgl.pyi create mode 100644 typings/mujoco/egl/__init__.pyi create mode 100644 typings/mujoco/egl/egl_ext.pyi create mode 100644 typings/mujoco/gl_context.pyi create mode 100644 typings/mujoco/glfw/__init__.pyi create mode 100644 typings/mujoco/mjx/__init__.pyi create mode 100644 typings/mujoco/mjx/_src/__init__.pyi create mode 100644 typings/mujoco/mjx/_src/collision_base.pyi create mode 100644 typings/mujoco/mjx/_src/collision_convex.pyi create mode 100644 typings/mujoco/mjx/_src/collision_driver.pyi create mode 100644 typings/mujoco/mjx/_src/collision_primitive.pyi create mode 100644 typings/mujoco/mjx/_src/constraint.pyi create mode 100644 typings/mujoco/mjx/_src/dataclasses.pyi create mode 100644 typings/mujoco/mjx/_src/device.pyi create mode 100644 typings/mujoco/mjx/_src/forward.pyi create mode 100644 typings/mujoco/mjx/_src/io.pyi create mode 100644 typings/mujoco/mjx/_src/math.pyi create mode 100644 typings/mujoco/mjx/_src/mesh.pyi create mode 100644 typings/mujoco/mjx/_src/passive.pyi create mode 100644 typings/mujoco/mjx/_src/scan.pyi create mode 100644 typings/mujoco/mjx/_src/smooth.pyi create mode 100644 typings/mujoco/mjx/_src/solver.pyi create mode 100644 typings/mujoco/mjx/_src/support.pyi create mode 100644 typings/mujoco/mjx/_src/types.pyi create mode 100644 typings/mujoco/msh2obj.pyi create mode 100644 typings/mujoco/msh2obj_test.pyi create mode 100644 typings/mujoco/osmesa/__init__.pyi create mode 100644 typings/mujoco/render_test.pyi create mode 100644 typings/mujoco/renderer.pyi create mode 100644 typings/mujoco/renderer_test.pyi create mode 100644 typings/mujoco/rollout.pyi create mode 100644 typings/mujoco/rollout_test.pyi create mode 100644 typings/mujoco/viewer.pyi create mode 100644 typings/mujoco/viewer_test.pyi create mode 100644 typings/trimesh/__init__.pyi create mode 100644 typings/trimesh/base.pyi create mode 100644 typings/trimesh/boolean.pyi create mode 100644 typings/trimesh/bounds.pyi create mode 100644 typings/trimesh/caching.pyi create mode 100644 typings/trimesh/collision.pyi create mode 100644 typings/trimesh/comparison.pyi create mode 100644 typings/trimesh/constants.pyi create mode 100644 typings/trimesh/convex.pyi create mode 100644 typings/trimesh/creation.pyi create mode 100644 typings/trimesh/curvature.pyi create mode 100644 typings/trimesh/decomposition.pyi create mode 100644 typings/trimesh/exceptions.pyi create mode 100644 typings/trimesh/exchange/__init__.pyi create mode 100644 typings/trimesh/exchange/binvox.pyi create mode 100644 typings/trimesh/exchange/dae.pyi create mode 100644 typings/trimesh/exchange/export.pyi create mode 100644 typings/trimesh/exchange/gltf.pyi create mode 100644 typings/trimesh/exchange/load.pyi create mode 100644 typings/trimesh/exchange/misc.pyi create mode 100644 typings/trimesh/exchange/obj.pyi create mode 100644 typings/trimesh/exchange/off.pyi create mode 100644 typings/trimesh/exchange/openctm.pyi create mode 100644 typings/trimesh/exchange/ply.pyi create mode 100644 typings/trimesh/exchange/stl.pyi create mode 100644 typings/trimesh/exchange/threedxml.pyi create mode 100644 typings/trimesh/exchange/threemf.pyi create mode 100644 typings/trimesh/exchange/urdf.pyi create mode 100644 typings/trimesh/exchange/xaml.pyi create mode 100644 typings/trimesh/exchange/xyz.pyi create mode 100644 typings/trimesh/geometry.pyi create mode 100644 typings/trimesh/graph.pyi create mode 100644 typings/trimesh/grouping.pyi create mode 100644 typings/trimesh/inertia.pyi create mode 100644 typings/trimesh/interfaces/__init__.pyi create mode 100644 typings/trimesh/interfaces/blender.pyi create mode 100644 typings/trimesh/interfaces/generic.pyi create mode 100644 typings/trimesh/interfaces/gmsh.pyi create mode 100644 typings/trimesh/intersections.pyi create mode 100644 typings/trimesh/interval.pyi create mode 100644 typings/trimesh/nsphere.pyi create mode 100644 typings/trimesh/parent.pyi create mode 100644 typings/trimesh/path/__init__.pyi create mode 100644 typings/trimesh/path/arc.pyi create mode 100644 typings/trimesh/path/creation.pyi create mode 100644 typings/trimesh/path/curve.pyi create mode 100644 typings/trimesh/path/entities.pyi create mode 100644 typings/trimesh/path/exchange/__init__.pyi create mode 100644 typings/trimesh/path/exchange/dxf.pyi create mode 100644 typings/trimesh/path/exchange/export.pyi create mode 100644 typings/trimesh/path/exchange/load.pyi create mode 100644 typings/trimesh/path/exchange/misc.pyi create mode 100644 typings/trimesh/path/exchange/svg_io.pyi create mode 100644 typings/trimesh/path/packing.pyi create mode 100644 typings/trimesh/path/path.pyi create mode 100644 typings/trimesh/path/polygons.pyi create mode 100644 typings/trimesh/path/raster.pyi create mode 100644 typings/trimesh/path/repair.pyi create mode 100644 typings/trimesh/path/segments.pyi create mode 100644 typings/trimesh/path/simplify.pyi create mode 100644 typings/trimesh/path/traversal.pyi create mode 100644 typings/trimesh/path/util.pyi create mode 100644 typings/trimesh/permutate.pyi create mode 100644 typings/trimesh/points.pyi create mode 100644 typings/trimesh/poses.pyi create mode 100644 typings/trimesh/primitives.pyi create mode 100644 typings/trimesh/proximity.pyi create mode 100644 typings/trimesh/ray/__init__.pyi create mode 100644 typings/trimesh/ray/ray_pyembree.pyi create mode 100644 typings/trimesh/ray/ray_triangle.pyi create mode 100644 typings/trimesh/ray/ray_util.pyi create mode 100644 typings/trimesh/registration.pyi create mode 100644 typings/trimesh/remesh.pyi create mode 100644 typings/trimesh/rendering.pyi create mode 100644 typings/trimesh/repair.pyi create mode 100644 typings/trimesh/resolvers.pyi create mode 100644 typings/trimesh/resources/__init__.pyi create mode 100644 typings/trimesh/sample.pyi create mode 100644 typings/trimesh/scene/__init__.pyi create mode 100644 typings/trimesh/scene/cameras.pyi create mode 100644 typings/trimesh/scene/lighting.pyi create mode 100644 typings/trimesh/scene/scene.pyi create mode 100644 typings/trimesh/scene/transforms.pyi create mode 100644 typings/trimesh/schemas.pyi create mode 100644 typings/trimesh/smoothing.pyi create mode 100644 typings/trimesh/transformations.pyi create mode 100644 typings/trimesh/triangles.pyi create mode 100644 typings/trimesh/typed.pyi create mode 100644 typings/trimesh/units.pyi create mode 100644 typings/trimesh/util.pyi create mode 100644 typings/trimesh/version.pyi create mode 100644 typings/trimesh/viewer/__init__.pyi create mode 100644 typings/trimesh/viewer/notebook.pyi create mode 100644 typings/trimesh/viewer/trackball.pyi create mode 100644 typings/trimesh/viewer/widget.pyi create mode 100644 typings/trimesh/viewer/windowed.pyi create mode 100644 typings/trimesh/visual/__init__.pyi create mode 100644 typings/trimesh/visual/base.pyi create mode 100644 typings/trimesh/visual/color.pyi create mode 100644 typings/trimesh/visual/gloss.pyi create mode 100644 typings/trimesh/visual/material.pyi create mode 100644 typings/trimesh/visual/objects.pyi create mode 100644 typings/trimesh/visual/texture.pyi create mode 100644 typings/trimesh/voxel/__init__.pyi create mode 100644 typings/trimesh/voxel/base.pyi create mode 100644 typings/trimesh/voxel/creation.pyi create mode 100644 typings/trimesh/voxel/encoding.pyi create mode 100644 typings/trimesh/voxel/morphology.pyi create mode 100644 typings/trimesh/voxel/ops.pyi create mode 100644 typings/trimesh/voxel/runlength.pyi create mode 100644 typings/trimesh/voxel/transforms.pyi diff --git a/ambersim/utils/_internal_utils.py b/ambersim/utils/_internal_utils.py index 9476332a..4201f09f 100644 --- a/ambersim/utils/_internal_utils.py +++ b/ambersim/utils/_internal_utils.py @@ -4,7 +4,7 @@ from ambersim import ROOT -def _check_filepath(filepath: Union[str, Path]) -> str: +def check_filepath(filepath: Union[str, Path]) -> str: """Checks validity of a filepath for model loading.""" assert isinstance(filepath, (str, Path)) @@ -19,7 +19,7 @@ def _check_filepath(filepath: Union[str, Path]) -> str: return filepath -def _rmtree(f: Path): +def rmtree(f: Path): """Recursively deletes a directory using pathlib. See: https://stackoverflow.com/a/66552066 @@ -28,5 +28,5 @@ def _rmtree(f: Path): f.unlink() else: for child in f.iterdir(): - _rmtree(child) + rmtree(child) f.rmdir() diff --git a/ambersim/utils/conversion_utils.py b/ambersim/utils/conversion_utils.py index d83b7384..7298070b 100644 --- a/ambersim/utils/conversion_utils.py +++ b/ambersim/utils/conversion_utils.py @@ -5,7 +5,7 @@ import mujoco as mj import trimesh -from ambersim.utils._internal_utils import _check_filepath +from ambersim.utils._internal_utils import check_filepath def save_model_xml(filepath: Union[str, Path], output_name: Optional[str] = None) -> None: @@ -21,7 +21,7 @@ def save_model_xml(filepath: Union[str, Path], output_name: Optional[str] = None """ try: # loading model and saving XML - filepath = _check_filepath(filepath) + filepath = check_filepath(filepath) _model = mj.MjModel.from_xml_path(filepath) if output_name is None: output_name = filepath.split("/")[-1].split(".")[0] @@ -69,7 +69,7 @@ def convex_decomposition_file( coacd.set_log_level("error") # executing the convex decomposition - meshfile = _check_filepath(meshfile) + meshfile = check_filepath(meshfile) _mesh = trimesh.load(meshfile, force="mesh") full_mesh = coacd.Mesh(_mesh.vertices, _mesh.faces) parts = coacd.run_coacd(full_mesh, **kwargs) # list of (vert, face) tuples @@ -102,7 +102,7 @@ def convex_decomposition_dir( Returns: all_decomposed_meshes: A list of lists of Trimesh objects representing the convex decompositions. """ - meshdir = _check_filepath(meshdir) + meshdir = check_filepath(meshdir) if recursive: glob_func = Path(meshdir).rglob else: diff --git a/ambersim/utils/io_utils.py b/ambersim/utils/io_utils.py index 38cef0cc..001e3025 100644 --- a/ambersim/utils/io_utils.py +++ b/ambersim/utils/io_utils.py @@ -6,27 +6,33 @@ import numpy as np import trimesh from dm_control import mjcf +from dm_control.mjcf.element import _ElementImpl +from dm_control.mjcf.parser import from_file +from dm_control.mjcf.physics import Physics from mujoco import mjx +from mujoco.mjx._src.device import device_put +from mujoco.mjx._src.io import make_data from ambersim import ROOT -from ambersim.utils._internal_utils import _check_filepath +from ambersim.utils._internal_utils import check_filepath from ambersim.utils.conversion_utils import save_model_xml -def _modify_robot_float_base(filepath: Union[str, Path]) -> mj.MjModel: +def _modify_robot_float_base(filepath: str) -> mj.MjModel: """Modifies a robot to have a floating base if it doesn't already.""" # loading current robot assert str(filepath).split(".")[-1] == "xml" - robot = mjcf.from_file(filepath, model_dir="/".join(filepath.split("/")[:-1])) + robot = from_file(filepath, model_dir="/".join(filepath.split("/")[:-1])) # only add free joint if the first body after worldbody has no freejoints - assert robot.worldbody is not None + assert robot.worldbody is not None # pyright typechecking + assert isinstance(robot.worldbody, _ElementImpl) # pyright typechecking if len(robot.worldbody.body[0].joint) == 0: robot.worldbody.body[0].add("freejoint", name="freejoint") assert robot.worldbody.body[0].inertial is not None # checking for non-physical parsing errors # extracts the mujoco model from the dm_mujoco Physics object - physics = mjcf.Physics.from_mjcf_model(robot) + physics = Physics.from_mjcf_model(robot) assert physics is not None # pyright typechecking model = physics.model.ptr return model @@ -60,7 +66,7 @@ def load_mj_model_from_file( elif isinstance(solver, mj.mjtSolver): assert solver in [mj.mjtSolver.mjSOL_CG] - filepath = _check_filepath(filepath) + filepath = check_filepath(filepath) # loading the model and data. check whether freejoint is added forcibly if force_float: @@ -69,7 +75,7 @@ def load_mj_model_from_file( output_name = "/".join(str(filepath).split("/")[:-1]) + "/_temp_xml_model.xml" save_model_xml(filepath, output_name=output_name) mj_model = _modify_robot_float_base(output_name) - Path.unlink(output_name) + Path(output_name).unlink() else: mj_model = _modify_robot_float_base(filepath) else: @@ -88,8 +94,8 @@ def load_mj_model_from_file( def mj_to_mjx_model_and_data(mj_model: mj.MjModel) -> Tuple[mjx.Model, mjx.Data]: """Converts a mujoco model to an mjx (model, data) pair.""" try: - mjx_model = mjx.device_put(mj_model) - mjx_data = mjx.make_data(mjx_model) + mjx_model = device_put(mj_model) + mjx_data = make_data(mjx_model) return mjx_model, mjx_data except NotImplementedError as e: extended_msg = """ diff --git a/examples/convex_decomposition.py b/examples/convex_decomposition.py index 8e49016a..b7e39987 100644 --- a/examples/convex_decomposition.py +++ b/examples/convex_decomposition.py @@ -1,7 +1,7 @@ from pathlib import Path from ambersim import ROOT -from ambersim.utils._internal_utils import _rmtree +from ambersim.utils._internal_utils import rmtree from ambersim.utils.conversion_utils import convex_decomposition_dir, convex_decomposition_file """This example demonstrates how to perform convex decompositions of nonconvex meshes. @@ -24,7 +24,7 @@ print("Example 1: paths of decomposed files:") for f in test_save_dir.glob("*.obj"): print(str(f)) -_rmtree(test_save_dir) # remove the test directory (delete this if you want to keep saved files) +rmtree(test_save_dir) # remove the test directory (delete this if you want to keep saved files) # example 2: whole directory # setting up dummy model directory with two meshes in it @@ -44,5 +44,5 @@ print("Example 2: paths of decomposed files:") for f in test_save_dir.glob("*.obj"): print(str(f)) -_rmtree(test_model_dir) -_rmtree(test_save_dir) +rmtree(test_model_dir) +rmtree(test_save_dir) diff --git a/pyrightconfig.json b/pyrightconfig.json index b7327d23..46951031 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,8 +1,8 @@ { "include": [ - "ambersim" + "ambersim", ], - "reportMissingImports": false, + "reportMissingImports": true, "reportMissingTypeStubs": false, - "reportGeneralTypeIssues": false, + "reportGeneralTypeIssues": true, } \ No newline at end of file diff --git a/tests/test_model_io.py b/tests/test_model_io.py index 4d084904..96acf2d3 100644 --- a/tests/test_model_io.py +++ b/tests/test_model_io.py @@ -8,7 +8,7 @@ from mujoco import mjx from ambersim import ROOT -from ambersim.utils._internal_utils import _rmtree +from ambersim.utils._internal_utils import rmtree from ambersim.utils.conversion_utils import convex_decomposition_file, save_model_xml from ambersim.utils.introspection_utils import get_joint_names from ambersim.utils.io_utils import _modify_robot_float_base, load_mjx_model_and_data_from_file @@ -37,7 +37,7 @@ def test_load_model(): assert load_mjx_model_and_data_from_file(Path(repo_path)) # remove temp local dir - _rmtree(local_dir) + rmtree(local_dir) def test_all_models(): @@ -126,4 +126,4 @@ def test_convex_decomposition(): signed_dist_t = igl.signed_distance(coords, tverts, tfaces)[0] assert np.allclose(signed_dist_d, signed_dist_t) - _rmtree(savedir) + rmtree(savedir) diff --git a/typings/coacd/__init__.pyi b/typings/coacd/__init__.pyi new file mode 100644 index 00000000..526ab344 --- /dev/null +++ b/typings/coacd/__init__.pyi @@ -0,0 +1,47 @@ +""" +This type stub file was generated by pyright. +""" + +import ctypes +import os +from ctypes import POINTER, Structure, c_bool, c_char_p, c_double, c_int, c_uint, c_uint64, c_void_p + +import numpy as np +import trimesh + +_lib_files = ... + +class CoACD_Mesh(ctypes.Structure): + _fields_ = ... + +class CoACD_MeshArray(ctypes.Structure): + _fields_ = ... + +class Mesh: + def __init__(self, vertices=..., indices=...) -> None: ... + +def run_coacd( + mesh: Mesh, + threshold: float = ..., + max_convex_hull: int = ..., + preprocess_mode: str = ..., + preprocess_resolution: int = ..., + resolution: int = ..., + mcts_nodes: int = ..., + mcts_iterations: int = ..., + mcts_max_depth: int = ..., + pca: int = ..., + merge: bool = ..., + seed: int = ..., +): # -> list[Unknown]: + ... + +def set_log_level(level: str): # -> None: + ... + +if __name__ == "__main__": + mesh = ... + mesh = ... + result = ... + mesh_parts = ... + scene = ... diff --git a/typings/dm_control/__init__.pyi b/typings/dm_control/__init__.pyi new file mode 100644 index 00000000..e897159a --- /dev/null +++ b/typings/dm_control/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/_render/__init__.pyi b/typings/dm_control/_render/__init__.pyi new file mode 100644 index 00000000..c74f0d54 --- /dev/null +++ b/typings/dm_control/_render/__init__.pyi @@ -0,0 +1,26 @@ +""" +This type stub file was generated by pyright. +""" + +import collections +import os + +from absl import logging +from dm_control._render import constants + +"""OpenGL context management for rendering MuJoCo scenes. + +By default, the `Renderer` class will try to load one of the following rendering +APIs, in descending order of priority: GLFW > EGL > OSMesa. + +It is also possible to select a specific backend by setting the `MUJOCO_GL=` +environment variable to 'glfw', 'egl', or 'osmesa'. +""" +BACKEND = ... +_ALL_RENDERERS = ... +_NO_RENDERER = ... +if BACKEND is not None: + import_func = ... + Renderer = ... +else: ... +USING_GPU = ... diff --git a/typings/dm_control/_render/base.pyi b/typings/dm_control/_render/base.pyi new file mode 100644 index 00000000..a2e4d4f7 --- /dev/null +++ b/typings/dm_control/_render/base.pyi @@ -0,0 +1,59 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import contextlib + +"""Base class for OpenGL context handlers. + +`ContextBase` defines a common API that OpenGL rendering contexts should conform +to. In addition, it provides a `make_current` context manager that: + +1. Makes this OpenGL context current within the appropriate rendering thread. +2. Yields an object exposing a `call` method that can be used to execute OpenGL + calls within the rendering thread. + +See the docstring for `dm_control.utils.render_executor` for further details +regarding rendering threads. +""" +_CURRENT_CONTEXT_FOR_THREAD = ... +_CURRENT_THREAD_FOR_CONTEXT = ... + +class ContextBase(metaclass=abc.ABCMeta): + """Base class for managing OpenGL contexts.""" + + def __init__(self, max_width, max_height, render_executor_class=...) -> None: + """Initializes this context.""" + ... + def keep_alive(self, obj): # -> None: + ... + def dont_keep_alive(self, obj): # -> None: + ... + def increment_refcount(self): # -> None: + ... + def decrement_refcount(self): # -> None: + ... + @property + def terminated(self): # -> bool: + ... + @property + def thread(self): # -> Thread | None: + ... + def free(self): # -> None: + """Frees resources associated with this context if its refcount is zero.""" + ... + def __del__(self): # -> None: + ... + @contextlib.contextmanager + def make_current(self): # -> Generator[PassthroughRenderExecutor, Any, None]: + """Context manager that makes this Renderer's OpenGL context current. + + Yields: + An object that exposes a `call` method that can be used to call a + function on the dedicated rendering thread. + + Raises: + RuntimeError: If this context is already current on another thread. + """ + ... diff --git a/typings/dm_control/_render/constants.pyi b/typings/dm_control/_render/constants.pyi new file mode 100644 index 00000000..c0108432 --- /dev/null +++ b/typings/dm_control/_render/constants.pyi @@ -0,0 +1,11 @@ +""" +This type stub file was generated by pyright. +""" + +"""String constants for the rendering module.""" +MUJOCO_GL = ... +PYOPENGL_PLATFORM = ... +OSMESA = ... +GLFW = ... +EGL = ... +NO_RENDERER = ... diff --git a/typings/dm_control/_render/executor/__init__.pyi b/typings/dm_control/_render/executor/__init__.pyi new file mode 100644 index 00000000..e5c1c614 --- /dev/null +++ b/typings/dm_control/_render/executor/__init__.pyi @@ -0,0 +1,32 @@ +""" +This type stub file was generated by pyright. +""" + +import os + +from dm_control._render.executor.native_mutex.render_executor import NativeMutexOffloadingRenderExecutor +from dm_control._render.executor.render_executor import ( + BaseRenderExecutor, + OffloadingRenderExecutor, + PassthroughRenderExecutor, +) + +"""RenderExecutor executes OpenGL rendering calls on an appropriate thread. + +OpenGL calls must be made on the same thread as where an OpenGL context is +made current on. With GPU rendering, migrating OpenGL contexts between threads +can become expensive. We provide a thread-safe executor that maintains a +thread on which an OpenGL context can be kept permanently current, and any other +threads that wish to render with that context will have their rendering calls +offloaded to the dedicated thread. + +For single-threaded applications, set the `DISABLE_RENDER_THREAD_OFFLOADING` +environment variable before launching the Python interpreter. This will +eliminate the overhead of unnecessary thread-switching. +""" +_OFFLOAD = ... +_EXECUTORS = ... +if _OFFLOAD: + RenderExecutor = ... +else: + RenderExecutor = ... diff --git a/typings/dm_control/_render/executor/render_executor.pyi b/typings/dm_control/_render/executor/render_executor.pyi new file mode 100644 index 00000000..b395e7c2 --- /dev/null +++ b/typings/dm_control/_render/executor/render_executor.pyi @@ -0,0 +1,102 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import contextlib + +"""RenderExecutors executes OpenGL rendering calls on an appropriate thread. + +The purpose of these classes is to ensure that OpenGL calls are made on the +same thread as where an OpenGL context was made current. + +In a single-threaded setting, `PassthroughRenderExecutor` is essentially a no-op +that executes rendering calls on the same thread. This is provided to minimize +thread-switching overhead. + +In a multithreaded setting, `OffloadingRenderExecutor` maintains a separate +dedicated thread on which the OpenGL context is created and made current. All +subsequent rendering calls are then offloaded onto this dedicated thread. +""" +_NOT_IN_CONTEXT = ... +_ALREADY_TERMINATED = ... + +class _FakeLock: + """An object with the same API as `threading.Lock` but that does nothing.""" + + def acquire(self, blocking=...): # -> None: + ... + def release(self): # -> None: + ... + def __enter__(self): # -> None: + ... + def __exit__(self, exc_type, exc_value, traceback): # -> None: + ... + +_FAKE_LOCK = ... + +class BaseRenderExecutor(metaclass=abc.ABCMeta): + """An object that manages rendering calls for an OpenGL context. + + This class helps ensure that OpenGL calls are made on the correct thread. The + usage pattern is as follows: + + ```python + executor = SomeRenderExecutorClass() + with executor.execution_context() as ctx: + ctx.call(an_opengl_call, arg, kwarg=foo) + result = ctx.call(another_opengl_call) + ``` + """ + + def __init__(self) -> None: ... + @contextlib.contextmanager + def execution_context(self): # -> Generator[Self@BaseRenderExecutor, Any, None]: + """A context manager that allows calls to be offloaded to this executor.""" + ... + @property + def terminated(self): # -> bool: + ... + @property + @abc.abstractmethod + def thread(self): # -> None: + ... + @abc.abstractmethod + def call(self, *args, **kwargs): # -> None: + ... + @abc.abstractmethod + def terminate(self, cleanup_callable=...): # -> None: + ... + +class PassthroughRenderExecutor(BaseRenderExecutor): + """A no-op render executor that executes on the calling thread.""" + + def __init__(self) -> None: ... + @property + def thread(self): # -> Thread | None: + ... + def call(self, func, *args, **kwargs): ... + def terminate(self, cleanup_callable=...): # -> None: + ... + +class _ThreadPoolExecutorPool: + """A pool of reusable ThreadPoolExecutors.""" + + def __init__(self) -> None: ... + def acquire(self): # -> ThreadPoolExecutor: + ... + def release(self, thread_pool_executor): # -> None: + ... + +_THREAD_POOL_EXECUTOR_POOL = ... + +class OffloadingRenderExecutor(BaseRenderExecutor): + """A render executor that executes calls on a dedicated offload thread.""" + + def __init__(self) -> None: ... + @property + def thread(self): # -> Thread | None: + ... + def call(self, func, *args, **kwargs): ... + def terminate(self, cleanup_callable=...): # -> None: + ... diff --git a/typings/dm_control/_render/glfw_renderer.pyi b/typings/dm_control/_render/glfw_renderer.pyi new file mode 100644 index 00000000..8a101b3f --- /dev/null +++ b/typings/dm_control/_render/glfw_renderer.pyi @@ -0,0 +1,12 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control._render import base + +"""An OpenGL renderer backed by GLFW.""" + +class GLFWContext(base.ContextBase): + """An OpenGL context backed by GLFW.""" + + def __init__(self, max_width, max_height) -> None: ... diff --git a/typings/dm_control/_render/pyopengl/__init__.pyi b/typings/dm_control/_render/pyopengl/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/_render/pyopengl/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/_render/pyopengl/egl_ext.pyi b/typings/dm_control/_render/pyopengl/egl_ext.pyi new file mode 100644 index 00000000..3e7827a1 --- /dev/null +++ b/typings/dm_control/_render/pyopengl/egl_ext.pyi @@ -0,0 +1,15 @@ +""" +This type stub file was generated by pyright. +""" + +from OpenGL.EGL import * + +"""Extends OpenGL.EGL with definitions necessary for headless rendering.""" +PFNEGLQUERYDEVICESEXTPROC = ... +_eglQueryDevicesEXT = ... +EGL_PLATFORM_DEVICE_EXT = ... +PFNEGLGETPLATFORMDISPLAYEXTPROC = ... +eglGetPlatformDisplayEXT = ... + +def eglQueryDevicesEXT(max_devices=...): # -> list[Any]: + ... diff --git a/typings/dm_control/_render/pyopengl/egl_renderer.pyi b/typings/dm_control/_render/pyopengl/egl_renderer.pyi new file mode 100644 index 00000000..99a0274b --- /dev/null +++ b/typings/dm_control/_render/pyopengl/egl_renderer.pyi @@ -0,0 +1,24 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control._render import base +from dm_control._render.pyopengl import egl_ext as EGL + +"""An OpenGL renderer backed by EGL, provided through PyOpenGL.""" +PYOPENGL_PLATFORM = ... +if not PYOPENGL_PLATFORM: ... +else: ... + +def create_initialized_headless_egl_display(): # -> Any | None: + """Creates an initialized EGL display directly on a device.""" + ... + +EGL_DISPLAY = ... +if EGL_DISPLAY == EGL.EGL_NO_DISPLAY: ... +EGL_ATTRIBUTES = ... + +class EGLContext(base.ContextBase): + """An OpenGL context backed by EGL.""" + + def __init__(self, max_width, max_height) -> None: ... diff --git a/typings/dm_control/_render/pyopengl/osmesa_renderer.pyi b/typings/dm_control/_render/pyopengl/osmesa_renderer.pyi new file mode 100644 index 00000000..0878f785 --- /dev/null +++ b/typings/dm_control/_render/pyopengl/osmesa_renderer.pyi @@ -0,0 +1,18 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control._render import base + +"""An OpenGL renderer backed by OSMesa.""" +PYOPENGL_PLATFORM = ... +if not PYOPENGL_PLATFORM: ... +else: ... +_DEPTH_BITS = ... +_STENCIL_BITS = ... +_ACCUM_BITS = ... + +class OSMesaContext(base.ContextBase): + """An OpenGL context backed by OSMesa.""" + + def __init__(self, *args, **kwargs) -> None: ... diff --git a/typings/dm_control/autowrap/__init__.pyi b/typings/dm_control/autowrap/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/autowrap/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/composer/__init__.pyi b/typings/dm_control/composer/__init__.pyi new file mode 100644 index 00000000..39f41c21 --- /dev/null +++ b/typings/dm_control/composer/__init__.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer.arena import Arena +from dm_control.composer.constants import * +from dm_control.composer.define import cached_property, observable +from dm_control.composer.entity import Entity, FreePropObservableMixin, ModelWrapperEntity, Observables +from dm_control.composer.environment import HOOK_NAMES, Environment, EpisodeInitializationError, ObservationPadding +from dm_control.composer.initializer import Initializer +from dm_control.composer.robot import Robot +from dm_control.composer.task import NullTask, Task + +"""Module containing abstract base classes for Composer environments.""" diff --git a/typings/dm_control/composer/arena.pyi b/typings/dm_control/composer/arena.pyi new file mode 100644 index 00000000..4bf51f44 --- /dev/null +++ b/typings/dm_control/composer/arena.pyi @@ -0,0 +1,19 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer import entity as entity_module + +"""The base empty arena that defines global settings for Composer.""" +_ARENA_XML_PATH = ... + +class Arena(entity_module.Entity): + """The base empty arena that defines global settings for Composer.""" + + def __init__(self, *args, **kwargs) -> None: ... + def add_free_entity(self, entity): + """Includes an entity in the arena as a free-moving body.""" + ... + @property + def mjcf_model(self): # -> RootElement | None: + ... diff --git a/typings/dm_control/composer/constants.pyi b/typings/dm_control/composer/constants.pyi new file mode 100644 index 00000000..eb4568f9 --- /dev/null +++ b/typings/dm_control/composer/constants.pyi @@ -0,0 +1,6 @@ +""" +This type stub file was generated by pyright. +""" + +"""Module defining constant values for Composer.""" +SENSOR_SITES_GROUP = ... diff --git a/typings/dm_control/composer/define.pyi b/typings/dm_control/composer/define.pyi new file mode 100644 index 00000000..8fb90086 --- /dev/null +++ b/typings/dm_control/composer/define.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Decorators for Entity methods returning elements and observables.""" + +class cached_property(property): + """A property that is evaluated only once per object instance.""" + + def __init__(self, func, doc=...) -> None: ... + def __get__(self, obj, cls): # -> Self@cached_property: + ... + +abstract_observable = abc.abstractproperty + +class observable(cached_property): + """A decorator for base.Observables methods returning an observable. + + The body of the decorated function is evaluated at Entity construction time + and the observable is cached. + """ + + ... diff --git a/typings/dm_control/composer/entity.pyi b/typings/dm_control/composer/entity.pyi new file mode 100644 index 00000000..7d14cf41 --- /dev/null +++ b/typings/dm_control/composer/entity.pyi @@ -0,0 +1,336 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Module defining the abstract entity class.""" +_OPTION_KEYS = ... +_NO_ATTACHMENT_FRAME = ... + +class _ObservableKeys: + """Helper object that implements the `observables.dict_keys` functionality.""" + + def __init__(self, entity, observables) -> None: ... + def __getattr__(self, name): ... + def __dir__(self): # -> list[Unknown]: + ... + +class Observables: + """Base-class for Entity observables. + + Subclasses should declare getter methods annotated with @define.observable + decorator and returning an observable object. + """ + + def __init__(self, entity) -> None: ... + @property + def dict_keys(self): # -> _ObservableKeys: + ... + def as_dict(self, fully_qualified=...): # -> OrderedDict[Unknown, Unknown]: + """Returns an OrderedDict of observables belonging to this Entity. + + The returned observables will include any added using the _add_observable + method, as well as any generated by a method decorated with the + @define.observable annotation. + + Args: + fully_qualified: (bool) Whether the dict keys should be prefixed with the + parent entity's full model identifier. + """ + ... + def get_observable(self, name, name_fully_qualified=...): + """Returns the observable with the given name. + + Args: + name: (str) The identifier of the observable. + name_fully_qualified: (bool) Whether the provided name is prefixed by the + model's full identifier. + """ + ... + def set_options(self, options): # -> None: + """Configure Observables with an options dict. + + Args: + options: A dict of dicts of configuration options keyed on + observable names, or a dict of configuration options, which will + propagate those options to all observables. + """ + ... + def enable_all(self): # -> None: + """Enable all observables of this entity.""" + ... + def disable_all(self): # -> None: + """Disable all observables of this entity.""" + ... + def add_observable(self, name, observable, enabled=...): # -> None: + ... + +class FreePropObservableMixin(metaclass=abc.ABCMeta): + """Enforce observables of a free-moving object.""" + + @property + @abc.abstractmethod + def position(self): # -> None: + ... + @property + @abc.abstractmethod + def orientation(self): # -> None: + ... + @property + @abc.abstractmethod + def linear_velocity(self): # -> None: + ... + @property + @abc.abstractmethod + def angular_velocity(self): # -> None: + ... + +class Entity(metaclass=abc.ABCMeta): + """The abstract base class for an entity in a Composer environment.""" + + def __init__(self, *args, **kwargs) -> None: + """Entity constructor. + + Subclasses should not override this method, instead implement a _build + method. + + Args: + *args: Arguments passed through to the _build method. + **kwargs: Keyword arguments. Passed through to the _build method, apart + from the following. + `observable_options`: A dictionary of Observable + configuration options. + """ + ... + def iter_entities(self, exclude_self=...): # -> Generator[Self@Entity | Unknown, Any, None]: + """An iterator that recursively iterates through all attached entities. + + Args: + exclude_self: (optional) Whether to exclude this `Entity` itself from the + iterator. + + Yields: + If `exclude_self` is `False`, the first value yielded is this Entity + itself. The following Entities are then yielded recursively in a + depth-first fashion, following the order in which the Entities are + attached. + """ + ... + @property + def observables(self): # -> Observables: + """The observables defined by this entity.""" + ... + def initialize_episode_mjcf(self, random_state): # -> None: + """Callback executed when the MJCF model is modified between episodes.""" + ... + def after_compile(self, physics, random_state): # -> None: + """Callback executed after the Mujoco Physics is recompiled.""" + ... + def initialize_episode(self, physics, random_state): # -> None: + """Callback executed during episode initialization.""" + ... + def before_step(self, physics, random_state): # -> None: + """Callback executed before an agent control step.""" + ... + def before_substep(self, physics, random_state): # -> None: + """Callback executed before a simulation step.""" + ... + def after_substep(self, physics, random_state): # -> None: + """A callback which is executed after a simulation step.""" + ... + def after_step(self, physics, random_state): # -> None: + """Callback executed after an agent control step.""" + ... + @property + @abc.abstractmethod + def mjcf_model(self): ... + def attach(self, entity, attach_site=...): + """Attaches an `Entity` without any additional degrees of freedom. + + Args: + entity: The `Entity` to attach. + attach_site: (optional) The site to which to attach the entity's model. If + not set, defaults to self.attachment_site. + + Returns: + The frame of the attached model. + """ + ... + def detach(self): # -> None: + """Detaches this entity if it has previously been attached.""" + ... + @property + def parent(self): # -> None: + """Returns the `Entity` to which this entity is attached, or `None`.""" + ... + @property + def attachment_site(self): ... + @property + def root_body(self): ... + def global_vector_to_local_frame(self, physics, vec_in_world_frame): # -> Any: + """Linearly transforms a world-frame vector into entity's local frame. + + Note that this function does not perform an affine transformation of the + vector. In other words, the input vector is assumed to be specified with + respect to the same origin as this entity's local frame. This function + can also be applied to matrices whose innermost dimensions are either 2 or + 3. In this case, a matrix with the same leading dimensions is returned + where the innermost vectors are replaced by their values computed in the + local frame. + + Args: + physics: An `mjcf.Physics` instance. + vec_in_world_frame: A NumPy array with last dimension of shape (2,) or + (3,) that represents a vector quantity in the world frame. + + Returns: + The same quantity as `vec_in_world_frame` but reexpressed in this + entity's local frame. The returned np.array has the same shape as + np.asarray(vec_in_world_frame). + + Raises: + ValueError: if `vec_in_world_frame` does not have shape ending with (2,) + or (3,). + """ + ... + def global_xmat_to_local_frame(self, physics, xmat): # -> NDArray[Any]: + """Transforms another entity's `xmat` into this entity's local frame. + + This function takes another entity's (E) xmat, which is an SO(3) matrix + from E's frame to the world frame, and turns it to a matrix that transforms + from E's frame into this entity's local frame. + + Args: + physics: An `mjcf.Physics` instance. + xmat: A NumPy array of shape (3, 3) or (9,) that represents another + entity's xmat. + + Returns: + The `xmat` reexpressed in this entity's local frame. The returned + np.array has the same shape as np.asarray(xmat). + + Raises: + ValueError: if `xmat` does not have shape (3, 3) or (9,). + """ + ... + def get_pose(self, physics): # -> tuple[Unknown, Unknown]: + """Get the position and orientation of this entity relative to its parent. + + Note that the semantics differ slightly depending on whether or not the + entity has a free joint: + + * If it has a free joint the position and orientation are always given in + global coordinates. + * If the entity is fixed or attached with a different joint type then the + position and orientation are given relative to the parent frame. + + For entities that are either attached directly to the worldbody, or to other + entities that are positioned at the global origin (e.g. the arena) the + global and relative poses are equivalent. + + Args: + physics: An instance of `mjcf.Physics`. + + Returns: + A 2-tuple where the first entry is a (3,) numpy array representing the + position and the second is a (4,) numpy array representing orientation as + a quaternion. + + Raises: + RuntimeError: If the entity is not attached. + """ + ... + def set_pose(self, physics, position=..., quaternion=...): # -> None: + """Sets position and/or orientation of this entity relative to its parent. + + If the entity is attached with a free joint, this method will set the + respective DoFs of the joint. If the entity is either fixed or attached with + a different joint type, this method will update the position and/or + quaternion of the attachment frame. + + Note that the semantics differ slightly between the two cases: the DoFs of a + free body are specified in global coordinates, whereas the position of a + non-free body is specified in relative coordinates with respect to the + parent frame. However, for entities that are either attached directly to the + worldbody, or to other entities that are positioned at the global origin + (e.g. the arena), there is no difference between the two cases. + + Args: + physics: An instance of `mjcf.Physics`. + position: (optional) A NumPy array of size 3. + quaternion: (optional) A NumPy array of size 4. + + Raises: + RuntimeError: If the entity is not attached. + """ + ... + def shift_pose(self, physics, position=..., quaternion=..., rotate_velocity=...): # -> None: + """Shifts the position and/or orientation from its current configuration. + + This is a convenience function that performs the same operation as + `set_pose`, but where the specified `position` is added to the current + position, and the specified `quaternion` is premultiplied to the current + quaternion. + + Args: + physics: An instance of `mjcf.Physics`. + position: (optional) A NumPy array of size 3. + quaternion: (optional) A NumPy array of size 4. + rotate_velocity: (optional) A bool, whether to shift the current linear + velocity along with the pose. This will rotate the current linear + velocity, which is expressed relative to the world frame. The angular + velocity, which is expressed relative to the local frame is left + unchanged. + + Raises: + RuntimeError: If the entity is not attached. + """ + ... + def get_velocity(self, physics): # -> tuple[Unknown, Unknown]: + """Gets the linear and angular velocity of this free entity. + + Args: + physics: An instance of `mjcf.Physics`. + + Returns: + A 2-tuple where the first entry is a (3,) numpy array representing the + linear velocity and the second is a (3,) numpy array representing the + angular velocity. + + """ + ... + def set_velocity(self, physics, velocity=..., angular_velocity=...): # -> None: + """Sets the linear velocity and/or angular velocity of this free entity. + + If the entity is attached with a free joint, this method will set the + respective DoFs of the joint. Otherwise a warning is logged. + + Args: + physics: An instance of `mjcf.Physics`. + velocity: (optional) A NumPy array of size 3 specifying the + linear velocity. + angular_velocity: (optional) A NumPy array of size 3 specifying the + angular velocity + """ + ... + def configure_joints(self, physics, position): # -> None: + """Configures this entity's internal joints. + + The default implementation of this method simply sets the `qpos` of all + joints in this entity to the values specified in the `position` argument. + Entity subclasses with actuated joints may override this method to achieve a + stable reconfiguration of joint positions, for example the control signal + of position actuators may be changed to match the new joint positions. + + Args: + physics: An instance of `mjcf.Physics`. + position: The desired position of this entity's joints. + """ + ... + +class ModelWrapperEntity(Entity): + """An entity class that wraps an MJCF model without any additional logic.""" + + @property + def mjcf_model(self): ... diff --git a/typings/dm_control/composer/environment.pyi b/typings/dm_control/composer/environment.pyi new file mode 100644 index 00000000..cb2536ae --- /dev/null +++ b/typings/dm_control/composer/environment.pyi @@ -0,0 +1,211 @@ +""" +This type stub file was generated by pyright. +""" + +import enum + +import dm_env + +"""RL environment classes for Composer tasks.""" +_STEPS_LOGGING_INTERVAL = ... +HOOK_NAMES = ... +_empty_function = ... +_EMPTY_CODE = ... +_EMPTY_WITH_DOCSTRING_CODE = ... + +class ObservationPadding(enum.Enum): + INITIAL_VALUE = ... + ZERO = ... + +class EpisodeInitializationError(RuntimeError): + """Raised by a `composer.Task` when it fails to initialize an episode.""" + + ... + +class _Hook: + __slots__ = ... + def __init__(self) -> None: ... + +class _EnvironmentHooks: + """Helper object that scans and memoizes various hooks in a task. + + This object exist to ensure that we do not incur a substantial overhead in + calling empty entity hooks in more complicated tasks. + """ + + __slots__ = ... + def __init__(self, task) -> None: ... + def refresh_entity_hooks(self): # -> None: + """Scans and memoizes all non-trivial entity hooks.""" + ... + def add_extra_hook(self, hook_name, hook_callable): # -> None: + ... + def initialize_episode_mjcf(self, random_state): # -> None: + ... + def after_compile(self, physics, random_state): # -> None: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + def before_step(self, physics, action, random_state): # -> None: + ... + def before_substep(self, physics, action, random_state): # -> None: + ... + def after_substep(self, physics, random_state): # -> None: + ... + def after_step(self, physics, random_state): # -> None: + ... + +class _CommonEnvironment: + """Common components for RL environments.""" + + def __init__( + self, + task, + time_limit=..., + random_state=..., + n_sub_steps=..., + raise_exception_on_physics_error=..., + strip_singleton_obs_buffer_dim=..., + delayed_observation_padding=..., + legacy_step: bool = ..., + ) -> None: + """Initializes an instance of `_CommonEnvironment`. + + Args: + task: Instance of `composer.base.Task`. + time_limit: (optional) A float, the time limit in seconds beyond which an + episode is forced to terminate. + random_state: Optional, either an int seed or an `np.random.RandomState` + object. If None (default), the random number generator will self-seed + from a platform-dependent source of entropy. + n_sub_steps: (DEPRECATED) An integer, number of physics steps to take per + agent control step. New code should instead override the + `control_substep` property of the task. + raise_exception_on_physics_error: (optional) A boolean, indicating whether + `PhysicsError` should be raised as an exception. If `False`, physics + errors will result in the current episode being terminated with a + warning logged, and a new episode started. + strip_singleton_obs_buffer_dim: (optional) A boolean, if `True`, + the array shape of observations with `buffer_size == 1` will not have a + leading buffer dimension. + delayed_observation_padding: (optional) An `ObservationPadding` enum value + specifying the padding behavior of the initial buffers for delayed + observables. If `ZERO` then the buffer is initially filled with zeroes. + If `INITIAL_VALUE` then the buffer is initially filled with the first + observation values. + legacy_step: If True, steps the state with up-to-date position and + velocity dependent fields. See Page 6 of + https://arxiv.org/abs/2006.12983 for more information. + """ + ... + def add_extra_hook(self, hook_name, hook_callable): # -> None: + ... + @property + def physics(self): # -> Any | None: + """Returns a `weakref.ProxyType` pointing to the current `mjcf.Physics`. + + Note that the underlying `mjcf.Physics` will be destroyed whenever the MJCF + model is recompiled. It is therefore unsafe for external objects to hold a + reference to `environment.physics`. Attempting to access attributes of a + dead `Physics` instance will result in a `ReferenceError`. + """ + ... + @property + def task(self): ... + @property + def random_state(self): # -> RandomState: + ... + def control_timestep(self): + """Returns the interval between agent actions in seconds.""" + ... + +class Environment(_CommonEnvironment, dm_env.Environment): + """Reinforcement learning environment for Composer tasks.""" + + def __init__( + self, + task, + time_limit=..., + random_state=..., + n_sub_steps=..., + raise_exception_on_physics_error=..., + strip_singleton_obs_buffer_dim=..., + max_reset_attempts=..., + delayed_observation_padding=..., + legacy_step: bool = ..., + ) -> None: + """Initializes an instance of `Environment`. + + Args: + task: Instance of `composer.base.Task`. + time_limit: (optional) A float, the time limit in seconds beyond which + an episode is forced to terminate. + random_state: (optional) an int seed or `np.random.RandomState` instance. + n_sub_steps: (DEPRECATED) An integer, number of physics steps to take per + agent control step. New code should instead override the + `control_substep` property of the task. + raise_exception_on_physics_error: (optional) A boolean, indicating whether + `PhysicsError` should be raised as an exception. If `False`, physics + errors will result in the current episode being terminated with a + warning logged, and a new episode started. + strip_singleton_obs_buffer_dim: (optional) A boolean, if `True`, + the array shape of observations with `buffer_size == 1` will not have a + leading buffer dimension. + max_reset_attempts: (optional) Maximum number of times to try resetting + the environment. If an `EpisodeInitializationError` is raised + during this process, an environment reset is reattempted up to this + number of times. If this count is exceeded then the most recent + exception will be allowed to propagate. Defaults to 1, i.e. no failure + is allowed. + delayed_observation_padding: (optional) An `ObservationPadding` enum value + specifying the padding behavior of the initial buffers for delayed + observables. If `ZERO` then the buffer is initially filled with zeroes. + If `INITIAL_VALUE` then the buffer is initially filled with the first + observation values. + legacy_step: If True, steps the state with up-to-date position and + velocity dependent fields. + """ + ... + def reset(self): # -> TimeStep: + ... + def step_spec(self): # -> TimeStep: + """DEPRECATED: please use `reward_spec` and `discount_spec` instead.""" + ... + def step(self, action): # -> TimeStep: + """Updates the environment using the action and returns a `TimeStep`.""" + ... + def action_spec(self): + """Returns the action specification for this environment.""" + ... + def reward_spec(self): # -> Array: + """Describes the reward returned by this environment. + + This will be the output of `self.task.reward_spec()` if it is not None, + otherwise it will be the default spec returned by + `dm_env.Environment.reward_spec()`. + + Returns: + A `specs.Array` instance, or a nested dict, list or tuple of + `specs.Array`s. + """ + ... + def discount_spec(self): # -> BoundedArray: + """Describes the discount returned by this environment. + + This will be the output of `self.task.discount_spec()` if it is not None, + otherwise it will be the default spec returned by + `dm_env.Environment.discount_spec()`. + + Returns: + A `specs.Array` instance, or a nested dict, list or tuple of + `specs.Array`s. + """ + ... + def observation_spec(self): # -> Any: + """Returns the observation specification for this environment. + + Returns: + An `OrderedDict` mapping observation name to `specs.Array` containing + observation shape and dtype. + """ + ... diff --git a/typings/dm_control/composer/initializer.pyi b/typings/dm_control/composer/initializer.pyi new file mode 100644 index 00000000..4a647c8e --- /dev/null +++ b/typings/dm_control/composer/initializer.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Module defining the abstract initializer.""" + +class Initializer(metaclass=abc.ABCMeta): + """The abstract base class for an initializer.""" + + @abc.abstractmethod + def __call__(self, physics, random_state): ... diff --git a/typings/dm_control/composer/initializers/__init__.pyi b/typings/dm_control/composer/initializers/__init__.pyi new file mode 100644 index 00000000..4603d022 --- /dev/null +++ b/typings/dm_control/composer/initializers/__init__.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer.initializers.prop_initializer import PropPlacer +from dm_control.composer.initializers.tcp_initializer import ToolCenterPointInitializer + +"""Tools for initializing the states of Composer environments.""" diff --git a/typings/dm_control/composer/initializers/prop_initializer.pyi b/typings/dm_control/composer/initializers/prop_initializer.pyi new file mode 100644 index 00000000..2565ca19 --- /dev/null +++ b/typings/dm_control/composer/initializers/prop_initializer.pyi @@ -0,0 +1,83 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer + +"""An initializer that places props at various poses.""" +_SETTLE_QVEL_TOL = ... +_SETTLE_QACC_TOL = ... +_REJECTION_SAMPLING_FAILED = ... +_SETTLING_PHYSICS_FAILED = ... + +class PropPlacer(composer.Initializer): + """An initializer that places props at various positions and orientations.""" + + def __init__( + self, + props, + position, + quaternion=..., + ignore_collisions=..., + max_qvel_tol=..., + max_qacc_tol=..., + max_attempts_per_prop=..., + settle_physics=..., + min_settle_physics_time=..., + max_settle_physics_time=..., + max_settle_physics_attempts=..., + raise_exception_on_settle_failure=..., + ) -> None: + """Initializes this PropPlacer. + + Args: + props: A sequence of `composer.Entity` instances representing props. + position: A single fixed Cartesian position, or a `composer.Variation` + object that generates Cartesian positions. If a fixed sequence of + positions for multiple props is desired, use + `variation.deterministic.Sequence`. + quaternion: (optional) A single fixed unit quaternion, or a + `Variation` object that generates unit quaternions. If a fixed + sequence of quaternions for multiple props is desired, use + `variation.deterministic.Sequence`. + ignore_collisions: (optional) If True, ignore collisions between props, + i.e. do not run rejection sampling. + max_qvel_tol: Maximum post-initialization joint velocity for props. If + `settle_physics=True`, the simulation will be run until all prop joint + velocities are less than this threshold. + max_qacc_tol: Maximum post-initialization joint acceleration for props. If + `settle_physics=True`, the simulation will be run until all prop joint + velocities are less than this threshold. + max_attempts_per_prop: The maximum number of rejection sampling attempts + per prop. If a non-colliding pose cannot be found before this limit is + reached, a `RuntimeError` will be raised. + settle_physics: (optional) If True, the physics simulation will be + advanced for a few steps to allow the prop positions to settle. + min_settle_physics_time: (optional) When `settle_physics` is True, lower + bound on time (in seconds) the physics simulation is advanced. + max_settle_physics_time: (optional) When `settle_physics` is True, upper + bound on time (in seconds) the physics simulation is advanced. + max_settle_physics_attempts: (optional) When `settle_physics` is True, the + number of attempts at sampling overall scene pose and settling. + raise_exception_on_settle_failure: If True, raises an exception if + settling physics is unsuccessful. + """ + ... + def __call__(self, physics, random_state, ignore_contacts_with_entities=...): # -> None: + """Sets initial prop poses. + + Args: + physics: An `mjcf.Physics` instance. + random_state: a `np.random.RandomState` instance. + ignore_contacts_with_entities: a list of `composer.Entity` instances + to ignore when detecting collisions. This can be used to ignore props + that are not being placed by this initializer, but are known to be + colliding in the current state of the simulation (for example if they + are going to be placed by a different initializer that will be called + subsequently). + + Raises: + RuntimeError: If `ignore_collisions == False` and a non-colliding prop + pose could not be found within `max_attempts_per_prop`. + """ + ... diff --git a/typings/dm_control/composer/initializers/tcp_initializer.pyi b/typings/dm_control/composer/initializers/tcp_initializer.pyi new file mode 100644 index 00000000..e23190ef --- /dev/null +++ b/typings/dm_control/composer/initializers/tcp_initializer.pyi @@ -0,0 +1,67 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer + +"""An initializer that sets the pose of a hand's tool center point.""" +_REJECTION_SAMPLING_FAILED = ... + +class ToolCenterPointInitializer(composer.Initializer): + """An initializer that sets the position of a hand's tool center point. + + This initializer calls the RobotArm's internal method to try and set the + hand's TCP to a randomized Cartesian position within the specified bound. + By default the initializer performs rejection sampling in order to avoid + poses that result in "relevant collisions", which are defined as: + + * Collisions between links of the robot arm + * Collisions between the arm and the hand + * Collisions between either the arm or hand and an external body without a + free joint + """ + + def __init__( + self, hand, arm, position, quaternion=..., ignore_collisions=..., max_ik_attempts=..., max_rejection_samples=... + ) -> None: + """Initializes this ToolCenterPointInitializer. + + Args: + hand: Either a `base.RobotHand` instance or None, in which case + `arm.wrist_site` is used as the TCP site in place of + `hand.tool_center_point`. + arm: A `base.RobotArm` instance. + position: A single fixed Cartesian position, or a `Variation` + object that generates Cartesian positions. If a fixed sequence of + positions for multiple props is desired, use + `variation.deterministic.Sequence`. + quaternion: (optional) A single fixed unit quaternion, or a + `composer.Variation` object that generates unit quaternions. If a fixed + sequence of quaternions for ultiple props is desired, use + `variation.deterministic.Sequence`. + ignore_collisions: (optional) If True all collisions are ignored, i.e. + rejection sampling is disabled. + max_ik_attempts: (optional) Maximum number of attempts for the inverse + kinematics solver to find a solution satisfying `target_pos` and + `target_quat`. These are attempts per rejection sample. If more than + one attempt is performed, the joint configuration will be randomized + before the second trial. To avoid randomizing joint positions, set this + parameter to 1. + max_rejection_samples (optional): Maximum number of TCP target poses to + sample while attempting to find a non-colliding configuration. For each + sampled pose, up to `max_ik_attempts` may be performed in order to find + an IK solution satisfying this pose. + """ + ... + def __call__(self, physics, random_state): # -> None: + """Sets initial tool center point pose via inverse kinematics. + + Args: + physics: An `mjcf.Physics` instance. + random_state: An `np.random.RandomState` instance. + + Raises: + RuntimeError: If a collision-free pose could not be found within + `max_ik_attempts`. + """ + ... diff --git a/typings/dm_control/composer/initializers/utils.pyi b/typings/dm_control/composer/initializers/utils.pyi new file mode 100644 index 00000000..c47b9081 --- /dev/null +++ b/typings/dm_control/composer/initializers/utils.pyi @@ -0,0 +1,27 @@ +""" +This type stub file was generated by pyright. +""" + +"""Utilities that are helpful for implementing initializers.""" + +class JointStaticIsolator: + """Helper class that isolates a collection of MuJoCo joints from others. + + An instance of this class is a context manager that caches the positions and + velocities of all non-isolated joints *upon construction*, and resets them to + their original state when the context exits. + """ + + def __init__(self, physics, joints) -> None: + """Initializes the joint isolator. + + Args: + physics: An instance of `mjcf.Physics`. + joints: An iterable of `mjcf.Element` representing joints that may be + modified inside the context managed by this isolator. + """ + ... + def __enter__(self): # -> None: + ... + def __exit__(self, exc_type, exc_value, traceback): # -> None: + ... diff --git a/typings/dm_control/composer/observation/__init__.pyi b/typings/dm_control/composer/observation/__init__.pyi new file mode 100644 index 00000000..70721f45 --- /dev/null +++ b/typings/dm_control/composer/observation/__init__.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer.observation import observable +from dm_control.composer.observation.obs_buffer import Buffer +from dm_control.composer.observation.updater import DEFAULT_BUFFER_SIZE, DEFAULT_DELAY, DEFAULT_UPDATE_INTERVAL, Updater + +"""Multi-rate observation and buffering framework for Composer environments.""" diff --git a/typings/dm_control/composer/observation/obs_buffer.pyi b/typings/dm_control/composer/observation/obs_buffer.pyi new file mode 100644 index 00000000..55847b41 --- /dev/null +++ b/typings/dm_control/composer/observation/obs_buffer.pyi @@ -0,0 +1,99 @@ +""" +This type stub file was generated by pyright. +""" + +""""An object that manages the buffering and delaying of observation.""" + +class InFlightObservation: + """Represents a delayed observation that may not have arrived yet. + + Attributes: + arrival: The time at which this observation will be delivered. + timestamp: The time at which this observation was made. + delay: The amount of delay between the time at which this observation was + made and the time at which it is delivered. + value: The value of this observation. + """ + + __slots__ = ... + def __init__(self, timestamp, delay, value) -> None: ... + def __lt__(self, other) -> bool: ... + +class Buffer: + """An object that manages the buffering and delaying of observation.""" + + def __init__(self, buffer_size, shape, dtype, pad_with_initial_value=..., strip_singleton_buffer_dim=...) -> None: + """Initializes this observation buffer. + + Args: + buffer_size: The size of the buffer returned by `read`. Note + that this does *not* affect size of the internal buffer held by this + object, which always grow as large as is necessary in the presence of + large delays. + shape: The shape of a single observation held by this buffer, which can + either be a single integer or an iterable of integers. The shape of the + buffer returned by `read` will then be + `(buffer_size, shape[0], ..., shape[n])`, unless `buffer_size == 1` + and `strip_singleton_buffer_dim == True`. + dtype: The NumPy dtype of observation entries. + pad_with_initial_value: (optional) A boolean. If `True` then the buffer + returned by `read` is padded with the first observation value when there + are fewer observation entries than `buffer_size`. If `False` then the + buffer returned by `read` is padded with zeroes. + strip_singleton_buffer_dim: (optional) A boolean, if `True` and + `buffer_size == 1` then the leading dimension will not be added to the + shape of the array returned by `read`. + """ + ... + @property + def shape( + self, + ): # -> tuple[Unknown | Literal[1], ...] | tuple[Unknown | Literal[1], int] | tuple[Unknown, ...] | tuple[int]: + ... + @property + def dtype(self): # -> Unknown: + ... + def insert(self, timestamp, delay, value): # -> None: + """Inserts a new observation to the buffer. + + This function implicitly updates the internal "clock" of this buffer to + the timestamp of the new observation, and the internal buffer is trimmed + accordingly, i.e. at most `buffer_size` items whose delayed arrival time + preceeds `timestamp` are kept. + + Args: + timestamp: The time at which this observation was made. + delay: The amount of delay between the time at which this observation was + made and the time at which it is delivered. + value: The value of this observation. + + Raises: + ValueError: if `delay` is negative. + """ + ... + def read(self, current_time): # -> NDArray[float64]: + """Reads the content of the buffer at the given timestamp.""" + ... + def drop_unobserved_upcoming_items(self, observation_schedule, read_interval): # -> None: + """Plans an optimal observation schedule for an upcoming control period. + + This function determines which of the proposed upcoming observations will + never in fact be delivered and removes them from the observation schedule. + + We assume that observations will only be queried at times that are integer + multiples of `read_interval`. If more observations are generated during + the upcoming control step than the `buffer_size` of this `Buffer` + then of those new observations will never be required. This function takes + into account the delayed arrival time and existing buffered items in the + planning process. + + Args: + observation_schedule: An list of `(timestamp, delay)` tuples, where + `timestamp` is the time at which the observation value will be produced, + and `delay` is the amount of time the observation will be delayed by. + This list will be modified in place. + read_interval: The time interval between successive calls to `read`. + We assume that observations will only be queried at times that are + integer multiples of `read_interval`. + """ + ... diff --git a/typings/dm_control/composer/observation/observable/__init__.pyi b/typings/dm_control/composer/observation/observable/__init__.pyi new file mode 100644 index 00000000..02c0224f --- /dev/null +++ b/typings/dm_control/composer/observation/observable/__init__.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer.observation.observable.base import Generic, MujocoCamera, MujocoFeature, Observable +from dm_control.composer.observation.observable.mjcf import MJCFCamera, MJCFFeature + +"""Module for observables in the Composer library.""" diff --git a/typings/dm_control/composer/observation/observable/base.pyi b/typings/dm_control/composer/observation/observable/base.pyi new file mode 100644 index 00000000..80fe0df6 --- /dev/null +++ b/typings/dm_control/composer/observation/observable/base.pyi @@ -0,0 +1,205 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Classes representing observables.""" +AGGREGATORS = ... + +class Observable(metaclass=abc.ABCMeta): + """Abstract base class for an observable.""" + + def __init__(self, update_interval, buffer_size, delay, aggregator, corruptor) -> None: ... + @property + def update_interval(self): # -> Unknown: + ... + @update_interval.setter + def update_interval(self, value): # -> None: + ... + @property + def buffer_size(self): # -> Unknown: + ... + @buffer_size.setter + def buffer_size(self, value): # -> None: + ... + @property + def delay(self): # -> Unknown: + ... + @delay.setter + def delay(self, value): # -> None: + ... + @property + def aggregator(self): # -> partial[Unknown] | None: + ... + @aggregator.setter + def aggregator(self, value): # -> None: + ... + @property + def corruptor(self): # -> Unknown: + ... + @corruptor.setter + def corruptor(self, value): # -> None: + ... + @property + def enabled(self): # -> bool: + ... + @enabled.setter + def enabled(self, value): # -> None: + ... + @property + def array_spec(self): # -> None: + """The `ArraySpec` which describes observation arrays from this observable. + + If this property is `None`, then the specification should be inferred by + actually retrieving an observation from this observable. + """ + ... + def observation_callable(self, physics, random_state=...): # -> (() -> Unknown) | None: + """A callable which returns a (potentially corrupted) observation.""" + ... + def __call__(self, physics, random_state=...): + """Convenience function to just call an observable.""" + ... + def configure(self, **kwargs): # -> None: + """Sets multiple attributes of this observable. + + Args: + **kwargs: The keyword argument names correspond to the attributes + being modified. + Raises: + AttributeError: If kwargs contained an attribute not in the observable. + """ + ... + +class Generic(Observable): + """A generic observable defined via a callable.""" + + def __init__( + self, raw_observation_callable, update_interval=..., buffer_size=..., delay=..., aggregator=..., corruptor=... + ) -> None: + """Initializes this observable. + + Args: + raw_observation_callable: A callable which accepts a single argument of + type `control.base.Physics` and returns the observation value. + update_interval: (optional) An integer, number of simulation steps between + successive updates to the value of this observable. + buffer_size: (optional) The maximum size of the returned buffer. + This option is only relevant when used in conjunction with an + `observation.Updater`. If None, `observation.DEFAULT_BUFFER_SIZE` will + be used. + delay: (optional) Number of additional simulation steps that must be + taken before an observation is returned. This option is only relevant + when used in conjunction with an`observation.Updater`. If None, + `observation.DEFAULT_DELAY` will be used. + aggregator: (optional) Name of an item in `AGGREGATORS` or a callable that + performs a reduction operation over the first dimension of the buffered + observation before it is returned. A value of `None` means that no + aggregation will be performed and the whole buffer will be returned. + corruptor: (optional) A callable which takes a single observation as + an argument, modifies it, and returns it. An example use case for this + is to add random noise to the observation. When used in a + `BufferedWrapper`, the corruptor is applied to the observation before + it is added to the buffer. In particular, this means that the aggregator + operates on corrupted observations. + """ + ... + +class MujocoFeature(Observable): + """An observable corresponding to a named MuJoCo feature.""" + + def __init__( + self, kind, feature_name, update_interval=..., buffer_size=..., delay=..., aggregator=..., corruptor=... + ) -> None: + """Initializes this observable. + + Args: + kind: A string corresponding to a field name in MuJoCo's mjData struct. + feature_name: A string, or list of strings, or a callable returning + either, corresponding to the name(s) of an entity in the + MuJoCo XML model. + update_interval: (optional) An integer, number of simulation steps between + successive updates to the value of this observable. + buffer_size: (optional) The maximum size of the returned buffer. + This option is only relevant when used in conjunction with an + `observation.Updater`. If None, `observation.DEFAULT_BUFFER_SIZE` will + be used. + delay: (optional) Number of additional simulation steps that must be + taken before an observation is returned. This option is only relevant + when used in conjunction with an`observation.Updater`. If None, + `observation.DEFAULT_DELAY` will be used. + aggregator: (optional) Name of an item in `AGGREGATORS` or a callable that + performs a reduction operation over the first dimension of the buffered + observation before it is returned. A value of `None` means that no + aggregation will be performed and the whole buffer will be returned. + corruptor: (optional) A callable which takes a single observation as + an argument, modifies it, and returns it. An example use case for this + is to add random noise to the observation. When used in a + `BufferedWrapper`, the corruptor is applied to the observation before + it is added to the buffer. In particular, this means that the aggregator + operates on corrupted observations. + """ + ... + +class MujocoCamera(Observable): + """An observable corresponding to a MuJoCo camera.""" + + def __init__( + self, + camera_name, + height=..., + width=..., + update_interval=..., + buffer_size=..., + delay=..., + aggregator=..., + corruptor=..., + depth=..., + ) -> None: + """Initializes this observable. + + Args: + camera_name: A string corresponding to the name of a camera in the + MuJoCo XML model. + height: (optional) An integer, the height of the rendered image. + width: (optional) An integer, the width of the rendered image. + update_interval: (optional) An integer, number of simulation steps between + successive updates to the value of this observable. + buffer_size: (optional) The maximum size of the returned buffer. + This option is only relevant when used in conjunction with an + `observation.Updater`. If None, `observation.DEFAULT_BUFFER_SIZE` will + be used. + delay: (optional) Number of additional simulation steps that must be + taken before an observation is returned. This option is only relevant + when used in conjunction with an`observation.Updater`. If None, + `observation.DEFAULT_DELAY` will be used. + aggregator: (optional) Name of an item in `AGGREGATORS` or a callable that + performs a reduction operation over the first dimension of the buffered + observation before it is returned. A value of `None` means that no + aggregation will be performed and the whole buffer will be returned. + corruptor: (optional) A callable which takes a single observation as + an argument, modifies it, and returns it. An example use case for this + is to add random noise to the observation. When used in a + `BufferedWrapper`, the corruptor is applied to the observation before + it is added to the buffer. In particular, this means that the aggregator + operates on corrupted observations. + depth: (optional) A boolean. If `True`, renders a depth image (1-channel) + instead of RGB (3-channel). + """ + ... + @property + def height(self): # -> int: + ... + @height.setter + def height(self, value): # -> None: + ... + @property + def width(self): # -> int: + ... + @width.setter + def width(self, value): # -> None: + ... + @property + def array_spec(self): # -> Array: + ... diff --git a/typings/dm_control/composer/observation/observable/mjcf.pyi b/typings/dm_control/composer/observation/observable/mjcf.pyi new file mode 100644 index 00000000..e17f36d2 --- /dev/null +++ b/typings/dm_control/composer/observation/observable/mjcf.pyi @@ -0,0 +1,165 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer.observation.observable import base + +"""Observables that are defined in terms of MJCF elements.""" +_BOTH_SEGMENTATION_AND_DEPTH_ENABLED = ... + +class MJCFFeature(base.Observable): + """An observable corresponding to an element in an MJCF model.""" + + def __init__( + self, + kind, + mjcf_element, + update_interval=..., + buffer_size=..., + delay=..., + aggregator=..., + corruptor=..., + index=..., + ) -> None: + """Initializes this observable. + + Args: + kind: The name of an attribute of a bound `mjcf.Physics` instance. See the + docstring for `mjcf.Physics.bind()` for examples showing this syntax. + mjcf_element: An `mjcf.Element`, or iterable of `mjcf.Element`. + update_interval: (optional) An integer, number of simulation steps between + successive updates to the value of this observable. + buffer_size: (optional) The maximum size of the returned buffer. + This option is only relevant when used in conjunction with an + `observation.Updater`. If None, `observation.DEFAULT_BUFFER_SIZE` will + be used. + delay: (optional) Number of additional simulation steps that must be + taken before an observation is returned. This option is only relevant + when used in conjunction with an`observation.Updater`. If None, + `observation.DEFAULT_DELAY` will be used. + aggregator: (optional) Name of an item in `AGGREGATORS` or a callable that + performs a reduction operation over the first dimension of the buffered + observation before it is returned. A value of `None` means that no + aggregation will be performed and the whole buffer will be returned. + corruptor: (optional) A callable which takes a single observation as + an argument, modifies it, and returns it. An example use case for this + is to add random noise to the observation. When used in a + `BufferedWrapper`, the corruptor is applied to the observation before + it is added to the buffer. In particular, this means that the aggregator + operates on corrupted observations. + index: (optional) An index that is to be applied to an array attribute + to pick out a slice or particular items. As a syntactic sugar, + `MJCFFeature` also implements `__getitem__` that returns a copy of the + same observable with an index applied. + + Raises: + ValueError: if `mjcf_element` is not an `mjcf.Element`. + """ + ... + def __getitem__(self, key): # -> MJCFFeature: + ... + +class MJCFCamera(base.Observable): + """An observable corresponding to a camera in an MJCF model.""" + + def __init__( + self, + mjcf_element, + height=..., + width=..., + update_interval=..., + buffer_size=..., + delay=..., + aggregator=..., + corruptor=..., + depth=..., + segmentation=..., + scene_option=..., + render_flag_overrides=..., + ) -> None: + """Initializes this observable. + + Args: + mjcf_element: A `mjcf.Element`. + height: (optional) An integer, the height of the rendered image. + width: (optional) An integer, the width of the rendered image. + update_interval: (optional) An integer, number of simulation steps between + successive updates to the value of this observable. + buffer_size: (optional) The maximum size of the returned buffer. + This option is only relevant when used in conjunction with an + `observation.Updater`. If None, `observation.DEFAULT_BUFFER_SIZE` will + be used. + delay: (optional) Number of additional simulation steps that must be + taken before an observation is returned. This option is only relevant + when used in conjunction with an`observation.Updater`. If None, + `observation.DEFAULT_DELAY` will be used. + aggregator: (optional) Name of an item in `AGGREGATORS` or a callable that + performs a reduction operation over the first dimension of the buffered + observation before it is returned. A value of `None` means that no + aggregation will be performed and the whole buffer will be returned. + corruptor: (optional) A callable which takes a single observation as + an argument, modifies it, and returns it. An example use case for this + is to add random noise to the observation. When used in a + `BufferedWrapper`, the corruptor is applied to the observation before + it is added to the buffer. In particular, this means that the aggregator + operates on corrupted observations. + depth: (optional) A boolean. If `True`, renders a depth image (1-channel) + instead of RGB (3-channel). + segmentation: (optional) A boolean. If `True`, renders a segmentation mask + (2-channel, int32) labeling the objects in the scene with their + (mjModel ID, mjtObj enum object type) pair. Background pixels are + set to (-1, -1). + scene_option: An optional `wrapper.MjvOption` instance that can be used to + render the scene with custom visualization options. If None then the + default options will be used. + render_flag_overrides: Optional mapping specifying rendering flags to + override. The keys can be either lowercase strings or `mjtRndFlag` enum + values, and the values are the overridden flag values, e.g. + `{'wireframe': True}` or `{mujoco.mjtRndFlag.mjRND_WIREFRAME: True}`. + See `mujoco.mjtRndFlag` for the set of valid flags. Must be None if + either `depth` or `segmentation` is True. + + Raises: + ValueError: if `mjcf_element` is not a element. + ValueError: if segmentation and depth flags are both set to True. + """ + ... + @property + def height(self): # -> int: + ... + @height.setter + def height(self, value): # -> None: + ... + @property + def width(self): # -> int: + ... + @width.setter + def width(self, value): # -> None: + ... + @property + def depth(self): # -> bool: + ... + @depth.setter + def depth(self, value): # -> None: + ... + @property + def segmentation(self): # -> bool: + ... + @segmentation.setter + def segmentation(self, value): # -> None: + ... + @property + def scene_option(self): # -> None: + ... + @scene_option.setter + def scene_option(self, value): # -> None: + ... + @property + def render_flag_overrides(self): # -> None: + ... + @render_flag_overrides.setter + def render_flag_overrides(self, value): # -> None: + ... + @property + def array_spec(self): # -> BoundedArray: + ... diff --git a/typings/dm_control/composer/observation/updater.pyi b/typings/dm_control/composer/observation/updater.pyi new file mode 100644 index 00000000..ac7aac4f --- /dev/null +++ b/typings/dm_control/composer/observation/updater.pyi @@ -0,0 +1,75 @@ +""" +This type stub file was generated by pyright. +""" + +"""An object that creates and updates buffers for enabled observables.""" +DEFAULT_BUFFER_SIZE = ... +DEFAULT_UPDATE_INTERVAL = ... +DEFAULT_DELAY = ... + +class _EnabledObservable: + """Encapsulates an enabled observable, its buffer, and its update schedule.""" + + __slots__ = ... + def __init__( + self, observable, physics, random_state, strip_singleton_buffer_dim, pad_with_initial_value + ) -> None: ... + +class Updater: + """Creates and updates buffers for enabled observables.""" + + def __init__( + self, + observables, + physics_steps_per_control_step=..., + strip_singleton_buffer_dim=..., + pad_with_initial_value=..., + ) -> None: ... + def reset(self, physics, random_state): # -> None: + """Resets this updater's state.""" + ... + def observation_spec(self): # -> Any: + """The observation specification for this environment. + + Returns a dict mapping the names of enabled observations to their + corresponding `Array` or `BoundedArray` specs. + + If an obs has a BoundedArray spec, but uses an aggregator that + does not preserve those bounds (such as `sum`), it will be mapped to an + (unbounded) `Array` spec. If using a bounds-preserving custom aggregator + `my_agg`, give it an attribute `my_agg.preserves_bounds = True` to indicate + to this method that it is bounds-preserving. + + The returned specification is only valid as of the previous call + to `reset`. In particular, it is an error to call this function before + the first call to `reset`. + + Returns: + A dict mapping observation name to `Array` or `BoundedArray` spec + containing the observation shape and dtype, and possibly bounds. + + Raises: + RuntimeError: If this method is called before `reset` has been called. + """ + ... + def prepare_for_next_control_step(self): # -> None: + """Simulates the next control step and optimizes the update schedule.""" + ... + def update(self): # -> None: + ... + def get_observation(self): # -> Any: + """Gets the current observation. + + The returned observation is only valid as of the previous call + to `reset`. In particular, it is an error to call this function before + the first call to `reset`. + + Returns: + A dict, or list of dicts, or tuple of dicts, of observation values. + The returned structure corresponds to the structure of the `observables` + that was given at initialization time. + + Raises: + RuntimeError: If this method is called before `reset` has been called. + """ + ... diff --git a/typings/dm_control/composer/robot.pyi b/typings/dm_control/composer/robot.pyi new file mode 100644 index 00000000..a89eb3d8 --- /dev/null +++ b/typings/dm_control/composer/robot.pyi @@ -0,0 +1,20 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +import numpy as np +from dm_control.composer import entity + +"""Module defining the abstract robot class.""" +DOWN_QUATERNION = np.array([0, 0.70710678118, 0.70710678118, 0]) + +class Robot(entity.Entity, metaclass=abc.ABCMeta): + """The abstract base class for robots.""" + + @property + @abc.abstractmethod + def actuators(self): + """Returns the actuator elements of the robot.""" + ... diff --git a/typings/dm_control/composer/task.pyi b/typings/dm_control/composer/task.pyi new file mode 100644 index 00000000..9154c30f --- /dev/null +++ b/typings/dm_control/composer/task.pyi @@ -0,0 +1,231 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Abstract base class for a Composer task.""" + +class Task(metaclass=abc.ABCMeta): + """Abstract base class for a Composer task.""" + + @abc.abstractproperty + def root_entity(self): + """A `base.Entity` instance for this task.""" + ... + def iter_entities(self): ... + @property + def observables(self): # -> OrderedDict[Unknown, Unknown]: + """An OrderedDict of `control.Observable` instances for this task. + + Task subclasses should generally NOT override this property. + + This property is automatically computed by combining the observables dict + provided by each `Entity` present in this task, and any additional + observables returned via the `task_observables` property. + + To provide an observable to an agent, the task code should either set + `enabled` property of an `Entity`-bound observable to `True`, or override + the `task_observables` property to provide additional observables not bound + to an `Entity`. + + Returns: + An `collections.OrderedDict` mapping strings to instances of + `control.Observable`. + """ + ... + @property + def task_observables(self): # -> OrderedDict[Unknown, Unknown]: + """An OrderedDict of task-specific `control.Observable` instances. + + A task should override this property if it wants to provide additional + observables to the agent that are not already provided by any `Entity` that + forms part of the task's model. For example, this may be used to provide + observations that is derived from relative poses between two entities. + + Returns: + An `collections.OrderedDict` mapping strings to instances of + `control.Observable`. + """ + ... + def after_compile(self, physics, random_state): # -> None: + """A callback which is executed after the Mujoco Physics is recompiled. + + Args: + physics: An instance of `control.Physics`. + random_state: An instance of `np.random.RandomState`. + """ + ... + @property + def control_timestep(self): # -> float: + """Returns the agent's control timestep for this task (in seconds).""" + ... + @control_timestep.setter + def control_timestep(self, new_value): # -> None: + """Changes the agent's control timestep for this task. + + Args: + new_value: the new control timestep (in seconds). + + Raises: + ValueError: if `new_value` is set and is not divisible by + `physics_timestep`. + """ + ... + @property + def physics_timestep(self): # -> float: + """Returns the physics timestep for this task (in seconds).""" + ... + @physics_timestep.setter + def physics_timestep(self, new_value): # -> None: + """Changes the physics simulation timestep for this task. + + Args: + new_value: the new simulation timestep (in seconds). + + Raises: + ValueError: if `control_timestep` is set and is not divisible by + `new_value`. + """ + ... + def set_timesteps(self, control_timestep, physics_timestep): # -> None: + """Changes the agent's control timestep and physics simulation timestep. + + This is equivalent to modifying `control_timestep` and `physics_timestep` + simultaneously. The divisibility check is performed between the two + new values. + + Args: + control_timestep: the new agent's control timestep (in seconds). + physics_timestep: the new physics simulation timestep (in seconds). + + Raises: + ValueError: if `control_timestep` is not divisible by `physics_timestep`. + """ + ... + @property + def physics_steps_per_control_step(self): # -> int: + """Returns number of physics steps per agent's control step.""" + ... + def action_spec(self, physics): # -> BoundedArray: + """Returns a `BoundedArray` spec matching the `Physics` actuators. + + BoundedArray.name should contain a tab-separated list of actuator names. + When overloading this method, non-MuJoCo actuators should be added to the + top of the list when possible, as a matter of convention. + + Args: + physics: used to query actuator names in the model. + """ + ... + def get_reward_spec(self): # -> None: + """Optional method to define non-scalar rewards for a `Task`.""" + ... + def get_discount_spec(self): # -> None: + """Optional method to define non-scalar discounts for a `Task`.""" + ... + def initialize_episode_mjcf(self, random_state): # -> None: + """Modifies the MJCF model of this task before the next episode begins. + + The Environment calls this method and recompiles the physics + if necessary before calling `initialize_episode`. + + Args: + random_state: An instance of `np.random.RandomState`. + """ + ... + def initialize_episode(self, physics, random_state): # -> None: + """Modifies the physics state before the next episode begins. + + The Environment calls this method after `initialize_episode_mjcf`, and also + after the physics has been recompiled if necessary. + + Args: + physics: An instance of `control.Physics`. + random_state: An instance of `np.random.RandomState`. + """ + ... + def before_step(self, physics, action, random_state): # -> None: + """A callback which is executed before an agent control step. + + The default implementation sets the control signal for the actuators in + `physics` to be equal to `action`. Subclasses that override this method + should ensure that the overriding method also sets the control signal before + returning, either by calling `super().before_step`, or by setting + the control signal explicitly (e.g. in order to create a non-trivial mapping + between `action` and the control signal). + + Args: + physics: An instance of `control.Physics`. + action: A NumPy array corresponding to agent actions. + random_state: An instance of `np.random.RandomState` (unused). + """ + ... + def before_substep(self, physics, action, random_state): # -> None: + """A callback which is executed before a simulation step. + + Actuation can be set, or overridden, in this callback. + + Args: + physics: An instance of `control.Physics`. + action: A NumPy array corresponding to agent actions. + random_state: An instance of `np.random.RandomState`. + """ + ... + def after_substep(self, physics, random_state): # -> None: + """A callback which is executed after a simulation step. + + Args: + physics: An instance of `control.Physics`. + random_state: An instance of `np.random.RandomState`. + """ + ... + def after_step(self, physics, random_state): # -> None: + """A callback which is executed after an agent control step. + + Args: + physics: An instance of `control.Physics`. + random_state: An instance of `np.random.RandomState`. + """ + ... + @abc.abstractmethod + def get_reward(self, physics): + """Calculates the reward signal given the physics state. + + Args: + physics: A Physics object. + + Returns: + A float + """ + ... + def should_terminate_episode(self, physics): # -> Literal[False]: + """Determines whether the episode should terminate given the physics state. + + Args: + physics: A Physics object + + Returns: + A boolean + """ + ... + def get_discount(self, physics): # -> float: + """Calculates the reward discount factor given the physics state. + + Args: + physics: A Physics object + + Returns: + A float + """ + ... + +class NullTask(Task): + """A class that wraps a single `Entity` into a `Task` with no reward.""" + + def __init__(self, root_entity) -> None: ... + @property + def root_entity(self): # -> Unknown: + ... + def get_reward(self, physics): # -> float: + ... diff --git a/typings/dm_control/composer/variation/__init__.pyi b/typings/dm_control/composer/variation/__init__.pyi new file mode 100644 index 00000000..cefcfeee --- /dev/null +++ b/typings/dm_control/composer/variation/__init__.pyi @@ -0,0 +1,76 @@ +""" +This type stub file was generated by pyright. +""" + +import collections +import copy + +from dm_control.composer.variation.base import Variation +from dm_control.composer.variation.variation_values import evaluate + +"""A module that helps manage model variation in Composer environments.""" + +class _VariationInfo: + __slots__ = ... + def __init__(self, initial_value=..., variation=...) -> None: ... + +class MJCFVariator: + """Helper object for applying variations to MJCF attributes. + + An instance of this class remembers the original value of each MJCF attribute + the first time a variation is applied. The original value is then passed as an + argument to each variation callable. + """ + + def __init__(self) -> None: ... + def bind_attributes(self, element, **kwargs): # -> None: + """Binds variations to attributes of an MJCF element. + + Args: + element: An `mjcf.Element` object. + **kwargs: Keyword arguments mapping attribute names to the corresponding + variations. A variation is either a fixed value or a callable that + optionally takes the original value of an attribute and returns a + new value. + """ + ... + def apply_variations(self, random_state): # -> None: + """Applies variations in-place to the specified MJCF element. + + Args: + random_state: A `numpy.random.RandomState` instance. + """ + ... + def clear(self): # -> None: + """Clears all bound attribute variations.""" + ... + def reset_initial_values(self): # -> None: + ... + +class PhysicsVariator: + """Helper object for applying variations to MjModel and MjData. + + An instance of this class remembers the original value of each attribute + the first time a variation is applied. The original value is then passed as an + argument to each variation callable. + """ + + def __init__(self) -> None: ... + def bind_attributes(self, element, **kwargs): # -> None: + """Binds variations to attributes of an MJCF element. + + Args: + element: An `mjcf.Element` object. + **kwargs: Keyword arguments mapping attribute names to the corresponding + variations. A variation is either a fixed value or a callable that + optionally takes the original value of an attribute and returns a + new value. + """ + ... + def apply_variations(self, physics, random_state): # -> None: + ... + def clear(self): # -> None: + """Clears all bound attribute variations.""" + ... + def reset_initial_values(self): # -> None: + ... diff --git a/typings/dm_control/composer/variation/base.pyi b/typings/dm_control/composer/variation/base.pyi new file mode 100644 index 00000000..98aad3bf --- /dev/null +++ b/typings/dm_control/composer/variation/base.pyi @@ -0,0 +1,72 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Base class for variations and binary operations on variations.""" + +class Variation(metaclass=abc.ABCMeta): + """Abstract base class for variations.""" + + @abc.abstractmethod + def __call__(self, initial_value, current_value, random_state): # -> None: + """Generates a value for this variation. + + Args: + initial_value: The original value of the attribute being varied. + Absolute variations may ignore this argument. + current_value: The current value of the attribute being varied. + Absolute variations may ignore this argument. + random_state: A `numpy.RandomState` used to generate the value. + Deterministic variations may ignore this argument. + + Returns: + The next value for this variation. + """ + ... + def __add__(self, other): # -> _BinaryOperation: + ... + def __radd__(self, other): # -> _BinaryOperation: + ... + def __sub__(self, other): # -> _BinaryOperation: + ... + def __rsub__(self, other): # -> _BinaryOperation: + ... + def __mul__(self, other): # -> _BinaryOperation: + ... + def __rmul__(self, other): # -> _BinaryOperation: + ... + def __truediv__(self, other): # -> _BinaryOperation: + ... + def __rtruediv__(self, other): # -> _BinaryOperation: + ... + def __floordiv__(self, other): # -> _BinaryOperation: + ... + def __rfloordiv__(self, other): # -> _BinaryOperation: + ... + def __pow__(self, other): # -> _BinaryOperation: + ... + def __rpow__(self, other): # -> _BinaryOperation: + ... + def __getitem__(self, index): # -> _GetItemOperation: + ... + def __neg__(self): # -> _UnaryOperation: + ... + +class _UnaryOperation(Variation): + """Represents the result of applying a unary operator to a Variation.""" + + def __init__(self, op, variation) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): ... + +class _BinaryOperation(Variation): + """Represents the result of applying a binary operator to two Variations.""" + + def __init__(self, op, first, second) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): ... + +class _GetItemOperation(Variation): + def __init__(self, variation, index) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): # -> ndarray[Any, dtype[Any]]: + ... diff --git a/typings/dm_control/composer/variation/distributions.pyi b/typings/dm_control/composer/variation/distributions.pyi new file mode 100644 index 00000000..1aa4e1ca --- /dev/null +++ b/typings/dm_control/composer/variation/distributions.pyi @@ -0,0 +1,97 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +from dm_control.composer.variation import base + +"""Standard statistical distributions that conform to the Variation API.""" + +class Distribution(base.Variation, metaclass=abc.ABCMeta): + """Base Distribution class for sampling a parametrized distribution. + + Subclasses need to implement `_callable`, which needs to return a callable + based on the random_state passed as arg. This callable then gets called using + the arguments passed to the constructor, after being evaluated. This allows + the distribution parameters themselves to be instances of `base.Variation`. + By default samples are drawn in the shape of `initial_value`, unless the + optional `single_sample` constructor arg is set to `True`, in which case only + a single sample is drawn. + """ + + __slots__ = ... + def __init__(self, *args, **kwargs) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): ... + def __getattr__(self, name): ... + +class Uniform(Distribution): + __slots__ = ... + def __init__(self, low=..., high=..., single_sample=...) -> None: ... + +class UniformInteger(Distribution): + __slots__ = ... + def __init__(self, low, high=..., single_sample=...) -> None: ... + +class UniformChoice(Distribution): + __slots__ = ... + def __init__(self, choices, single_sample=...) -> None: ... + +class UniformPointOnSphere(base.Variation): + """Samples a point on the unit sphere, i.e. a 3D vector with norm 1.""" + + __slots__ = ... + def __init__(self, single_sample=...) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): # -> Any: + ... + +class Normal(Distribution): + __slots__ = ... + def __init__(self, loc=..., scale=..., single_sample=...) -> None: ... + +class LogNormal(Distribution): + __slots__ = ... + def __init__(self, mean=..., sigma=..., single_sample=...) -> None: ... + +class Exponential(Distribution): + __slots__ = ... + def __init__(self, scale=..., single_sample=...) -> None: ... + +class Poisson(Distribution): + __slots__ = ... + def __init__(self, lam=..., single_sample=...) -> None: ... + +class Bernoulli(Distribution): + __slots__ = ... + def __init__(self, prob=..., single_sample=...) -> None: ... + +_NEGATIVE_STDEV = ... +_NEGATIVE_TIMESCALE = ... + +class BiasedRandomWalk(base.Variation): + """A Class for generating noise from a zero-mean Ornstein-Uhlenbeck process. + + Let + `retain = np.exp(-1. / timescale)` + and + `scale = stdev * sqrt(1 - (retain * retain))` + Then the discete-time first-order filtered diffusion process + `x_next = retain * x + N(0, scale))` + has standard deviation `stdev` and characteristic timescale `timescale`. + """ + + __slots__ = ... + def __init__(self, stdev=..., timescale=...) -> None: + """Initializes a `BiasedRandomWalk`. + + Args: + stdev: Float. Standard deviation of the output sequence. + timescale: Integer. Number of timesteps characteristic of the random walk. + After `timescale` steps the correlation is reduced by exp(-1). Larger or + equal to 0, where a value of 0 is an uncorrelated normal distribution. + + Raises: + ValueError: if either `stdev` or `timescale` is negative. + """ + ... + def __call__(self, initial_value=..., current_value=..., random_state=...): ... diff --git a/typings/dm_control/composer/variation/rotations.pyi b/typings/dm_control/composer/variation/rotations.pyi new file mode 100644 index 00000000..64ae3f38 --- /dev/null +++ b/typings/dm_control/composer/variation/rotations.pyi @@ -0,0 +1,35 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np +from dm_control.composer.variation import base + +"""Variations in 3D rotations.""" +IDENTITY_QUATERNION = np.array([1, 0, 0, 0]) + +class UniformQuaternion(base.Variation): + """Uniformly distributed unit quaternions.""" + + def __call__(self, initial_value=..., current_value=..., random_state=...): # -> NDArray[Any]: + ... + +class QuaternionFromAxisAngle(base.Variation): + """Quaternion variation specified in terms of variations in axis and angle.""" + + def __init__(self, axis, angle) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): # -> NDArray[Unknown]: + ... + +class QuaternionPreMultiply(base.Variation): + """A variation that pre-multiplies an existing quaternion value. + + This variation takes a quaternion value generated by another variation and + pre-multiplies it to an existing value. In cumulative mode, the new quaternion + is pre-multiplied to the current value being varied. In non-cumulative mode, + the new quaternion is pre-multiplied to a fixed initial value. + """ + + def __init__(self, quat, cumulative=...) -> None: ... + def __call__(self, initial_value=..., current_value=..., random_state=...): # -> NDArray[Unknown]: + ... diff --git a/typings/dm_control/composer/variation/variation_values.pyi b/typings/dm_control/composer/variation/variation_values.pyi new file mode 100644 index 00000000..bc5dac1c --- /dev/null +++ b/typings/dm_control/composer/variation/variation_values.pyi @@ -0,0 +1,22 @@ +""" +This type stub file was generated by pyright. +""" + +"""Utilities for handling nested structures of callables or constants.""" + +def evaluate( + structure, *args, **kwargs +): # -> defaultdict[Unknown, Unknown] | MappingProxyType[Unknown, Unknown] | Mapping[Unknown, Unknown] | dict[Unknown, Unknown] | list[Unknown] | Any | ObjectProxy: + """Evaluates a arbitrarily nested structure of callables or constant values. + + Args: + structure: An arbitrarily nested structure of callables or constant values. + By "structures", we mean lists, tuples, namedtuples, or dicts. + *args: Positional arguments passed to each callable in `structure`. + **kwargs: Keyword arguments passed to each callable in `structure. + + Returns: + The same nested structure, with each callable replaced by the value returned + by calling it. + """ + ... diff --git a/typings/dm_control/entities/__init__.pyi b/typings/dm_control/entities/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/entities/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/entities/manipulators/__init__.pyi b/typings/dm_control/entities/manipulators/__init__.pyi new file mode 100644 index 00000000..d0278d5c --- /dev/null +++ b/typings/dm_control/entities/manipulators/__init__.pyi @@ -0,0 +1,5 @@ +""" +This type stub file was generated by pyright. +""" + +"""Composer entities corresponding to robots.""" diff --git a/typings/dm_control/entities/manipulators/base.pyi b/typings/dm_control/entities/manipulators/base.pyi new file mode 100644 index 00000000..8f0f6156 --- /dev/null +++ b/typings/dm_control/entities/manipulators/base.pyi @@ -0,0 +1,100 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +import numpy as np +from dm_control import composer +from dm_control.composer import define +from dm_control.composer.observation import observable + +"""Abstract base classes for robot arms and hands.""" +DOWN_QUATERNION = np.array([0, 0.70710678118, 0.70710678118, 0]) +_INVALID_JOINTS_ERROR = ... + +class RobotArm(composer.Robot, metaclass=abc.ABCMeta): + """The abstract base class for robotic arms.""" + + @property + def attachment_site(self): ... + def randomize_arm_joints(self, physics, random_state): # -> None: + """Randomizes the qpos of all arm joints. + + The ranges of qpos values is determined from the MJCF model. + + Args: + physics: A `mujoco.Physics` instance. + random_state: An `np.random.RandomState` instance. + """ + ... + def set_site_to_xpos( + self, physics, random_state, site, target_pos, target_quat=..., max_ik_attempts=... + ): # -> Literal[False]: + """Moves the arm so that a site occurs at the specified location. + + This function runs the inverse kinematics solver to find a configuration + arm joints for which the pinch site occurs at the specified location in + Cartesian coordinates. + + Args: + physics: A `mujoco.Physics` instance. + random_state: An `np.random.RandomState` instance. + site: Either a `mjcf.Element` or a string specifying the full name + of the site whose position is being set. + target_pos: The desired Cartesian location of the site. + target_quat: (optional) The desired orientation of the site, expressed + as a quaternion. If `None`, the default orientation is to point + vertically downwards. + max_ik_attempts: (optional) Maximum number of attempts to make at finding + a solution satisfying `target_pos` and `target_quat`. The joint + positions will be randomized after each unsuccessful attempt. + + Returns: + A boolean indicating whether the desired configuration is obtained. + + Raises: + ValueError: If site is neither a string nor an `mjcf.Element`. + """ + ... + @property + @abc.abstractmethod + def joints(self): + """Returns the joint elements of the arm.""" + ... + @property + @abc.abstractmethod + def wrist_site(self): + """Returns the wrist site element of the arm.""" + ... + +class JointsObservables(composer.Observables): + """Observables common to all robot arms.""" + + @define.observable + def joints_pos(self): # -> MJCFFeature: + ... + @define.observable + def joints_vel(self): # -> MJCFFeature: + ... + +class RobotHand(composer.Robot, metaclass=abc.ABCMeta): + """The abstract base class for robotic hands.""" + + @abc.abstractmethod + def set_grasp(self, physics, close_factors): # -> None: + """Sets the finger position to the desired positions. + + Args: + physics: An instance of `mjcf.Physics`. + close_factors: A number or list of numbers defining the desired grasp + position of each finger. A value of 0 corresponds to fully opening a + finger, while a value of 1 corresponds to fully closing it. If a single + number is specified, the same position is applied to all fingers. + """ + ... + @property + @abc.abstractmethod + def tool_center_point(self): # -> None: + """Returns the tool center point element of the hand.""" + ... diff --git a/typings/dm_control/entities/manipulators/kinova/__init__.pyi b/typings/dm_control/entities/manipulators/kinova/__init__.pyi new file mode 100644 index 00000000..ec6fdf56 --- /dev/null +++ b/typings/dm_control/entities/manipulators/kinova/__init__.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.entities.manipulators.kinova.jaco_arm import JacoArm +from dm_control.entities.manipulators.kinova.jaco_hand import JacoHand + +"""Composer models of Kinova robots.""" diff --git a/typings/dm_control/entities/manipulators/kinova/assets_path.pyi b/typings/dm_control/entities/manipulators/kinova/assets_path.pyi new file mode 100644 index 00000000..43f81276 --- /dev/null +++ b/typings/dm_control/entities/manipulators/kinova/assets_path.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +"""Helper module that specifies the path to Kinova assets.""" +_DM_CONTROL_ROOT = ... +KINOVA_ROOT = ... diff --git a/typings/dm_control/entities/manipulators/kinova/jaco_arm.pyi b/typings/dm_control/entities/manipulators/kinova/jaco_arm.pyi new file mode 100644 index 00000000..4f95f3fd --- /dev/null +++ b/typings/dm_control/entities/manipulators/kinova/jaco_arm.pyi @@ -0,0 +1,53 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.composer import define +from dm_control.composer.observation import observable +from dm_control.entities.manipulators import base + +"""Module containing the Jaco robot class.""" +_JACO_ARM_XML_PATH = ... +_LARGE_JOINTS = ... +_SMALL_JOINTS = ... +_ALL_JOINTS = ... +_WRIST_SITE = ... +_LARGE_JOINT_MAX_TORQUE = ... +_SMALL_JOINT_MAX_TORQUE = ... +_LARGE_JOINT_MAX_VELOCITY = ... +_SMALL_JOINT_MAX_VELOCITY = ... +_VELOCITY_GAIN = ... + +class JacoArm(base.RobotArm): + """A composer entity representing a Jaco arm.""" + + @property + def joints(self): # -> list[Any | None]: + """List of joint elements belonging to the arm.""" + ... + @property + def actuators(self): # -> list[Any]: + """List of actuator elements belonging to the arm.""" + ... + @property + def joint_torque_sensors(self): # -> list[Any]: + """List of torque sensors for each joint belonging to the arm.""" + ... + @property + def wrist_site(self): # -> Any | None: + """Wrist site of the arm (attachment point for the hand).""" + ... + @property + def mjcf_model(self): # -> RootElement: + """Returns the `mjcf.RootElement` object corresponding to this robot.""" + ... + +class JacoArmObservables(base.JointsObservables): + """Jaco arm obserables.""" + + @define.observable + def joints_pos(self): # -> Generic: + ... + @define.observable + def joints_torque(self): # -> Generic: + ... diff --git a/typings/dm_control/entities/manipulators/kinova/jaco_hand.pyi b/typings/dm_control/entities/manipulators/kinova/jaco_hand.pyi new file mode 100644 index 00000000..20a00e13 --- /dev/null +++ b/typings/dm_control/entities/manipulators/kinova/jaco_hand.pyi @@ -0,0 +1,80 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.composer.observation import observable +from dm_control.entities.manipulators import base + +"""Module containing the standard Jaco hand.""" +_JACO_HAND_XML_PATH = ... +_HAND_BODY = ... +_PINCH_SITE = ... +_GRIP_SITE = ... + +class JacoHand(base.RobotHand): + """A composer entity representing a Jaco hand.""" + + @property + def tool_center_point(self): # -> Any | None: + """Tool center point for the Jaco hand.""" + ... + @property + def joints(self): # -> list[Unknown]: + """List of joint elements.""" + ... + @property + def actuators(self): # -> list[Unknown]: + """List of finger actuators.""" + ... + @property + def hand_geom(self): # -> list[Any]: + """List of geoms belonging to the hand.""" + ... + @property + def finger_geoms(self): # -> list[Unknown]: + """List of geoms belonging to the fingers.""" + ... + @property + def grip_site(self): # -> Any | None: + """Grip site.""" + ... + @property + def pinch_site(self): # -> Any | None: + """Pinch site.""" + ... + @property + def pinch_site_pos_sensor(self): + """Sensor that returns the cartesian position of the pinch site.""" + ... + @property + def pinch_site_quat_sensor(self): + """Sensor that returns the orientation of the pinch site as a quaternion.""" + ... + @property + def mjcf_model(self): # -> RootElement: + """Returns the `mjcf.RootElement` object corresponding to this robot.""" + ... + def set_grasp(self, physics, close_factors): # -> None: + """Sets the finger position to the desired positions. + + Args: + physics: An instance of `mjcf.Physics`. + close_factors: A number or list of numbers defining the desired grasp + position of each finger. A value of 0 corresponds to fully opening a + finger, while a value of 1 corresponds to fully closing it. If a single + number is specified, the same position is applied to all fingers. + """ + ... + +class JacoHandObservables(base.JointsObservables): + """Observables for the Jaco hand.""" + + @composer.observable + def pinch_site_pos(self): # -> MJCFFeature: + """The position of the pinch site, in global coordinates.""" + ... + @composer.observable + def pinch_site_rmat(self): # -> MJCFFeature: + """The rotation matrix of the pinch site in global coordinates.""" + ... diff --git a/typings/dm_control/entities/props/__init__.pyi b/typings/dm_control/entities/props/__init__.pyi new file mode 100644 index 00000000..7511631e --- /dev/null +++ b/typings/dm_control/entities/props/__init__.pyi @@ -0,0 +1,12 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.entities.props.duplo import Duplo +from dm_control.entities.props.position_detector import PositionDetector +from dm_control.entities.props.primitive import Primitive + +"""Composer entities corresponding to props. + +A "prop" is typically a non-actuated entity representing an object in the world. +""" diff --git a/typings/dm_control/entities/props/duplo/__init__.pyi b/typings/dm_control/entities/props/duplo/__init__.pyi new file mode 100644 index 00000000..afb2401b --- /dev/null +++ b/typings/dm_control/entities/props/duplo/__init__.pyi @@ -0,0 +1,55 @@ +""" +This type stub file was generated by pyright. +""" + +import collections +import os + +import numpy as np +from dm_control import composer, mjcf +from dm_control.composer import define +from dm_control.composer.observation import observable + +"""A 2x4 Duplo brick.""" +_DUPLO_XML_PATH = ... +_StudSize = ... +_StudParams = ... +_STUD_SIZE_PARAMS = ... +_COLOR_NOT_BETWEEN_0_AND_1 = ... + +class Duplo(composer.Entity): + """A 2x4 Duplo brick.""" + + def initialize_episode_mjcf(self, random_state): # -> None: + """Randomizes the stud radius (and therefore the separation force).""" + ... + @property + def studs(self): # -> ndarray[Unknown, Unknown]: + """A (2, 4) numpy array of `mjcf.Elements` corresponding to stud sites.""" + ... + @property + def holes(self): # -> ndarray[Unknown, Unknown]: + """A (2, 4) numpy array of `mjcf.Elements` corresponding to hole sites.""" + ... + @property + def mjcf_model(self): # -> RootElement: + ... + +class DuploObservables(composer.Observables, composer.FreePropObservableMixin): + """Observables for the `Duplo` prop.""" + + @define.observable + def position(self): # -> MJCFFeature: + ... + @define.observable + def orientation(self): # -> MJCFFeature: + ... + @define.observable + def linear_velocity(self): # -> MJCFFeature: + ... + @define.observable + def angular_velocity(self): # -> MJCFFeature: + ... + @define.observable + def force(self): # -> MJCFFeature: + ... diff --git a/typings/dm_control/entities/props/position_detector.pyi b/typings/dm_control/entities/props/position_detector.pyi new file mode 100644 index 00000000..2ddb4d0c --- /dev/null +++ b/typings/dm_control/entities/props/position_detector.pyi @@ -0,0 +1,84 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer + +"""Detects the presence of registered entities within a cuboidal region.""" +_RENDERED_HEIGHT_IN_2D_MODE = ... + +class _Detection: + __slots__ = ... + def __init__(self, entity, detected=...) -> None: ... + +class PositionDetector(composer.Entity): + """Detects the presence of registered entities within an axis-aligned box. + + The volume of this detector is defined by a "lower" corner and an "upper" + corner, which suffice to define an axis-aligned box. + An entity is considered "detected" if the `xpos` value of any one of its geom + lies within the active region defined by this detector. Note that this is NOT + a contact-based detector. Generally speaking, a geom will not be detected + until it is already "half inside" the region. + + This detector supports both 2D and 3D modes. In 2D mode, the active region + has an effective infinite height along the z-direction. + + This detector also provides an "inverted" detection mode, where an entity is + detected when it is not inside the detector's region. + """ + + def resize(self, pos, size): # -> None: + ... + def set_colors(self, rgba, detected_rgba): # -> None: + ... + def set_color(self, rgba): # -> None: + ... + def set_detected_color(self, detected_rgba): # -> None: + ... + def set_position(self, physics, pos): # -> None: + ... + @property + def mjcf_model(self): # -> RootElement: + ... + def register_entities(self, *entities): # -> None: + ... + def deregister_entities(self): # -> None: + ... + @property + def detected_entities(self): # -> list[Unknown]: + """A list of detected entities.""" + ... + def initialize_episode_mjcf(self, unused_random_state): # -> None: + ... + def initialize_episode(self, physics, unused_random_state): # -> None: + ... + def before_step(self, physics, unused_random_state): # -> None: + ... + def after_substep(self, physics, unused_random_state): # -> None: + ... + def site_pos(self, physics): ... + @property + def activated(self): # -> bool: + ... + @property + def upper(self): # -> NDArray[bool_]: + ... + @property + def lower(self): # -> NDArray[Any]: + ... + @property + def mid(self): # -> NDArray[floating[Any]]: + ... + @property + def lower_site(self): ... + @property + def mid_site(self): ... + @property + def upper_site(self): ... + @property + def lower_sensor(self): ... + @property + def mid_sensor(self): ... + @property + def upper_sensor(self): ... diff --git a/typings/dm_control/entities/props/primitive.pyi b/typings/dm_control/entities/props/primitive.pyi new file mode 100644 index 00000000..9c29e320 --- /dev/null +++ b/typings/dm_control/entities/props/primitive.pyi @@ -0,0 +1,51 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.composer import define +from dm_control.composer.observation import observable + +"""Prop consisting of a single geom with position and velocity sensors.""" + +class Primitive(composer.Entity): + """A prop consisting of a single geom with position and velocity sensors.""" + + @property + def geom(self): + """The geom belonging to this prop.""" + ... + @property + def position(self): + """Sensor that returns the prop position.""" + ... + @property + def orientation(self): + """Sensor that returns the prop orientation (as a quaternion).""" + ... + @property + def linear_velocity(self): + """Sensor that returns the linear velocity of the prop.""" + ... + @property + def angular_velocity(self): + """Sensor that returns the angular velocity of the prop.""" + ... + @property + def mjcf_model(self): ... + +class PrimitiveObservables(composer.Observables, composer.FreePropObservableMixin): + """Primitive entity's observables.""" + + @define.observable + def position(self): # -> MJCFFeature: + ... + @define.observable + def orientation(self): # -> MJCFFeature: + ... + @define.observable + def linear_velocity(self): # -> MJCFFeature: + ... + @define.observable + def angular_velocity(self): # -> MJCFFeature: + ... diff --git a/typings/dm_control/locomotion/__init__.pyi b/typings/dm_control/locomotion/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/locomotion/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/manipulation/__init__.pyi b/typings/dm_control/manipulation/__init__.pyi new file mode 100644 index 00000000..888d9456 --- /dev/null +++ b/typings/dm_control/manipulation/__init__.pyi @@ -0,0 +1,44 @@ +""" +This type stub file was generated by pyright. +""" + +from absl import flags +from dm_control import composer as _composer +from dm_control.manipulation import bricks as _bricks +from dm_control.manipulation import lift as _lift +from dm_control.manipulation import place as _place +from dm_control.manipulation import reach as _reach +from dm_control.manipulation.shared import registry as _registry + +"""A structured set of manipulation tasks with a single entry point.""" +_TIME_LIMIT = ... +_TIMEOUT = ... +ALL = ... +TAGS = ... +FLAGS = ... + +def get_environments_by_tag(tag): # -> tuple[Unknown, ...]: + """Returns the names of all environments matching a given tag. + + Args: + tag: A string from `TAGS`. + + Returns: + A tuple of environment names. + """ + ... + +def load(environment_name, seed=...): # -> Environment: + """Loads a manipulation environment. + + Args: + environment_name: String, the name of the environment to load. Must be in + `ALL`. + seed: An optional integer used to seed the task's random number generator. + If None (default), the random number generator will self-seed from a + platform-dependent source of entropy. + + Returns: + An instance of `composer.Environment`. + """ + ... diff --git a/typings/dm_control/manipulation/bricks.pyi b/typings/dm_control/manipulation/bricks.pyi new file mode 100644 index 00000000..bb9c716a --- /dev/null +++ b/typings/dm_control/manipulation/bricks.pyi @@ -0,0 +1,172 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.manipulation.shared import registry, tags +from dm_control.mujoco.wrapper import mjbindings + +"""Tasks involving assembly and/or disassembly of bricks.""" +mjlib = mjbindings.mjlib +_BrickWorkspace = ... +_PROP_Z_OFFSET = ... +_WORKSPACE = ... +_HINT_ALPHA = ... +_CLOSE_THRESHOLD = ... +_CLICK_THRESHOLD = ... + +class _Common(composer.Task): + """Common components of brick tasks.""" + + def __init__(self, arena, arm, hand, num_bricks, obs_settings, workspace, control_timestep) -> None: ... + @property + def task_observables(self): # -> OrderedDict[Unknown, Unknown]: + ... + @property + def root_entity(self): # -> Unknown: + ... + @property + def arm(self): # -> Unknown: + ... + @property + def hand(self): # -> Unknown: + ... + +class Stack(_Common): + """Build a stack of Duplo bricks.""" + + def __init__( + self, + arena, + arm, + hand, + num_bricks, + target_height, + moveable_base, + randomize_order, + obs_settings, + workspace, + control_timestep, + ) -> None: + """Initializes a new `Stack` task. + + Args: + arena: `composer.Entity` instance. + arm: `robot_base.RobotArm` instance. + hand: `robot_base.RobotHand` instance. + num_bricks: The total number of bricks; must be between 2 and 6. + target_height: The target number of bricks in the stack in order to get + maximum reward. Must be between 2 and `num_bricks`. + moveable_base: Boolean specifying whether or not the bottom brick should + be moveable. + randomize_order: Boolean specifying whether to randomize the desired order + of bricks in the stack at the start of each episode. + obs_settings: `observations.ObservationSettings` instance. + workspace: A `_BrickWorkspace` instance. + control_timestep: Float specifying the control timestep in seconds. + + Raises: + ValueError: If `num_bricks` is not between 2 and 6, or if + `target_height` is not between 2 and `num_bricks - 1`. + """ + ... + def initialize_episode_mjcf(self, random_state): # -> None: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + def get_reward(self, physics): ... + +class Reassemble(_Common): + """Disassemble a stack of Duplo bricks and reassemble it in another order.""" + + def __init__( + self, + arena, + arm, + hand, + num_bricks, + randomize_initial_order, + randomize_desired_order, + obs_settings, + workspace, + control_timestep, + ) -> None: + """Initializes a new `Reassemble` task. + + Args: + arena: `composer.Entity` instance. + arm: `robot_base.RobotArm` instance. + hand: `robot_base.RobotHand` instance. + num_bricks: The total number of bricks; must be between 2 and 6. + randomize_initial_order: Boolean specifying whether to randomize the + initial order of bricks in the stack at the start of each episode. + randomize_desired_order: Boolean specifying whether to independently + randomize the desired order of bricks in the stack at the start of each + episode. By default the desired order will be the reverse of the initial + order, with the exception of the base brick which is always the same as + in the initial order since it is welded in place. + obs_settings: `observations.ObservationSettings` instance. + workspace: A `_BrickWorkspace` instance. + control_timestep: Float specifying the control timestep in seconds. + + Raises: + ValueError: If `num_bricks` is not between 2 and 6. + """ + ... + def initialize_episode_mjcf(self, random_state): # -> None: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + def get_reward(self, physics): ... + +@registry.add(tags.FEATURES) +def stack_2_bricks_features(): # -> Stack: + ... + +@registry.add(tags.VISION) +def stack_2_bricks_vision(): # -> Stack: + ... + +@registry.add(tags.FEATURES) +def stack_2_bricks_moveable_base_features(): # -> Stack: + ... + +@registry.add(tags.VISION) +def stack_2_bricks_moveable_base_vision(): # -> Stack: + ... + +@registry.add(tags.FEATURES) +def stack_3_bricks_features(): # -> Stack: + ... + +@registry.add(tags.VISION) +def stack_3_bricks_vision(): # -> Stack: + ... + +@registry.add(tags.FEATURES) +def stack_3_bricks_random_order_features(): # -> Stack: + ... + +@registry.add(tags.FEATURES) +def stack_2_of_3_bricks_random_order_features(): # -> Stack: + ... + +@registry.add(tags.VISION) +def stack_2_of_3_bricks_random_order_vision(): # -> Stack: + ... + +@registry.add(tags.FEATURES) +def reassemble_3_bricks_fixed_order_features(): # -> Reassemble: + ... + +@registry.add(tags.VISION) +def reassemble_3_bricks_fixed_order_vision(): # -> Reassemble: + ... + +@registry.add(tags.FEATURES) +def reassemble_5_bricks_random_order_features(): # -> Reassemble: + ... + +@registry.add(tags.VISION) +def reassemble_5_bricks_random_order_vision(): # -> Reassemble: + ... diff --git a/typings/dm_control/manipulation/lift.pyi b/typings/dm_control/manipulation/lift.pyi new file mode 100644 index 00000000..ad263b2e --- /dev/null +++ b/typings/dm_control/manipulation/lift.pyi @@ -0,0 +1,81 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.entities import props +from dm_control.manipulation.shared import registry, tags + +"""Tasks where the goal is to elevate a prop.""" +_LiftWorkspace = ... +_DUPLO_WORKSPACE = ... +_BOX_SIZE = ... +_BOX_MASS = ... +_BOX_WORKSPACE = ... +_DISTANCE_TO_LIFT = ... + +class _VertexSitesMixin: + """Mixin class that adds sites corresponding to the vertices of a box.""" + + @property + def vertices(self): # -> list[Unknown]: + ... + +class _BoxWithVertexSites(props.Primitive, _VertexSitesMixin): + """Subclass of `Box` with sites marking the vertices of the box geom.""" + + ... + +class _DuploWithVertexSites(props.Duplo, _VertexSitesMixin): + """Subclass of `Duplo` with sites marking the vertices of its sensor site.""" + + ... + +class Lift(composer.Task): + """A task where the goal is to elevate a prop.""" + + def __init__(self, arena, arm, hand, prop, obs_settings, workspace, control_timestep) -> None: + """Initializes a new `Lift` task. + + Args: + arena: `composer.Entity` instance. + arm: `robot_base.RobotArm` instance. + hand: `robot_base.RobotHand` instance. + prop: `composer.Entity` instance. + obs_settings: `observations.ObservationSettings` instance. + workspace: `_LiftWorkspace` specifying the placement of the prop and TCP. + control_timestep: Float specifying the control timestep in seconds. + """ + ... + @property + def root_entity(self): # -> Unknown: + ... + @property + def arm(self): # -> Unknown: + ... + @property + def hand(self): # -> Unknown: + ... + @property + def task_observables(self): # -> OrderedDict[Unknown, Unknown]: + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + +@registry.add(tags.FEATURES) +def lift_brick_features(): # -> Lift: + ... + +@registry.add(tags.VISION) +def lift_brick_vision(): # -> Lift: + ... + +@registry.add(tags.FEATURES) +def lift_large_box_features(): # -> Lift: + ... + +@registry.add(tags.VISION) +def lift_large_box_vision(): # -> Lift: + ... diff --git a/typings/dm_control/manipulation/place.pyi b/typings/dm_control/manipulation/place.pyi new file mode 100644 index 00000000..2e58c7e6 --- /dev/null +++ b/typings/dm_control/manipulation/place.pyi @@ -0,0 +1,88 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.composer import define +from dm_control.composer.observation import observable +from dm_control.manipulation.shared import registry, tags + +"""A task where the goal is to place a movable prop on top of a fixed prop.""" +_PlaceWorkspace = ... +_TARGET_RADIUS = ... +_PEDESTAL_RADIUS = ... +_PROP_Z_OFFSET = ... +_WORKSPACE = ... + +class SphereCradle(composer.Entity): + """A concave shape for easy placement.""" + + _SPHERE_COUNT = ... + @property + def mjcf_model(self): ... + +class Pedestal(composer.Entity): + """A narrow pillar to elevate the target.""" + + _HEIGHT = ... + @property + def mjcf_model(self): ... + @property + def target_site(self): ... + +class PedestalObservables(composer.Observables): + """Observables for the `Pedestal` prop.""" + + @define.observable + def position(self): # -> MJCFFeature: + ... + +class Place(composer.Task): + """Place the prop on top of another fixed prop held up by a pedestal.""" + + def __init__(self, arena, arm, hand, prop, obs_settings, workspace, control_timestep, cradle) -> None: + """Initializes a new `Place` task. + + Args: + arena: `composer.Entity` instance. + arm: `robot_base.RobotArm` instance. + hand: `robot_base.RobotHand` instance. + prop: `composer.Entity` instance. + obs_settings: `observations.ObservationSettings` instance. + workspace: A `_PlaceWorkspace` instance. + control_timestep: Float specifying the control timestep in seconds. + cradle: `composer.Entity` onto which the `prop` must be placed. + """ + ... + @property + def root_entity(self): # -> Unknown: + ... + @property + def arm(self): # -> Unknown: + ... + @property + def hand(self): # -> Unknown: + ... + @property + def task_observables(self): # -> OrderedDict[Unknown, Unknown]: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]]: + ... + +@registry.add(tags.FEATURES) +def place_brick_features(): # -> Place: + ... + +@registry.add(tags.VISION) +def place_brick_vision(): # -> Place: + ... + +@registry.add(tags.FEATURES) +def place_cradle_features(): # -> Place: + ... + +@registry.add(tags.VISION) +def place_cradle_vision(): # -> Place: + ... diff --git a/typings/dm_control/manipulation/reach.pyi b/typings/dm_control/manipulation/reach.pyi new file mode 100644 index 00000000..f8a1fdca --- /dev/null +++ b/typings/dm_control/manipulation/reach.pyi @@ -0,0 +1,64 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer +from dm_control.manipulation.shared import registry, tags + +"""A task where the goal is to move the hand close to a target prop or site.""" +_ReachWorkspace = ... +_PROP_Z_OFFSET = ... +_DUPLO_WORKSPACE = ... +_SITE_WORKSPACE = ... +_TARGET_RADIUS = ... + +class Reach(composer.Task): + """Bring the hand close to a target prop or site.""" + + def __init__(self, arena, arm, hand, prop, obs_settings, workspace, control_timestep) -> None: + """Initializes a new `Reach` task. + + Args: + arena: `composer.Entity` instance. + arm: `robot_base.RobotArm` instance. + hand: `robot_base.RobotHand` instance. + prop: `composer.Entity` instance specifying the prop to reach to, or None + in which case the target is a fixed site whose position is specified by + the workspace. + obs_settings: `observations.ObservationSettings` instance. + workspace: `_ReachWorkspace` specifying the placement of the prop and TCP. + control_timestep: Float specifying the control timestep in seconds. + """ + ... + @property + def root_entity(self): # -> Unknown: + ... + @property + def arm(self): # -> Unknown: + ... + @property + def hand(self): # -> Unknown: + ... + @property + def task_observables(self): # -> OrderedDict[Unknown, Unknown]: + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + ... + def initialize_episode(self, physics, random_state): # -> None: + ... + +@registry.add(tags.FEATURES, tags.EASY) +def reach_duplo_features(): # -> Reach: + ... + +@registry.add(tags.VISION, tags.EASY) +def reach_duplo_vision(): # -> Reach: + ... + +@registry.add(tags.FEATURES, tags.EASY) +def reach_site_features(): # -> Reach: + ... + +@registry.add(tags.VISION, tags.EASY) +def reach_site_vision(): # -> Reach: + ... diff --git a/typings/dm_control/manipulation/shared/__init__.pyi b/typings/dm_control/manipulation/shared/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/manipulation/shared/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/manipulation/shared/arenas.pyi b/typings/dm_control/manipulation/shared/arenas.pyi new file mode 100644 index 00000000..b66972f5 --- /dev/null +++ b/typings/dm_control/manipulation/shared/arenas.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import composer + +"""Suite-specific arena class.""" + +class Standard(composer.Arena): + """Suite-specific subclass of the standard Composer arena.""" + + def attach_offset(self, entity, offset, attach_site=...): + """Attaches another entity at a position offset from the attachment site. + + Args: + entity: The `Entity` to attach. + offset: A length 3 array-like object representing the XYZ offset. + attach_site: (optional) The site to which to attach the entity's model. + If not set, defaults to self.attachment_site. + Returns: + The frame of the attached model. + """ + ... diff --git a/typings/dm_control/manipulation/shared/cameras.pyi b/typings/dm_control/manipulation/shared/cameras.pyi new file mode 100644 index 00000000..7d4ce3a1 --- /dev/null +++ b/typings/dm_control/manipulation/shared/cameras.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +"""Tools for adding custom cameras to the arena.""" +CameraSpec = ... +FRONT_CLOSE = ... +FRONT_FAR = ... +TOP_DOWN = ... +LEFT_CLOSE = ... +RIGHT_CLOSE = ... + +def add_camera_observables(entity, obs_settings, *camera_specs): # -> OrderedDict[Unknown, Unknown]: + """Adds cameras to an entity's worldbody and configures observables for them. + + Args: + entity: A `composer.Entity`. + obs_settings: An `observations.ObservationSettings` instance. + *camera_specs: Instances of `CameraSpec`. + + Returns: + A `collections.OrderedDict` keyed on camera names, containing pre-configured + `observable.MJCFCamera` instances. + """ + ... diff --git a/typings/dm_control/manipulation/shared/constants.pyi b/typings/dm_control/manipulation/shared/constants.pyi new file mode 100644 index 00000000..edb77e00 --- /dev/null +++ b/typings/dm_control/manipulation/shared/constants.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +"""Global constants used in manipulation tasks.""" +CONTROL_TIMESTEP = ... +RED = ... +GREEN = ... +BLUE = ... +CYAN = ... +MAGENTA = ... +YELLOW = ... +TASK_SITE_GROUP = ... diff --git a/typings/dm_control/manipulation/shared/observations.pyi b/typings/dm_control/manipulation/shared/observations.pyi new file mode 100644 index 00000000..95b5f63d --- /dev/null +++ b/typings/dm_control/manipulation/shared/observations.pyi @@ -0,0 +1,60 @@ +""" +This type stub file was generated by pyright. +""" + +import collections + +"""Shared configuration options for observations.""" + +class ObservableSpec( + collections.namedtuple( + "ObservableSpec", ["enabled", "update_interval", "buffer_size", "delay", "aggregator", "corruptor"] + ) +): + """Configuration options for generic observables.""" + + __slots__ = ... + +class CameraObservableSpec( + collections.namedtuple("CameraObservableSpec", ("height", "width") + ObservableSpec._fields) +): + """Configuration options for camera observables.""" + + __slots__ = ... + +class ObservationSettings(collections.namedtuple("ObservationSettings", ["proprio", "ftt", "prop_pose", "camera"])): + """Container of `ObservableSpecs` grouped by category.""" + + __slots__ = ... + +class ObservableNames(collections.namedtuple("ObservableNames", ["proprio", "ftt", "prop_pose", "camera"])): + """Container that groups the names of observables by category.""" + + __slots__ = ... + def __new__(cls, proprio=..., ftt=..., prop_pose=..., camera=...): # -> Self@ObservableNames: + ... + +_DISABLED_FEATURE = ... +_ENABLED_FEATURE = ... +_symlog1p = ... +_DISABLED_FTT = ... +_ENABLED_FTT = ... +_DISABLED_CAMERA = ... +_ENABLED_CAMERA = ... +PERFECT_FEATURES = ... +VISION = ... +JACO_ARM_OBSERVABLES = ... +JACO_HAND_OBSERVABLES = ... +FREEPROP_OBSERVABLES = ... + +def make_options(obs_settings, obs_names): # -> dict[Unknown, Unknown]: + """Constructs a dict of configuration options for a set of named observables. + + Args: + obs_settings: An `ObservationSettings` instance. + obs_names: An `ObservableNames` instance. + + Returns: + A nested dict containing `{observable_name: {option_name: value}}`. + """ + ... diff --git a/typings/dm_control/manipulation/shared/registry.pyi b/typings/dm_control/manipulation/shared/registry.pyi new file mode 100644 index 00000000..c94a752a --- /dev/null +++ b/typings/dm_control/manipulation/shared/registry.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +"""A global registry of constructors for manipulation environments.""" +_ALL_CONSTRUCTORS = ... +add = ... +get_constructor = ... +get_all_names = ... +get_tags = ... +get_names_by_tag = ... + +def done_importing_tasks(): # -> None: + ... diff --git a/typings/dm_control/manipulation/shared/robots.pyi b/typings/dm_control/manipulation/shared/robots.pyi new file mode 100644 index 00000000..71fa3de4 --- /dev/null +++ b/typings/dm_control/manipulation/shared/robots.pyi @@ -0,0 +1,28 @@ +""" +This type stub file was generated by pyright. +""" + +"""Custom robot constructors with manipulation-specific defaults.""" +ARM_OFFSET = ... + +def make_arm(obs_settings): # -> JacoArm: + """Constructs a robot arm with manipulation-specific defaults. + + Args: + obs_settings: `observations.ObservationSettings` instance. + + Returns: + An instance of `manipulators.base.RobotArm`. + """ + ... + +def make_hand(obs_settings): # -> JacoHand: + """Constructs a robot hand with manipulation-specific defaults. + + Args: + obs_settings: `observations.ObservationSettings` instance. + + Returns: + An instance of `manipulators.base.RobotHand`. + """ + ... diff --git a/typings/dm_control/manipulation/shared/tags.pyi b/typings/dm_control/manipulation/shared/tags.pyi new file mode 100644 index 00000000..98092ee4 --- /dev/null +++ b/typings/dm_control/manipulation/shared/tags.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +"""String constants used to annotate task constructors.""" +FEATURES = ... +VISION = ... +EASY = ... diff --git a/typings/dm_control/manipulation/shared/workspaces.pyi b/typings/dm_control/manipulation/shared/workspaces.pyi new file mode 100644 index 00000000..a6644590 --- /dev/null +++ b/typings/dm_control/manipulation/shared/workspaces.pyi @@ -0,0 +1,49 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.entities.manipulators import base + +"""Tools for defining and visualizing workspaces for manipulation tasks. + +Workspaces define distributions from which the initial positions and/or +orientations of the hand and prop(s) are sampled, plus other task-specific +spatial parameters such as target sizes. +""" +_MIN_SITE_DIMENSION = ... +_VISIBLE_GROUP = ... +_INVISIBLE_GROUP = ... +DOWN_QUATERNION = base.DOWN_QUATERNION +BoundingBox = ... +uniform_z_rotation = ... + +def add_bbox_site(body, lower, upper, visible=..., **kwargs): + """Adds a site for visualizing a bounding box to an MJCF model. + + Args: + body: An `mjcf.Element`, the (world)body to which the site should be added. + lower: A sequence of lower x,y,z bounds. + upper: A sequence of upper x,y,z bounds. + visible: Whether the site should be visible by default. + **kwargs: Keyword arguments used to set other attributes of the newly + created site. + + Returns: + An `mjcf.Element` representing the newly created site. + """ + ... + +def add_target_site(body, radius, visible=..., **kwargs): + """Adds a site for visualizing a target location. + + Args: + body: An `mjcf.Element`, the (world)body to which the site should be added. + radius: The radius of the target. + visible: Whether the site should be visible by default. + **kwargs: Keyword arguments used to set other attributes of the newly + created site. + + Returns: + An `mjcf.Element` representing the newly created site. + """ + ... diff --git a/typings/dm_control/mjcf/__init__.pyi b/typings/dm_control/mjcf/__init__.pyi new file mode 100644 index 00000000..78713f9b --- /dev/null +++ b/typings/dm_control/mjcf/__init__.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mjcf.attribute import Asset +from dm_control.mjcf.base import Element +from dm_control.mjcf.constants import PREFIX_SEPARATOR +from dm_control.mjcf.element import RootElement +from dm_control.mjcf.export_with_assets import export_with_assets +from dm_control.mjcf.export_with_assets_as_zip import export_with_assets_as_zip +from dm_control.mjcf.parser import from_file, from_path, from_xml_string +from dm_control.mjcf.physics import Physics +from dm_control.mjcf.traversal_utils import ( + commit_defaults, + get_attachment_frame, + get_frame_freejoint, + get_frame_joints, + get_freejoint, +) + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/mjcf/attribute.pyi b/typings/dm_control/mjcf/attribute.pyi new file mode 100644 index 00000000..435a3761 --- /dev/null +++ b/typings/dm_control/mjcf/attribute.pyi @@ -0,0 +1,125 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +""" +This type stub file was generated by pyright. +""" +_INVALID_REFERENCE_TYPE = ... +_MESH_EXTENSIONS = ... +_INVALID_MESH_EXTENSION = ... + +class _Attribute(metaclass=abc.ABCMeta): + """Abstract base class for MJCF attribute data types.""" + + def __init__(self, name, required, parent, value, conflict_allowed, conflict_behavior) -> None: ... + @property + def last_modified_stack(self): ... + @property + def value(self): ... + @value.setter + def value(self, new_value): ... + def clear(self): ... + def to_xml_string(self, prefix_root, **kwargs): ... + @property + def conflict_allowed(self): ... + @property + def conflict_behavior(self): ... + +class String(_Attribute): + """A string MJCF attribute.""" + + ... + +class Integer(_Attribute): + """An integer MJCF attribute.""" + + ... + +class Float(_Attribute): + """An float MJCF attribute.""" + + def to_xml_string(self, prefix_root=..., *, precision=..., zero_threshold=..., **kwargs): ... + +class Keyword(_Attribute): + """A keyword MJCF attribute.""" + + def __init__(self, name, required, parent, value, conflict_allowed, conflict_behavior, valid_values) -> None: ... + @property + def valid_values(self): ... + +class Array(_Attribute): + """An array MJCF attribute.""" + + def __init__(self, name, required, parent, value, conflict_allowed, conflict_behavior, length, dtype) -> None: ... + def to_xml_string(self, prefix_root=..., *, precision=..., zero_threshold=..., **kwargs): ... + +class Identifier(_Attribute): + """A string attribute that represents a unique identifier of an element.""" + + def to_xml_string(self, prefix_root=..., **kwargs): ... + +class Reference(_Attribute): + """A string attribute that represents a reference to an identifier.""" + + def __init__( + self, name, required, parent, value, conflict_allowed, conflict_behavior, reference_namespace + ) -> None: ... + @property + def value(self): ... + @value.setter + def value(self, new_value): ... + @property + def reference_namespace(self): ... + def to_xml_string(self, prefix_root, **kwargs): ... + +class BasePath(_Attribute): + """A string attribute that represents a base path for an asset type.""" + + def __init__(self, name, required, parent, value, conflict_allowed, conflict_behavior, path_namespace) -> None: ... + def to_xml_string(self, prefix_root=..., **kwargs): ... + +class BaseAsset: + """Base class for binary assets.""" + + __slots__ = ... + def __init__(self, extension, prefix=...) -> None: ... + def __eq__(self, other) -> bool: ... + def get_vfs_filename(self): + """Returns the name of the asset file as registered in MuJoCo's VFS.""" + ... + +class Asset(BaseAsset): + """Class representing a binary asset.""" + + __slots__ = ... + def __init__(self, contents, extension, prefix=...) -> None: + """Initializes a new `Asset`. + + Args: + contents: The contents of the file as a bytestring. + extension: A string specifying the file extension (e.g. '.png', '.stl'). + prefix: (optional) A prefix applied to the filename given in MuJoCo's VFS. + """ + ... + +class SkinAsset(BaseAsset): + """Class representing a binary asset corresponding to a skin.""" + + __slots__ = ... + def __init__(self, contents, parent, extension, prefix=...) -> None: ... + @property + def contents(self): ... + +class File(_Attribute): + """Attribute representing an asset file.""" + + def __init__(self, name, required, parent, value, conflict_allowed, conflict_behavior, path_namespace) -> None: ... + def get_contents(self): + """Returns a bytestring representing the contents of the asset.""" + ... + def to_xml_string(self, prefix_root=..., **kwargs): + """Returns the asset filename as it will appear in the generated XML.""" + ... diff --git a/typings/dm_control/mjcf/base.pyi b/typings/dm_control/mjcf/base.pyi new file mode 100644 index 00000000..76f2895c --- /dev/null +++ b/typings/dm_control/mjcf/base.pyi @@ -0,0 +1,236 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +""" +This type stub file was generated by pyright. +""" + +class Element(metaclass=abc.ABCMeta): + """Abstract base class for an MJCF element. + + This class is provided so that `isinstance(foo, Element)` is `True` for all + Element-like objects. We do not implement the actual element here because + the actual object returned from traversing the object hierarchy is a + weakproxy-like proxy to an actual element. This is because we do not allow + orphaned non-root elements, so when a particular element is removed from the + tree, all references held automatically become invalid. + """ + + __slots__ = ... + @abc.abstractmethod + def get_init_stack(self): + """Gets the stack trace where this element was first initialized.""" + ... + @abc.abstractmethod + def get_last_modified_stacks_for_all_attributes(self): + """Gets a dict of stack traces where each attribute was last modified.""" + ... + @abc.abstractmethod + def is_same_as(self, other): + """Checks whether another element is semantically equivalent to this one. + + Two elements are considered equivalent if they have the same + specification (i.e. same tag appearing in the same context), the same + attribute values, and all of their children are equivalent. The ordering + of non-repeated children is not important for this comparison, while + the ordering of repeated children are important only amongst the same + type* of children. In other words, for two bodies to be considered + equivalent, their child sites must appear in the same order, and their + child geoms must appear in the same order, but permutations between sites + and geoms are disregarded. (The only exception is in tendon definition, + where strict ordering of all children is necessary for equivalence.) + + *Note that the notion of "same type" in this function is very loose: + for example different actuator element subtypes are treated as separate + types when children ordering is considered. Therefore, two + elements might be considered equivalent even though they result in different + orderings of `mjData.ctrl` when compiled. As it stands, this function + is designed primarily as a testing aid and should not be used to guarantee + that models are actually identical. + + Args: + other: An `mjcf.Element` + + Returns: + `True` if `other` element is semantically equivalent to this one. + """ + ... + @property + @abc.abstractmethod + def tag(self): ... + @property + @abc.abstractmethod + def spec(self): ... + @property + @abc.abstractmethod + def parent(self): ... + @property + @abc.abstractmethod + def namescope(self): ... + @property + @abc.abstractmethod + def root(self): ... + @abc.abstractmethod + def prefixed_identifier(self, prefix_root): ... + @property + @abc.abstractmethod + def full_identifier(self): + """Fully-qualified identifier used for this element in the generated XML.""" + ... + @abc.abstractmethod + def find(self, namespace, identifier): + """Finds an element with a particular identifier. + + This function allows the direct access to an arbitrarily deeply nested + child element by name, without the need to manually traverse through the + object tree. The `namespace` argument specifies the kind of element to + find. In most cases, this corresponds to the element's XML tag name. + However, if an element has multiple specialized tags, then the namespace + corresponds to the tag name of the most general element of that kind. + For example, `namespace='joint'` would search for `` and + ``, while `namespace='actuator'` would search for ``, + ``, ``, ``, and ``. + + Args: + namespace: A string specifying the namespace being searched. See the + docstring above for explanation. + identifier: The identifier string of the desired element. + + Returns: + An `mjcf.Element` object, or `None` if an element with the specified + identifier is not found. + + Raises: + ValueError: if either `namespace` or `identifier` is not a string, or if + `namespace` is not a valid namespace. + """ + ... + @abc.abstractmethod + def find_all(self, namespace, immediate_children_only=..., exclude_attachments=...): + """Finds all elements of a particular kind. + + The `namespace` argument specifies the kind of element to + find. In most cases, this corresponds to the element's XML tag name. + However, if an element has multiple specialized tags, then the namespace + corresponds to the tag name of the most general element of that kind. + For example, `namespace='joint'` would search for `` and + ``, while `namespace='actuator'` would search for ``, + ``, ``, ``, and ``. + + Args: + namespace: A string specifying the namespace being searched. See the + docstring above for explanation. + immediate_children_only: (optional) A boolean, if `True` then only + the immediate children of this element are returned. + exclude_attachments: (optional) A boolean, if `True` then elements + belonging to attached models are excluded. + + Returns: + A list of `mjcf.Element`. + + Raises: + ValueError: if `namespace` is not a valid namespace. + """ + ... + @abc.abstractmethod + def enter_scope(self, scope_identifier): + """Finds the root element of the given scope and returns it. + + This function allows the access to a nested scope that is a child of this + element. The `scope_identifier` argument specifies the path to the child + scope element. + + Args: + scope_identifier: The path of the desired scope element. + + Returns: + An `mjcf.Element` object, or `None` if a scope element with the + specified path is not found. + """ + ... + @abc.abstractmethod + def get_attribute_xml_string(self, attribute_name, prefix_root=...): ... + @abc.abstractmethod + def get_attributes(self): ... + @abc.abstractmethod + def set_attributes(self, **kwargs): ... + @abc.abstractmethod + def get_children(self, element_name): ... + @abc.abstractmethod + def add(self, element_name, **kwargs): + """Add a new child element to this element. + + Args: + element_name: The tag of the element to add. + **kwargs: Attributes of the new element being created. + + Raises: + ValueError: If the 'element_name' is not a valid child, or if an invalid + attribute is specified in `kwargs`. + + Returns: + An `mjcf.Element` corresponding to the newly created child element. + """ + ... + @abc.abstractmethod + def remove(self, affect_attachments=...): + """Removes this element from the model.""" + ... + @property + @abc.abstractmethod + def is_removed(self): ... + @abc.abstractmethod + def all_children(self): ... + @abc.abstractmethod + def to_xml(self, prefix_root=..., debug_context=..., *, precision=..., zero_threshold=...): + """Generates an etree._Element corresponding to this MJCF element. + + Args: + prefix_root: (optional) A `NameScope` object to be treated as root + for the purpose of calculating the prefix. + If `None` then no prefix is included. + debug_context: (optional) A `debugging.DebugContext` object to which + the debugging information associated with the generated XML is written. + This is intended for internal use within PyMJCF; users should never need + manually pass this argument. + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + + Returns: + An etree._Element object. + """ + ... + @abc.abstractmethod + def to_xml_string( + self, prefix_root=..., self_only=..., pretty_print=..., debug_context=..., *, precision=..., zero_threshold=... + ): + """Generates an XML string corresponding to this MJCF element. + + Args: + prefix_root: (optional) A `NameScope` object to be treated as root + for the purpose of calculating the prefix. + If `None` then no prefix is included. + self_only: (optional) A boolean, whether to generate an XML corresponding + only to this element without any children. + pretty_print: (optional) A boolean, whether to the XML string should be + properly indented. + debug_context: (optional) A `debugging.DebugContext` object to which + the debugging information associated with the generated XML is written. + This is intended for internal use within PyMJCF; users should never need + manually pass this argument. + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + + Returns: + A string. + """ + ... + @abc.abstractmethod + def resolve_references(self): ... diff --git a/typings/dm_control/mjcf/constants.pyi b/typings/dm_control/mjcf/constants.pyi new file mode 100644 index 00000000..df4af1cf --- /dev/null +++ b/typings/dm_control/mjcf/constants.pyi @@ -0,0 +1,30 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" +PREFIX_SEPARATOR = ... +PREFIX_SEPARATOR_ESCAPE = ... +NAMESPACE_SEPARATOR = ... +BASEPATH = ... +CHILDCLASS = ... +CLASS = ... +DEFAULT = ... +DCLASS = ... +ACTUATOR = ... +BODY = ... +DEFAULT = ... +MESH = ... +SITE = ... +SKIN = ... +TENDON = ... +WORLDBODY = ... +MJDATA_TRIGGERS_DIRTY = ... +MJMODEL_DOESNT_TRIGGER_DIRTY = ... +MJMODEL_DISABLE_ON_WRITE = ... +MAX_VFS_FILENAME_LENGTH = ... +INDIRECT_REFERENCE_NAMESPACE_PREFIX = ... +INDIRECT_REFERENCE_ATTRIB = ... +XML_DEFAULT_PRECISION = ... diff --git a/typings/dm_control/mjcf/copier.pyi b/typings/dm_control/mjcf/copier.pyi new file mode 100644 index 00000000..13b6566b --- /dev/null +++ b/typings/dm_control/mjcf/copier.pyi @@ -0,0 +1,15 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" + +class Copier: + """Helper for keeping track of new elements created when copying MJCF.""" + + def __init__(self, source) -> None: ... + def copy_into(self, destination, override_attributes=...): + """Copies this copier's element into a destination MJCF element.""" + ... diff --git a/typings/dm_control/mjcf/debugging.pyi b/typings/dm_control/mjcf/debugging.pyi new file mode 100644 index 00000000..d3204a6d --- /dev/null +++ b/typings/dm_control/mjcf/debugging.pyi @@ -0,0 +1,137 @@ +""" +This type stub file was generated by pyright. +""" + +import contextlib + +""" +This type stub file was generated by pyright. +""" +FLAGS = ... +StackTraceEntry = ... +ElementDebugInfo = ... +MODULE_PATH = ... +DEBUG_METADATA_PREFIX = ... +_DEBUG_METADATA_TAG_PREFIX = ... +_DEBUG_METADATA_SEARCH_PATTERN = ... +_CURRENT_FROZEN_STACK = ... +_DEBUG_MODE_ENABLED = ... +_DEBUG_FULL_DUMP_DIR = ... + +def debug_mode(): + """Returns a boolean that indicates whether PyMJCF debug mode is enabled.""" + ... + +def enable_debug_mode(): + """Enables PyMJCF debug mode.""" + ... + +def disable_debug_mode(): + """Disables PyMJCF debug mode.""" + ... + +def get_full_dump_dir(): + """Gets the directory to dump full debug info files.""" + ... + +def set_full_dump_dir(dump_path): + """Sets the directory to dump full debug info files.""" + ... + +def get_current_stack_trace(): + """Returns the stack trace of the current execution frame. + + Returns: + A list of `StackTraceEntry` named tuples corresponding to the current stack + trace of the process, truncated to immediately before entry into + PyMJCF internal code. + """ + ... + +@contextlib.contextmanager +def freeze_current_stack_trace(): + """A context manager that freezes the stack trace. + + AVOID USING THIS CONTEXT MANAGER OUTSIDE OF INTERNAL PYMJCF IMPLEMENTATION, + AS IT REDUCES THE USEFULNESS OF DEBUG MODE. + + If PyMJCF debug mode is enabled, calls to `debugging.get_current_stack_trace` + within this context will always return the stack trace from when this context + was entered. + + The frozen stack is global to this debugging module. That is, if the context + is entered while another one is still active, then the stack trace of the + outermost one is returned. + + This context significantly speeds up bulk operations in debug mode, e.g. + parsing an existing XML string or creating a deeply-nested element, as it + prevents the same stack trace from being repeatedly constructed. + + Yields: + `None` + """ + ... + +class DebugContext: + """A helper object to store debug information for a generated XML string. + + This class is intended for internal use within the PyMJCF implementation. + """ + + def __init__(self) -> None: ... + def register_element_for_debugging(self, elem): + """Registers an `Element` and returns debugging metadata for the XML. + + Args: + elem: An `mjcf.Element`. + + Returns: + An `lxml.etree.Comment` that represents debugging metadata in the + generated XML. + """ + ... + def commit_xml_string(self, xml_string): + """Commits the XML string associated with this debug context. + + This function also formats the XML string to make sure that the debugging + metadata appears on the same line as the corresponding XML element. + + Args: + xml_string: A pretty-printed XML string. + + Returns: + A reformatted XML string where all debugging metadata appears on the same + line as the corresponding XML element. + """ + ... + def process_and_raise_last_exception(self): + """Processes and re-raises the last ValueError caught. + + This function will insert the relevant line from the source XML to the error + message. If debug mode is enabled, additional debugging information is + appended to the error message. If debug mode is not enabled, the error + message instructs the user to enable it by rerunning the executable with an + appropriate flag. + """ + ... + @property + def default_dump_dir(self): ... + @property + def debug_mode(self): ... + def dump_full_debug_info_to_disk(self, dump_dir=...): + """Dumps full debug information to disk. + + Full debug information consists of an XML file whose elements are tagged + with a unique ID, and a stack trace file for each element ID. Each stack + trace file consists of a stack trace for when the element was created, and + when each attribute was last modified. + + Args: + dump_dir: Full path to the directory in which dump files are created. + + Raises: + ValueError: If neither `dump_dir` nor the global dump path is given. The + global dump path can be specified either via the + --pymjcf_debug_full_dump_dir flag or via `debugging.set_full_dump_dir`. + """ + ... diff --git a/typings/dm_control/mjcf/element.pyi b/typings/dm_control/mjcf/element.pyi new file mode 100644 index 00000000..67e3e6fa --- /dev/null +++ b/typings/dm_control/mjcf/element.pyi @@ -0,0 +1,393 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mjcf import base + +""" +This type stub file was generated by pyright. +""" +_raw_property = ... +_UNITS = ... +_CONFLICT_BEHAVIOR_FUNC = ... + +def property(method): + """Modifies `@property` to keep track of any `AttributeError` raised. + + Our `Element` implementations overrides the `__getattr__` method. This does + not interact well with `@property`: if a `property`'s code is buggy so as to + raise an `AttributeError`, then Python would silently discard it and redirect + to our `__getattr__` instead, leading to an uninformative stack trace. This + makes it very difficult to debug issues that involve properties. + + To remedy this, we modify `@property` within this module to store any + `AttributeError` raised within the respective `Element` object. Then, in our + `__getattr__` logic, we could re-raise it to preserve the original stack + trace. + + The reason that this is not implemented as a different decorator is that we + could accidentally use @property on a new method. This would work fine until + someone triggers a subtle bug. This is when a proper trace would be most + useful, but we would still end up with a strange undebuggable stack trace + anyway. + + Note that at the end of this module, we have a `del property` to prevent this + override from being broadcasted externally. + + Args: + method: The method that is being decorated. + + Returns: + A `property` corresponding to the decorated method. + """ + ... + +_DEFAULT_NAME_FROM_FILENAME = ... + +class _ElementImpl(base.Element): + """Actual implementation of a generic MJCF element object.""" + + __slots__ = ... + def __init__(self, spec, parent, attributes=...) -> None: ... + def get_init_stack(self): + """Gets the stack trace where this element was first initialized.""" + ... + def get_last_modified_stacks_for_all_attributes(self): + """Gets a dict of stack traces where each attribute was last modified.""" + ... + def is_same_as(self, other): + """Checks whether another element is semantically equivalent to this one. + + Two elements are considered equivalent if they have the same + specification (i.e. same tag appearing in the same context), the same + attribute values, and all of their children are equivalent. The ordering + of non-repeated children is not important for this comparison, while + the ordering of repeated children are important only amongst the same + type* of children. In other words, for two bodies to be considered + equivalent, their child sites must appear in the same order, and their + child geoms must appear in the same order, but permutations between sites + and geoms are disregarded. (The only exception is in tendon definition, + where strict ordering of all children is necessary for equivalence.) + + *Note that the notion of "same type" in this function is very loose: + for example different actuator element subtypes are treated as separate + types when children ordering is considered. Therefore, two + elements might be considered equivalent even though they result in different + orderings of `mjData.ctrl` when compiled. As it stands, this function + is designed primarily as a testing aid and should not be used to guarantee + that models are actually identical. + + Args: + other: An `mjcf.Element` + + Returns: + `True` if `other` element is semantically equivalent to this one. + """ + ... + @property + def tag(self): ... + @property + def spec(self): ... + @property + def parent(self): ... + @property + def namescope(self): ... + @property + def root(self): ... + def prefixed_identifier(self, prefix_root): ... + @property + def full_identifier(self): + """Fully-qualified identifier used for this element in the generated XML.""" + ... + def __dir__(self): ... + def find(self, namespace, identifier): + """Finds an element with a particular identifier. + + This function allows the direct access to an arbitrarily deeply nested + child element by name, without the need to manually traverse through the + object tree. The `namespace` argument specifies the kind of element to + find. In most cases, this corresponds to the element's XML tag name. + However, if an element has multiple specialized tags, then the namespace + corresponds to the tag name of the most general element of that kind. + For example, `namespace='joint'` would search for `` and + ``, while `namespace='actuator'` would search for ``, + ``, ``, ``, and ``. + + Args: + namespace: A string specifying the namespace being searched. See the + docstring above for explanation. + identifier: The identifier string of the desired element. + + Returns: + An `mjcf.Element` object, or `None` if an element with the specified + identifier is not found. + + Raises: + ValueError: if either `namespace` or `identifier` is not a string, or if + `namespace` is not a valid namespace. + """ + ... + def find_all(self, namespace, immediate_children_only=..., exclude_attachments=...): + """Finds all elements of a particular kind. + + The `namespace` argument specifies the kind of element to + find. In most cases, this corresponds to the element's XML tag name. + However, if an element has multiple specialized tags, then the namespace + corresponds to the tag name of the most general element of that kind. + For example, `namespace='joint'` would search for `` and + ``, while `namespace='actuator'` would search for ``, + ``, ``, ``, and ``. + + Args: + namespace: A string specifying the namespace being searched. See the + docstring above for explanation. + immediate_children_only: (optional) A boolean, if `True` then only + the immediate children of this element are returned. + exclude_attachments: (optional) A boolean, if `True` then elements + belonging to attached models are excluded. + + Returns: + A list of `mjcf.Element`. + + Raises: + ValueError: if `namespace` is not a valid namespace. + """ + ... + def enter_scope(self, scope_identifier): + """Finds the root element of the given scope and returns it. + + This function allows the access to a nested scope that is a child of this + element. The `scope_identifier` argument specifies the path to the child + scope element. + + Args: + scope_identifier: The path of the desired scope element. + + Returns: + An `mjcf.Element` object, or `None` if a scope element with the + specified path is not found. + """ + ... + def get_attribute_xml_string(self, attribute_name, prefix_root=..., *, precision=..., zero_threshold=...): ... + def get_attributes(self): ... + def set_attributes(self, **kwargs): ... + def get_children(self, element_name): ... + def add(self, element_name, **kwargs): + """Add a new child element to this element. + + Args: + element_name: The tag of the element to add. + **kwargs: Attributes of the new element being created. + + Raises: + ValueError: If the 'element_name' is not a valid child, or if an invalid + attribute is specified in `kwargs`. + + Returns: + An `mjcf.Element` corresponding to the newly created child element. + """ + ... + def insert(self, element_name, position, **kwargs): + """Add a new child element to this element. + + Args: + element_name: The tag of the element to add. + position: Where to insert the new element. + **kwargs: Attributes of the new element being created. + + Raises: + ValueError: If the 'element_name' is not a valid child, or if an invalid + attribute is specified in `kwargs`. + + Returns: + An `mjcf.Element` corresponding to the newly created child element. + """ + ... + def __getattr__(self, name): ... + def __setattr__(self, name, value): ... + def __delattr__(self, name): ... + def remove(self, affect_attachments=...): + """Removes this element from the model.""" + ... + @property + def is_removed(self): ... + def all_children(self): ... + def to_xml(self, prefix_root=..., debug_context=..., *, precision=..., zero_threshold=...): + """Generates an etree._Element corresponding to this MJCF element. + + Args: + prefix_root: (optional) A `NameScope` object to be treated as root + for the purpose of calculating the prefix. + If `None` then no prefix is included. + debug_context: (optional) A `debugging.DebugContext` object to which + the debugging information associated with the generated XML is written. + This is intended for internal use within PyMJCF; users should never need + manually pass this argument. + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + + Returns: + An etree._Element object. + """ + ... + def to_xml_string( + self, prefix_root=..., self_only=..., pretty_print=..., debug_context=..., *, precision=..., zero_threshold=... + ): + """Generates an XML string corresponding to this MJCF element. + + Args: + prefix_root: (optional) A `NameScope` object to be treated as root + for the purpose of calculating the prefix. + If `None` then no prefix is included. + self_only: (optional) A boolean, whether to generate an XML corresponding + only to this element without any children. + pretty_print: (optional) A boolean, whether to the XML string should be + properly indented. + debug_context: (optional) A `debugging.DebugContext` object to which + the debugging information associated with the generated XML is written. + This is intended for internal use within PyMJCF; users should never need + manually pass this argument. + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + + Returns: + A string. + """ + ... + def __str__(self) -> str: ... + def __repr__(self): ... + def resolve_references(self): ... + +class _AttachableElement(_ElementImpl): + """Specialized object representing a or element. + + This element defines a frame to which another MJCF model can be attached. + """ + + __slots__ = ... + def attach(self, attachment): + """Attaches another MJCF model at this site. + + An empty will be created as an attachment frame. All children of + `attachment`'s will be treated as children of this frame. + Furthermore, all other elements in `attachment` are merged into the root + of the MJCF model to which this element belongs. + + Args: + attachment: An MJCF `RootElement` + + Returns: + An `mjcf.Element` corresponding to the attachment frame. A joint can be + added directly to this frame to give degrees of freedom to the attachment. + + Raises: + ValueError: If `other` is not a valid attachment to this element. + """ + ... + +class _AttachmentFrame(_ElementImpl): + """An specialized representing a frame holding an external attachment.""" + + __slots__ = ... + def __init__(self, parent, site, attachment) -> None: ... + def prefixed_identifier(self, prefix_root=...): ... + def to_xml(self, prefix_root=..., debug_context=..., *, precision=..., zero_threshold=...): ... + @property + def full_identifier(self): ... + +class _AttachmentFrameChild(_ElementImpl): + """A child element of an attachment frame. + + Right now, this is always a or a . The name of the joint + is not freely specifiable, but instead just inherits from the parent frame. + This ensures uniqueness, as attachment frame identifiers always end in '/'. + """ + + __slots__ = ... + def to_xml(self, prefix_root=..., debug_context=..., *, precision=..., zero_threshold=...): ... + def prefixed_identifier(self, prefix_root=...): ... + +class _DefaultElement(_ElementImpl): + """Specialized object representing a element. + + This is necessary for the proper handling of global defaults. + """ + + __slots__ = ... + def all_children(self): ... + def to_xml(self, prefix_root=..., debug_context=..., *, precision=..., zero_threshold=...): ... + +class _ActuatorElement(_ElementImpl): + """Specialized object representing an element.""" + + __slots__ = ... + +class RootElement(_ElementImpl): + """The root `` element of an MJCF model.""" + + __slots__ = ... + def __init__(self, model=..., model_dir=..., assets=...) -> None: ... + @property + def namescope(self): ... + @property + def root(self): ... + @property + def model(self): ... + @model.setter + def model(self, new_name): ... + def attach(self, other): ... + def detach(self): ... + def include_copy(self, other, override_attributes=...): ... + @property + def parent_model(self): + """The RootElement of the MJCF model to which this one is attached.""" + ... + @property + def root_model(self): ... + def get_assets(self): + """Returns a dict containing the binary assets referenced in this model. + + This will contain `{vfs_filename: contents}` pairs. `vfs_filename` will be + the name of the asset in MuJoCo's Virtual File System, which corresponds to + the filename given in the XML returned by `to_xml_string()`. `contents` is a + bytestring. + + This dict can be used together with the result of `to_xml_string()` to + construct a `mujoco.Physics` instance: + + ```python + physics = mujoco.Physics.from_xml_string( + xml_string=mjcf_model.to_xml_string(), + assets=mjcf_model.get_assets()) + ``` + """ + ... + @property + def full_identifier(self): ... + def __copy__(self): ... + def __deepcopy__(self, _): ... + def is_same_as(self, other): ... + +class _ElementListView: + """A hybrid list/dict-like view to a group of repeated MJCF elements.""" + + def __init__(self, spec, parent) -> None: ... + @property + def spec(self): ... + @property + def tag(self): ... + @property + def namescope(self): ... + @property + def parent(self): ... + def __len__(self): ... + def __iter__(self): ... + def clear(self): ... + def __getitem__(self, index): ... + def __delitem__(self, index): ... + def __str__(self) -> str: ... + def __repr__(self): ... diff --git a/typings/dm_control/mjcf/export_with_assets.pyi b/typings/dm_control/mjcf/export_with_assets.pyi new file mode 100644 index 00000000..4bbfcbdb --- /dev/null +++ b/typings/dm_control/mjcf/export_with_assets.pyi @@ -0,0 +1,29 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" + +def export_with_assets(mjcf_model, out_dir, out_file_name=..., *, precision=..., zero_threshold=...): + """Saves mjcf.model in the given directory in MJCF (XML) format. + + Creates an MJCF XML file named `out_file_name` in the specified `out_dir`, and + writes all of its assets into the same directory. + + Args: + mjcf_model: `mjcf.RootElement` instance to export. + out_dir: Directory to save the model and assets. Will be created if it does + not already exist. + out_file_name: (Optional) Name of the XML file to create. Defaults to the + model name (`mjcf_model.model`) suffixed with '.xml'. + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + + Raises: + ValueError: If `out_file_name` is a string that does not end with '.xml'. + """ + ... diff --git a/typings/dm_control/mjcf/export_with_assets_as_zip.pyi b/typings/dm_control/mjcf/export_with_assets_as_zip.pyi new file mode 100644 index 00000000..bc8347ca --- /dev/null +++ b/typings/dm_control/mjcf/export_with_assets_as_zip.pyi @@ -0,0 +1,30 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" + +def export_with_assets_as_zip(mjcf_model, out_dir, model_name=..., *, precision=..., zero_threshold=...): + """Saves mjcf_model and all its assets as a .zip file in the given directory. + + Creates a .zip file named `model_name`.zip in the specified `out_dir`, and a + directory inside of this file named `model_name`. The MJCF XML is written into + this directory with the name `model_name`.xml, and all the assets are also + written into this directory without changing their names. + + Args: + mjcf_model: `mjcf.RootElement` instance to export. + out_dir: Directory to save the .zip file. Will be created if it does not + already exist. + model_name: (Optional) Name of the .zip file, the name of the directory + inside the .zip root containing the model and assets, and name of the XML + file inside this directory. Defaults to the MJCF model name + (`mjcf_model.model`). + precision: (optional) Number of digits to output for floating point + quantities. + zero_threshold: (optional) When outputting XML, floating point quantities + whose absolute value falls below this threshold will be treated as zero. + """ + ... diff --git a/typings/dm_control/mjcf/namescope.pyi b/typings/dm_control/mjcf/namescope.pyi new file mode 100644 index 00000000..cff1129b --- /dev/null +++ b/typings/dm_control/mjcf/namescope.pyi @@ -0,0 +1,119 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" + +class NameScope: + """A name scoping context for an MJCF model. + + This object maintains the uniqueness of identifiers within each MJCF + namespace. Examples of MJCF namespaces include 'body', 'joint', and 'geom'. + Each namescope also carries a name, and can have a parent namescope. + When MJCF models are merged, all identifiers gain a hierarchical prefix + separated by '/', which is the concatenation of all scope names up to + the root namescope. + """ + + def __init__(self, name, mjcf_model, model_dir=..., assets=...) -> None: + """Initializes a scope with the given name. + + Args: + name: The scope's name + mjcf_model: The RootElement of the MJCF model associated with this scope. + model_dir: (optional) Path to the directory containing the model XML file. + This is used to prefix the paths of all asset files. + assets: (optional) A dictionary of pre-loaded assets, of the form + `{filename: bytestring}`. If present, PyMJCF will search for assets in + this dictionary before attempting to load them from the filesystem. + """ + ... + @property + def revision(self): ... + def increment_revision(self): ... + @property + def name(self): + """This scope's name.""" + ... + @property + def files(self): + """A set containing the `File` attributes registered in this scope.""" + ... + @property + def assets(self): + """A dictionary containing pre-loaded assets.""" + ... + @property + def model_dir(self): + """Path to the directory containing the model XML file.""" + ... + @name.setter + def name(self, new_name): ... + @property + def mjcf_model(self): ... + @property + def parent(self): + """This parent `NameScope`, or `None` if this is a root scope.""" + ... + @parent.setter + def parent(self, new_parent): ... + @property + def root(self): ... + def full_prefix(self, prefix_root=..., as_list=...): + """The prefix for identifiers belonging to this scope. + + Args: + prefix_root: (optional) A `NameScope` object to be treated as root + for the purpose of calculating the prefix. If `None` then no prefix + is produced. + as_list: (optional) A boolean, if `True` return the list of prefix + components. If `False`, return the full prefix string separated by + `mjcf.constants.PREFIX_SEPARATOR`. + + Returns: + The prefix string. + """ + ... + def add(self, namespace, identifier, obj): + """Add an identifier to this name scope. + + Args: + namespace: A string specifying the namespace to which the + identifier belongs. + identifier: The identifier string. + obj: The object referred to by the identifier. + + Raises: + ValueError: If `identifier` not valid. + """ + ... + def replace(self, namespace, identifier, obj): + """Reassociates an identifier with a different object. + + Args: + namespace: A string specifying the namespace to which the + identifier belongs. + identifier: The identifier string. + obj: The object referred to by the identifier. + + Raises: + ValueError: If `identifier` not valid. + """ + ... + def remove(self, namespace, identifier): + """Removes an identifier from this name scope. + + Args: + namespace: A string specifying the namespace to which the + identifier belongs. + identifier: The identifier string. + + Raises: + KeyError: If `identifier` does not exist in this scope. + """ + ... + def rename(self, namespace, old_identifier, new_identifier): ... + def get(self, namespace, identifier): ... + def has_identifier(self, namespace, identifier): ... diff --git a/typings/dm_control/mjcf/parser.pyi b/typings/dm_control/mjcf/parser.pyi new file mode 100644 index 00000000..9dbececd --- /dev/null +++ b/typings/dm_control/mjcf/parser.pyi @@ -0,0 +1,69 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" + +def from_xml_string(xml_string, escape_separators=..., model_dir=..., resolve_references=..., assets=...): + """Parses an XML string into an MJCF object model. + + Args: + xml_string: An XML string representing an MJCF model. + escape_separators: (optional) A boolean, whether to replace '/' characters + in element identifiers. If `False`, any '/' present in the XML causes + a ValueError to be raised. + model_dir: (optional) Path to the directory containing the model XML file. + This is used to prefix the paths of all asset files. + resolve_references: (optional) A boolean indicating whether the parser + should attempt to resolve reference attributes to a corresponding element. + assets: (optional) A dictionary of pre-loaded assets, of the form + `{filename: bytestring}`. If present, PyMJCF will search for assets in + this dictionary before attempting to load them from the filesystem. + + Returns: + An `mjcf.RootElement`. + """ + ... + +def from_file(file_handle, escape_separators=..., model_dir=..., resolve_references=..., assets=...): + """Parses an XML file into an MJCF object model. + + Args: + file_handle: A Python file-like handle. + escape_separators: (optional) A boolean, whether to replace '/' characters + in element identifiers. If `False`, any '/' present in the XML causes + a ValueError to be raised. + model_dir: (optional) Path to the directory containing the model XML file. + This is used to prefix the paths of all asset files. + resolve_references: (optional) A boolean indicating whether the parser + should attempt to resolve reference attributes to a corresponding element. + assets: (optional) A dictionary of pre-loaded assets, of the form + `{filename: bytestring}`. If present, PyMJCF will search for assets in + this dictionary before attempting to load them from the filesystem. + + Returns: + An `mjcf.RootElement`. + """ + ... + +def from_path(path, escape_separators=..., resolve_references=..., assets=...): + """Parses an XML file into an MJCF object model. + + Args: + path: A path to an XML file. This path should be loadable using + `resources.GetResource`. + escape_separators: (optional) A boolean, whether to replace '/' characters + in element identifiers. If `False`, any '/' present in the XML causes + a ValueError to be raised. + resolve_references: (optional) A boolean indicating whether the parser + should attempt to resolve reference attributes to a corresponding element. + assets: (optional) A dictionary of pre-loaded assets, of the form + `{filename: bytestring}`. If present, PyMJCF will search for assets in + this dictionary before attempting to load them from the filesystem. + + Returns: + An `mjcf.RootElement`. + """ + ... diff --git a/typings/dm_control/mjcf/physics.pyi b/typings/dm_control/mjcf/physics.pyi new file mode 100644 index 00000000..f28c4209 --- /dev/null +++ b/typings/dm_control/mjcf/physics.pyi @@ -0,0 +1,214 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np +from dm_control import mujoco + +""" +This type stub file was generated by pyright. +""" +FLAGS = ... +_XML_PRINT_SHARD_SIZE = ... +_PICKLING_NOT_SUPPORTED = ... +_Attribute = ... +_TUPLE = tuple +_ARRAY_LIKE = ... +_ATTRIBUTES = ... + +def names_from_elements(mjcf_elements): + """Returns `namespace` and `named_index` for `mjcf_elements`. + + Args: + mjcf_elements: Either an `mjcf.Element`, or an iterable of `mjcf.Element` + of the same kind. + + Returns: + A tuple of `(namespace, named_indices)` where + -`namespace` is the Mujoco element type (eg: 'geom', 'body', etc.) + -`named_indices` are the names of `mjcf_elements`, either as a single + string or an iterable of strings depending on whether `mjcf_elements` + was an `mjcf.Element` or an iterable of `mjcf_Element`s. + + Raises: + ValueError: If `mjcf_elements` cannot be bound to this Physics. + """ + ... + +class SynchronizingArrayWrapper(np.ndarray): + """A non-contiguous view of an ndarray that synchronizes with the original. + + Note: this class should not be instantiated directly. + """ + + __slots__ = ... + def __new__(cls, backing_array, backing_index, physics, triggers_dirty, disable_on_write): ... + def copy(self, order=...): ... + def __copy__(self): ... + def __deepcopy__(self, memo): ... + def __reduce__(self): ... + def __setitem__(self, index, value): ... + def __setslice__(self, start, stop, value): ... + +class Binding: + """Binding between a mujoco.Physics and an mjcf.Element or a list of Elements. + + This object should normally be created by calling `physics.bind(element)` + where `physics` is an instance of `mjcf.Physics`. See docstring for that + function for details. + """ + + __slots__ = ... + def __init__(self, physics, namespace, named_index) -> None: ... + def __dir__(self): ... + @property + def element_id(self): + """The ID number of this element within MuJoCo's data structures.""" + ... + def __getattr__(self, name): ... + def __setattr__(self, name, value): ... + def __getitem__(self, index): ... + def __setitem__(self, index, value): ... + +class _EmptyBinding: + """The result of binding no `mjcf.Elements` to an `mjcf.Physics` instance.""" + + __slots__ = ... + def __init__(self) -> None: ... + def __getattr__(self, name): ... + def __setattr__(self, name, value): ... + +_EMPTY_BINDING = ... + +class Physics(mujoco.Physics): + """A specialized `mujoco.Physics` that supports binding to MJCF elements.""" + + @classmethod + def from_mjcf_model(cls, mjcf_model): + """Constructs a new `mjcf.Physics` from an `mjcf.RootElement`. + + Args: + mjcf_model: An `mjcf.RootElement` instance. + + Returns: + A new `mjcf.Physics` instance. + """ + ... + def reload_from_mjcf_model(self, mjcf_model): + """Reloads this `mjcf.Physics` from an `mjcf.RootElement`. + + After calling this method, the state of this `Physics` instance is the same + as a new `Physics` instance created with the `from_mjcf_model` named + constructor. + + Args: + mjcf_model: An `mjcf.RootElement` instance. + """ + ... + @property + def is_dirty(self): + """Whether this physics' internal state needs to be recalculated.""" + ... + def mark_as_dirty(self): + """Marks this physics as dirty, thus requiring recalculation.""" + ... + def forward(self): + """Recomputes the forward dynamics without advancing the simulation.""" + ... + def bind(self, mjcf_elements): + """Creates a binding between this `Physics` instance and `mjcf.Element`s. + + The binding allows for easier interaction with the `Physics` data structures + related to an MJCF element. For example, in order to access the Cartesian + position of a geom, we can use: + + ```python + physics.bind(geom_element).pos + ``` + + instead of the more cumbersome: + + ```python + physics.named.model.geom_pos[geom_element.full_identifier] + ``` + + Note that the binding takes into account the type of element. This allows us + to remove prefixes from certain common attributes in order to unify access. + For example, we can use: + + ```python + physics.bind(geom_element).pos = [1, 2, 3] + physics.bind(site_element).pos = [4, 5, 6] + ``` + + instead of: + + ```python + physics.named.model.geom_pos[geom_element.full_identifier] = [1, 2, 3] + physics.named.model.site_pos[site_element.full_identifier] = [4, 5, 6] + ``` + + This in turn allows for the creation of common algorithms that can operate + across a wide range of element type. + + When attribute values are modified through the binding, future queries of + derived values are automatically recalculated if necessary. For example, + if a joint's `qpos` is modified and a site's `xpos` is later read, the value + of the `xpos` is updated according to the new joint configuration. This is + done lazily when an updated value is required, so repeated value + modifications do not incur a performance penalty. + + It is also possible to bind a sequence containing one or more elements, + provided they are all of the same type. In this case the binding exposes + `SynchronizingArrayWrapper`s, which are array-like objects that provide + writeable views onto the corresponding memory addresses in MuJoCo. Writing + into a `SynchronizingArrayWrapper` causes the underlying values in MuJoCo + to be updated, and if necessary causes derived values to be recalculated. + Note that in order to trigger recalculation it is necessary to reference a + derived attribute of a binding. + + ```python + bound_joints = physics.bind([joint1, joint2]) + bound_bodies = physics.bind([body1, body2]) + # `qpos_view` and `xpos_view` are `SynchronizingArrayWrapper`s providing + # views onto `physics.data.qpos` and `physics.data.xpos` respectively. + qpos_view = bound_joints.qpos + xpos_view = bound_bodies.xpos + # This updates the corresponding values in `physics.data.qpos`, and marks + # derived values (such as `physics.data.xpos`) as needing recalculation. + qpos_view[0] += 1. + # Note: at this point `xpos_view` still contains the old values, since we + # need to actually read the value of a derived attribute in order to + # trigger recalculation. + another_xpos_view = bound_bodies.xpos # Triggers recalculation of `xpos`. + # Now both `xpos_view` and `another_xpos_view` will contain the updated + # values. + ``` + + Note that `SynchronizingArrayWrapper`s cannot be pickled. We also do not + recommend holding references to them - instead hold a reference to the + binding object, or call `physics.bind` again. + + Bindings also support numpy-style square bracket indexing. The first element + in the indexing expression should be an attribute name, and the second + element (if present) is used to index into the columns of the underlying + array. Named indexing into columns is also allowed, provided that the + corresponding field in `physics.named` supports it. + + ```python + physics.bind([geom1, geom2])['pos'] = [[1, 2, 3], [4, 5, 6]] + physics.bind([geom1, geom2])['pos', ['x', 'z']] = [[1, 3], [4, 6]] + ``` + + Args: + mjcf_elements: Either an `mjcf.Element`, or an iterable of `mjcf.Element` + of the same kind. + + Returns: + A binding between this Physics instance an `mjcf_elements`, as described + above. + + Raises: + ValueError: If `mjcf_elements` cannot be bound to this Physics. + """ + ... diff --git a/typings/dm_control/mjcf/schema.pyi b/typings/dm_control/mjcf/schema.pyi new file mode 100644 index 00000000..7db9dc01 --- /dev/null +++ b/typings/dm_control/mjcf/schema.pyi @@ -0,0 +1,52 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" +_SCHEMA_XML_PATH = ... +_ARRAY_DTYPE_MAP = ... +_SCALAR_TYPE_MAP = ... +ElementSpec = ... +AttributeSpec = ... +_ADDITIONAL_FINDABLE_NAMESPACES = ... + +def parse_schema(schema_path): + """Parses the schema XML. + + Args: + schema_path: Path to the schema XML file. + + Returns: + An `ElementSpec` for the root element in the schema. + """ + ... + +def collect_namespaces(root_spec): + """Constructs a set of namespaces in a given ElementSpec. + + Args: + root_spec: An `ElementSpec` for the root element in the schema. + + Returns: + A set of strings specifying the names of all the namespaces that are present + in the spec. + """ + ... + +MUJOCO = ... +FINDABLE_NAMESPACES = ... +ATTACHMENT_FRAME = ... +WORLD_ATTACHMENT_FRAME = ... + +def override_schema(schema_xml_path): + """Override the schema with a custom xml. + + This method updates several global variables and care should be taken not to + call it if the pre-update values have already been used. + + Args: + schema_xml_path: Path to schema xml file. + """ + ... diff --git a/typings/dm_control/mjcf/skin.pyi b/typings/dm_control/mjcf/skin.pyi new file mode 100644 index 00000000..9a58e355 --- /dev/null +++ b/typings/dm_control/mjcf/skin.pyi @@ -0,0 +1,34 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" +MAX_BODY_NAME_LENGTH = ... +Skin = ... +Bone = ... + +def parse(contents, body_getter): + """Parses the contents of a MuJoCo skin file. + + Args: + contents: a bytes-like object containing the contents of a skin file. + body_getter: a callable that takes a string and returns the `mjcf.Element` + instance of a MuJoCo body of the specified name. + + Returns: + A `Skin` named tuple. + """ + ... + +def serialize(skin): + """Serializes a `Skin` named tuple into the contents of a MuJoCo skin file. + + Args: + skin: a `Skin` named tuple. + + Returns: + A `bytes` object representing the content of a MuJoCo skin file. + """ + ... diff --git a/typings/dm_control/mjcf/traversal_utils.pyi b/typings/dm_control/mjcf/traversal_utils.pyi new file mode 100644 index 00000000..238ab3b8 --- /dev/null +++ b/typings/dm_control/mjcf/traversal_utils.pyi @@ -0,0 +1,28 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" +_ACTUATOR_TAGS = ... + +def get_freejoint(element): + """Retrieves the free joint of a body. Returns `None` if there isn't one.""" + ... + +def get_attachment_frame(mjcf_model): ... +def get_frame_freejoint(mjcf_model): ... +def get_frame_joints(mjcf_model): + """Retrieves all joints belonging to the attachment frame of an MJCF model.""" + ... + +def commit_defaults(element, attributes=...): + """Commits default values into attributes of the specified element. + + Args: + element: A PyMJCF element. + attributes: (optional) A list of strings specifying the attributes to be + copied from defaults, or `None` if all attributes should be copied. + """ + ... diff --git a/typings/dm_control/mujoco/__init__.pyi b/typings/dm_control/mujoco/__init__.pyi new file mode 100644 index 00000000..5201f4ed --- /dev/null +++ b/typings/dm_control/mujoco/__init__.pyi @@ -0,0 +1,10 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mujoco.engine import Camera, MovableCamera, Physics, TextOverlay, action_spec +from mujoco import * + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/mujoco/engine.pyi b/typings/dm_control/mujoco/engine.pyi new file mode 100644 index 00000000..b45ad9fb --- /dev/null +++ b/typings/dm_control/mujoco/engine.pyi @@ -0,0 +1,527 @@ +""" +This type stub file was generated by pyright. +""" + +import contextlib +from typing import Callable, NamedTuple, Optional, Union + +import mujoco +import numpy as np +from dm_control.rl import control as _control + +""" +This type stub file was generated by pyright. +""" +_FONT_STYLES = ... +_GRID_POSITIONS = ... +Contexts = ... +Selected = ... +NamedIndexStructs = ... +Pose = ... +_BOTH_SEGMENTATION_AND_DEPTH_ENABLED = ... +_INVALID_PHYSICS_STATE = ... +_OVERLAYS_NOT_SUPPORTED_FOR_DEPTH_OR_SEGMENTATION = ... +_RENDER_FLAG_OVERRIDES_NOT_SUPPORTED_FOR_DEPTH_OR_SEGMENTATION = ... +_KEYFRAME_ID_OUT_OF_RANGE = ... + +class Physics(_control.Physics): + """Encapsulates a MuJoCo model. + + A MuJoCo model is typically defined by an MJCF XML file [0] + + ```python + physics = Physics.from_xml_path('/path/to/model.xml') + + with physics.reset_context(): + physics.named.data.qpos['hinge'] = np.random.rand() + + # Apply controls and advance the simulation state. + physics.set_control(np.random.random_sample(size=N_ACTUATORS)) + physics.step() + + # Render a camera defined in the XML file to a NumPy array. + rgb = physics.render(height=240, width=320, id=0) + ``` + + [0] http://www.mujoco.org/book/modeling.html + """ + + _contexts = ... + def __new__(cls, *args, **kwargs): ... + def __init__(self, data) -> None: + """Initializes a new `Physics` instance. + + Args: + data: Instance of `wrapper.MjData`. + """ + ... + @contextlib.contextmanager + def suppress_physics_errors(self): + """Physics warnings will be logged rather than raise exceptions.""" + ... + def enable_profiling(self): + """Enables Mujoco timing profiling.""" + ... + def set_control(self, control): + """Sets the control signal for the actuators. + + Args: + control: NumPy array or array-like actuation values. + """ + ... + def step(self, nstep: int = ...) -> None: + """Advances the physics state by `nstep`s. + + Args: + nstep: Optional integer, number of steps to take. + + The actuation can be updated by calling the `set_control` function first. + """ + ... + def render( + self, + height=..., + width=..., + camera_id=..., + overlays=..., + depth=..., + segmentation=..., + scene_option=..., + render_flag_overrides=..., + scene_callback: Optional[Callable[[Physics, mujoco.MjvScene], None]] = ..., + ): + """Returns a camera view as a NumPy array of pixel values. + + Args: + height: Viewport height (number of pixels). Optional, defaults to 240. + width: Viewport width (number of pixels). Optional, defaults to 320. + camera_id: Optional camera name or index. Defaults to -1, the free + camera, which is always defined. A nonnegative integer or string + corresponds to a fixed camera, which must be defined in the model XML. + If `camera_id` is a string then the camera must also be named. + overlays: An optional sequence of `TextOverlay` instances to draw. Only + supported if `depth` is False. + depth: If `True`, this method returns a NumPy float array of depth values + (in meters). Defaults to `False`, which results in an RGB image. + segmentation: If `True`, this method returns a 2-channel NumPy int32 array + of label values where the pixels of each object are labeled with the + pair (mjModel ID, mjtObj enum object type). Background pixels are + labeled (-1, -1). Defaults to `False`, which returns an RGB image. + scene_option: An optional `wrapper.MjvOption` instance that can be used to + render the scene with custom visualization options. If None then the + default options will be used. + render_flag_overrides: Optional mapping specifying rendering flags to + override. The keys can be either lowercase strings or `mjtRndFlag` enum + values, and the values are the overridden flag values, e.g. + `{'wireframe': True}` or `{mujoco.mjtRndFlag.mjRND_WIREFRAME: True}`. + See `mujoco.mjtRndFlag` for the set of valid flags. Must be None if + either `depth` or `segmentation` is True. + scene_callback: Called after the scene has been created and before + it is rendered. Can be used to add more geoms to the scene. + + Returns: + The rendered RGB, depth or segmentation image. + """ + ... + def get_state(self): + """Returns the physics state. + + Returns: + NumPy array containing full physics simulation state. + """ + ... + def set_state(self, physics_state): + """Sets the physics state. + + Args: + physics_state: NumPy array containing the full physics simulation state. + + Raises: + ValueError: If `physics_state` has invalid size. + """ + ... + def copy(self, share_model=...): + """Creates a copy of this `Physics` instance. + + Args: + share_model: If True, the copy and the original will share a common + MjModel instance. By default, both model and data will both be copied. + + Returns: + A `Physics` instance. + """ + ... + def reset(self, keyframe_id=...): + """Resets internal variables of the simulation, possibly to a keyframe. + + Args: + keyframe_id: Optional integer specifying the index of a keyframe defined + in the model XML to which the simulation state should be initialized. + Must be between 0 and `self.model.nkey - 1` (inclusive). + + Raises: + ValueError: If `keyframe_id` is out of range. + """ + ... + def after_reset(self): + """Runs after resetting internal variables of the physics simulation.""" + ... + def forward(self): + """Recomputes the forward dynamics without advancing the simulation.""" + ... + @contextlib.contextmanager + def check_invalid_state(self): + """Checks whether the physics state is invalid at exit. + + Yields: + None + + Raises: + PhysicsError: if the simulation state is invalid at exit, unless this + context is nested inside a `suppress_physics_errors` context, in which + case a warning will be logged instead. + """ + ... + def __getstate__(self): ... + def __setstate__(self, data): ... + def free(self): + """Frees the native MuJoCo data structures held by this `Physics` instance. + + This is an advanced feature for use when manual memory management is + necessary. This `Physics` object MUST NOT be used after this function has + been called. + """ + ... + @classmethod + def from_model(cls, model): + """A named constructor from a `wrapper.MjModel` instance.""" + ... + @classmethod + def from_xml_string(cls, xml_string, assets=...): + """A named constructor from a string containing an MJCF XML file. + + Args: + xml_string: XML string containing an MJCF model description. + assets: Optional dict containing external assets referenced by the model + (such as additional XML files, textures, meshes etc.), in the form of + `{filename: contents_string}` pairs. The keys should correspond to the + filenames specified in the model XML. + + Returns: + A new `Physics` instance. + """ + ... + @classmethod + def from_byte_string(cls, byte_string): + """A named constructor from a model binary as a byte string.""" + ... + @classmethod + def from_xml_path(cls, file_path): + """A named constructor from a path to an MJCF XML file. + + Args: + file_path: String containing path to model definition file. + + Returns: + A new `Physics` instance. + """ + ... + @classmethod + def from_binary_path(cls, file_path): + """A named constructor from a path to an MJB model binary file. + + Args: + file_path: String containing path to model definition file. + + Returns: + A new `Physics` instance. + """ + ... + def reload_from_xml_string(self, xml_string, assets=...): + """Reloads the `Physics` instance from a string containing an MJCF XML file. + + After calling this method, the state of the `Physics` instance is the same + as a new `Physics` instance created with the `from_xml_string` named + constructor. + + Args: + xml_string: XML string containing an MJCF model description. + assets: Optional dict containing external assets referenced by the model + (such as additional XML files, textures, meshes etc.), in the form of + `{filename: contents_string}` pairs. The keys should correspond to the + filenames specified in the model XML. + """ + ... + def reload_from_xml_path(self, file_path): + """Reloads the `Physics` instance from a path to an MJCF XML file. + + After calling this method, the state of the `Physics` instance is the same + as a new `Physics` instance created with the `from_xml_path` + named constructor. + + Args: + file_path: String containing path to model definition file. + """ + ... + @property + def named(self): ... + @property + def contexts(self): + """Returns a `Contexts` namedtuple, used in `Camera`s and rendering code.""" + ... + @property + def model(self): ... + @property + def data(self): ... + def control(self): + """Returns a copy of the control signals for the actuators.""" + ... + def activation(self): + """Returns a copy of the internal states of actuators. + + For details, please refer to + http://www.mujoco.org/book/computation.html#geActuation + + Returns: + Activations in a numpy array. + """ + ... + def state(self): + """Returns the full physics state. Alias for `get_physics_state`.""" + ... + def position(self): + """Returns a copy of the generalized positions (system configuration).""" + ... + def velocity(self): + """Returns a copy of the generalized velocities.""" + ... + def timestep(self): + """Returns the simulation timestep.""" + ... + def time(self): + """Returns episode time in seconds.""" + ... + +class CameraMatrices(NamedTuple): + """Component matrices used to construct the camera matrix. + + The matrix product over these components yields the camera matrix. + + Attributes: + image: (3, 3) image matrix. + focal: (3, 4) focal matrix. + rotation: (4, 4) rotation matrix. + translation: (4, 4) translation matrix. + """ + + image: np.ndarray + focal: np.ndarray + rotation: np.ndarray + translation: np.ndarray + ... + +class Camera: + """Mujoco scene camera. + + Holds rendering properties such as the width and height of the viewport. The + camera position and rotation is defined by the Mujoco camera corresponding to + the `camera_id`. Multiple `Camera` instances may exist for a single + `camera_id`, for example to render the same view at different resolutions. + """ + + def __init__( + self, + physics: Physics, + height: int = ..., + width: int = ..., + camera_id: Union[int, str] = ..., + max_geom: Optional[int] = ..., + scene_callback: Optional[Callable[[Physics, mujoco.MjvScene], None]] = ..., + ) -> None: + """Initializes a new `Camera`. + + Args: + physics: Instance of `Physics`. + height: Optional image height. Defaults to 240. + width: Optional image width. Defaults to 320. + camera_id: Optional camera name or index. Defaults to -1, the free + camera, which is always defined. A nonnegative integer or string + corresponds to a fixed camera, which must be defined in the model XML. + If `camera_id` is a string then the camera must also be named. + max_geom: Optional integer specifying the maximum number of geoms that can + be rendered in the same scene. If None this will be chosen automatically + based on the estimated maximum number of renderable geoms in the model. + scene_callback: Called after the scene has been created and before + it is rendered. Can be used to add more geoms to the scene. + Raises: + ValueError: If `camera_id` is outside the valid range, or if `width` or + `height` exceed the dimensions of MuJoCo's offscreen framebuffer. + """ + ... + @property + def width(self): + """Returns the image width (number of pixels).""" + ... + @property + def height(self): + """Returns the image height (number of pixels).""" + ... + @property + def option(self): + """Returns the camera's visualization options.""" + ... + @property + def scene(self): + """Returns the `mujoco.MjvScene` instance used by the camera.""" + ... + def matrices(self) -> CameraMatrices: + """Computes the component matrices used to compute the camera matrix. + + Returns: + An instance of `CameraMatrices` containing the image, focal, rotation, and + translation matrices of the camera. + """ + ... + @property + def matrix(self): + """Returns the 3x4 camera matrix. + + For a description of the camera matrix see, e.g., + https://en.wikipedia.org/wiki/Camera_matrix. + For a usage example, see the associated test. + """ + ... + def update(self, scene_option=...): + """Updates geometry used for rendering. + + Args: + scene_option: A custom `wrapper.MjvOption` instance to use to render + the scene instead of the default. If None, will use the default. + """ + ... + def render(self, overlays=..., depth=..., segmentation=..., scene_option=..., render_flag_overrides=...): + """Renders the camera view as a numpy array of pixel values. + + Args: + overlays: An optional sequence of `TextOverlay` instances to draw. Only + supported if `depth` and `segmentation` are both False. + depth: An optional boolean. If True, makes the camera return depth + measurements. Cannot be enabled if `segmentation` is True. + segmentation: An optional boolean. If True, make the camera return a + pixel-wise segmentation of the scene. Cannot be enabled if `depth` is + True. + scene_option: A custom `wrapper.MjvOption` instance to use to render + the scene instead of the default. If None, will use the default. + render_flag_overrides: Optional mapping containing rendering flags to + override. The keys can be either lowercase strings or `mjtRndFlag` enum + values, and the values are the overridden flag values, e.g. + `{'wireframe': True}` or `{mujoco.mjtRndFlag.mjRND_WIREFRAME: True}`. + See `mujoco.mjtRndFlag` for the set of valid flags. Must be empty if + either `depth` or `segmentation` is True. + + Returns: + The rendered scene. + * If `depth` and `segmentation` are both False (default), this is a + (height, width, 3) uint8 numpy array containing RGB values. + * If `depth` is True, this is a (height, width) float32 numpy array + containing depth values (in meters). + * If `segmentation` is True, this is a (height, width, 2) int32 numpy + array where the first channel contains the integer ID of the object at + each pixel, and the second channel contains the corresponding object + type (a value in the `mjtObj` enum). Background pixels are labeled + (-1, -1). + + Raises: + ValueError: If either `overlays` or `render_flag_overrides` is requested + when `depth` or `segmentation` rendering is enabled. + ValueError: If both depth and segmentation flags are set together. + """ + ... + def select(self, cursor_position): + """Returns bodies and geoms visible at given coordinates in the frame. + + Args: + cursor_position: A `tuple` containing x and y coordinates, normalized to + between 0 and 1, and where (0, 0) is bottom-left. + + Returns: + A `Selected` namedtuple. Fields are None if nothing is selected. + """ + ... + +class MovableCamera(Camera): + """Subclass of `Camera` that can be moved by changing its pose. + + A `MovableCamera` always corresponds to a MuJoCo free camera with id -1. + """ + + def __init__( + self, + physics: Physics, + height: int = ..., + width: int = ..., + max_geom: Optional[int] = ..., + scene_callback: Optional[Callable[[Physics, mujoco.MjvScene], None]] = ..., + ) -> None: + """Initializes a new `MovableCamera`. + + Args: + physics: Instance of `Physics`. + height: Optional image height. Defaults to 240. + width: Optional image width. Defaults to 320. + max_geom: Optional integer specifying the maximum number of geoms that can + be rendered in the same scene. If None this will be chosen automatically + based on the estimated maximum number of renderable geoms in the model. + scene_callback: Called after the scene has been created and before + it is rendered. Can be used to add more geoms to the scene. + """ + ... + def get_pose(self): + """Returns the pose of the camera. + + Returns: + A `Pose` named tuple with fields: + lookat: NumPy array specifying lookat point. + distance: Float specifying distance to `lookat`. + azimuth: Azimuth in degrees. + elevation: Elevation in degrees. + """ + ... + def set_pose(self, lookat, distance, azimuth, elevation): + """Sets the pose of the camera. + + Args: + lookat: NumPy array or list specifying lookat point. + distance: Float specifying distance to `lookat`. + azimuth: Azimuth in degrees. + elevation: Elevation in degrees. + """ + ... + +class TextOverlay: + """A text overlay that can be drawn on top of a camera view.""" + + __slots__ = ... + def __init__(self, title=..., body=..., style=..., position=...) -> None: + """Initializes a new TextOverlay instance. + + Args: + title: Title text. + body: Body text. + style: The font style. Can be either "normal", "shadow", or "big". + position: The grid position of the overlay. Can be either "top left", + "top right", "bottom left", or "bottom right". + """ + ... + def draw(self, context, rect): + """Draws the overlay. + + Args: + context: A `mujoco.MjrContext` pointer. + rect: A `mujoco.MjrRect`. + """ + ... + +def action_spec(physics): + """Returns a `BoundedArraySpec` matching the `physics` actuators.""" + ... diff --git a/typings/dm_control/mujoco/index.pyi b/typings/dm_control/mujoco/index.pyi new file mode 100644 index 00000000..ad9782e3 --- /dev/null +++ b/typings/dm_control/mujoco/index.pyi @@ -0,0 +1,231 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Mujoco functions to support named indexing. + +The Mujoco name structure works as follows: + +In mjxmacro.h, each "X" entry denotes a type (a), a field name (b) and a list +of dimension size metadata (c) which may contain both numbers and names, for +example + + X(int, name_bodyadr, nbody, 1) // or + X(mjtNum, body_pos, nbody, 3) + a b c -----> + +The second declaration states that the field `body_pos` has type `mjtNum` and +dimension sizes `(nbody, 3)`, i.e. the first axis is indexed by body number. +These and other named dimensions are sized based on the loaded model. This +information is parsed and stored in `mjbindings.sizes`. + +In mjmodel.h, the struct mjModel contains an array of element name addresses +for each size name. + + int* name_bodyadr; // body name pointers (nbody x 1) + +By iterating over each of these element name address arrays, we first obtain a +mapping from size names to a list of element names. + + {'nbody': ['cart', 'pole'], 'njnt': ['free', 'ball', 'hinge'], ...} + +In addition to the element names that are derived from the mjModel struct at +runtime, we also assign hard-coded names to certain dimensions where there is an +established naming convention (e.g. 'x', 'y', 'z' for dimensions that correspond +to Cartesian positions). + +For some dimensions, a single element name maps to multiple indices within the +underlying field. For example, a single joint name corresponds to a variable +number of indices within `qpos` that depends on the number of degrees of freedom +associated with that joint type. These are referred to as "ragged" dimensions. + +In such cases we determine the size of each named element by examining the +address arrays (e.g. `jnt_qposadr`), and construct a mapping from size name to +element sizes: + + {'nq': [7, 3, 1], 'nv': [6, 3, 1], ...} + +Given these two dictionaries, we then create an `Axis` instance for each size +name. These objects have a `convert_key_item` method that handles the conversion +from indexing expressions containing element names to valid numpy indices. +Different implementations of `Axis` are used to handle "ragged" and "non-ragged" +dimensions. + + {'nbody': RegularNamedAxis(names=['cart', 'pole']), + 'nq': RaggedNamedAxis(names=['free', 'ball', 'hinge'], sizes=[7, 4, 1])} + +We construct this dictionary once using `make_axis_indexers`. + +Finally, for each field we construct a `FieldIndexer` class. A `FieldIndexer` +instance encapsulates a field together with a list of `Axis` instances (one per +dimension), and implements the named indexing logic by calling their respective +`convert_key_item` methods. + +Summary of terminology: + +* _size name_ or _size_ A dimension size name, e.g. `nbody` or `ngeom`. +* _element name_ or _name_ A named element in a Mujoco model, e.g. 'cart' or + 'pole'. +* _element index_ or _index_ The index of an element name, for a specific size + name. +""" +_RAGGED_ADDRS = ... +_COLUMN_NAMES = ... +_COLUMN_ID_TO_FIELDS = ... + +def make_axis_indexers(model): # -> defaultdict[Unknown, UnnamedAxis]: + """Returns a dict that maps size names to `Axis` indexers. + + Args: + model: An instance of `mjbindings.MjModelWrapper`. + + Returns: + A `dict` mapping from a size name (e.g. `'nbody'`) to an `Axis` instance. + """ + ... + +class Axis(metaclass=abc.ABCMeta): + """Handles the conversion of named indexing expressions into numpy indices.""" + + @abc.abstractmethod + def convert_key_item(self, key_item): # -> None: + """Converts a (possibly named) indexing expression to a numpy index.""" + ... + +class UnnamedAxis(Axis): + """An object representing an axis where the elements are not named.""" + + def convert_key_item(self, key_item): + """Validate the indexing expression and return it unmodified.""" + ... + +class RegularNamedAxis(Axis): + """Represents an axis where each named element has a fixed size of 1.""" + + def __init__(self, names) -> None: + """Initializes a new `RegularNamedAxis` instance. + + Args: + names: A list or array of element names. + """ + ... + def convert_key_item(self, key_item): # -> int | NDArray[Any] | NDArray[Unknown]: + """Converts a named indexing expression to a numpy-friendly index.""" + ... + @property + def names(self): # -> Unknown: + """Returns a list of element names.""" + ... + +class RaggedNamedAxis(Axis): + """Represents an axis where the named elements may vary in size.""" + + def __init__(self, element_names, element_sizes, singleton=...) -> None: + """Initializes a new `RaggedNamedAxis` instance. + + Args: + element_names: A list or array containing the element names. + element_sizes: A list or array containing the size of each element. + singleton: Whether to reduce singleton slices to scalars. + """ + ... + def convert_key_item(self, key_item): # -> list[Unknown] | ndarray[Unknown, Unknown]: + """Converts a named indexing expression to a numpy-friendly index.""" + ... + @property + def names(self): # -> Unknown: + """Returns a list of element names.""" + ... + +Axes = ... + +class FieldIndexer: + """An array-like object providing named access to a field in a MuJoCo struct. + + FieldIndexers expose the same attributes and methods as an `np.ndarray`. + + They may be indexed with strings or lists of strings corresponding to element + names. They also support standard numpy indexing expressions, with the + exception of indices containing `Ellipsis` or `None`. + """ + + __slots__ = ... + def __init__(self, parent_struct, field_name, axis_indexers) -> None: + """Initializes a new `FieldIndexer`. + + Args: + parent_struct: Wrapped ctypes structure, as generated by `mjbindings`. + field_name: String containing field name in `parent_struct`. + axis_indexers: A list of `Axis` instances, one per dimension. + """ + ... + def __dir__(self): # -> list[str]: + ... + def __getattr__(self, name): # -> Any: + ... + def __getitem__(self, key): + """Converts the key to a numeric index and returns the indexed array. + + Args: + key: Indexing expression. + + Raises: + IndexError: If an indexing tuple has too many elements, or if it contains + `Ellipsis`, `None`, or an empty string. + + Returns: + The indexed array. + """ + ... + def __setitem__(self, key, value): # -> None: + """Converts the key and assigns to the indexed array. + + Args: + key: Indexing expression. + value: Value to assign. + + Raises: + IndexError: If an indexing tuple has too many elements, or if it contains + `Ellipsis`, `None`, or an empty string. + """ + ... + @property + def axes(self): # -> Axes: + """A namedtuple containing the row and column indexers for this field.""" + ... + def __repr__(self): # -> LiteralString: + """Returns a pretty string representation of the `FieldIndexer`.""" + ... + +def struct_indexer(struct, struct_name, size_to_axis_indexer): # -> StructIndexer: + """Returns an object with a `FieldIndexer` attribute for each dynamic field. + + Usage example + + ```python + named_data = struct_indexer(mjdata, 'mjdata', size_to_axis_indexer) + fingertip_xpos = named_data.xpos['fingertip'] + elbow_qvel = named_data.qvel['elbow'] + ``` + + Args: + struct: Wrapped ctypes structure as generated by `mjbindings`. + struct_name: String containing corresponding Mujoco name of struct. + size_to_axis_indexer: dict that maps size names to `Axis` instances. + + Returns: + An object with a field for every dynamically sized array field, mapping to a + `FieldIndexer`. The returned object is immutable and has an `_asdict` + method. + + Raises: + ValueError: If `struct_name` is not recognized. + """ + ... + +def make_struct_indexer(field_indexers): # -> StructIndexer: + """Returns an immutable container exposing named indexers as attributes.""" + + class StructIndexer: ... diff --git a/typings/dm_control/mujoco/wrapper/__init__.pyi b/typings/dm_control/mujoco/wrapper/__init__.pyi new file mode 100644 index 00000000..056bb59c --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/__init__.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mujoco.wrapper import mjbindings +from dm_control.mujoco.wrapper.core import ( + Error, + MjData, + MjModel, + MjrContext, + MjvCamera, + MjvFigure, + MjvOption, + MjvPerturb, + MjvScene, + callback_context, + enable_timer, + get_schema, + save_last_parsed_model_to_xml, + set_callback, +) + +"""Python bindings and wrapper classes for MuJoCo.""" diff --git a/typings/dm_control/mujoco/wrapper/core.pyi b/typings/dm_control/mujoco/wrapper/core.pyi new file mode 100644 index 00000000..069bb2ed --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/core.pyi @@ -0,0 +1,387 @@ +""" +This type stub file was generated by pyright. +""" + +import contextlib + +import mujoco + +"""Main user-facing classes and utility functions for loading MuJoCo models.""" +_FAKE_BINARY_FILENAME = ... +_CONTACT_ID_OUT_OF_RANGE = ... + +class Error(Exception): + """Base class for MuJoCo exceptions.""" + + ... + +if mujoco.mjVERSION_HEADER != mujoco.mj_version(): ... +_LAST_PARSED_MODEL_PTR = ... +_NOT_LAST_PARSED_ERROR = ... + +def enable_timer(enabled=...): # -> None: + ... + +def set_callback(name, new_callback=...): # -> None: + """Sets a user-defined callback function to modify MuJoCo's behavior. + + Callback functions should have the following signature: + func(const_mjmodel_ptr, mjdata_ptr) -> None + + Args: + name: Name of the callback to set. Must be a field in + `functions.function_pointers`. + new_callback: The new callback. This can be one of the following: + * A Python callable + * A C function exposed by a `ctypes.CDLL` object + * An integer specifying the address of a callback function + * None, in which case any existing callback of that name is removed + """ + ... + +@contextlib.contextmanager +def callback_context(name, new_callback=...): # -> Generator[None, Any, None]: + """Context manager that temporarily overrides a MuJoCo callback function. + + On exit, the callback will be restored to its original value (None if the + callback was not already overridden when the context was entered). + + Args: + name: Name of the callback to set. Must be a field in + `mjbindings.function_pointers`. + new_callback: The new callback. This can be one of the following: + * A Python callable + * A C function exposed by a `ctypes.CDLL` object + * An integer specifying the address of a callback function + * None, in which case any existing callback of that name is removed + + Yields: + None + """ + ... + +def get_schema(): # -> Any: + """Returns a string containing the schema used by the MuJoCo XML parser.""" + ... + +def save_last_parsed_model_to_xml(xml_path, check_model=...): # -> None: + """Writes a description of the most recently loaded model to an MJCF XML file. + + Args: + xml_path: Path to the output XML file. + check_model: Optional `MjModel` instance. If specified, this model will be + checked to see if it is the most recently parsed one, and a ValueError + will be raised otherwise. + Raises: + Error: If MuJoCo encounters an error while writing the XML file. + ValueError: If `check_model` was passed, and this model is not the most + recently parsed one. + """ + ... + +class _MjModelMeta(type): + """Metaclass which allows MjModel below to delegate to mujoco.MjModel.""" + + def __new__(cls, name, bases, dct): # -> Self@_MjModelMeta: + ... + +class MjModel(metaclass=_MjModelMeta): + """Wrapper class for a MuJoCo 'mjModel' instance. + + MjModel encapsulates features of the model that are expected to remain + constant. It also contains simulation and visualization options which may be + changed occasionally, although this is done explicitly by the user. + """ + + _HAS_DYNAMIC_ATTRIBUTES = ... + def __init__(self, model_ptr) -> None: + """Creates a new MjModel instance from a mujoco.MjModel.""" + ... + @property + def ptr(self): # -> Unknown: + """The lower level MjModel instance.""" + ... + def __getstate__(self): # -> Unknown: + ... + def __setstate__(self, state): # -> None: + ... + def __copy__(self): # -> MjModel: + ... + @classmethod + def from_xml_string(cls, xml_string, assets=...): # -> Self@MjModel: + """Creates an `MjModel` instance from a model description XML string. + + Args: + xml_string: String containing an MJCF or URDF model description. + assets: Optional dict containing external assets referenced by the model + (such as additional XML files, textures, meshes etc.), in the form of + `{filename: contents_string}` pairs. The keys should correspond to the + filenames specified in the model XML. + + Returns: + An `MjModel` instance. + """ + ... + @classmethod + def from_byte_string(cls, byte_string): # -> Self@MjModel: + """Creates an MjModel instance from a model binary as a string of bytes.""" + ... + @classmethod + def from_xml_path(cls, xml_path): # -> Self@MjModel: + """Creates an MjModel instance from a path to a model XML file.""" + ... + @classmethod + def from_binary_path(cls, binary_path): # -> Self@MjModel: + """Creates an MjModel instance from a path to a compiled model binary.""" + ... + def save_binary(self, binary_path): # -> None: + """Saves the MjModel instance to a binary file.""" + ... + def to_bytes(self): # -> bytes: + """Serialize the model to a string of bytes.""" + ... + def copy(self): # -> MjModel: + """Returns a copy of this MjModel instance.""" + ... + def free(self): # -> None: + """Frees the native resources held by this MjModel. + + This is an advanced feature for use when manual memory management is + necessary. This MjModel object MUST NOT be used after this function has + been called. + """ + ... + def name2id(self, name, object_type): + """Returns the integer ID of a specified MuJoCo object. + + Args: + name: String specifying the name of the object to query. + object_type: The type of the object. Can be either a lowercase string + (e.g. 'body', 'geom') or an `mjtObj` enum value. + + Returns: + An integer object ID. + + Raises: + Error: If `object_type` is not a valid MuJoCo object type, or if no object + with the corresponding name and type was found. + """ + ... + def id2name(self, object_id, object_type): # -> str: + """Returns the name associated with a MuJoCo object ID, if there is one. + + Args: + object_id: Integer ID. + object_type: The type of the object. Can be either a lowercase string + (e.g. 'body', 'geom') or an `mjtObj` enum value. + + Returns: + A string containing the object name, or an empty string if the object ID + either doesn't exist or has no name. + + Raises: + Error: If `object_type` is not a valid MuJoCo object type. + """ + ... + @contextlib.contextmanager + def disable(self, *flags): # -> Generator[None, Any, None]: + """Context manager for temporarily disabling MuJoCo flags. + + Args: + *flags: Positional arguments specifying flags to disable. Can be either + lowercase strings (e.g. 'gravity', 'contact') or `mjtDisableBit` enum + values. + + Yields: + None + + Raises: + ValueError: If any item in `flags` is neither a valid name nor a value + from `mujoco.mjtDisableBit`. + """ + ... + @property + def name(self): # -> str: + """Returns the name of the model.""" + ... + +class _MjDataMeta(type): + """Metaclass which allows MjData below to delegate to mujoco.MjData.""" + + def __new__(cls, name, bases, dct): # -> Self@_MjDataMeta: + ... + +class MjData(metaclass=_MjDataMeta): + """Wrapper class for a MuJoCo 'mjData' instance. + + MjData contains all of the dynamic variables and intermediate results produced + by the simulation. These are expected to change on each simulation timestep. + """ + + _HAS_DYNAMIC_ATTRIBUTES = ... + def __init__(self, model) -> None: + """Construct a new MjData instance. + + Args: + model: An MjModel instance. + """ + ... + def __getstate__(self): # -> tuple[Unknown, Unknown]: + ... + def __setstate__(self, state): # -> None: + ... + def __copy__(self): # -> MjData: + ... + def copy(self): # -> MjData: + """Returns a copy of this MjData instance with the same parent MjModel.""" + ... + def object_velocity(self, object_id, object_type, local_frame=...): # -> ndarray[Any, dtype[float64]]: + """Returns the 6D velocity (linear, angular) of a MuJoCo object. + + Args: + object_id: Object identifier. Can be either integer ID or String name. + object_type: The type of the object. Can be either a lowercase string + (e.g. 'body', 'geom') or an `mjtObj` enum value. + local_frame: Boolean specifiying whether the velocity is given in the + global (worldbody), or local (object) frame. + + Returns: + 2x3 array with stacked (linear_velocity, angular_velocity) + + Raises: + Error: If `object_type` is not a valid MuJoCo object type, or if no object + with the corresponding name and type was found. + """ + ... + def contact_force(self, contact_id): # -> ndarray[Any, dtype[float64]]: + """Returns the wrench of a contact as a 2 x 3 array of (forces, torques). + + Args: + contact_id: Integer, the index of the contact within the contact buffer + (`self.contact`). + + Returns: + 2x3 array with stacked (force, torque). Note that the order of dimensions + is (normal, tangent, tangent), in the contact's frame. + + Raises: + ValueError: If `contact_id` is negative or bigger than ncon-1. + """ + ... + @property + def ptr(self): + """The lower level MjData instance.""" + ... + @property + def model(self): # -> Unknown: + """The parent MjModel for this MjData instance.""" + ... + @property + def contact(self): + """Variable-length recarray containing all current contacts.""" + ... + +class MjvCamera(mujoco.MjvCamera): + @property + def type_(self): ... + @type_.setter + def type_(self, t): # -> None: + ... + @property + def ptr(self): # -> Self@MjvCamera: + ... + +class MjvOption(mujoco.MjvOption): + def __init__(self) -> None: ... + @property + def ptr(self): # -> Self@MjvOption: + ... + +class MjrContext: + """Wrapper for mujoco.MjrContext.""" + + def __init__(self, model, gl_context, font_scale=...) -> None: + """Initializes this MjrContext instance. + + Args: + model: An `MjModel` instance. + gl_context: A `render.ContextBase` instance. + font_scale: Integer controlling the font size for text. Must be a value + in `mujoco.mjtFontScale`. + + Raises: + ValueError: If `font_scale` is invalid. + """ + ... + @property + def ptr(self): # -> None: + ... + def free(self): # -> None: + """Frees the native resources held by this MjrContext. + + This is an advanced feature for use when manual memory management is + necessary. This MjrContext object MUST NOT be used after this function has + been called. + """ + ... + def __del__(self): # -> None: + ... + +_NAME_TO_RENDER_FLAG_ENUM_VALUE = ... + +class MjvScene(mujoco.MjvScene): + def __init__(self, model=..., max_geom=...) -> None: + """Initializes a new `MjvScene` instance. + + Args: + model: (optional) An `MjModel` instance. + max_geom: (optional) An integer specifying the maximum number of geoms + that can be represented in the scene. If None, this will be chosen + automatically based on `model`. + """ + ... + @property + def ptr(self): # -> Self@MjvScene: + ... + @contextlib.contextmanager + def override_flags(self, overrides): # -> Generator[None, Any, None]: + """Context manager for temporarily overriding rendering flags. + + Args: + overrides: A mapping specifying rendering flags to override. The keys can + be either lowercase strings or `mjtRndFlag` enum values, and the values + are the overridden flag values, e.g. `{'wireframe': True}` or + `{mujoco.mjtRndFlag.mjRND_WIREFRAME: True}`. See `mujoco.mjtRndFlag` for + the set of valid flags. + + Yields: + None + """ + ... + def free(self): # -> None: + """Frees the native resources held by this MjvScene. + + This is an advanced feature for use when manual memory management is + necessary. This MjvScene object MUST NOT be used after this function has + been called. + """ + ... + @property + def geoms(self): + """Variable-length recarray containing all geoms currently in the buffer.""" + ... + +class MjvPerturb(mujoco.MjvPerturb): + @property + def ptr(self): # -> Self@MjvPerturb: + ... + +class MjvFigure(mujoco.MjvFigure): + @property + def ptr(self): # -> Self@MjvFigure: + ... + @property + def range_(self): ... + @range_.setter + def range_(self, value): # -> None: + ... diff --git a/typings/dm_control/mujoco/wrapper/mjbindings/__init__.pyi b/typings/dm_control/mujoco/wrapper/mjbindings/__init__.pyi new file mode 100644 index 00000000..f252b889 --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/mjbindings/__init__.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +from absl import logging +from dm_control.mujoco.wrapper.mjbindings import constants, enums, functions, sizes +from dm_control.mujoco.wrapper.mjbindings.functions import mjlib + +"""Import core names of MuJoCo ctypes bindings.""" diff --git a/typings/dm_control/mujoco/wrapper/mjbindings/constants.pyi b/typings/dm_control/mujoco/wrapper/mjbindings/constants.pyi new file mode 100644 index 00000000..647a0609 --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/mjbindings/constants.pyi @@ -0,0 +1,80 @@ +""" +This type stub file was generated by pyright. +""" + +"""Automatically generated by binding_generator.py. + +MuJoCo header version: 300 +""" +MUJOCO_MJMODEL_H_ = ... +mjPI = ... +mjMAXVAL = ... +mjMINMU = ... +mjMINIMP = ... +mjMAXIMP = ... +mjMAXCONPAIR = ... +mjMAXTREEDEPTH = ... +mjMAXVFS = ... +mjMAXVFSNAME = ... +mjNEQDATA = ... +mjNDYN = ... +mjNGAIN = ... +mjNBIAS = ... +mjNFLUID = ... +mjNREF = ... +mjNIMP = ... +mjNSOLVER = ... +mjNISLAND = ... +MUJOCO_MJDATA_H_ = ... +MUJOCO_MJRENDER_H_ = ... +mjNAUX = ... +mjMAXTEXTURE = ... +MUJOCO_INCLUDE_MJTNUM_H_ = ... +mjUSEDOUBLE = ... +mjMINVAL = ... +MUJOCO_MJUI_H_ = ... +mjMAXUISECT = ... +mjMAXUIITEM = ... +mjMAXUITEXT = ... +mjMAXUINAME = ... +mjMAXUIMULTI = ... +mjMAXUIEDIT = ... +mjMAXUIRECT = ... +mjSEPCLOSED = ... +mjKEY_ESCAPE = ... +mjKEY_ENTER = ... +mjKEY_TAB = ... +mjKEY_BACKSPACE = ... +mjKEY_INSERT = ... +mjKEY_DELETE = ... +mjKEY_RIGHT = ... +mjKEY_LEFT = ... +mjKEY_DOWN = ... +mjKEY_UP = ... +mjKEY_PAGE_UP = ... +mjKEY_PAGE_DOWN = ... +mjKEY_HOME = ... +mjKEY_END = ... +mjKEY_F1 = ... +mjKEY_F2 = ... +mjKEY_F3 = ... +mjKEY_F4 = ... +mjKEY_F5 = ... +mjKEY_F6 = ... +mjKEY_F7 = ... +mjKEY_F8 = ... +mjKEY_F9 = ... +mjKEY_F10 = ... +mjKEY_F11 = ... +mjKEY_F12 = ... +mjKEY_NUMPAD_0 = ... +mjKEY_NUMPAD_9 = ... +MUJOCO_MJVISUALIZE_H_ = ... +mjNGROUP = ... +mjMAXLIGHT = ... +mjMAXOVERLAY = ... +mjMAXLINE = ... +mjMAXLINEPNT = ... +mjMAXPLANEGRID = ... +MUJOCO_MUJOCO_H_ = ... +mjVERSION_HEADER = ... diff --git a/typings/dm_control/mujoco/wrapper/mjbindings/enums.pyi b/typings/dm_control/mujoco/wrapper/mjbindings/enums.pyi new file mode 100644 index 00000000..d77e3e29 --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/mjbindings/enums.pyi @@ -0,0 +1,50 @@ +""" +This type stub file was generated by pyright. +""" + +"""Automatically generated by binding_generator.py. + +MuJoCo header version: 300 +""" +mjtDisableBit = ... +mjtEnableBit = ... +mjtJoint = ... +mjtGeom = ... +mjtCamLight = ... +mjtTexture = ... +mjtIntegrator = ... +mjtCone = ... +mjtJacobian = ... +mjtSolver = ... +mjtEq = ... +mjtWrap = ... +mjtTrn = ... +mjtDyn = ... +mjtGain = ... +mjtBias = ... +mjtObj = ... +mjtConstraint = ... +mjtConstraintState = ... +mjtSensor = ... +mjtStage = ... +mjtDataType = ... +mjtLRMode = ... +mjtFlexSelf = ... +mjtWarning = ... +mjtTimer = ... +mjtGridPos = ... +mjtFramebuffer = ... +mjtDepthMap = ... +mjtFontScale = ... +mjtFont = ... +mjtButton = ... +mjtEvent = ... +mjtCatBit = ... +mjtMouse = ... +mjtPertBit = ... +mjtCamera = ... +mjtLabel = ... +mjtFrame = ... +mjtVisFlag = ... +mjtRndFlag = ... +mjtStereo = ... diff --git a/typings/dm_control/mujoco/wrapper/mjbindings/functions.pyi b/typings/dm_control/mujoco/wrapper/mjbindings/functions.pyi new file mode 100644 index 00000000..8a242da9 --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/mjbindings/functions.pyi @@ -0,0 +1,19 @@ +""" +This type stub file was generated by pyright. +""" + +import mujoco + +"""Aliases for the mujoco library, provided for backwards compatibility. + +New code should import mujoco directly, instead of accessing these constants or +mjlib through this module. +""" +mjlib = mujoco +mjDISABLESTRING = ... +mjENABLESTRING = ... +mjTIMERSTRING = ... +mjLABELSTRING = ... +mjFRAMESTRING = ... +mjVISSTRING = ... +mjRNDSTRING = ... diff --git a/typings/dm_control/mujoco/wrapper/mjbindings/sizes.pyi b/typings/dm_control/mujoco/wrapper/mjbindings/sizes.pyi new file mode 100644 index 00000000..00a44bf4 --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/mjbindings/sizes.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +"""Automatically generated by binding_generator.py. + +MuJoCo header version: 300 +""" +array_sizes = ... diff --git a/typings/dm_control/mujoco/wrapper/util.pyi b/typings/dm_control/mujoco/wrapper/util.pyi new file mode 100644 index 00000000..2c51ea7f --- /dev/null +++ b/typings/dm_control/mujoco/wrapper/util.pyi @@ -0,0 +1,27 @@ +""" +This type stub file was generated by pyright. +""" + +import functools + +import numpy as np + +"""Various helper functions and classes.""" +ENV_MJLIB_PATH = ... +DEFAULT_ENCODING = ... + +def to_binary_string(s): # -> bytes: + """Convert text string to binary.""" + ... + +def to_native_string(s): # -> str: + """Convert a text or binary string to the native string format.""" + ... + +def get_mjlib(): # -> Module("mujoco"): + ... + +@functools.wraps(np.ctypeslib.ndpointer) +def ndptr(*args, **kwargs): # -> Any: + """Wraps `np.ctypeslib.ndpointer` to allow passing None for NULL pointers.""" + ... diff --git a/typings/dm_control/rl/__init__.pyi b/typings/dm_control/rl/__init__.pyi new file mode 100644 index 00000000..e897159a --- /dev/null +++ b/typings/dm_control/rl/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/rl/control.pyi b/typings/dm_control/rl/control.pyi new file mode 100644 index 00000000..1d15e5d7 --- /dev/null +++ b/typings/dm_control/rl/control.pyi @@ -0,0 +1,273 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import contextlib + +import dm_env + +""" +This type stub file was generated by pyright. +""" +FLAT_OBSERVATION_KEY = ... + +class Environment(dm_env.Environment): + """Class for physics-based reinforcement learning environments.""" + + def __init__( + self, + physics, + task, + time_limit=..., + control_timestep=..., + n_sub_steps=..., + flat_observation=..., + legacy_step: bool = ..., + ) -> None: + """Initializes a new `Environment`. + + Args: + physics: Instance of `Physics`. + task: Instance of `Task`. + time_limit: Optional `int`, maximum time for each episode in seconds. By + default this is set to infinite. + control_timestep: Optional control time-step, in seconds. + n_sub_steps: Optional number of physical time-steps in one control + time-step, aka "action repeats". Can only be supplied if + `control_timestep` is not specified. + flat_observation: If True, observations will be flattened and concatenated + into a single numpy array. + legacy_step: If True, steps the state with up-to-date position and + velocity dependent fields. See Page 6 of + https://arxiv.org/abs/2006.12983 for more information. + + Raises: + ValueError: If both `n_sub_steps` and `control_timestep` are supplied. + """ + ... + def reset(self): + """Starts a new episode and returns the first `TimeStep`.""" + ... + def step(self, action): + """Updates the environment using the action and returns a `TimeStep`.""" + ... + def action_spec(self): + """Returns the action specification for this environment.""" + ... + def step_spec(self): + """May return a specification for the values returned by `step`.""" + ... + def observation_spec(self): + """Returns the observation specification for this environment. + + Infers the spec from the observation, unless the Task implements the + `observation_spec` method. + + Returns: + An dict mapping observation name to `ArraySpec` containing observation + shape and dtype. + """ + ... + @property + def physics(self): ... + @property + def task(self): ... + def control_timestep(self): + """Returns the interval between agent actions in seconds.""" + ... + +def compute_n_steps(control_timestep, physics_timestep, tolerance=...): + """Returns the number of physics timesteps in a single control timestep. + + Args: + control_timestep: Control time-step, should be an integer multiple of the + physics timestep. + physics_timestep: The time-step of the physics simulation. + tolerance: Optional tolerance value for checking if `physics_timestep` + divides `control_timestep`. + + Returns: + The number of physics timesteps in a single control timestep. + + Raises: + ValueError: If `control_timestep` is smaller than `physics_timestep` or if + `control_timestep` is not an integer multiple of `physics_timestep`. + """ + ... + +class Physics(metaclass=abc.ABCMeta): + """Simulates a physical environment.""" + + legacy_step: bool = ... + @abc.abstractmethod + def step(self, n_sub_steps=...): + """Updates the simulation state. + + Args: + n_sub_steps: Optional number of times to repeatedly update the simulation + state. Defaults to 1. + """ + ... + @abc.abstractmethod + def time(self): + """Returns the elapsed simulation time in seconds.""" + ... + @abc.abstractmethod + def timestep(self): + """Returns the simulation timestep.""" + ... + def set_control(self, control): + """Sets the control signal for the actuators.""" + ... + @contextlib.contextmanager + def reset_context(self): + """Context manager for resetting the simulation state. + + Sets the internal simulation to a default state when entering the block. + + ```python + with physics.reset_context(): + # Set joint and object positions. + + physics.step() + ``` + + Yields: + The `Physics` instance. + """ + ... + @abc.abstractmethod + def reset(self): + """Resets internal variables of the physics simulation.""" + ... + @abc.abstractmethod + def after_reset(self): + """Runs after resetting internal variables of the physics simulation.""" + ... + def check_divergence(self): + """Raises a `PhysicsError` if the simulation state is divergent. + + The default implementation is a no-op. + """ + ... + +class PhysicsError(RuntimeError): + """Raised if the state of the physics simulation becomes divergent.""" + + ... + +class Task(metaclass=abc.ABCMeta): + """Defines a task in a `control.Environment`.""" + + @abc.abstractmethod + def initialize_episode(self, physics): + """Sets the state of the environment at the start of each episode. + + Called by `control.Environment` at the start of each episode *within* + `physics.reset_context()` (see the documentation for `base.Physics`). + + Args: + physics: Instance of `Physics`. + """ + ... + @abc.abstractmethod + def before_step(self, action, physics): + """Updates the task from the provided action. + + Called by `control.Environment` before stepping the physics engine. + + Args: + action: numpy array or array-like action values, or a nested structure of + such arrays. Should conform to the specification returned by + `self.action_spec(physics)`. + physics: Instance of `Physics`. + """ + ... + def after_step(self, physics): + """Optional method to update the task after the physics engine has stepped. + + Called by `control.Environment` after stepping the physics engine and before + `control.Environment` calls `get_observation, `get_reward` and + `get_termination`. + + The default implementation is a no-op. + + Args: + physics: Instance of `Physics`. + """ + ... + @abc.abstractmethod + def action_spec(self, physics): + """Returns a specification describing the valid actions for this task. + + Args: + physics: Instance of `Physics`. + + Returns: + A `BoundedArraySpec`, or a nested structure containing `BoundedArraySpec`s + that describe the shapes, dtypes and elementwise lower and upper bounds + for the action array(s) passed to `self.step`. + """ + ... + def step_spec(self, physics): + """Returns a specification describing the time_step for this task. + + Args: + physics: Instance of `Physics`. + + Returns: + A `BoundedArraySpec`, or a nested structure containing `BoundedArraySpec`s + that describe the shapes, dtypes and elementwise lower and upper bounds + for the array(s) returned by `self.step`. + """ + ... + @abc.abstractmethod + def get_observation(self, physics): + """Returns an observation from the environment. + + Args: + physics: Instance of `Physics`. + """ + ... + @abc.abstractmethod + def get_reward(self, physics): + """Returns a reward from the environment. + + Args: + physics: Instance of `Physics`. + """ + ... + def get_termination(self, physics): + """If the episode should end, returns a final discount, otherwise None.""" + ... + def observation_spec(self, physics): + """Optional method that returns the observation spec. + + If not implemented, the Environment infers the spec from the observation. + + Args: + physics: Instance of `Physics`. + + Returns: + A dict mapping observation name to `ArraySpec` containing observation + shape and dtype. + """ + ... + +def flatten_observation(observation, output_key=...): + """Flattens multiple observation arrays into a single numpy array. + + Args: + observation: A mutable mapping from observation names to numpy arrays. + output_key: The key for the flattened observation array in the output. + + Returns: + A mutable mapping of the same type as `observation`. This will contain a + single key-value pair consisting of `output_key` and the flattened + and concatenated observation array. + + Raises: + ValueError: If `observation` is not a `collections.abc.MutableMapping`. + """ + ... diff --git a/typings/dm_control/suite/__init__.pyi b/typings/dm_control/suite/__init__.pyi new file mode 100644 index 00000000..a076307a --- /dev/null +++ b/typings/dm_control/suite/__init__.pyi @@ -0,0 +1,82 @@ +""" +This type stub file was generated by pyright. +""" + +import collections +import inspect +import itertools + +from dm_control.rl import control +from dm_control.suite import ( + acrobot, + ball_in_cup, + cartpole, + cheetah, + dog, + finger, + fish, + hopper, + humanoid, + humanoid_CMU, + lqr, + manipulator, + pendulum, + point_mass, + quadruped, + reacher, + stacker, + swimmer, + walker, +) + +"""A collection of MuJoCo-based Reinforcement Learning environments.""" +_DOMAINS = ... +ALL_TASKS = ... +BENCHMARKING = ... +EASY = ... +HARD = ... +EXTRA = ... +NO_REWARD_VIZ = ... +REWARD_VIZ = ... +TASKS_BY_DOMAIN = ... + +def load(domain_name, task_name, task_kwargs=..., environment_kwargs=..., visualize_reward=...): # -> Any: + """Returns an environment from a domain name, task name and optional settings. + + ```python + env = suite.load('cartpole', 'balance') + ``` + + Args: + domain_name: A string containing the name of a domain. + task_name: A string containing the name of a task. + task_kwargs: Optional `dict` of keyword arguments for the task. + environment_kwargs: Optional `dict` specifying keyword arguments for the + environment. + visualize_reward: Optional `bool`. If `True`, object colours in rendered + frames are set to indicate the reward at each step. Default `False`. + + Returns: + The requested environment. + """ + ... + +def build_environment(domain_name, task_name, task_kwargs=..., environment_kwargs=..., visualize_reward=...): # -> Any: + """Returns an environment from the suite given a domain name and a task name. + + Args: + domain_name: A string containing the name of a domain. + task_name: A string containing the name of a task. + task_kwargs: Optional `dict` specifying keyword arguments for the task. + environment_kwargs: Optional `dict` specifying keyword arguments for the + environment. + visualize_reward: Optional `bool`. If `True`, object colours in rendered + frames are set to indicate the reward at each step. Default `False`. + + Raises: + ValueError: If the domain or task doesn't exist. + + Returns: + An instance of the requested environment. + """ + ... diff --git a/typings/dm_control/suite/acrobot.pyi b/typings/dm_control/suite/acrobot.pyi new file mode 100644 index 00000000..736156ae --- /dev/null +++ b/typings/dm_control/suite/acrobot.pyi @@ -0,0 +1,69 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Acrobot domain.""" +_DEFAULT_TIME_LIMIT = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def swingup(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns Acrobot balance task.""" + ... + +@SUITE.add("benchmarking") +def swingup_sparse(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns Acrobot sparse balance.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Acrobot domain.""" + + def horizontal(self): + """Returns horizontal (x) component of body frame z-axes.""" + ... + def vertical(self): + """Returns vertical (z) component of body frame z-axes.""" + ... + def to_target(self): # -> floating[Any]: + """Returns the distance from the tip to the target.""" + ... + def orientations(self): # -> NDArray[Unknown]: + """Returns the sines and cosines of the pole angles.""" + ... + +class Balance(base.Task): + """An Acrobot `Task` to swing up and balance the pole.""" + + def __init__(self, sparse, random=...) -> None: + """Initializes an instance of `Balance`. + + Args: + sparse: A `bool` specifying whether to use a sparse (indicator) reward. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Shoulder and elbow are set to a random position between [-pi, pi). + + Args: + physics: An instance of `Physics`. + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of pole orientation and angular velocities.""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + """Returns a sparse or a smooth reward, as specified in the constructor.""" + ... diff --git a/typings/dm_control/suite/ball_in_cup.pyi b/typings/dm_control/suite/ball_in_cup.pyi new file mode 100644 index 00000000..e64df342 --- /dev/null +++ b/typings/dm_control/suite/ball_in_cup.pyi @@ -0,0 +1,48 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Ball-in-Cup Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking", "easy") +def catch(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Ball-in-Cup task.""" + ... + +class Physics(mujoco.Physics): + """Physics with additional features for the Ball-in-Cup domain.""" + + def ball_to_target(self): + """Returns the vector from the ball to the target.""" + ... + def in_target(self): # -> float: + """Returns 1 if the ball is in the target, 0 otherwise.""" + ... + +class BallInCup(base.Task): + """The Ball-in-Cup task. Put the ball in the cup.""" + + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the state.""" + ... + def get_reward(self, physics): + """Returns a sparse reward.""" + ... diff --git a/typings/dm_control/suite/base.pyi b/typings/dm_control/suite/base.pyi new file mode 100644 index 00000000..679ec81a --- /dev/null +++ b/typings/dm_control/suite/base.pyi @@ -0,0 +1,66 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.rl import control + +"""Base class for tasks in the Control Suite.""" + +class Task(control.Task): + """Base class for tasks in the Control Suite. + + Actions are mapped directly to the states of MuJoCo actuators: each element of + the action array is used to set the control input for a single actuator. The + ordering of the actuators is the same as in the corresponding MJCF XML file. + + Attributes: + random: A `numpy.random.RandomState` instance. This should be used to + generate all random variables associated with the task, such as random + starting states, observation noise* etc. + + *If sensor noise is enabled in the MuJoCo model then this will be generated + using MuJoCo's internal RNG, which has its own independent state. + """ + + def __init__(self, random=...) -> None: + """Initializes a new continuous control task. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an integer + seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + @property + def random(self): # -> RandomState: + """Task-specific `numpy.random.RandomState` instance.""" + ... + def action_spec(self, physics): # -> BoundedArray: + """Returns a `BoundedArraySpec` matching the `physics` actuators.""" + ... + def initialize_episode(self, physics): # -> None: + """Resets geom colors to their defaults after starting a new episode. + + Subclasses of `base.Task` must delegate to this method after performing + their own initialization. + + Args: + physics: An instance of `mujoco.Physics`. + """ + ... + def before_step(self, action, physics): # -> None: + """Sets the control signal for the actuators to values in `action`.""" + ... + def after_step(self, physics): # -> None: + """Modifies colors according to the reward.""" + ... + @property + def visualize_reward(self): # -> bool: + ... + @visualize_reward.setter + def visualize_reward(self, value): # -> None: + ... + +_MATERIALS = ... +_DEFAULT = ... +_HIGHLIGHT = ... diff --git a/typings/dm_control/suite/cartpole.pyi b/typings/dm_control/suite/cartpole.pyi new file mode 100644 index 00000000..edb5a538 --- /dev/null +++ b/typings/dm_control/suite/cartpole.pyi @@ -0,0 +1,100 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Cartpole domain.""" +_DEFAULT_TIME_LIMIT = ... +SUITE = ... + +def get_model_and_assets(num_poles=...): # -> tuple[Any | Unknown, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def balance(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Cartpole Balance task.""" + ... + +@SUITE.add("benchmarking") +def balance_sparse(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the sparse reward variant of the Cartpole Balance task.""" + ... + +@SUITE.add("benchmarking") +def swingup(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Cartpole Swing-Up task.""" + ... + +@SUITE.add("benchmarking") +def swingup_sparse(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the sparse reward variant of the Cartpole Swing-Up task.""" + ... + +@SUITE.add() +def two_poles(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Cartpole Balance task with two poles.""" + ... + +@SUITE.add() +def three_poles(time_limit=..., random=..., num_poles=..., sparse=..., environment_kwargs=...): # -> Environment: + """Returns the Cartpole Balance task with three or more poles.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Cartpole domain.""" + + def cart_position(self): + """Returns the position of the cart.""" + ... + def angular_vel(self): + """Returns the angular velocity of the pole.""" + ... + def pole_angle_cosine(self): + """Returns the cosine of the pole angle.""" + ... + def bounded_position(self): # -> NDArray[Unknown]: + """Returns the state, with pole angle split into sin/cos.""" + ... + +class Balance(base.Task): + """A Cartpole `Task` to balance the pole. + + State is initialized either close to the target configuration or at a random + configuration. + """ + + _CART_RANGE = ... + _ANGLE_COSINE_RANGE = ... + def __init__(self, swing_up, sparse, random=...) -> None: + """Initializes an instance of `Balance`. + + Args: + swing_up: A `bool`, which if `True` sets the cart to the middle of the + slider and the pole pointing towards the ground. Otherwise, sets the + cart to a random position on the slider and the pole to a random + near-vertical position. + sparse: A `bool`, whether to return a sparse or a smooth reward. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Initializes the cart and pole according to `swing_up`, and in both cases + adds a small random initial velocity to break symmetry. + + Args: + physics: An instance of `Physics`. + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the (bounded) physics state.""" + ... + def get_reward(self, physics): # -> Any: + """Returns a sparse or a smooth reward, as specified in the constructor.""" + ... diff --git a/typings/dm_control/suite/cheetah.pyi b/typings/dm_control/suite/cheetah.pyi new file mode 100644 index 00000000..77cbe56f --- /dev/null +++ b/typings/dm_control/suite/cheetah.pyi @@ -0,0 +1,40 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Cheetah Domain.""" +_DEFAULT_TIME_LIMIT = ... +_RUN_SPEED = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the run task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Cheetah domain.""" + + def speed(self): + """Returns the horizontal speed of the Cheetah.""" + ... + +class Cheetah(base.Task): + """A `Task` to train a running Cheetah.""" + + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the state, ignoring horizontal position.""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/common/__init__.pyi b/typings/dm_control/suite/common/__init__.pyi new file mode 100644 index 00000000..bf485d30 --- /dev/null +++ b/typings/dm_control/suite/common/__init__.pyi @@ -0,0 +1,16 @@ +""" +This type stub file was generated by pyright. +""" + +import os + +from dm_control.utils import io as resources + +"""Functions to manage the common assets for domains.""" +_SUITE_DIR = ... +_FILENAMES = ... +ASSETS = ... + +def read_model(model_filename): # -> Any: + """Reads a model XML file and returns its contents as a string.""" + ... diff --git a/typings/dm_control/suite/dog.pyi b/typings/dm_control/suite/dog.pyi new file mode 100644 index 00000000..e17c3549 --- /dev/null +++ b/typings/dm_control/suite/dog.pyi @@ -0,0 +1,180 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Dog Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_MAX_UPRIGHT_ANGLE = ... +_MIN_UPRIGHT_COSINE = ... +_STAND_HEIGHT_FRACTION = ... +_EXCESSIVE_LIMIT_TORQUES = ... +_WALK_SPEED = ... +_TROT_SPEED = ... +_RUN_SPEED = ... +_HINGE_TYPE = ... +_LIMIT_TYPE = ... +SUITE = ... +_ASSET_DIR = ... + +def make_model(floor_size, remove_ball): + """Sets floor size, removes ball and walls (Stand and Move tasks).""" + ... + +def get_model_and_assets(floor_size=..., remove_ball=...): # -> tuple[Unknown, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("no_reward_visualization") +def stand(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Stand task.""" + ... + +@SUITE.add("no_reward_visualization") +def walk(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Walk task.""" + ... + +@SUITE.add("no_reward_visualization") +def trot(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Trot task.""" + ... + +@SUITE.add("no_reward_visualization") +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +@SUITE.add("no_reward_visualization", "hard") +def fetch(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Fetch task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Dog domain.""" + + def torso_pelvis_height(self): + """Returns the height of the torso.""" + ... + def z_projection(self): # -> NDArray[Unknown]: + """Returns rotation-invariant projection of local frames to the world z.""" + ... + def upright(self): # -> ndarray[Any, dtype[Unknown]]: + """Returns projection from local z-axes to the z-axis of world.""" + ... + def center_of_mass_velocity(self): + """Returns the velocity of the center-of-mass.""" + ... + def torso_com_velocity(self): + """Returns the velocity of the center-of-mass in the torso frame.""" + ... + def com_forward_velocity(self): + """Returns the com velocity in the torso's forward direction.""" + ... + def joint_angles(self): + """Returns the configuration of all hinge joints (skipping free joints).""" + ... + def joint_velocities(self): + """Returns the velocity of all hinge joints (skipping free joints).""" + ... + def inertial_sensors(self): + """Returns inertial sensor readings.""" + ... + def touch_sensors(self): + """Returns touch readings.""" + ... + def foot_forces(self): + """Returns touch readings.""" + ... + def ball_in_head_frame(self): # -> NDArray[Unknown | Any]: + """Returns the ball position and velocity in the frame of the head.""" + ... + def target_in_head_frame(self): + """Returns the target position in the frame of the head.""" + ... + def ball_to_mouth_distance(self): # -> floating[Any]: + """Returns the distance from the ball to the mouth.""" + ... + def ball_to_target_distance(self): # -> floating[Any]: + """Returns the distance from the ball to the target.""" + ... + +class Stand(base.Task): + """A dog stand task generating upright posture.""" + + def __init__(self, random=..., observe_reward_factors=...) -> None: + """Initializes an instance of `Stand`. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + observe_reward_factors: Boolean, whether the factorised reward is a + key in the observation dict returned to the agent. + """ + ... + def initialize_episode(self, physics): # -> None: + """Randomizes initial root velocities and actuator states. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation_components(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns the observations for the Stand task.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns the observation, possibly adding reward factors.""" + ... + def get_reward_factors(self, physics): # -> NDArray[Any]: + """Returns the factorized reward.""" + ... + def get_reward(self, physics): # -> int_: + """Returns the reward, product of reward factors.""" + ... + +class Move(Stand): + """A dog move task for generating locomotion.""" + + def __init__(self, move_speed, random, observe_reward_factors=...) -> None: + """Initializes an instance of `Move`. + + Args: + move_speed: A float. Specifies a target horizontal velocity. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + observe_reward_factors: Boolean, whether the factorised reward is a + component of the observation dict. + """ + ... + def get_reward_factors(self, physics): # -> NDArray[Any]: + """Returns the factorized reward.""" + ... + +class Fetch(Stand): + """A dog fetch task to fetch a thrown ball.""" + + def __init__(self, random, observe_reward_factors=...) -> None: + """Initializes an instance of `Move`. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + observe_reward_factors: Boolean, whether the factorised reward is a + component of the observation dict. + """ + ... + def initialize_episode(self, physics): # -> None: + ... + def get_observation_components(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns the common observations for the Stand task.""" + ... + def get_reward_factors(self, physics): # -> NDArray[Any]: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/finger.pyi b/typings/dm_control/suite/finger.pyi new file mode 100644 index 00000000..91296b08 --- /dev/null +++ b/typings/dm_control/suite/finger.pyi @@ -0,0 +1,105 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Finger Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_EASY_TARGET_SIZE = ... +_HARD_TARGET_SIZE = ... +_INITIAL_SPIN_VELOCITY = ... +_STOP_VELOCITY = ... +_SPIN_VELOCITY = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def spin(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Spin task.""" + ... + +@SUITE.add("benchmarking") +def turn_easy(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the easy Turn task.""" + ... + +@SUITE.add("benchmarking") +def turn_hard(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the hard Turn task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Finger domain.""" + + def touch(self): # -> Any: + """Returns logarithmically scaled signals from the two touch sensors.""" + ... + def hinge_velocity(self): + """Returns the velocity of the hinge joint.""" + ... + def tip_position(self): + """Returns the (x,z) position of the tip relative to the hinge.""" + ... + def bounded_position(self): # -> NDArray[Unknown]: + """Returns the positions, with the hinge angle replaced by tip position.""" + ... + def velocity(self): + """Returns the velocities (extracted from sensordata).""" + ... + def target_position(self): + """Returns the (x,z) position of the target relative to the hinge.""" + ... + def to_target(self): + """Returns the vector from the tip to the target.""" + ... + def dist_to_target(self): + """Returns the signed distance to the target surface, negative is inside.""" + ... + +class Spin(base.Task): + """A Finger `Task` to spin the stopped body.""" + + def __init__(self, random=...) -> None: + """Initializes a new `Spin` instance. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns state and touch sensors, and target info.""" + ... + def get_reward(self, physics): # -> float: + """Returns a sparse reward.""" + ... + +class Turn(base.Task): + """A Finger `Task` to turn the body to a target angle.""" + + def __init__(self, target_radius, random=...) -> None: + """Initializes a new `Turn` instance. + + Args: + target_radius: Radius of the target site, which specifies the goal angle. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns state, touch sensors, and target info.""" + ... + def get_reward(self, physics): # -> float: + ... diff --git a/typings/dm_control/suite/fish.pyi b/typings/dm_control/suite/fish.pyi new file mode 100644 index 00000000..3641bd3e --- /dev/null +++ b/typings/dm_control/suite/fish.pyi @@ -0,0 +1,89 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Fish Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_JOINTS = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def upright(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Fish Upright task.""" + ... + +@SUITE.add("benchmarking") +def swim(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Fish Swim task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Fish domain.""" + + def upright(self): + """Returns projection from z-axes of torso to the z-axes of worldbody.""" + ... + def torso_velocity(self): + """Returns velocities and angular velocities of the torso.""" + ... + def joint_velocities(self): + """Returns the joint velocities.""" + ... + def joint_angles(self): + """Returns the joint positions.""" + ... + def mouth_to_target(self): + """Returns a vector, from mouth to target in local coordinate of mouth.""" + ... + +class Upright(base.Task): + """A Fish `Task` for getting the torso upright with smooth reward.""" + + def __init__(self, random=...) -> None: + """Initializes an instance of `Upright`. + + Args: + random: Either an existing `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically. + """ + ... + def initialize_episode(self, physics): # -> None: + """Randomizes the tail and fin angles and the orientation of the Fish.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of joint angles, velocities and uprightness.""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + """Returns a smooth reward.""" + ... + +class Swim(base.Task): + """A Fish `Task` for swimming with smooth reward.""" + + def __init__(self, random=...) -> None: + """Initializes an instance of `Swim`. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of joints, target direction and velocities.""" + ... + def get_reward(self, physics): + """Returns a smooth reward.""" + ... diff --git a/typings/dm_control/suite/hopper.pyi b/typings/dm_control/suite/hopper.pyi new file mode 100644 index 00000000..30f38975 --- /dev/null +++ b/typings/dm_control/suite/hopper.pyi @@ -0,0 +1,64 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Hopper domain.""" +SUITE = ... +_CONTROL_TIMESTEP = ... +_DEFAULT_TIME_LIMIT = ... +_STAND_HEIGHT = ... +_HOP_SPEED = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def stand(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns a Hopper that strives to stand upright, balancing its pose.""" + ... + +@SUITE.add("benchmarking") +def hop(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns a Hopper that strives to hop forward.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Hopper domain.""" + + def height(self): + """Returns height of torso with respect to foot.""" + ... + def speed(self): + """Returns horizontal speed of the Hopper.""" + ... + def touch(self): # -> Any: + """Returns the signals from two foot touch sensors.""" + ... + +class Hopper(base.Task): + """A Hopper's `Task` to train a standing and a jumping Hopper.""" + + def __init__(self, hopping, random=...) -> None: + """Initialize an instance of `Hopper`. + + Args: + hopping: Boolean, if True the task is to hop forwards, otherwise it is to + balance upright. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of positions, velocities and touch sensors.""" + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]] | NDArray[bool_] | Any: + """Returns a reward applicable to the performed task.""" + ... diff --git a/typings/dm_control/suite/humanoid.pyi b/typings/dm_control/suite/humanoid.pyi new file mode 100644 index 00000000..ab173f2b --- /dev/null +++ b/typings/dm_control/suite/humanoid.pyi @@ -0,0 +1,95 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Humanoid Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_STAND_HEIGHT = ... +_WALK_SPEED = ... +_RUN_SPEED = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def stand(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Stand task.""" + ... + +@SUITE.add("benchmarking") +def walk(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Walk task.""" + ... + +@SUITE.add("benchmarking") +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +@SUITE.add() +def run_pure_state(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Walker domain.""" + + def torso_upright(self): + """Returns projection from z-axes of torso to the z-axes of world.""" + ... + def head_height(self): + """Returns the height of the torso.""" + ... + def center_of_mass_position(self): + """Returns position of the center-of-mass.""" + ... + def center_of_mass_velocity(self): + """Returns the velocity of the center-of-mass.""" + ... + def torso_vertical_orientation(self): + """Returns the z-projection of the torso orientation matrix.""" + ... + def joint_angles(self): + """Returns the state without global orientation or position.""" + ... + def extremities(self): # -> NDArray[Unknown]: + """Returns end effector positions in egocentric frame.""" + ... + +class Humanoid(base.Task): + """A humanoid task.""" + + def __init__(self, move_speed, pure_state, random=...) -> None: + """Initializes an instance of `Humanoid`. + + Args: + move_speed: A float. If this value is zero, reward is given simply for + standing up. Otherwise this specifies a target horizontal velocity for + the walking task. + pure_state: A bool. Whether the observations consist of the pure MuJoCo + state or includes some useful features thereof. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns either the pure state or a set of egocentric features.""" + ... + def get_reward(self, physics): # -> Any: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/humanoid_CMU.pyi b/typings/dm_control/suite/humanoid_CMU.pyi new file mode 100644 index 00000000..14b43d7a --- /dev/null +++ b/typings/dm_control/suite/humanoid_CMU.pyi @@ -0,0 +1,87 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Humanoid_CMU Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_STAND_HEIGHT = ... +_WALK_SPEED = ... +_RUN_SPEED = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add() +def stand(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Stand task.""" + ... + +@SUITE.add() +def walk(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Walk task.""" + ... + +@SUITE.add() +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the humanoid_CMU domain.""" + + def thorax_upright(self): + """Returns projection from y-axes of thorax to the z-axes of world.""" + ... + def head_height(self): + """Returns the height of the head.""" + ... + def center_of_mass_position(self): + """Returns position of the center-of-mass.""" + ... + def center_of_mass_velocity(self): + """Returns the velocity of the center-of-mass.""" + ... + def torso_vertical_orientation(self): + """Returns the z-projection of the thorax orientation matrix.""" + ... + def joint_angles(self): + """Returns the state without global orientation or position.""" + ... + def extremities(self): # -> NDArray[Unknown]: + """Returns end effector positions in egocentric frame.""" + ... + +class HumanoidCMU(base.Task): + """A task for the CMU Humanoid.""" + + def __init__(self, move_speed, random=...) -> None: + """Initializes an instance of `Humanoid_CMU`. + + Args: + move_speed: A float. If this value is zero, reward is given simply for + standing up. Otherwise this specifies a target horizontal velocity for + the walking task. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets a random collision-free configuration at the start of each episode. + + Args: + physics: An instance of `Physics`. + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns a set of egocentric features.""" + ... + def get_reward(self, physics): # -> Any: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/lqr.pyi b/typings/dm_control/suite/lqr.pyi new file mode 100644 index 00000000..ab441545 --- /dev/null +++ b/typings/dm_control/suite/lqr.pyi @@ -0,0 +1,79 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Procedurally generated LQR domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_COST_COEF = ... +SUITE = ... + +def get_model_and_assets(n_bodies, n_actuators, random): # -> tuple[Unknown, dict[str, Any]]: + """Returns the model description as an XML string and a dict of assets. + + Args: + n_bodies: An int, number of bodies of the LQR. + n_actuators: An int, number of actuated bodies of the LQR. `n_actuators` + should be less or equal than `n_bodies`. + random: A `numpy.random.RandomState` instance. + + Returns: + A tuple `(model_xml_string, assets)`, where `assets` is a dict consisting of + `{filename: contents_string}` pairs. + """ + ... + +@SUITE.add() +def lqr_2_1(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns an LQR environment with 2 bodies of which the first is actuated.""" + ... + +@SUITE.add() +def lqr_6_2(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns an LQR environment with 6 bodies of which first 2 are actuated.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the LQR domain.""" + + def state_norm(self): # -> floating[Any]: + """Returns the norm of the physics state.""" + ... + +class LQRLevel(base.Task): + """A Linear Quadratic Regulator `Task`.""" + + _TERMINAL_TOL = ... + def __init__(self, control_cost_coef, random=...) -> None: + """Initializes an LQR level with cost = sum(states^2) + c*sum(controls^2). + + Args: + control_cost_coef: The coefficient of the control cost. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + + Raises: + ValueError: If the control cost coefficient is not positive. + """ + ... + @property + def control_cost_coef(self): # -> Unknown: + ... + def initialize_episode(self, physics): # -> None: + """Random state sampled from a unit sphere.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the state.""" + ... + def get_reward(self, physics): # -> Any: + """Returns a quadratic state and control reward.""" + ... + def get_evaluation(self, physics): # -> float: + """Returns a sparse evaluation reward that is not used for learning.""" + ... + def get_termination(self, physics): # -> float | None: + """Terminates when the state norm is smaller than epsilon.""" + ... diff --git a/typings/dm_control/suite/manipulator.pyi b/typings/dm_control/suite/manipulator.pyi new file mode 100644 index 00000000..026c90c6 --- /dev/null +++ b/typings/dm_control/suite/manipulator.pyi @@ -0,0 +1,85 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Planar Manipulator domain.""" +_CLOSE = ... +_CONTROL_TIMESTEP = ... +_TIME_LIMIT = ... +_P_IN_HAND = ... +_P_IN_TARGET = ... +_ARM_JOINTS = ... +_ALL_PROPS = ... +_TOUCH_SENSORS = ... +SUITE = ... + +def make_model(use_peg, insert): # -> tuple[Unknown, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking", "hard") +def bring_ball(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns manipulator bring task with the ball prop.""" + ... + +@SUITE.add("hard") +def bring_peg(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns manipulator bring task with the peg prop.""" + ... + +@SUITE.add("hard") +def insert_ball(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns manipulator insert task with the ball prop.""" + ... + +@SUITE.add("hard") +def insert_peg(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns manipulator insert task with the peg prop.""" + ... + +class Physics(mujoco.Physics): + """Physics with additional features for the Planar Manipulator domain.""" + + def bounded_joint_pos(self, joint_names): # -> NDArray[Any]: + """Returns joint positions as (sin, cos) values.""" + ... + def joint_vel(self, joint_names): + """Returns joint velocities.""" + ... + def body_2d_pose(self, body_names, orientation=...): # -> NDArray[Unknown]: + """Returns positions and/or orientations of bodies.""" + ... + def touch(self): # -> Any: + ... + def site_distance(self, site1, site2): # -> floating[Any]: + ... + +class Bring(base.Task): + """A Bring `Task`: bring the prop to the target.""" + + def __init__(self, use_peg, insert, fully_observable, random=...) -> None: + """Initialize an instance of the `Bring` task. + + Args: + use_peg: A `bool`, whether to replace the ball prop with the peg prop. + insert: A `bool`, whether to insert the prop in a receptacle. + fully_observable: A `bool`, whether the observation should contain the + position and velocity of the object being manipulated and the target + location. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns either features or only sensors (to be used with pixels).""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/pendulum.pyi b/typings/dm_control/suite/pendulum.pyi new file mode 100644 index 00000000..420d2681 --- /dev/null +++ b/typings/dm_control/suite/pendulum.pyi @@ -0,0 +1,72 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Pendulum domain.""" +_DEFAULT_TIME_LIMIT = ... +_ANGLE_BOUND = ... +_COSINE_BOUND = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def swingup(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns pendulum swingup task .""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Pendulum domain.""" + + def pole_vertical(self): + """Returns vertical (z) component of pole frame.""" + ... + def angular_velocity(self): + """Returns the angular velocity of the pole.""" + ... + def pole_orientation(self): + """Returns both horizontal and vertical components of pole frame.""" + ... + +class SwingUp(base.Task): + """A Pendulum `Task` to swing up and balance the pole.""" + + def __init__(self, random=...) -> None: + """Initialize an instance of `Pendulum`. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Pole is set to a random angle between [-pi, pi). + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation. + + Observations are states concatenating pole orientation and angular velocity + and pixels from fixed camera. + + Args: + physics: An instance of `physics`, Pendulum physics. + + Returns: + A `dict` of observation. + """ + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + ... diff --git a/typings/dm_control/suite/point_mass.pyi b/typings/dm_control/suite/point_mass.pyi new file mode 100644 index 00000000..6486cf4e --- /dev/null +++ b/typings/dm_control/suite/point_mass.pyi @@ -0,0 +1,65 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Point-mass domain.""" +_DEFAULT_TIME_LIMIT = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking", "easy") +def easy(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the easy point_mass task.""" + ... + +@SUITE.add() +def hard(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the hard point_mass task.""" + ... + +class Physics(mujoco.Physics): + """physics for the point_mass domain.""" + + def mass_to_target(self): + """Returns the vector from mass to target in global coordinate.""" + ... + def mass_to_target_dist(self): # -> floating[Any]: + """Returns the distance from mass to the target.""" + ... + +class PointMass(base.Task): + """A point_mass `Task` to reach target with smooth reward.""" + + def __init__(self, randomize_gains, random=...) -> None: + """Initialize an instance of `PointMass`. + + Args: + randomize_gains: A `bool`, whether to randomize the actuator gains. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + If _randomize_gains is True, the relationship between the controls and + the joints is randomized, so that each control actuates a random linear + combination of joints. + + Args: + physics: An instance of `mujoco.Physics`. + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the state.""" + ... + def get_reward(self, physics): # -> Any: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/quadruped.pyi b/typings/dm_control/suite/quadruped.pyi new file mode 100644 index 00000000..46fd9935 --- /dev/null +++ b/typings/dm_control/suite/quadruped.pyi @@ -0,0 +1,154 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.mujoco.wrapper import mjbindings +from dm_control.suite import base + +"""Quadruped Domain.""" +enums = mjbindings.enums +mjlib = mjbindings.mjlib +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_RUN_SPEED = ... +_WALK_SPEED = ... +_HEIGHTFIELD_ID = ... +_TERRAIN_SMOOTHNESS = ... +_TERRAIN_BUMP_SCALE = ... +_TOES = ... +_WALLS = ... +SUITE = ... + +def make_model(floor_size=..., terrain=..., rangefinders=..., walls_and_ball=...): + """Returns the model XML string.""" + ... + +@SUITE.add() +def walk(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Walk task.""" + ... + +@SUITE.add() +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +@SUITE.add() +def escape(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Escape task.""" + ... + +@SUITE.add() +def fetch(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Fetch task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Quadruped domain.""" + + def torso_upright(self): # -> NDArray[Unknown]: + """Returns the dot-product of the torso z-axis and the global z-axis.""" + ... + def torso_velocity(self): + """Returns the velocity of the torso, in the local frame.""" + ... + def egocentric_state(self): # -> NDArray[Unknown]: + """Returns the state without global orientation or position.""" + ... + def toe_positions(self): + """Returns toe positions in egocentric frame.""" + ... + def force_torque(self): # -> Any: + """Returns scaled force/torque sensor readings at the toes.""" + ... + def imu(self): + """Returns IMU-like sensor readings.""" + ... + def rangefinder(self): # -> NDArray[Any]: + """Returns scaled rangefinder sensor readings.""" + ... + def origin_distance(self): # -> NDArray[floating[Any]]: + """Returns the distance from the origin to the workspace.""" + ... + def origin(self): + """Returns origin position in the torso frame.""" + ... + def ball_state(self): # -> ndarray[Any, Unknown]: + """Returns ball position and velocity relative to the torso frame.""" + ... + def target_position(self): + """Returns target position in torso frame.""" + ... + def ball_to_target_distance(self): # -> floating[Any]: + """Returns horizontal distance from the ball to the target.""" + ... + def self_to_ball_distance(self): # -> floating[Any]: + """Returns horizontal distance from the quadruped workspace to the ball.""" + ... + +class Move(base.Task): + """A quadruped task solved by moving forward at a designated speed.""" + + def __init__(self, desired_speed, random=...) -> None: + """Initializes an instance of `Move`. + + Args: + desired_speed: A float. If this value is zero, reward is given simply + for standing upright. Otherwise this specifies the horizontal velocity + at which the velocity-dependent reward component is maximized. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation to the agent.""" + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]] | NDArray[bool_]: + """Returns a reward to the agent.""" + ... + +class Escape(base.Task): + """A quadruped task solved by escaping a bowl-shaped terrain.""" + + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation to the agent.""" + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]] | NDArray[bool_]: + """Returns a reward to the agent.""" + ... + +class Fetch(base.Task): + """A quadruped task solved by bringing a ball to the origin.""" + + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation to the agent.""" + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]]: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/reacher.pyi b/typings/dm_control/suite/reacher.pyi new file mode 100644 index 00000000..d9adcbbd --- /dev/null +++ b/typings/dm_control/suite/reacher.pyi @@ -0,0 +1,59 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Reacher domain.""" +SUITE = ... +_DEFAULT_TIME_LIMIT = ... +_BIG_TARGET = ... +_SMALL_TARGET = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking", "easy") +def easy(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns reacher with sparse reward with 5e-2 tol and randomized target.""" + ... + +@SUITE.add("benchmarking") +def hard(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns reacher with sparse reward with 1e-2 tol and randomized target.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Reacher domain.""" + + def finger_to_target(self): + """Returns the vector from target to finger in global coordinates.""" + ... + def finger_to_target_dist(self): # -> floating[Any]: + """Returns the signed distance between the finger and target surface.""" + ... + +class Reacher(base.Task): + """A reacher `Task` to reach the target.""" + + def __init__(self, target_size, random=...) -> None: + """Initialize an instance of `Reacher`. + + Args: + target_size: A `float`, tolerance to determine whether finger reached the + target. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of the state and the target position.""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + ... diff --git a/typings/dm_control/suite/stacker.pyi b/typings/dm_control/suite/stacker.pyi new file mode 100644 index 00000000..89fd34cd --- /dev/null +++ b/typings/dm_control/suite/stacker.pyi @@ -0,0 +1,69 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Planar Stacker domain.""" +_CLOSE = ... +_CONTROL_TIMESTEP = ... +_TIME_LIMIT = ... +_ARM_JOINTS = ... +SUITE = ... + +def make_model(n_boxes): # -> tuple[Unknown, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("hard") +def stack_2(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns stacker task with 2 boxes.""" + ... + +@SUITE.add("hard") +def stack_4(fully_observable=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns stacker task with 4 boxes.""" + ... + +class Physics(mujoco.Physics): + """Physics with additional features for the Planar Manipulator domain.""" + + def bounded_joint_pos(self, joint_names): # -> NDArray[Any]: + """Returns joint positions as (sin, cos) values.""" + ... + def joint_vel(self, joint_names): + """Returns joint velocities.""" + ... + def body_2d_pose(self, body_names, orientation=...): # -> NDArray[Unknown]: + """Returns positions and/or orientations of bodies.""" + ... + def touch(self): # -> Any: + ... + def site_distance(self, site1, site2): # -> floating[Any]: + ... + +class Stack(base.Task): + """A Stack `Task`: stack the boxes.""" + + def __init__(self, n_boxes, fully_observable, random=...) -> None: + """Initialize an instance of the `Stack` task. + + Args: + n_boxes: An `int`, number of boxes to stack. + fully_observable: A `bool`, whether the observation should contain the + positions and velocities of the boxes and the location of the target. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode.""" + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns either features or only sensors (to be used with pixels).""" + ... + def get_reward(self, physics): # -> float | NDArray[floating[Any]] | NDArray[bool_]: + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/suite/swimmer.pyi b/typings/dm_control/suite/swimmer.pyi new file mode 100644 index 00000000..5439eb27 --- /dev/null +++ b/typings/dm_control/suite/swimmer.pyi @@ -0,0 +1,82 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Procedurally generated Swimmer domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +SUITE = ... + +def get_model_and_assets(n_joints): # -> tuple[Unknown, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets. + + Args: + n_joints: An integer specifying the number of joints in the swimmer. + + Returns: + A tuple `(model_xml_string, assets)`, where `assets` is a dict consisting of + `{filename: contents_string}` pairs. + """ + ... + +@SUITE.add("benchmarking") +def swimmer6(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns a 6-link swimmer.""" + ... + +@SUITE.add("benchmarking") +def swimmer15(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns a 15-link swimmer.""" + ... + +def swimmer(n_links=..., time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns a swimmer with n links.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the swimmer domain.""" + + def nose_to_target(self): + """Returns a vector from nose to target in local coordinate of the head.""" + ... + def nose_to_target_dist(self): # -> floating[Any]: + """Returns the distance from the nose to the target.""" + ... + def body_velocities(self): + """Returns local body velocities: x,y linear, z rotational.""" + ... + def joints(self): + """Returns all internal joint angles (excluding root joints).""" + ... + +class Swimmer(base.Task): + """A swimmer `Task` to reach the target or just swim.""" + + def __init__(self, random=...) -> None: + """Initializes an instance of `Swimmer`. + + Args: + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + Initializes the swimmer orientation to [-pi, pi) and the relative joint + angle of each joint uniformly within its range. + + Args: + physics: An instance of `Physics`. + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of joint angles, body velocities and target.""" + ... + def get_reward(self, physics): # -> float | NDArray[Any]: + """Returns a smooth reward.""" + ... diff --git a/typings/dm_control/suite/utils/__init__.pyi b/typings/dm_control/suite/utils/__init__.pyi new file mode 100644 index 00000000..01042072 --- /dev/null +++ b/typings/dm_control/suite/utils/__init__.pyi @@ -0,0 +1,5 @@ +""" +This type stub file was generated by pyright. +""" + +"""Utility functions used in the control suite.""" diff --git a/typings/dm_control/suite/utils/randomizers.pyi b/typings/dm_control/suite/utils/randomizers.pyi new file mode 100644 index 00000000..ba722e6c --- /dev/null +++ b/typings/dm_control/suite/utils/randomizers.pyi @@ -0,0 +1,28 @@ +""" +This type stub file was generated by pyright. +""" + +"""Randomization functions.""" + +def random_limited_quaternion(random, limit): # -> NDArray[float64]: + """Generates a random quaternion limited to the specified rotations.""" + ... + +def randomize_limited_and_rotational_joints(physics, random=...): # -> None: + """Randomizes the positions of joints defined in the physics body. + + The following randomization rules apply: + - Bounded joints (hinges or sliders) are sampled uniformly in the bounds. + - Unbounded hinges are samples uniformly in [-pi, pi] + - Quaternions for unlimited free joints and ball joints are sampled + uniformly on the unit 3-sphere. + - Quaternions for limited ball joints are sampled uniformly on a sector + of the unit 3-sphere. + - The linear degrees of freedom of free joints are not randomized. + + Args: + physics: Instance of 'Physics' class that holds a loaded model. + random: Optional instance of 'np.random.RandomState'. Defaults to the global + NumPy random state. + """ + ... diff --git a/typings/dm_control/suite/walker.pyi b/typings/dm_control/suite/walker.pyi new file mode 100644 index 00000000..b0c34eb5 --- /dev/null +++ b/typings/dm_control/suite/walker.pyi @@ -0,0 +1,82 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import mujoco +from dm_control.suite import base + +"""Planar Walker Domain.""" +_DEFAULT_TIME_LIMIT = ... +_CONTROL_TIMESTEP = ... +_STAND_HEIGHT = ... +_WALK_SPEED = ... +_RUN_SPEED = ... +SUITE = ... + +def get_model_and_assets(): # -> tuple[Any, dict[str, Any]]: + """Returns a tuple containing the model XML string and a dict of assets.""" + ... + +@SUITE.add("benchmarking") +def stand(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Stand task.""" + ... + +@SUITE.add("benchmarking") +def walk(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Walk task.""" + ... + +@SUITE.add("benchmarking") +def run(time_limit=..., random=..., environment_kwargs=...): # -> Environment: + """Returns the Run task.""" + ... + +class Physics(mujoco.Physics): + """Physics simulation with additional features for the Walker domain.""" + + def torso_upright(self): + """Returns projection from z-axes of torso to the z-axes of world.""" + ... + def torso_height(self): + """Returns the height of the torso.""" + ... + def horizontal_velocity(self): + """Returns the horizontal velocity of the center-of-mass.""" + ... + def orientations(self): + """Returns planar orientations of all bodies.""" + ... + +class PlanarWalker(base.Task): + """A planar walker task.""" + + def __init__(self, move_speed, random=...) -> None: + """Initializes an instance of `PlanarWalker`. + + Args: + move_speed: A float. If this value is zero, reward is given simply for + standing up. Otherwise this specifies a target horizontal velocity for + the walking task. + random: Optional, either a `numpy.random.RandomState` instance, an + integer seed for creating a new `RandomState`, or None to select a seed + automatically (default). + """ + ... + def initialize_episode(self, physics): # -> None: + """Sets the state of the environment at the start of each episode. + + In 'standing' mode, use initial orientation and small velocities. + In 'random' mode, randomize joint angles and let fall to the floor. + + Args: + physics: An instance of `Physics`. + + """ + ... + def get_observation(self, physics): # -> OrderedDict[Unknown, Unknown]: + """Returns an observation of body orientations, height and velocites.""" + ... + def get_reward(self, physics): + """Returns a reward to the agent.""" + ... diff --git a/typings/dm_control/utils/__init__.pyi b/typings/dm_control/utils/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/dm_control/utils/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/dm_control/utils/containers.pyi b/typings/dm_control/utils/containers.pyi new file mode 100644 index 00000000..72ee8325 --- /dev/null +++ b/typings/dm_control/utils/containers.pyi @@ -0,0 +1,74 @@ +""" +This type stub file was generated by pyright. +""" + +import collections + +"""Container classes used in control domains.""" +_NAME_ALREADY_EXISTS = ... + +class TaggedTasks(collections.abc.Mapping): + """Maps task names to their corresponding factory functions with tags. + + To store a function in a `TaggedTasks` container, we can use its `.add` + decorator: + + ```python + tasks = TaggedTasks() + + @tasks.add('easy', 'stable') + def example_task(): + ... + return environment + + environment_factory = tasks['example_task'] + + # Or to restrict to a given tag: + environment_factory = tasks.tagged('easy')['example_task'] + ``` + """ + + def __init__(self, allow_overriding_keys=...) -> None: + """Initializes a new `TaggedTasks` container. + + Args: + allow_overriding_keys: Boolean, whether `add` can override existing keys + within the container. If False (default), calling `add` multiple times + with the same function name will result in a `ValueError`. + """ + ... + def add(self, *tags): # -> (factory_func: Unknown) -> Unknown: + """Decorator that adds a factory function to the container with tags. + + Args: + *tags: Strings specifying the tags for this function. + + Returns: + The same function. + + Raises: + ValueError: if a function with the same name already exists within the + container and `allow_overriding_keys` is False. + """ + ... + def tagged(self, *tags): # -> dict[Unknown, Unknown]: + """Returns a (possibly empty) dict of functions matching all the given tags. + + Args: + *tags: Strings specifying tags to query by. + + Returns: + A dict of `{name: function}` containing all the functions that are tagged + by all of the strings in `tags`. + """ + ... + def tags(self): # -> list[Unknown]: + """Returns a list of all the tags in this container.""" + ... + def __getitem__(self, k): ... + def __iter__(self): # -> Iterator[Unknown]: + ... + def __len__(self): # -> int: + ... + def __repr__(self): # -> str: + ... diff --git a/typings/dm_control/utils/inverse_kinematics.pyi b/typings/dm_control/utils/inverse_kinematics.pyi new file mode 100644 index 00000000..e2f2cd3d --- /dev/null +++ b/typings/dm_control/utils/inverse_kinematics.pyi @@ -0,0 +1,96 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mujoco.wrapper import mjbindings + +"""Functions for computing inverse kinematics on MuJoCo models.""" +mjlib = mjbindings.mjlib +_INVALID_JOINT_NAMES_TYPE = ... +_REQUIRE_TARGET_POS_OR_QUAT = ... +IKResult = ... + +def qpos_from_site_pose( + physics, + site_name, + target_pos=..., + target_quat=..., + joint_names=..., + tol=..., + rot_weight=..., + regularization_threshold=..., + regularization_strength=..., + max_update_norm=..., + progress_thresh=..., + max_steps=..., + inplace=..., +): # -> IKResult: + """Find joint positions that satisfy a target site position and/or rotation. + + Args: + physics: A `mujoco.Physics` instance. + site_name: A string specifying the name of the target site. + target_pos: A (3,) numpy array specifying the desired Cartesian position of + the site, or None if the position should be unconstrained (default). + One or both of `target_pos` or `target_quat` must be specified. + target_quat: A (4,) numpy array specifying the desired orientation of the + site as a quaternion, or None if the orientation should be unconstrained + (default). One or both of `target_pos` or `target_quat` must be specified. + joint_names: (optional) A list, tuple or numpy array specifying the names of + one or more joints that can be manipulated in order to achieve the target + site pose. If None (default), all joints may be manipulated. + tol: (optional) Precision goal for `qpos` (the maximum value of `err_norm` + in the stopping criterion). + rot_weight: (optional) Determines the weight given to rotational error + relative to translational error. + regularization_threshold: (optional) L2 regularization will be used when + inverting the Jacobian whilst `err_norm` is greater than this value. + regularization_strength: (optional) Coefficient of the quadratic penalty + on joint movements. + max_update_norm: (optional) The maximum L2 norm of the update applied to + the joint positions on each iteration. The update vector will be scaled + such that its magnitude never exceeds this value. + progress_thresh: (optional) If `err_norm` divided by the magnitude of the + joint position update is greater than this value then the optimization + will terminate prematurely. This is a useful heuristic to avoid getting + stuck in local minima. + max_steps: (optional) The maximum number of iterations to perform. + inplace: (optional) If True, `physics.data` will be modified in place. + Default value is False, i.e. a copy of `physics.data` will be made. + + Returns: + An `IKResult` namedtuple with the following fields: + qpos: An (nq,) numpy array of joint positions. + err_norm: A float, the weighted sum of L2 norms for the residual + translational and rotational errors. + steps: An int, the number of iterations that were performed. + success: Boolean, True if we converged on a solution within `max_steps`, + False otherwise. + + Raises: + ValueError: If both `target_pos` and `target_quat` are None, or if + `joint_names` has an invalid type. + """ + ... + +def nullspace_method(jac_joints, delta, regularization_strength=...): + """Calculates the joint velocities to achieve a specified end effector delta. + + Args: + jac_joints: The Jacobian of the end effector with respect to the joints. A + numpy array of shape `(ndelta, nv)`, where `ndelta` is the size of `delta` + and `nv` is the number of degrees of freedom. + delta: The desired end-effector delta. A numpy array of shape `(3,)` or + `(6,)` containing either position deltas, rotation deltas, or both. + regularization_strength: (optional) Coefficient of the quadratic penalty + on joint movements. Default is zero, i.e. no regularization. + + Returns: + An `(nv,)` numpy array of joint velocities. + + Reference: + Buss, S. R. S. (2004). Introduction to inverse kinematics with jacobian + transpose, pseudoinverse and damped least squares methods. + https://www.math.ucsd.edu/~sbuss/ResearchWeb/ikmethods/iksurvey.pdf + """ + ... diff --git a/typings/dm_control/utils/io.pyi b/typings/dm_control/utils/io.pyi new file mode 100644 index 00000000..d270a608 --- /dev/null +++ b/typings/dm_control/utils/io.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +"""IO functions.""" + +def GetResource(name, mode=...): # -> Any: + ... + +def GetResourceFilename(name, mode=...): ... +def WalkResources(path): # -> Iterator[tuple[Unknown, list[Unknown], list[Unknown]]]: + ... + +GetResourceAsFile = ... diff --git a/typings/dm_control/utils/rewards.pyi b/typings/dm_control/utils/rewards.pyi new file mode 100644 index 00000000..b8bb8021 --- /dev/null +++ b/typings/dm_control/utils/rewards.pyi @@ -0,0 +1,36 @@ +""" +This type stub file was generated by pyright. +""" + +"""Soft indicator function evaluating whether a number is within bounds.""" +_DEFAULT_VALUE_AT_MARGIN = ... + +def tolerance(x, bounds=..., margin=..., sigmoid=..., value_at_margin=...): # -> float | NDArray[Any]: + """Returns 1 when `x` falls inside the bounds, between 0 and 1 otherwise. + + Args: + x: A scalar or numpy array. + bounds: A tuple of floats specifying inclusive `(lower, upper)` bounds for + the target interval. These can be infinite if the interval is unbounded + at one or both ends, or they can be equal to one another if the target + value is exact. + margin: Float. Parameter that controls how steeply the output decreases as + `x` moves out-of-bounds. + * If `margin == 0` then the output will be 0 for all values of `x` + outside of `bounds`. + * If `margin > 0` then the output will decrease sigmoidally with + increasing distance from the nearest bound. + sigmoid: String, choice of sigmoid type. Valid values are: 'gaussian', + 'linear', 'hyperbolic', 'long_tail', 'cosine', 'tanh_squared'. + value_at_margin: A float between 0 and 1 specifying the output value when + the distance from `x` to the nearest bound is equal to `margin`. Ignored + if `margin == 0`. + + Returns: + A float or numpy array with values between 0.0 and 1.0. + + Raises: + ValueError: If `bounds[0] > bounds[1]`. + ValueError: If `margin` is negative. + """ + ... diff --git a/typings/dm_control/utils/xml_tools.pyi b/typings/dm_control/utils/xml_tools.pyi new file mode 100644 index 00000000..f990a4ec --- /dev/null +++ b/typings/dm_control/utils/xml_tools.pyi @@ -0,0 +1,61 @@ +""" +This type stub file was generated by pyright. +""" + +"""Helper functions for model xml creation and modification.""" + +def find_element(root, tag, name): + """Finds and returns the first element of specified tag and name. + + Args: + root: `etree.Element` to be searched recursively. + tag: The `tag` property of the sought element. + name: The `name` attribute of the sought element. + + Returns: + An `etree.Element` with the specified properties. + + Raises: + ValueError: If no matching element is found. + """ + ... + +def nested_element(element, depth): + """Makes a nested `tree.Element` given a single element. + + If `depth=2`, the new tree will look like + + ```xml + + + + + + + ``` + + Args: + element: The `etree.Element` used to create a nested structure. + depth: An `int` denoting the nesting depth. The resulting will contain + `element` nested `depth` times. + + + Returns: + A nested `etree.Element`. + """ + ... + +def parse(file_obj): + """Reads xml from a file and returns an `etree.Element`. + + Compared to the `etree.fromstring()`, this function removes the whitespace in + the xml file. This means later on, a user can pretty print the `etree.Element` + with `etree.tostring(element, pretty_print=True)`. + + Args: + file_obj: A file or file-like object. + + Returns: + `etree.Element` of the xml file. + """ + ... diff --git a/typings/dm_control/viewer/__init__.pyi b/typings/dm_control/viewer/__init__.pyi new file mode 100644 index 00000000..a93cb0a6 --- /dev/null +++ b/typings/dm_control/viewer/__init__.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.viewer import application + +"""Suite environments viewer package.""" + +def launch(environment_loader, policy=..., title=..., width=..., height=...): # -> None: + """Launches an environment viewer. + + Args: + environment_loader: An environment loader (a callable that returns an + instance of dm_control.rl.control.Environment), an instance of + dm_control.rl.control.Environment. + policy: An optional callable corresponding to a policy to execute within the + environment. It should accept a `TimeStep` and return a numpy array of + actions conforming to the output of `environment.action_spec()`. + title: Application title to be displayed in the title bar. + width: Window width, in pixels. + height: Window height, in pixels. + Raises: + ValueError: When 'environment_loader' argument is set to None. + """ + ... diff --git a/typings/dm_control/viewer/application.pyi b/typings/dm_control/viewer/application.pyi new file mode 100644 index 00000000..6116f242 --- /dev/null +++ b/typings/dm_control/viewer/application.pyi @@ -0,0 +1,98 @@ +""" +This type stub file was generated by pyright. +""" + +import collections + +from dm_control.viewer import views + +"""Viewer application module.""" +_DOUBLE_BUFFERING = ... +_PAUSE = ... +_RESTART = ... +_ADVANCE_SIMULATION = ... +_SPEED_UP_TIME = ... +_SLOW_DOWN_TIME = ... +_HELP = ... +_STATUS = ... +_MAX_FRONTBUFFER_SIZE = ... +_MISSING_STATUS_ENTRY = ... +_RUNTIME_STOPPED_LABEL = ... +_STATUS_LABEL = ... +_TIME_LABEL = ... +_CPU_LABEL = ... +_FPS_LABEL = ... +_CAMERA_LABEL = ... +_PAUSED_LABEL = ... +_ERROR_LABEL = ... + +class Help(views.ColumnTextModel): + """Contains the description of input map employed in the application.""" + + def __init__(self) -> None: + """Instance initializer.""" + ... + def get_columns(self): # -> list[list[str]]: + """Returns the text to display in two columns.""" + ... + +class Status(views.ColumnTextModel): + """Monitors and returns the status of the application.""" + + def __init__(self, time_multiplier, pause, frame_timer) -> None: + """Instance initializer. + + Args: + time_multiplier: Instance of util.TimeMultiplier. + pause: An observable pause subject, instance of util.ObservableFlag. + frame_timer: A Timer instance counting duration of frames. + """ + ... + def set_camera(self, camera): # -> None: + """Updates the active camera instance. + + Args: + camera: Instance of renderer.SceneCamera. + """ + ... + def set_runtime(self, instance): # -> None: + """Updates the active runtime instance. + + Args: + instance: Instance of runtime.Base. + """ + ... + def get_columns( + self, + ): # -> list[tuple[Literal['Status', 'Time', 'CPU', 'FPS', 'Camera', 'Paused', 'Error'], Literal['--']]]: + """Returns the text to display in two columns.""" + ... + +class ReloadParams(collections.namedtuple("RefreshParams", ["zoom_to_scene"])): + """Parameters of a reload request.""" + + ... + +class Application: + """Viewer application.""" + + def __init__(self, title=..., width=..., height=...) -> None: + """Instance initializer.""" + ... + def launch(self, environment_loader, policy=...): # -> None: + """Starts the viewer with the specified policy and environment. + + Args: + environment_loader: Either a callable that takes no arguments and returns + an instance of dm_control.rl.control.Environment, or an instance of + dm_control.rl.control.Environment. + policy: An optional callable corresponding to a policy to execute + within the environment. It should accept a `TimeStep` and return + a numpy array of actions conforming to the output of + `environment.action_spec()`. If the callable implements a method `reset` + then this method is called when the viewer is reset. + + Raises: + ValueError: If `environment_loader` is None. + """ + ... diff --git a/typings/dm_control/viewer/gui/__init__.pyi b/typings/dm_control/viewer/gui/__init__.pyi new file mode 100644 index 00000000..236eae27 --- /dev/null +++ b/typings/dm_control/viewer/gui/__init__.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control import _render +from dm_control.viewer.gui import glfw_gui + +"""Viewer's windowing systems.""" +RenderWindow = ... +RenderWindow = ... +if RenderWindow is None: + def ErrorRenderWindow(*args, **kwargs): ... + + RenderWindow = ... diff --git a/typings/dm_control/viewer/gui/base.pyi b/typings/dm_control/viewer/gui/base.pyi new file mode 100644 index 00000000..6f629986 --- /dev/null +++ b/typings/dm_control/viewer/gui/base.pyi @@ -0,0 +1,34 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +"""Utilities and base classes used exclusively in the gui package.""" +_DOUBLE_CLICK_INTERVAL = ... + +class InputEventsProcessor(metaclass=abc.ABCMeta): + """Thread safe input events processor.""" + + def __init__(self) -> None: + """Instance initializer.""" + ... + def add_event(self, receivers, *args): # -> None: + """Adds a new event to the processing queue.""" + ... + def process_events(self): # -> None: + """Invokes each of the events in the queue. + + Thread safe for queue access but not during event invocations. + + This method must be called regularly on the main thread. + """ + ... + +class DoubleClickDetector: + """Detects double click events.""" + + def __init__(self) -> None: ... + def process(self, button, action): # -> bool: + """Attempts to identify a mouse button click as a double click event.""" + ... diff --git a/typings/dm_control/viewer/gui/fullscreen_quad.pyi b/typings/dm_control/viewer/gui/fullscreen_quad.pyi new file mode 100644 index 00000000..0c35423c --- /dev/null +++ b/typings/dm_control/viewer/gui/fullscreen_quad.pyi @@ -0,0 +1,34 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +"""OpenGL utility for rendering numpy arrays as images on a quad surface.""" +_FULLSCREEN_QUAD_VERTEX_POSITONS_AND_TEXTURE_COORDS = np.array( + [-1, -1, 0, 1, -1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0], dtype=np.float32 +) +_FLOATS_PER_XY = ... +_FLOATS_PER_VERTEX = ... +_SIZE_OF_FLOAT = ... +_VERTEX_SHADER = ... +_FRAGMENT_SHADER = ... +_VAR_POSITION = ... +_VAR_UV = ... +_VAR_TEXTURE_SAMPLER = ... + +class FullscreenQuadRenderer: + """Renders pixmaps on a fullscreen quad using OpenGL.""" + + def __init__(self) -> None: + """Initializes the fullscreen quad renderer.""" + ... + def render(self, pixmap, viewport_shape): # -> None: + """Renders the pixmap on a fullscreen quad. + + Args: + pixmap: A 3D numpy array of bytes (np.uint8), with dimensions + (width, height, 3). + viewport_shape: A tuple of two elements, (width, height). + """ + ... diff --git a/typings/dm_control/viewer/gui/glfw_gui.pyi b/typings/dm_control/viewer/gui/glfw_gui.pyi new file mode 100644 index 00000000..8b3612f2 --- /dev/null +++ b/typings/dm_control/viewer/gui/glfw_gui.pyi @@ -0,0 +1,138 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control._render import glfw_renderer +from dm_control.viewer.gui import base + +"""Windowing system that uses GLFW library.""" + +class DoubleBufferedGlfwContext(glfw_renderer.GLFWContext): + """Custom context manager for the GLFW based GUI.""" + + def __init__(self, width, height, title) -> None: ... + @property + def window(self): # -> Any: + ... + +class GlfwKeyboard(base.InputEventsProcessor): + """Glfw keyboard device handler. + + Handles the keyboard input in a thread-safe way, and forwards the events + to the registered callbacks. + + Attributes: + on_key: Observable subject triggered when a key event is triggered. + Expects a callback with signature: (key, scancode, activity, modifiers) + """ + + def __init__(self, context) -> None: ... + +class GlfwMouse(base.InputEventsProcessor): + """Glfw mouse device handler. + + Handles the mouse input in a thread-safe way, forwarding the events to the + registered callbacks. + + Attributes: + on_move: Observable subject triggered when a mouse move is detected. + Expects a callback with signature (position, translation). + on_click: Observable subject triggered when a mouse click is detected. + Expects a callback with signature (button, action, modifiers). + on_double_click: Observable subject triggered when a mouse double click is + detected. Expects a callback with signature (button, modifiers). + on_scroll: Observable subject triggered when a mouse scroll is detected. + Expects a callback with signature (scroll_value). + """ + + def __init__(self, context) -> None: ... + @property + def position(self): # -> NDArray[Any]: + ... + +class GlfwWindow: + """A GLFW based application window. + + Attributes: + on_files_drop: An observable subject, instance of util.QuietSet. Attached + listeners, callables taking one argument, will be invoked every time the + user drops files onto the window. The callable will be passed an iterable + with dropped file paths. + is_full_screen: Boolean, whether the window is currently full-screen. + """ + + def __init__(self, width, height, title, context=...) -> None: + """Instance initializer. + + Args: + width: Initial window width, in pixels. + height: Initial window height, in pixels. + title: A string with a window title. + context: (Optional) A `render.GLFWContext` instance. + + Raises: + RuntimeError: If GLFW initialization or window initialization fails. + """ + ... + @property + def shape(self): # -> tuple[int, int]: + """Returns a tuple with the shape of the window, (width, height).""" + ... + @property + def position(self): # -> tuple[int, int]: + """Returns a tuple with top-left window corner's coordinates, (x, y).""" + ... + @property + def keyboard(self): # -> GlfwKeyboard: + """Returns a GlfwKeyboard instance associated with the window.""" + ... + @property + def mouse(self): # -> GlfwMouse: + """Returns a GlfwMouse instance associated with the window.""" + ... + def set_title(self, title): # -> None: + """Sets the window title. + + Args: + title: A string, title of the window. + """ + ... + def set_full_screen(self, enable): # -> None: + """Expands the main application window to full screen or minimizes it. + + Args: + enable: Boolean flag, True expands the window to full-screen mode, False + minimizes it to its former size. + """ + ... + def toggle_full_screen(self): # -> None: + """Expands the main application window to full screen or minimizes it.""" + ... + @property + def is_full_screen(self): # -> bool: + ... + def free(self): # -> None: + """Closes the deleted window.""" + ... + def event_loop(self, tick_func): # -> None: + """Runs the window's event loop. + + This is a blocking call that won't exit until the window is closed. + + Args: + tick_func: A callable, function to call every frame. + """ + ... + def update(self, render_func): # -> None: + """Updates the window and renders a new image. + + Args: + render_func: A callable returning a 3D numpy array of bytes (np.uint8), + with dimensions (width, height, 3). + """ + ... + def close(self): # -> None: + """Closes the window and releases associated resources.""" + ... + def __del__(self): # -> None: + ... diff --git a/typings/dm_control/viewer/renderer.pyi b/typings/dm_control/viewer/renderer.pyi new file mode 100644 index 00000000..6370671d --- /dev/null +++ b/typings/dm_control/viewer/renderer.pyi @@ -0,0 +1,355 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import contextlib + +import mujoco +import numpy as np + +"""Renderer module.""" +_FREE_CAMERA_INDEX = ... +_NO_BODY_TRACKED_INDEX = ... +_INVALID_BODY_INDEX = ... +_FULL_SCENE_ZOOM_FACTOR = ... +_DEFAULT_RENDER_FLAGS = np.zeros(mujoco.mjtRndFlag.mjNRNDFLAG, dtype=np.ubyte) +_DEFAULT_FONT_SCALE = ... +_HIDPI_FONT_SCALE = ... + +class BaseRenderer(metaclass=abc.ABCMeta): + """A base class for component-based Mujoco Renderers implementations. + + Attributes: + components: A set of RendererComponent the renderer will render in addition + to rendering the physics scene. Being a QuietSet instance, it supports + adding and removing of components using += and -= operators. + screen_capture_components: Components that perform screen capture and need + a guarantee to be called when all other elements have been rendered. + """ + + def __init__(self) -> None: + """Instance initializer.""" + ... + +class Component(metaclass=abc.ABCMeta): + """Components are a way to introduce extra rendering content. + + They are invoked after the main rendering pass, allowing to draw extra images + into the render buffer, such as overlays. + """ + + @abc.abstractmethod + def render(self, context, viewport): # -> None: + """Renders the component. + + Args: + context: MjrContext instance. + viewport: Viewport instance. + """ + ... + +class NullRenderer: + """A stub off-screen renderer used when no other renderer is available.""" + + def __init__(self) -> None: + """Instance initializer.""" + ... + def release(self): # -> None: + ... + @property + def pixels(self): # -> NDArray[uint8]: + """Returns a black pixel map.""" + ... + +class OffScreenRenderer(BaseRenderer): + """A Mujoco renderer that renders to an off-screen surface.""" + + def __init__(self, model, surface) -> None: + """Instance initializer. + + Args: + model: instance of MjModel. + surface: instance of dm_control.render.BaseContext. + """ + ... + def render(self, viewport, scene): # -> None: + """Renders the scene to the specified viewport. + + Args: + viewport: Instance of Viewport. + scene: Instance of MjvScene. + Returns: + A 3-dimensional array of shape (viewport.width, viewport.height, 3), + with the contents of the front buffer. + """ + ... + def release(self): # -> None: + """Releases the render context and related resources.""" + ... + @property + def pixels(self): # -> NDArray[uint8]: + """Returns the rendered image.""" + ... + +class Perturbation: + """A proxy that allows to move a scene object.""" + + def __init__(self, body_id, model, data, scene) -> None: + """Instance initializer. + + Args: + body_id: A positive integer, ID of the body to manipulate. + model: MjModel instance. + data: MjData instance. + scene: MjvScene instance. + """ + ... + def start_move(self, action, grab_pos): # -> None: + """Starts a movement action.""" + ... + def tick_move(self, viewport_offset): # -> None: + """Transforms object's location/rotation by the specified amount.""" + ... + def end_move(self): # -> None: + """Ends a movement operation.""" + ... + @contextlib.contextmanager + def apply(self, paused): # -> Generator[None, Any, None]: + """Applies the modifications introduced by performing the move operation.""" + ... + @property + def ptr(self): # -> MjvPerturb: + """Returns the underlying Mujoco Perturbation object.""" + ... + @property + def body_id(self): # -> Unknown: + """A positive integer, ID of the manipulated body.""" + ... + +class NullPerturbation: + """An empty perturbation. + + A null-object pattern, used to avoid cumbersome if clauses. + """ + + @contextlib.contextmanager + def apply(self, paused): # -> Generator[None, Any, None]: + """Activates/deactivates the null context.""" + ... + @property + def ptr(self): # -> None: + """Returns None, because this class represents an empty perturbation.""" + ... + +class RenderSettings: + """Renderer settings.""" + + def __init__(self) -> None: ... + @property + def visualization(self): # -> MjvOption: + """Returns scene visualization options.""" + ... + @property + def render_flags(self): # -> NDArray[ubyte]: + """Returns the render flags.""" + ... + @property + def visualization_flags(self): + """Returns scene visualization flags.""" + ... + @property + def geom_groups(self): + """Returns geom groups visibility flags.""" + ... + @property + def site_groups(self): + """Returns site groups visibility flags.""" + ... + def apply_settings(self, scene): # -> None: + """Applies settings to the specified scene. + + Args: + scene: Instance of MjvScene. + """ + ... + def toggle_rendering_flag(self, flag_index): # -> None: + """Toggles the specified rendering flag.""" + ... + def toggle_visualization_flag(self, flag_index): # -> None: + """Toggles the specified visualization flag.""" + ... + def toggle_geom_group(self, group_index): # -> None: + """Toggles the specified geom group visible or not.""" + ... + def toggle_site_group(self, group_index): # -> None: + """Toggles the specified site group visible or not.""" + ... + def toggle_stereo_buffering(self): # -> None: + """Toggles the double buffering mode on/off.""" + ... + def select_next_rendering_mode(self): # -> None: + """Cycles to the next rendering mode.""" + ... + def select_prev_rendering_mode(self): # -> None: + """Cycles to the previous rendering mode.""" + ... + def select_next_labeling_mode(self): # -> None: + """Cycles to the next scene object labeling mode.""" + ... + def select_prev_labeling_mode(self): # -> None: + """Cycles to the previous scene object labeling mode.""" + ... + +class SceneCamera: + """A camera used to navigate around and render the scene.""" + + def __init__(self, model, data, options, settings=..., zoom_factor=..., scene_callback=...) -> None: + """Instance initializer. + + Args: + model: MjModel instance. + data: MjData instance. + options: RenderSettings instance. + settings: Optional, internal camera settings obtained from another + SceneCamera instance using 'settings' property. + zoom_factor: The initial zoom factor for zooming into the scene. + scene_callback: Scene callback. + This is a callable of the form: `my_callable(MjModel, MjData, MjvScene)` + that gets applied to every rendered scene. + """ + ... + def set_freelook_mode(self): # -> None: + """Enables 6 degrees of freedom of movement for the camera.""" + ... + def set_tracking_mode(self, body_id): # -> None: + """Latches the camera onto the specified body. + + Leaves the user only 3 degrees of freedom to rotate the camera. + + Args: + body_id: A positive integer, ID of the body to track. + """ + ... + def set_fixed_mode(self, fixed_camera_id): # -> None: + """Fixes the camera in a pre-defined position, taking away all DOF. + + Args: + fixed_camera_id: A positive integer, Id of a fixed camera defined in the + scene. + """ + ... + def look_at(self, position, distance): # -> None: + """Positions the camera so that it's focused on the specified point.""" + ... + def move(self, action, viewport_offset): # -> None: + """Moves the camera around the scene.""" + ... + def new_perturbation(self, body_id): # -> Perturbation: + """Creates a proxy that allows to manipulate the specified object.""" + ... + def raycast( + self, viewport, screen_pos + ): # -> tuple[Literal[-1], None] | tuple[Unknown | Literal[-1], NDArray[double] | None]: + """Shoots a ray from the specified viewport position into the scene.""" + ... + def render(self, perturbation=...): # -> MjvScene: + """Renders the scene form this camera's perspective. + + Args: + perturbation: (Optional), instance of Perturbation. + Returns: + Rendered scene, instance of MjvScene. + """ + ... + def zoom_to_scene(self): # -> None: + """Zooms in on the entire scene.""" + ... + @property + def transform(self): # -> tuple[NDArray[float64], NDArray[float64]]: + """Returns a tuple with camera transform. + + The transform comes in form: (3x3 rotation mtx, 3-component position). + """ + ... + @property + def settings(self): # -> MjvCamera: + """Returns internal camera settings.""" + ... + @settings.setter + def settings(self, value): # -> None: + """Restores the camera settings.""" + ... + @property + def name(self): # -> str: + """Name of the active camera.""" + ... + @property + def mode(self): + """Index of the mode the camera is currently in.""" + ... + @property + def is_initialized(self): # -> Literal[False]: + """Returns True if camera is properly initialized.""" + ... + +class Viewport: + """Render viewport.""" + + def __init__(self, width=..., height=...) -> None: + """Instance initializer. + + Args: + width: Viewport width, in pixels. + height: Viewport height, in pixels. + """ + ... + def set_size(self, width, height): # -> None: + """Changes the viewport size. + + Args: + width: Viewport width, in pixels. + height: Viewport height, in pixels. + """ + ... + def screen_to_viewport(self, screen_coordinates): # -> NDArray[float32]: + """Converts screen coordinates to viewport coordinates. + + Args: + screen_coordinates: 2-component tuple, with components being integral + numbers in range defined by the screen/window resolution. + Returns: + A 2-component tuple, with components being floating point values in range + [0, 1]. + """ + ... + def screen_to_inverse_viewport(self, screen_coordinates): # -> NDArray[float32]: + """Converts screen coordinates to viewport coordinates flipped vertically. + + Args: + screen_coordinates: 2-component tuple, with components being integral + numbers in range defined by the screen/window resolution. + Returns: + A 2-component tuple, with components being floating point values in range + [0, 1]. The height component value will be flipped, with 1 at the top, and + 0 at the bottom of the viewport. + """ + ... + @property + def aspect_ratio(self): ... + @property + def mujoco_rect(self): + """Instance of MJRRECT with viewport dimensions.""" + ... + @property + def dimensions(self): # -> NDArray[Unknown]: + """Viewport dimensions in form of a 2-component vector.""" + ... + @property + def width(self): + """Viewport width.""" + ... + @property + def height(self): + """Viewport height.""" + ... diff --git a/typings/dm_control/viewer/runtime.pyi b/typings/dm_control/viewer/runtime.pyi new file mode 100644 index 00000000..23d8d2de --- /dev/null +++ b/typings/dm_control/viewer/runtime.pyi @@ -0,0 +1,87 @@ +""" +This type stub file was generated by pyright. +""" + +import enum + +from dm_control.mujoco.wrapper import mjbindings + +"""Environment's execution runtime.""" +mjlib = mjbindings.mjlib +_SIMULATION_STEP_INTERVAL = ... +_DEFAULT_MAX_SIM_STEP = ... + +class State(enum.Enum): + """State of the Runtime class.""" + + START = ... + RUNNING = ... + STOP = ... + STOPPED = ... + RESTARTING = ... + +class Runtime: + """Base Runtime class. + + Attributes: + simulation_time_budget: Float value, how much time can be spent on physics + simulation every frame, in seconds. + on_episode_begin: An observable subject, an instance of util.QuietSet. + It contains argumentless callables, invoked, when a new episode begins. + on_error: An observable subject, an instance of util.QuietSet. It contains + single argument callables, invoked, when the environment or the agent + throw an error. + on_physics_changed: An observable subject, an instance of util.QuietSet. + During episode restarts, the underlying physics instance may change. If + you are interested in learning about those changes, attach a listener + using the += operator. The listener should be a callable with no required + arguments. + """ + + def __init__(self, environment, policy=...) -> None: + """Instance initializer. + + Args: + environment: An instance of dm_control.rl.control.Environment. + policy: Either a callable that accepts a `TimeStep` and returns a numpy + array of actions conforming to `environment.action_spec()`, or None, in + which case a default action will be generated for each environment step. + """ + ... + def tick(self, time_elapsed, paused): # -> None: + """Advances the simulation by one frame. + + Args: + time_elapsed: Time elapsed since the last time this method was called. + paused: A boolean flag telling if the simulation is paused. + Returns: + A boolean flag to determine if the episode has finished. + """ + ... + def single_step(self): # -> None: + """Performs a single step of simulation.""" + ... + def stop(self): # -> None: + """Stops the runtime.""" + ... + def restart(self): # -> None: + """Restarts the episode, resetting environment, model, and data.""" + ... + def get_time(self): + """Elapsed simulation time.""" + ... + @property + def state(self): # -> State: + """Returns the current state of the state machine. + + Returned states are values of runtime.State enum. + """ + ... + @property + def simulation_time(self): # -> float: + """Returns the amount of time spent running the simulation.""" + ... + @property + def last_action(self): # -> tuple[Unknown, ...] | NDArray[Unknown] | None: + """Action passed to the environment on the last step.""" + ... diff --git a/typings/dm_control/viewer/user_input.pyi b/typings/dm_control/viewer/user_input.pyi new file mode 100644 index 00000000..1162c995 --- /dev/null +++ b/typings/dm_control/viewer/user_input.pyi @@ -0,0 +1,205 @@ +""" +This type stub file was generated by pyright. +""" + +import collections + +"""Utilities for handling keyboard events.""" +RELEASE = ... +PRESS = ... +REPEAT = ... +KEY_UNKNOWN = ... +KEY_SPACE = ... +KEY_APOSTROPHE = ... +KEY_COMMA = ... +KEY_MINUS = ... +KEY_PERIOD = ... +KEY_SLASH = ... +KEY_0 = ... +KEY_1 = ... +KEY_2 = ... +KEY_3 = ... +KEY_4 = ... +KEY_5 = ... +KEY_6 = ... +KEY_7 = ... +KEY_8 = ... +KEY_9 = ... +KEY_SEMICOLON = ... +KEY_EQUAL = ... +KEY_A = ... +KEY_B = ... +KEY_C = ... +KEY_D = ... +KEY_E = ... +KEY_F = ... +KEY_G = ... +KEY_H = ... +KEY_I = ... +KEY_J = ... +KEY_K = ... +KEY_L = ... +KEY_M = ... +KEY_N = ... +KEY_O = ... +KEY_P = ... +KEY_Q = ... +KEY_R = ... +KEY_S = ... +KEY_T = ... +KEY_U = ... +KEY_V = ... +KEY_W = ... +KEY_X = ... +KEY_Y = ... +KEY_Z = ... +KEY_LEFT_BRACKET = ... +KEY_BACKSLASH = ... +KEY_RIGHT_BRACKET = ... +KEY_GRAVE_ACCENT = ... +KEY_ESCAPE = ... +KEY_ENTER = ... +KEY_TAB = ... +KEY_BACKSPACE = ... +KEY_INSERT = ... +KEY_DELETE = ... +KEY_RIGHT = ... +KEY_LEFT = ... +KEY_DOWN = ... +KEY_UP = ... +KEY_PAGE_UP = ... +KEY_PAGE_DOWN = ... +KEY_HOME = ... +KEY_END = ... +KEY_CAPS_LOCK = ... +KEY_SCROLL_LOCK = ... +KEY_NUM_LOCK = ... +KEY_PRINT_SCREEN = ... +KEY_PAUSE = ... +KEY_F1 = ... +KEY_F2 = ... +KEY_F3 = ... +KEY_F4 = ... +KEY_F5 = ... +KEY_F6 = ... +KEY_F7 = ... +KEY_F8 = ... +KEY_F9 = ... +KEY_F10 = ... +KEY_F11 = ... +KEY_F12 = ... +KEY_KP_0 = ... +KEY_KP_1 = ... +KEY_KP_2 = ... +KEY_KP_3 = ... +KEY_KP_4 = ... +KEY_KP_5 = ... +KEY_KP_6 = ... +KEY_KP_7 = ... +KEY_KP_8 = ... +KEY_KP_9 = ... +KEY_KP_DECIMAL = ... +KEY_KP_DIVIDE = ... +KEY_KP_MULTIPLY = ... +KEY_KP_SUBTRACT = ... +KEY_KP_ADD = ... +KEY_KP_ENTER = ... +KEY_KP_EQUAL = ... +KEY_LEFT_SHIFT = ... +KEY_LEFT_CONTROL = ... +KEY_LEFT_ALT = ... +KEY_LEFT_SUPER = ... +KEY_RIGHT_SHIFT = ... +KEY_RIGHT_CONTROL = ... +KEY_RIGHT_ALT = ... +KEY_RIGHT_SUPER = ... +MOD_NONE = ... +MOD_SHIFT = ... +MOD_CONTROL = ... +MOD_ALT = ... +MOD_SUPER = ... +MOD_SHIFT_CONTROL = ... +MOUSE_BUTTON_LEFT = ... +MOUSE_BUTTON_RIGHT = ... +MOUSE_BUTTON_MIDDLE = ... +_NO_EXCLUSIVE_KEY = ... +_NO_CALLBACK = ... + +class Exclusive(collections.namedtuple("Exclusive", "combination")): + """Defines an exclusive action. + + Exclusive actions can be invoked in response to single key clicks only. The + callback will be called twice. The first time when the key combination is + pressed, passing True as the argument to the callback. The second time when + the key is released (the modifiers don't have to be present then), passing + False as the callback argument. + + Attributes: + combination: A list of integers interpreted as key codes, or tuples + in format (keycode, modifier). + """ + + ... + +class DoubleClick(collections.namedtuple("DoubleClick", "combination")): + """Defines a mouse double click action. + + It will define a requirement to double click the mouse button specified in the + combination in order to be triggered. + + Attributes: + combination: A list of integers interpreted as key codes, or tuples + in format (keycode, modifier). The keycodes are limited only to mouse + button codes. + """ + + ... + +class Range(collections.namedtuple("Range", "collection")): + """Binds a number of key combinations to a callback. + + When triggered, the index of the triggering key combination will be passed + as an argument to the callback. + + Attributes: + callback: A callable accepting a single argument - an integer index of the + triggered callback. + collection: A collection of combinations. Combinations may either be raw key + codes, tuples in format (keycode, modifier), or one of the Exclusive or + DoubleClick instances. + """ + + ... + +class InputMap: + """Provides ability to alias key combinations and map them to actions.""" + + def __init__(self, mouse, keyboard) -> None: + """Instance initializer. + + Args: + mouse: GlfwMouse instance. + keyboard: GlfwKeyboard instance. + """ + ... + def __del__(self): # -> None: + """Instance deleter.""" + ... + def clear_bindings(self): # -> None: + """Clears registered action bindings, while keeping key aliases.""" + ... + def bind(self, callback, key_binding): # -> None: + """Binds a key combination to a callback. + + Args: + callback: An argument-less callable. + key_binding: A integer with a key code, a tuple (keycode, modifier) or one + of the actions Exclusive|DoubleClick|Range carrying the key combination. + """ + ... + def bind_plane(self, callback): # -> None: + """Binds a callback to a planar motion action (mouse movement).""" + ... + def bind_z_axis(self, callback): # -> None: + """Binds a callback to a z-axis motion action (mouse scroll).""" + ... diff --git a/typings/dm_control/viewer/util.pyi b/typings/dm_control/viewer/util.pyi new file mode 100644 index 00000000..dcbacc65 --- /dev/null +++ b/typings/dm_control/viewer/util.pyi @@ -0,0 +1,212 @@ +""" +This type stub file was generated by pyright. +""" + +import contextlib + +"""Utility classes.""" +_MIN_TIME_MULTIPLIER = ... +_MAX_TIME_MULTIPLIER = ... + +def is_scalar(value): # -> bool: + """Checks if the supplied value can be converted to a scalar.""" + ... + +def to_iterable(item): # -> list[str] | list[Unknown]: + """Converts an item or iterable into an iterable.""" + ... + +class QuietSet: + """A set-like container that quietly processes removals of missing keys.""" + + def __init__(self) -> None: ... + def __iadd__(self, items): # -> Self@QuietSet: + """Adds `items`, avoiding duplicates. + + Args: + items: An iterable of items to add, or a single item to add. + + Returns: + This instance of `QuietSet`. + """ + ... + def __isub__(self, items): # -> Self@QuietSet: + """Detaches `items`. + + Args: + items: An iterable of items to detach, or a single item to detach. + + Returns: + This instance of `QuietSet`. + """ + ... + def __len__(self): # -> int: + ... + def __iter__(self): # -> Iterator[Unknown]: + ... + +def interleave(a, b): # -> chain[Unknown]: + """Interleaves the contents of two iterables.""" + ... + +class TimeMultiplier: + """Controls the relative speed of the simulation compared to realtime.""" + + def __init__(self, initial_time_multiplier) -> None: + """Instance initializer. + + Args: + initial_time_multiplier: A float scalar specifying the initial speed of + the simulation with 1.0 corresponding to realtime. + """ + ... + def get(self): # -> float: + """Returns the current time factor value.""" + ... + def set(self, value): # -> None: + """Modifies the time factor. + + Args: + value: A float scalar, new value of the time factor. + """ + ... + def __str__(self) -> str: + """Returns a formatted string containing the time factor.""" + ... + def increase(self): # -> None: + """Doubles the current time factor value.""" + ... + def decrease(self): # -> None: + """Halves the current time factor value.""" + ... + +class Integrator: + """Integrates a value and averages it for the specified period of time.""" + + def __init__(self, refresh_rate=...) -> None: + """Instance initializer. + + Args: + refresh_rate: How often, in seconds, is the integrated value averaged. + """ + ... + @property + def value(self): # -> int: + """Returns the averaged value.""" + ... + @value.setter + def value(self, val): # -> None: + """Integrates the new value.""" + ... + +class AtomicAction: + """An action that cannot be interrupted.""" + + def __init__(self, state_change_callback=...) -> None: + """Instance initializer. + + Args: + state_change_callback: Callable invoked when action changes its state. + """ + ... + def begin(self, watermark): # -> None: + """Begins the action, signing it with the specified watermark.""" + ... + def end(self, watermark): # -> None: + """Ends a started action, provided the watermarks match.""" + ... + @property + def in_progress(self): # -> bool: + """Returns a boolean value to indicate if the being method was called.""" + ... + @property + def watermark(self): # -> None: + """Returns the watermark passed to begin() method call, or None. + + None will be returned if the action is not in progress. + """ + ... + +class ObservableFlag(QuietSet): + """Observable boolean flag. + + The QuietState provides necessary functionality for managing listeners. + + A listener is a callable that takes one boolean parameter. + """ + + def __init__(self, initial_value) -> None: + """Instance initializer. + + Args: + initial_value: A boolean value with the initial state of the flag. + """ + ... + def toggle(self): # -> None: + """Toggles the value True/False.""" + ... + def __iadd__(self, value): # -> Self@ObservableFlag: + """Add new listeners and update them about the state.""" + ... + @property + def value(self): # -> Unknown | bool: + """Value of the flag.""" + ... + @value.setter + def value(self, val): # -> None: + ... + +class Timer: + """Measures time elapsed between two ticks.""" + + def __init__(self) -> None: + """Instance initializer.""" + ... + def tick(self): # -> float: + """Updates the timer. + + Returns: + Time elapsed since the last call to this method. + """ + ... + @contextlib.contextmanager + def measure_time(self): # -> Generator[None, Any, None]: + ... + @property + def measured_time(self): # -> float: + ... + +class ErrorLogger: + """A context manager that catches and logs all errors.""" + + def __init__(self, listeners) -> None: + """Instance initializer. + + Args: + listeners: An iterable of callables, listeners to inform when an error + is caught. Each callable should accept a single string argument. + """ + ... + def __enter__(self, *args): # -> None: + ... + def __exit__(self, exception_type, exception_value, tb): # -> Literal[True] | None: + ... + @property + def errors_found(self): # -> bool: + """Returns True if any errors were caught.""" + ... + +class NullErrorLogger: + """A context manager that replaces an ErrorLogger. + + This error logger will pass all thrown errors through. + """ + + def __enter__(self, *args): # -> None: + ... + def __exit__(self, error_type, value, tb): # -> None: + ... + @property + def errors_found(self): # -> Literal[False]: + """Returns True if any errors were caught.""" + ... diff --git a/typings/dm_control/viewer/viewer.pyi b/typings/dm_control/viewer/viewer.pyi new file mode 100644 index 00000000..b812236c --- /dev/null +++ b/typings/dm_control/viewer/viewer.pyi @@ -0,0 +1,233 @@ +""" +This type stub file was generated by pyright. +""" + +from dm_control.mujoco.wrapper import mjbindings + +"""Mujoco Physics viewer, with custom input controllers.""" +functions = mjbindings.functions +_NUM_GROUP_KEYS = ... +_PAN_CAMERA_VERTICAL_MOUSE = ... +_PAN_CAMERA_HORIZONTAL_MOUSE = ... +_ROTATE_OBJECT_MOUSE = ... +_MOVE_OBJECT_VERTICAL_MOUSE = ... +_MOVE_OBJECT_HORIZONTAL_MOUSE = ... +_PAN_CAMERA_VERTICAL_TOUCHPAD = ... +_PAN_CAMERA_HORIZONTAL_TOUCHPAD = ... +_ROTATE_OBJECT_TOUCHPAD = ... +_MOVE_OBJECT_VERTICAL_TOUCHPAD = ... +_MOVE_OBJECT_HORIZONTAL_TOUCHPAD = ... +_ROTATE_CAMERA = ... +_CENTER_CAMERA = ... +_SELECT_OBJECT = ... +_TRACK_OBJECT = ... +_FREE_LOOK = ... +_NEXT_CAMERA = ... +_PREVIOUS_CAMERA = ... +_ZOOM_TO_SCENE = ... +_DOUBLE_BUFFERING = ... +_PREV_RENDERING_MODE = ... +_NEXT_RENDERING_MODE = ... +_PREV_LABELING_MODE = ... +_NEXT_LABELING_MODE = ... +_PRINT_CAMERA = ... +_VISUALIZATION_FLAGS = ... +_GEOM_GROUPS = ... +_SITE_GROUPS = ... +_RENDERING_FLAGS = ... +_CAMERA_MOVEMENT_ACTIONS = ... +_SCROLL_SPEED_FACTOR = ... +_LOOK_AT_DISTANCE = ... +_FULL_SCENE_ZOOM_FACTOR = ... + +class Viewer: + """Viewport displaying the contents of a physics world.""" + + def __init__(self, viewport, mouse, keyboard, camera_settings=..., zoom_factor=..., scene_callback=...) -> None: + """Instance initializer. + + Args: + viewport: Render viewport, instance of renderer.Viewport. + mouse: A mouse device. + keyboard: A keyboard device. + camera_settings: Properties of the scene MjvCamera. + zoom_factor: Initial scale factor for zooming into the scene. + scene_callback: Scene callback. + This is a callable of the form: `my_callable(MjModel, MjData, MjvScene)` + that gets applied to every rendered scene. + """ + ... + def __del__(self): # -> None: + ... + def initialize(self, physics, renderer_instance, touchpad): # -> None: + """Initialize the viewer. + + Args: + physics: Physics instance. + renderer_instance: A renderer.Base instance. + touchpad: A boolean, use input dedicated to touchpad. + """ + ... + def deinitialize(self): # -> None: + """Deinitializes the viewer instance.""" + ... + def render(self): # -> None: + """Renders the visualized scene.""" + ... + def zoom_to_scene(self): # -> None: + """Utility method that set the camera to embrace the entire scene.""" + ... + @property + def perturbation(self): # -> NullPerturbation: + """Returns an active renderer.Perturbation object.""" + ... + @property + def camera(self): # -> SceneCamera | None: + """Returns an active renderer.SceneCamera instance.""" + ... + @property + def render_settings(self): # -> RenderSettings: + """Returns renderer.RenderSettings used by this viewer.""" + ... + +class CameraSelector: + """Binds camera behavior to user input.""" + + def __init__(self, model, camera, free_camera, **unused) -> None: + """Instance initializer. + + Args: + model: Instance of MjModel. + camera: Instance of SceneCamera. + free_camera: Instance of FreeCameraController. + **unused: Other arguments, not used by this class. + """ + ... + def select_previous(self): # -> None: + """Cycles to the previous scene camera.""" + ... + def select_next(self): # -> None: + """Cycles to the next scene camera.""" + ... + def escape(self) -> None: + """Unconditionally switches to the free camera.""" + ... + +class FreeCameraController: + """Implements the free camera behavior.""" + + def __init__(self, viewport, camera, pointer, selection_service, **unused) -> None: + """Instance initializer. + + Args: + viewport: Instance of mujoco_viewer.Viewport. + camera: Instance of mujoco_viewer.SceneCamera. + pointer: A pointer that moves around the screen and is used to point at + bodies. Implements a single attribute - 'position' - that returns a + 2-component vector of pointer's screen space position. + selection_service: An instance of a class implementing a + 'selected_body_id' property. + **unused: Other optional parameters not used by this class. + """ + ... + def activate(self): # -> None: + """Activates the controller.""" + ... + def deactivate(self): # -> None: + """Deactivates the controller.""" + ... + def set_pan_vertical_mode(self, enable): # -> None: + """Starts/ends the camera panning action along the vertical plane. + + Args: + enable: A boolean flag, True to start the action, False to end it. + """ + ... + def set_pan_horizontal_mode(self, enable): # -> None: + """Starts/ends the camera panning action along the horizontal plane. + + Args: + enable: A boolean flag, True to start the action, False to end it. + """ + ... + def set_rotate_mode(self, enable): # -> None: + """Starts/ends the camera rotation action. + + Args: + enable: A boolean flag, True to start the action, False to end it. + """ + ... + def center(self): # -> None: + """Focuses camera on the object the pointer is currently pointing at.""" + ... + def on_move(self, position, translation): # -> None: + """Translates mouse moves onto camera movements.""" + ... + def zoom(self, zoom_factor): # -> None: + """Zooms the camera in/out. + + Args: + zoom_factor: A floating point value, by how much to zoom the camera. + Positive values zoom the camera in, negative values zoom it out. + """ + ... + def track(self): # -> None: + """Makes the camera track the currently selected object. + + The selection is managed by the selection service. + """ + ... + def free_look(self): # -> None: + """Switches the camera to a free-look mode.""" + ... + +class ManipulationController: + """Binds control over scene objects to user input.""" + + def __init__(self, viewport, camera, pointer, **unused) -> None: + """Instance initializer. + + Args: + viewport: Instance of mujoco_viewer.Viewport. + camera: Instance of mujoco_viewer.SceneCamera. + pointer: A pointer that moves around the screen and is used to point at + bodies. Implements a single attribute - 'position' - that returns a + 2-component vector of pointer's screen space position. + **unused: Other arguments, unused by this class. + """ + ... + def select(self): # -> None: + """Translates mouse double-clicks to object selection action.""" + ... + def set_move_vertical_mode(self, enable): # -> None: + """Begins/ends an object translation action along the vertical plane. + + Args: + enable: A boolean flag, True begins the action, False ends it. + """ + ... + def set_move_horizontal_mode(self, enable): # -> None: + """Begins/ends an object translation action along the horizontal plane. + + Args: + enable: A boolean flag, True begins the action, False ends it. + """ + ... + def set_rotate_mode(self, enable): # -> None: + """Begins/ends an object rotation action. + + Args: + enable: A boolean flag, True begins the action, False ends it. + """ + ... + def on_move(self, position, translation): # -> None: + """Translates mouse moves to selected object movements.""" + ... + @property + def perturbation(self): # -> None: + """Returns the Perturbation object that represents the manipulated body.""" + ... + @property + def selected_body_id(self): # -> Literal[-1]: + """Returns the id of the selected body, or -1 if none is selected.""" + ... diff --git a/typings/dm_control/viewer/views.pyi b/typings/dm_control/viewer/views.pyi new file mode 100644 index 00000000..170cbcf8 --- /dev/null +++ b/typings/dm_control/viewer/views.pyi @@ -0,0 +1,118 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import enum + +from dm_control.viewer import renderer + +"""Components and views that render custom images into Mujoco render frame.""" + +class PanelLocation(enum.Enum): + TOP_LEFT = ... + TOP_RIGHT = ... + BOTTOM_LEFT = ... + BOTTOM_RIGHT = ... + +class BaseViewportView(metaclass=abc.ABCMeta): + """Base abstract view class.""" + + @abc.abstractmethod + def render(self, context, viewport, location): # -> None: + """Renders the view on screen. + + Args: + context: MjrContext instance. + viewport: Viewport instance. + location: Value defined in PanelLocation enum. + """ + ... + +class ColumnTextModel(metaclass=abc.ABCMeta): + """Data model that returns 2 columns of text.""" + + @abc.abstractmethod + def get_columns(self): # -> None: + """Returns the text to display in two columns. + + Returns: + Returns an iterable of tuples of 2 strings. Each tuple has format + (left_column_label, right_column_label). + """ + ... + +class ColumnTextView(BaseViewportView): + """A view displayed in Mujoco render window.""" + + def __init__(self, model) -> None: + """Instance initializer. + + Args: + model: Instance of ColumnTextModel. + """ + ... + def render(self, context, viewport, location): # -> None: + """Renders the overlay on screen. + + Args: + context: MjrContext instance. + viewport: Viewport instance. + location: Value defined in PanelLocation enum. + """ + ... + +class MujocoDepthBuffer(renderer.Component): + """Displays the contents of the scene's depth buffer.""" + + def __init__(self) -> None: ... + def render(self, context, viewport): # -> None: + """Renders the overlay on screen. + + Args: + context: MjrContext instance. + viewport: MJRRECT instance. + """ + ... + +class ViewportLayout(renderer.Component): + """Layout manager for the render viewport. + + Allows to create a viewport layout by injecting renderer component even in + absence of a renderer, and then easily reattach it between renderers. + """ + + def __init__(self) -> None: + """Instance initializer.""" + ... + def __len__(self): # -> int: + ... + def __contains__(self, key): # -> bool: + ... + def add(self, view, location): # -> None: + """Adds a new view. + + Args: + view: renderer.BaseViewportView instance. + location: Value defined in PanelLocation enum, location of the view in the + viewport. + """ + ... + def remove(self, view): # -> None: + """Removes a view. + + Args: + view: renderer.BaseViewportView instance. + """ + ... + def clear(self): # -> None: + """Removes all attached components.""" + ... + def render(self, context, viewport): # -> None: + """Renders the overlay on screen. + + Args: + context: MjrContext instance. + viewport: MJRRECT instance. + """ + ... diff --git a/typings/mujoco/__init__.pyi b/typings/mujoco/__init__.pyi new file mode 100644 index 00000000..610fa6e9 --- /dev/null +++ b/typings/mujoco/__init__.pyi @@ -0,0 +1,44 @@ +""" +This type stub file was generated by pyright. +""" + +import ctypes +import ctypes.util +import os +import platform +import subprocess +import warnings + +import mujoco +from mujoco import mjtOBJ +from mujoco._callbacks import * +from mujoco._constants import * +from mujoco._enums import * +from mujoco._errors import * +from mujoco._functions import * +from mujoco._render import * +from mujoco._structs import * +from mujoco.gl_context import * +from mujoco.renderer import Renderer + +""" +This type stub file was generated by pyright. +""" +_SYSTEM = ... +if _SYSTEM == "Windows": ... +else: + proc_translated = ... + is_rosetta = ... +HEADERS_DIR = ... +PLUGINS_DIR = ... +PLUGIN_HANDLES = ... +__version__ = ... + +# these are just hacks to get pyright to stop complaining +MjModel: mujoco.engine.MjModel +MjData: mujoco.engine.MjData +mjtSolver: mujoco.mjtSolver +mjtObj: mujoco.mjtOBJ + +def mj_saveLastXML(filename: str, model: mujoco.MjModel) -> None: ... +def mj_id2name(model: mujoco.MjModel, type: mujoco.mjtOBJ, id: int) -> str: ... diff --git a/typings/mujoco/bindings_test.pyi b/typings/mujoco/bindings_test.pyi new file mode 100644 index 00000000..0127fa47 --- /dev/null +++ b/typings/mujoco/bindings_test.pyi @@ -0,0 +1,226 @@ +""" +This type stub file was generated by pyright. +""" + +import contextlib +import sys + +import mujoco +from absl.testing import absltest, parameterized + +"""Tests for MuJoCo Python bindings.""" +TEST_XML = ... +TEST_XML_SENSOR = ... +TEST_XML_PLUGIN = ... + +@contextlib.contextmanager +def temporary_callback(setter, callback): # -> Generator[None, Any, None]: + ... + +class MuJoCoBindingsTest(parameterized.TestCase): + def setUp(self): # -> None: + ... + def test_load_xml_can_handle_name_clash(self): # -> None: + ... + def test_can_read_array(self): # -> None: + ... + def test_can_set_array(self): # -> None: + ... + def test_array_is_a_view(self): # -> None: + ... + @absltest.skipIf(sys.implementation.name == "pypy", reason="requires sys.getrefcount") + def test_array_keeps_struct_alive(self): # -> None: + ... + def test_named_indexing_actuator_ctrl(self): # -> None: + ... + def test_named_indexing_invalid_names_in_model(self): # -> None: + ... + def test_named_indexing_no_name_argument_in_model(self): # -> None: + ... + def test_named_indexing_invalid_names_in_data(self): # -> None: + ... + def test_named_indexing_no_name_argument_in_data(self): # -> None: + ... + def test_named_indexing_invalid_index_in_model(self): # -> None: + ... + def test_named_indexing_invalid_index_in_data(self): # -> None: + ... + def test_named_indexing_geom_size(self): # -> None: + ... + def test_named_indexing_geom_quat(self): # -> None: + ... + def test_named_indexing_ragged_qpos(self): # -> None: + ... + def test_named_indexing_ragged2d_cdof(self): # -> None: + ... + def test_named_indexing_repr_in_data(self): # -> None: + ... + def test_named_indexing_body_repr_in_data(self): # -> None: + ... + def test_named_indexing_repr_in_model(self): # -> None: + ... + def test_addresses_differ_between_structs(self): # -> None: + ... + def test_mjmodel_can_read_and_write_opt(self): # -> None: + ... + def test_mjmodel_can_read_and_write_stat(self): # -> None: + ... + def test_mjmodel_can_read_and_write_vis(self): # -> None: + ... + def test_mjmodel_can_access_names_directly(self): # -> None: + ... + def test_mjmodel_names_doesnt_copy(self): # -> None: + ... + def test_vis_global_exposed_as_global_(self): # -> None: + ... + def test_mjoption_can_make_default(self): # -> None: + ... + def test_mjoption_can_copy(self): # -> None: + ... + def test_mjmodel_can_copy(self): # -> None: + ... + def test_assets_array_filename_too_long(self): # -> None: + ... + def test_mjdata_can_copy(self): # -> None: + ... + def test_mjdata_can_read_warning_array(self): # -> None: + ... + def test_mjcontact_can_copy(self): # -> None: + ... + def test_mj_step(self): # -> None: + ... + def test_mj_step_multiple(self): # -> None: + ... + def test_mj_contact_list(self): # -> None: + ... + def test_realloc_con_efc(self): # -> None: + ... + def test_mj_struct_list_equality(self): # -> None: + ... + @parameterized.named_parameters( + [ + ("MjOption", mujoco.MjOption, "tolerance"), + ("MjWarningStat", mujoco.MjWarningStat, "number"), + ("MjTimerStat", mujoco.MjTimerStat, "number"), + ("MjSolverStat", mujoco.MjSolverStat, "neval"), + ("MjContact", mujoco.MjContact, "dist"), + ("MjStatistic", mujoco.MjStatistic, "extent"), + ("MjLROpt", mujoco.MjLROpt, "maxforce"), + ("MjvPerturb", mujoco.MjvPerturb, "select"), + ("MjvCamera", mujoco.MjvCamera, "fixedcamid"), + ] + ) + def test_mj_struct_equality(self, cls, attr): # -> None: + ... + def test_mj_struct_equality_array(self): # -> None: + ... + @parameterized.named_parameters( + [ + ("MjOption", mujoco.MjOption, "tolerance"), + ("MjWarningStat", mujoco.MjWarningStat, "number"), + ("MjTimerStat", mujoco.MjTimerStat, "number"), + ("MjSolverStat", mujoco.MjSolverStat, "neval"), + ("MjContact", mujoco.MjContact, "dist"), + ("MjStatistic", mujoco.MjStatistic, "extent"), + ("MjLROpt", mujoco.MjLROpt, "maxforce"), + ("MjvPerturb", mujoco.MjvPerturb, "select"), + ("MjvCamera", mujoco.MjvCamera, "fixedcamid"), + ] + ) + def test_mj_struct_repr(self, cls, attr): # -> None: + ... + def test_mj_struct_repr_for_subclass(self): # -> None: + class MjWarningStatSubclass(mujoco.MjWarningStat): ... + + def test_mju_rotVecQuat(self): # -> None: + ... + def test_getsetstate(self): # -> None: + ... + def test_mj_jacSite(self): # -> None: + ... + def test_docstrings(self): # -> None: + ... + def test_int_constant(self): # -> None: + ... + def test_float_constant(self): # -> None: + ... + def test_string_constants(self): # -> None: + ... + def test_enum_values(self): # -> None: + ... + def test_enum_from_int(self): # -> None: + ... + def test_enum_as_index(self): # -> None: + ... + def test_enum_ops(self): # -> None: + ... + def test_can_raise_error(self): # -> None: + ... + def test_mjcb_time(self): # -> None: + class CallCounter: ... + + def test_mjcb_time_exception(self): # -> None: + class TestError(RuntimeError): ... + + def test_mjcb_time_wrong_return_type(self): # -> None: + ... + def test_mjcb_time_not_callable(self): # -> None: + ... + def test_mjcb_sensor(self): # -> None: + class SensorCallback: ... + + @absltest.skipIf(sys.implementation.name == "pypy", reason="requires sys.getrefcount") + def test_mjcb_control_not_leak_memory(self): # -> None: + ... + @absltest.skipIf(sys.implementation.name == "pypy", reason="requires sys.getrefcount") + def test_mjdata_holds_ref_to_model(self): # -> None: + ... + def test_can_initialize_mjv_structs(self): # -> None: + ... + def test_mjv_camera(self): # -> None: + ... + def test_mjv_scene(self): # -> None: + ... + def test_mjv_scene_without_model(self): # -> None: + ... + def test_mj_ray(self): # -> None: + ... + def test_inverse_fd_none(self): # -> None: + ... + def test_inverse_fd(self): # -> None: + ... + def test_mjd_sub_quat(self): # -> None: + ... + def test_mjd_quat_intergrate(self): # -> None: + ... + def test_banded(self): # -> None: + ... + def test_mju_box_qp(self): # -> None: + ... + def test_mju_fill(self): # -> None: + ... + def test_mju_eye(self): # -> None: + ... + def test_mju_symmetrize(self): # -> None: + ... + def test_mju_clip(self): # -> None: + ... + def test_mju_mul_vec_mat_vec(self): # -> None: + ... + @parameterized.product(flg_html=(False, True), flg_pad=(False, True)) + def test_mj_printSchema(self, flg_html, flg_pad): # -> None: + ... + def test_pickle_mjdata(self): # -> None: + ... + def test_pickle_mjmodel(self): # -> None: + ... + def test_indexer_name_id(self): # -> None: + ... + def test_load_plugin(self): # -> None: + ... + def test_copy_mjdata_with_plugin(self): # -> None: + ... + def test_deepcopy_mjdata_with_plugin(self): # -> None: + ... + +if __name__ == "__main__": ... diff --git a/typings/mujoco/cgl/__init__.pyi b/typings/mujoco/cgl/__init__.pyi new file mode 100644 index 00000000..68c73ce3 --- /dev/null +++ b/typings/mujoco/cgl/__init__.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +import atexit +import ctypes +import os + +from mujoco.cgl import cgl + +"""An Apple CGL context for offscreen rendering on macOS.""" +_ATTRIB = cgl.CGLPixelFormatAttribute +_PROFILE = cgl.CGLOpenGLProfile + +class GLContext: + """An EGL context for headless accelerated OpenGL rendering on GPU devices.""" + + def __init__(self, max_width, max_height) -> None: ... + def make_current(self): # -> None: + ... + def free(self): # -> None: + """Frees resources associated with this context.""" + ... + def __del__(self): # -> None: + ... diff --git a/typings/mujoco/cgl/cgl.pyi b/typings/mujoco/cgl/cgl.pyi new file mode 100644 index 00000000..5a215b84 --- /dev/null +++ b/typings/mujoco/cgl/cgl.pyi @@ -0,0 +1,81 @@ +""" +This type stub file was generated by pyright. +""" + +import ctypes +import enum + +"""Bindings for Apple CGL.""" +_CGL = ... +CGLContextObj = ctypes.c_void_p +CGLPixelFormatObj = ctypes.c_void_p +GLint = ctypes.c_int +_CGLChoosePixelFormat = ... +_CGLCreateContext = ... +_CGLErrorString = ... +_CGLLockContext = ... +_CGLReleaseContext = ... +_CGLReleasePixelFormat = ... +_CGLSetCurrentContext = ... +_CGLUnlockContext = ... + +class CGLOpenGLProfile(enum.IntEnum): + CGLOGLPVersion_Legacy = ... + CGLOGLPVersion_3_2_Core = ... + CGLOGLPVersion_GL3_Core = ... + CGLOGLPVersion_GL4_Core = ... + +class CGLPixelFormatAttribute(enum.IntEnum): + """CGLPixelFormatAttribute enum values.""" + + CGLPFAAllRenderers = ... + CGLPFATripleBuffer = ... + CGLPFADoubleBuffer = ... + CGLPFAColorSize = ... + CGLPFAAlphaSize = ... + CGLPFADepthSize = ... + CGLPFAStencilSize = ... + CGLPFAMinimumPolicy = ... + CGLPFAMaximumPolicy = ... + CGLPFASampleBuffers = ... + CGLPFASample = ... + CGLPFAColorFloat = ... + CGLPFAMultisample = ... + CGLPFASupersample = ... + CGLPFASampleAlpha = ... + CGLPFARendererID = ... + CGLPFANoRecovery = ... + CGLPFAAccelerated = ... + CGLPFAClosestPolicy = ... + CGLPFABackingStore = ... + CGLPFABackingVolatile = ... + CGLPFADisplayMask = ... + CGLPFAAllowOfflineRenderers = ... + CGLPFAAcceleratedCompute = ... + CGLPFAOpenGLProfile = ... + CGLPFASupportsAutomaticGraphicsSwitching = ... + CGLPFAVirtualScreenCount = ... + CGLPFAAuxBuffers = ... + CGLPFAAccumSize = ... + CGLPFAAuxDepthStencil = ... + CGLPFAStereo = ... + CGLPFAOffScreen = ... + CGLPFAWindow = ... + CGLPFACompliant = ... + CGLPFAPBuffer = ... + CGLPFARemotePBuffer = ... + CGLPFASingleRenderer = ... + CGLPFARobust = ... + CGLPFAMPSafe = ... + CGLPFAMultiScreen = ... + CGLPFAFullScreen = ... + +class CGLError(RuntimeError): ... + +CGLChoosePixelFormat = ... +CGLCreateContext = ... +CGLLockContext = ... +CGLReleaseContext = ... +CGLReleasePixelFormat = ... +CGLSetCurrentContext = ... +CGLUnlockContext = ... diff --git a/typings/mujoco/egl/__init__.pyi b/typings/mujoco/egl/__init__.pyi new file mode 100644 index 00000000..7dcf794e --- /dev/null +++ b/typings/mujoco/egl/__init__.pyi @@ -0,0 +1,35 @@ +""" +This type stub file was generated by pyright. +""" + +import atexit +import ctypes +import os + +from mujoco.egl import egl_ext as EGL +from OpenGL import error + +"""An EGL context for headless accelerated OpenGL rendering on GPU devices.""" +PYOPENGL_PLATFORM = ... +if not PYOPENGL_PLATFORM: ... +else: ... + +def create_initialized_egl_device_display(): # -> Any | None: + """Creates an initialized EGL display directly on a device.""" + ... + +EGL_DISPLAY = ... +if EGL_DISPLAY == EGL.EGL_NO_DISPLAY: ... +EGL_ATTRIBUTES = ... + +class GLContext: + """An EGL context for headless accelerated OpenGL rendering on GPU devices.""" + + def __init__(self, max_width, max_height) -> None: ... + def make_current(self): # -> None: + ... + def free(self): # -> None: + """Frees resources associated with this context.""" + ... + def __del__(self): # -> None: + ... diff --git a/typings/mujoco/egl/egl_ext.pyi b/typings/mujoco/egl/egl_ext.pyi new file mode 100644 index 00000000..3e7827a1 --- /dev/null +++ b/typings/mujoco/egl/egl_ext.pyi @@ -0,0 +1,15 @@ +""" +This type stub file was generated by pyright. +""" + +from OpenGL.EGL import * + +"""Extends OpenGL.EGL with definitions necessary for headless rendering.""" +PFNEGLQUERYDEVICESEXTPROC = ... +_eglQueryDevicesEXT = ... +EGL_PLATFORM_DEVICE_EXT = ... +PFNEGLGETPLATFORMDISPLAYEXTPROC = ... +eglGetPlatformDisplayEXT = ... + +def eglQueryDevicesEXT(max_devices=...): # -> list[Any]: + ... diff --git a/typings/mujoco/gl_context.pyi b/typings/mujoco/gl_context.pyi new file mode 100644 index 00000000..72e1e4af --- /dev/null +++ b/typings/mujoco/gl_context.pyi @@ -0,0 +1,11 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" +_SYSTEM = ... +_MUJOCO_GL = ... +if _MUJOCO_GL not in ("disable", "disabled", "off", "false", "0"): + _VALID_MUJOCO_GL = ... diff --git a/typings/mujoco/glfw/__init__.pyi b/typings/mujoco/glfw/__init__.pyi new file mode 100644 index 00000000..2bdf0663 --- /dev/null +++ b/typings/mujoco/glfw/__init__.pyi @@ -0,0 +1,18 @@ +""" +This type stub file was generated by pyright. +""" + +import glfw + +"""An OpenGL context created via GLFW.""" + +class GLContext: + """An OpenGL context created via GLFW.""" + + def __init__(self, max_width, max_height) -> None: ... + def make_current(self): # -> None: + ... + def free(self): # -> None: + ... + def __del__(self): # -> None: + ... diff --git a/typings/mujoco/mjx/__init__.pyi b/typings/mujoco/mjx/__init__.pyi new file mode 100644 index 00000000..7b356936 --- /dev/null +++ b/typings/mujoco/mjx/__init__.pyi @@ -0,0 +1,16 @@ +""" +This type stub file was generated by pyright. +""" + +from mujoco.mjx._src.collision_driver import collision +from mujoco.mjx._src.constraint import make_constraint +from mujoco.mjx._src.device import device_get_into, device_put +from mujoco.mjx._src.forward import forward, step +from mujoco.mjx._src.io import make_data +from mujoco.mjx._src.passive import passive +from mujoco.mjx._src.smooth import com_pos, com_vel, crb, factor_m, kinematics, mul_m, rne, transmission +from mujoco.mjx._src.types import * + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/mujoco/mjx/_src/__init__.pyi b/typings/mujoco/mjx/_src/__init__.pyi new file mode 100644 index 00000000..e897159a --- /dev/null +++ b/typings/mujoco/mjx/_src/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +""" +This type stub file was generated by pyright. +""" diff --git a/typings/mujoco/mjx/_src/collision_base.pyi b/typings/mujoco/mjx/_src/collision_base.pyi new file mode 100644 index 00000000..ec1adfe9 --- /dev/null +++ b/typings/mujoco/mjx/_src/collision_base.pyi @@ -0,0 +1,51 @@ +""" +This type stub file was generated by pyright. +""" + +import dataclasses +from typing import Dict, List, Optional, Tuple + +import jax +from mujoco.mjx._src.dataclasses import PyTreeNode +from mujoco.mjx._src.types import GeomType + +""" +This type stub file was generated by pyright. +""" +Contact = Tuple[jax.Array, jax.Array, jax.Array] + +@dataclasses.dataclass +class Candidate: + geom1: int + geom2: int + ipair: int + geomp: int + dim: int + ... + +CandidateSet = Dict[ + Tuple[GeomType, GeomType, Tuple[int, ...], Tuple[int, ...]], + List[Candidate], +] + +class GeomInfo(PyTreeNode): + """Collision info for a geom.""" + + pos: jax.Array + mat: jax.Array + size: jax.Array + face: Optional[jax.Array] = ... + vert: Optional[jax.Array] = ... + edge: Optional[jax.Array] = ... + facenorm: Optional[jax.Array] = ... + +class SolverParams(PyTreeNode): + """Contact solver params.""" + + friction: jax.Array + solref: jax.Array + solreffriction: jax.Array + solimp: jax.Array + margin: jax.Array + gap: jax.Array + ... diff --git a/typings/mujoco/mjx/_src/collision_convex.pyi b/typings/mujoco/mjx/_src/collision_convex.pyi new file mode 100644 index 00000000..f1800419 --- /dev/null +++ b/typings/mujoco/mjx/_src/collision_convex.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +from mujoco.mjx._src.collision_base import Contact, GeomInfo + +"""Convex collisions.""" + +def plane_convex(plane: GeomInfo, convex: GeomInfo) -> Contact: + """Calculates contacts between a plane and a convex object.""" + ... + +def sphere_convex(sphere: GeomInfo, convex: GeomInfo) -> Contact: + """Calculates contact between a sphere and a convex object.""" + ... + +def capsule_convex(cap: GeomInfo, convex: GeomInfo) -> Contact: + """Calculates contacts between a capsule and a convex object.""" + ... + +def convex_convex(c1: GeomInfo, c2: GeomInfo) -> Contact: + """Calculates contacts between two convex objects.""" + ... diff --git a/typings/mujoco/mjx/_src/collision_driver.pyi b/typings/mujoco/mjx/_src/collision_driver.pyi new file mode 100644 index 00000000..bac04394 --- /dev/null +++ b/typings/mujoco/mjx/_src/collision_driver.pyi @@ -0,0 +1,33 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Callable, Optional, Tuple, Union + +import mujoco +from mujoco.mjx._src import collision_base +from mujoco.mjx._src.collision_base import CandidateSet, GeomInfo +from mujoco.mjx._src.types import Contact, Data, GeomType, Model + +""" +This type stub file was generated by pyright. +""" +_COLLISION_FUNC = ... + +def get_collision_fn( + key: Tuple[Union[GeomType, mujoco.mjtGeom], Union[GeomType, mujoco.mjtGeom]] +) -> Optional[Callable[[GeomInfo, GeomInfo], collision_base.Contact]]: + """Returns a collision function given a pair of geom types.""" + ... + +def collision_candidates(m: Union[Model, mujoco.MjModel]) -> CandidateSet: + """Returns candidates for collision checking.""" + ... + +def ncon(m: Model) -> int: + """Returns the number of contacts computed in MJX given a model.""" + ... + +def collision(m: Model, d: Data) -> Data: + """Collides geometries.""" + ... diff --git a/typings/mujoco/mjx/_src/collision_primitive.pyi b/typings/mujoco/mjx/_src/collision_primitive.pyi new file mode 100644 index 00000000..c27e10f3 --- /dev/null +++ b/typings/mujoco/mjx/_src/collision_primitive.pyi @@ -0,0 +1,27 @@ +""" +This type stub file was generated by pyright. +""" + +from mujoco.mjx._src.collision_base import Contact, GeomInfo + +"""Collision primitives.""" + +def plane_sphere(plane: GeomInfo, sphere: GeomInfo) -> Contact: + """Calculates contact between a plane and a sphere.""" + ... + +def plane_capsule(plane: GeomInfo, cap: GeomInfo) -> Contact: + """Calculates two contacts between a capsule and a plane.""" + ... + +def sphere_sphere(s1: GeomInfo, s2: GeomInfo) -> Contact: + """Calculates contact between two spheres.""" + ... + +def sphere_capsule(sphere: GeomInfo, cap: GeomInfo) -> Contact: + """Calculates one contact between a sphere and a capsule.""" + ... + +def capsule_capsule(cap1: GeomInfo, cap2: GeomInfo) -> Contact: + """Calculates one contact between two capsules.""" + ... diff --git a/typings/mujoco/mjx/_src/constraint.pyi b/typings/mujoco/mjx/_src/constraint.pyi new file mode 100644 index 00000000..fa23fd8f --- /dev/null +++ b/typings/mujoco/mjx/_src/constraint.pyi @@ -0,0 +1,33 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Tuple + +import jax +from mujoco.mjx._src.dataclasses import PyTreeNode +from mujoco.mjx._src.types import Data, Model + +""" +This type stub file was generated by pyright. +""" + +class _Efc(PyTreeNode): + """Support data for creating constraint matrices.""" + + J: jax.Array + pos: jax.Array + pos_norm: jax.Array + invweight: jax.Array + solref: jax.Array + solimp: jax.Array + frictionloss: jax.Array + ... + +def count_constraints(m: Model, d: Data) -> Tuple[int, int, int, int]: + """Returns equality, friction, limit, and contact constraint counts.""" + ... + +def make_constraint(m: Model, d: Data) -> Data: + """Creates constraint jacobians and other supporting data.""" + ... diff --git a/typings/mujoco/mjx/_src/dataclasses.pyi b/typings/mujoco/mjx/_src/dataclasses.pyi new file mode 100644 index 00000000..5caebe0d --- /dev/null +++ b/typings/mujoco/mjx/_src/dataclasses.pyi @@ -0,0 +1,41 @@ +""" +This type stub file was generated by pyright. +""" + +import typing +from typing import Dict, Optional, TypeVar + +import jax + +""" +This type stub file was generated by pyright. +""" +_T = TypeVar("_T") + +def dataclass(clz: _T) -> _T: + """Wraps a dataclass with metadata for which fields are pytrees. + + This is based off flax.struct.dataclass, but instead of using field + descriptors to specify which fields are pytrees, we follow a simple rule: + a leaf field is a pytree node if and only if it's a jax.Array + + Args: + clz: the class to register as a dataclass + + Returns: + the resulting dataclass, registered with Jax + """ + ... + +TNode = TypeVar("TNode", bound="PyTreeNode") + +class PyTreeNode: + """Base class for dataclasses that should act like a JAX pytree node. + + This base class additionally avoids type checking errors when using PyType. + """ + + def __init_subclass__(cls): ... + def __init__(self, *args, **kwargs) -> None: ... + def replace(self: TNode, **overrides) -> TNode: ... + def tree_replace(self, params: Dict[str, Optional[jax.typing.ArrayLike]]) -> PyTreeNode: ... diff --git a/typings/mujoco/mjx/_src/device.pyi b/typings/mujoco/mjx/_src/device.pyi new file mode 100644 index 00000000..02da7d02 --- /dev/null +++ b/typings/mujoco/mjx/_src/device.pyi @@ -0,0 +1,53 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import List, Union, overload + +import mujoco +from mujoco.mjx._src import types + +""" +This type stub file was generated by pyright. +""" +_MJ_TYPE_ATTR = ... +_TYPE_MAP = ... +_TRANSFORMS = ... +_INVERSE_TRANSFORMS = ... +_DERIVED = ... + +@overload +def device_put(value: mujoco.MjModel) -> types.Model: ... +@overload +def device_put(value: mujoco.MjData) -> types.Data: ... +def device_put(value): + """Places mujoco data onto a device. + + Args: + value: a mujoco struct to transfer + + Returns: + on-device MJX struct reflecting the input value + """ + ... + +@overload +def device_get_into(result: Union[mujoco.MjData, List[mujoco.MjData]], value: types.Data): ... +def device_get_into(result, value): + """Transfers data off device into a mujoco MjData. + + Data on device often has a batch dimension which adds (N,) to the beginning + of each array shape where N = batch size. + + If result is a single MjData, arrays are copied over with the batch dimension + intact. If result is a list, the list must be length N and will be populated + with distinct MjData structs where the batch dimension is stripped. + + Args: + result: struct (or list of structs) to transfer into + value: device value to transfer + + Raises: + RuntimeError: if result length doesn't match data batch size + """ + ... diff --git a/typings/mujoco/mjx/_src/forward.pyi b/typings/mujoco/mjx/_src/forward.pyi new file mode 100644 index 00000000..a5412f17 --- /dev/null +++ b/typings/mujoco/mjx/_src/forward.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np +from mujoco.mjx._src.types import Data, Model + +""" +This type stub file was generated by pyright. +""" +_RK4_A = np.array([[0.5, 0, 0], [0, 0.5, 0], [0, 0, 1]]) +_RK4_B = np.array([1 / 6, 1 / 3, 1 / 3, 1 / 6]) + +def named_scope(fn, name: str = ...): ... +@named_scope +def forward(m: Model, d: Data) -> Data: + """Forward dynamics.""" + ... + +@named_scope +def step(m: Model, d: Data) -> Data: + """Advance simulation.""" + ... diff --git a/typings/mujoco/mjx/_src/io.pyi b/typings/mujoco/mjx/_src/io.pyi new file mode 100644 index 00000000..c1136619 --- /dev/null +++ b/typings/mujoco/mjx/_src/io.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from mujoco.mjx._src.types import Data, Model + +""" +This type stub file was generated by pyright. +""" + +def make_data(m: Model) -> Data: + """Allocate and initialize Data.""" + ... diff --git a/typings/mujoco/mjx/_src/math.pyi b/typings/mujoco/mjx/_src/math.pyi new file mode 100644 index 00000000..ad3b00f7 --- /dev/null +++ b/typings/mujoco/mjx/_src/math.pyi @@ -0,0 +1,194 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional, Tuple, Union + +import jax +from jax import numpy as jp + +"""Some useful math functions.""" + +def norm(x: jax.Array, axis: Optional[Union[Tuple[int, ...], int]] = ...) -> jax.Array: + """Calculates a linalg.norm(x) that's safe for gradients at x=0. + + Avoids a poorly defined gradient for jnp.linal.norm(0) see + https://github.com/google/jax/issues/3058 for details + Args: + x: A jnp.array + axis: The axis along which to compute the norm + + Returns: + Norm of the array x. + """ + ... + +def normalize_with_norm(x: jax.Array, axis: Optional[Union[Tuple[int, ...], int]] = ...) -> Tuple[jax.Array, jax.Array]: + """Normalizes an array. + + Args: + x: A jnp.array + axis: The axis along which to compute the norm + + Returns: + A tuple of (normalized array x, the norm). + """ + ... + +def normalize(x: jax.Array, axis: Optional[Union[Tuple[int, ...], int]] = ...) -> jax.Array: + """Normalizes an array. + + Args: + x: A jnp.array + axis: The axis along which to compute the norm + + Returns: + normalized array x + """ + ... + +def rotate(vec: jax.Array, quat: jax.Array) -> jax.Array: + """Rotates a vector vec by a unit quaternion quat. + + Args: + vec: (3,) a vector + quat: (4,) a quaternion + + Returns: + ndarray(3) containing vec rotated by quat. + """ + ... + +def quat_inv(q: jp.ndarray) -> jp.ndarray: + """Calculates the inverse of quaternion q. + + Args: + q: (4,) quaternion [w, x, y, z] + + Returns: + The inverse of q, where qmult(q, inv_quat(q)) = [1, 0, 0, 0]. + """ + ... + +def quat_sub(u: jax.Array, v: jax.Array) -> jax.Array: + """Subtracts two quaternions (u - v) as a 3D velocity.""" + ... + +def quat_mul(u: jax.Array, v: jax.Array) -> jax.Array: + """Multiplies two quaternions. + + Args: + u: (4,) quaternion (w,x,y,z) + v: (4,) quaternion (w,x,y,z) + + Returns: + A quaternion u * v. + """ + ... + +def quat_mul_axis(q: jax.Array, axis: jax.Array) -> jax.Array: + """Multiplies a quaternion and an axis. + + Args: + q: (4,) quaternion (w,x,y,z) + axis: (3,) axis (x,y,z) + + Returns: + A quaternion q * axis + """ + ... + +def quat_to_mat(q: jax.Array) -> jax.Array: + """Converts a quaternion into a 9-dimensional rotation matrix.""" + ... + +def quat_to_axis_angle(q: jax.Array) -> Tuple[jax.Array, jax.Array]: + """Converts a quaternion into axis and angle.""" + ... + +def axis_angle_to_quat(axis: jax.Array, angle: jax.Array) -> jax.Array: + """Provides a quaternion that describes rotating around axis by angle. + + Args: + axis: (3,) axis (x,y,z) + angle: () float angle to rotate by + + Returns: + A quaternion that rotates around axis by angle + """ + ... + +def quat_integrate(q: jax.Array, v: jax.Array, dt: jax.Array) -> jax.Array: + """Integrates a quaternion given angular velocity and dt.""" + ... + +def inert_mul(i: jax.Array, v: jax.Array) -> jax.Array: + """Multiply inertia by motion, producing force. + + Args: + i: (10,) inertia (inertia matrix, position, mass) + v: (6,) spatial motion + + Returns: + resultant force + """ + ... + +def transform_motion(vel: jax.Array, offset: jax.Array, rotmat: jax.Array): # -> Array: + """Transform spatial motion. + + Args: + vel: (6,) spatial motion (3 angular, 3 linear) + offset: (3,) translation + rotmat: (3, 3) rotation + + Returns: + 6d spatial velocity + """ + ... + +def motion_cross(u, v): # -> Array: + """Cross product of two motions. + + Args: + u: (6,) spatial motion + v: (6,) spatial motion + + Returns: + resultant spatial motion + """ + ... + +def motion_cross_force(v, f): # -> Array: + """Cross product of a motion and force. + + Args: + v: (6,) spatial motion + f: (6,) force + + Returns: + resultant force + """ + ... + +def orthogonals(a: jax.Array) -> Tuple[jax.Array, jax.Array]: + """Returns orthogonal vectors `b` and `c`, given a vector `a`.""" + ... + +def make_frame(a: jax.Array) -> jax.Array: + """Makes a right-handed 3D frame given a direction.""" + ... + +def closest_segment_point(a: jax.Array, b: jax.Array, pt: jax.Array) -> jax.Array: + """Returns the closest point on the a-b line segment to a point pt.""" + ... + +def closest_segment_point_and_dist(a: jax.Array, b: jax.Array, pt: jax.Array) -> Tuple[jax.Array, jax.Array]: + """Returns closest point on the line segment and the distance squared.""" + ... + +def closest_segment_to_segment_points( + a0: jax.Array, a1: jax.Array, b0: jax.Array, b1: jax.Array +) -> Tuple[jax.Array, jax.Array]: + """Returns closest points between two line segments.""" + ... diff --git a/typings/mujoco/mjx/_src/mesh.pyi b/typings/mujoco/mjx/_src/mesh.pyi new file mode 100644 index 00000000..7138e590 --- /dev/null +++ b/typings/mujoco/mjx/_src/mesh.pyi @@ -0,0 +1,20 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Dict, Optional, Sequence, Tuple + +import mujoco +import numpy as np + +"""Mesh processing.""" +_BOX_CORNERS = ... +_BOX_FACES = ... +_MAX_HULL_FACE_VERTICES = ... +_CONVEX_CACHE: Dict[Tuple[int, int], Dict[str, np.ndarray]] = ... +_DERIVED_ARGS = ... +DERIVED = ... + +def get(m: mujoco.MjModel) -> Dict[str, Sequence[Optional[np.ndarray]]]: + """Derives geom mesh attributes for mjx.Model from MjModel.""" + ... diff --git a/typings/mujoco/mjx/_src/passive.pyi b/typings/mujoco/mjx/_src/passive.pyi new file mode 100644 index 00000000..300d7555 --- /dev/null +++ b/typings/mujoco/mjx/_src/passive.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from mujoco.mjx._src.types import Data, Model + +""" +This type stub file was generated by pyright. +""" + +def passive(m: Model, d: Data) -> Data: + """Adds all passive forces.""" + ... diff --git a/typings/mujoco/mjx/_src/scan.pyi b/typings/mujoco/mjx/_src/scan.pyi new file mode 100644 index 00000000..6c3c5d13 --- /dev/null +++ b/typings/mujoco/mjx/_src/scan.pyi @@ -0,0 +1,73 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Callable, TypeVar + +from mujoco.mjx._src.types import Model + +"""Scan across data ordered by body joint types and kinematic tree order.""" +Y = TypeVar("Y") + +def flat(m: Model, f: Callable[..., Y], in_types: str, out_types: str, *args, group_by: str = ...) -> Y: + r"""Scan a function across bodies or actuators. + + Scan group data according to type and batch shape then calls vmap(f) on it.\ + + Args: + m: an mjx model + f: a function to be scanned with the following type signature: + def f(key, *args) -> y + where + ``key`` gives grouping key for this function instance + ``*args`` are input arguments with types matching ``in_types`` + ``y`` is an output arguments with types matching ``out_type`` + in_types: string specifying the type of each input arg: + 'b': split according to bodies + 'j': split according to joint types + 'q': split according to generalized coordinates (len(qpos)) + 'v': split according to degrees of freedom (len(qvel)) + 'u': split according to actuators + 'a': split according to actuator activations + out_types: string specifying the types the output dimension matches + *args: the input arguments corresponding to ``in_types`` + group_by: the type to group by, either joints or actuators + + Returns: + The stacked outputs of ``f`` matching the model's order. + + Raises: + IndexError: if function output shape does not match out_types shape + """ + ... + +def body_tree(m: Model, f: Callable[..., Y], in_types: str, out_types: str, *args, reverse: bool = ...) -> Y: + r"""Scan ``f`` across bodies in tree order, carrying results up/down the tree. + + This function groups bodies according to level and attached joints, then calls + vmap(f) on them.\ + + Args: + m: an mjx mjmodel + f: a function to be scanned with the following type signature: + def f(y, *args) -> y + where + ``y`` is the carry value and return value + ``*args`` are input arguments with types matching ``in_types`` + in_types: string specifying the type of each input arg: + 'b': split according to bodies + 'j': split according to joint types + 'q': split according to generalized coordinates (len(qpos)) + 'v': split according to degrees of freedom (len(qvel)) + out_types: string specifying the types the output dimension matches + *args: the input arguments corresponding to ``in_types`` + reverse: if True, scans up the body tree from leaves to root, otherwise + root to leaves + + Returns: + The stacked outputs of ``f`` matching the model's body order. + + Raises: + IndexError: if function output shape does not match out_types shape + """ + ... diff --git a/typings/mujoco/mjx/_src/smooth.pyi b/typings/mujoco/mjx/_src/smooth.pyi new file mode 100644 index 00000000..bebe3d9b --- /dev/null +++ b/typings/mujoco/mjx/_src/smooth.pyi @@ -0,0 +1,50 @@ +""" +This type stub file was generated by pyright. +""" + +import jax +from mujoco.mjx._src.types import Data, Model + +""" +This type stub file was generated by pyright. +""" + +def kinematics(m: Model, d: Data) -> Data: + """Converts position/velocity from generalized coordinates to maximal.""" + ... + +def com_pos(m: Model, d: Data) -> Data: + """Maps inertias and motion dofs to global frame centered at subtree-CoM.""" + ... + +def crb(m: Model, d: Data) -> Data: + """Runs composite rigid body inertia algorithm.""" + ... + +def factor_m(m: Model, d: Data, qM: jax.Array) -> Data: + """Gets sparse L'*D*L factorizaton of inertia-like matrix M, assumed spd.""" + ... + +def solve_m(m: Model, d: Data, x: jax.Array) -> jax.Array: + """Computes sparse backsubstitution: x = inv(L'*D*L)*y .""" + ... + +def dense_m(m: Model, d: Data) -> jax.Array: + """Reconstitute dense mass matrix from qM.""" + ... + +def mul_m(m: Model, d: Data, vec: jax.Array) -> jax.Array: + """Multiply vector by inertia matrix.""" + ... + +def com_vel(m: Model, d: Data) -> Data: + """Computes cvel, cdof_dot.""" + ... + +def rne(m: Model, d: Data) -> Data: + """Computes inverse dynamics using the recursive Newton-Euler algorithm.""" + ... + +def transmission(m: Model, d: Data) -> Data: + """Computes actuator/transmission lengths and moments.""" + ... diff --git a/typings/mujoco/mjx/_src/solver.pyi b/typings/mujoco/mjx/_src/solver.pyi new file mode 100644 index 00000000..368f5784 --- /dev/null +++ b/typings/mujoco/mjx/_src/solver.pyi @@ -0,0 +1,87 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional + +import jax +from mujoco.mjx._src.dataclasses import PyTreeNode +from mujoco.mjx._src.types import Data, Model + +"""Constraint solvers.""" + +class _Context(PyTreeNode): + """Data updated during each solver iteration. + + Attributes: + qacc: acceleration (from Data) (nv,) + qfrc_constraint: constraint force (from Data) (nv,) + Jaref: Jac*qacc - aref (nefc,) + efc_force: constraint force in constraint space (nefc,) + M: dense mass matrix, populated for nv < 100 (nv, nv) + Ma: M*qacc (nv,) + grad: gradient of master cost (nv,) + Mgrad: M / grad (nv,) + search: linesearch vector (nv,) + gauss: gauss Cost + cost: constraint + Gauss cost + prev_cost: cost from previous iter + solver_niter: number of solver iterations + """ + + qacc: jax.Array + qfrc_constraint: jax.Array + Jaref: jax.Array + efc_force: jax.Array + M: Optional[jax.Array] + Ma: jax.Array + grad: jax.Array + Mgrad: jax.Array + search: jax.Array + gauss: jax.Array + cost: jax.Array + prev_cost: jax.Array + solver_niter: jax.Array + @classmethod + def create(cls, m: Model, d: Data, grad: bool = ...) -> _Context: ... + +class _LSPoint(PyTreeNode): + """Line search evaluation point. + + Attributes: + alpha: step size that reduces f(x + alpha * p) given search direction p + cost: line search cost + deriv_0: first derivative of quadratic + deriv_1: second derivative of quadratic + """ + + alpha: jax.Array + cost: jax.Array + deriv_0: jax.Array + deriv_1: jax.Array + @classmethod + def create( + cls, d: Data, ctx: _Context, alpha: jax.Array, jv: jax.Array, quad: jax.Array, quad_gauss: jax.Array + ) -> _LSPoint: + """Creates a linesearch point with first and second derivatives.""" + ... + +class _LSContext(PyTreeNode): + """Data updated during each line search iteration. + + Attributes: + lo: low point bounding the line search interval + hi: high point bounding the line search interval + swap: True if low or hi was swapped in the line search iteration + ls_iter: number of linesearch iterations + """ + + lo: _LSPoint + hi: _LSPoint + swap: jax.Array + ls_iter: jax.Array + ... + +def solve(m: Model, d: Data) -> Data: + """Finds forces that satisfy constraints using conjugate gradient descent.""" + ... diff --git a/typings/mujoco/mjx/_src/support.pyi b/typings/mujoco/mjx/_src/support.pyi new file mode 100644 index 00000000..77e17d94 --- /dev/null +++ b/typings/mujoco/mjx/_src/support.pyi @@ -0,0 +1,26 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Tuple + +import jax +from mujoco.mjx._src.types import Data, Model + +"""Engine support functions.""" + +def jac(m: Model, d: Data, point: jax.Array, body_id: jax.Array) -> Tuple[jax.Array, jax.Array]: + """Compute pair of (NV, 3) Jacobians of global point attached to body.""" + ... + +def jac_dif_pair(m: Model, d: Data, pos: jax.Array, body_1: jax.Array, body_2: jax.Array) -> jax.Array: + """Compute Jacobian difference for two body points.""" + ... + +def apply_ft(m: Model, d: Data, force: jax.Array, torque: jax.Array, point: jax.Array, body_id: jax.Array) -> jax.Array: + """Apply Cartesian force and torque.""" + ... + +def xfrc_accumulate(m: Model, d: Data) -> jax.Array: + """Accumulate xfrc_applied into a qfrc.""" + ... diff --git a/typings/mujoco/mjx/_src/types.pyi b/typings/mujoco/mjx/_src/types.pyi new file mode 100644 index 00000000..d683ea1a --- /dev/null +++ b/typings/mujoco/mjx/_src/types.pyi @@ -0,0 +1,608 @@ +""" +This type stub file was generated by pyright. +""" + +import enum +from typing import Sequence + +import jax +import numpy as np +from mujoco.mjx._src.dataclasses import PyTreeNode + +""" +This type stub file was generated by pyright. +""" + +class DisableBit(enum.IntFlag): + """Disable default feature bitflags. + + Attributes: + CONSTRAINT: entire constraint solver + EQUALITY: equality constraints + FRICTIONLOSS: joint and tendon frictionloss constraints + LIMIT: joint and tendon limit constraints + CONTACT: contact constraints + PASSIVE: passive forces + GRAVITY: gravitational forces + CLAMPCTRL: clamp control to specified range + WARMSTART: warmstart constraint solver + ACTUATION: apply actuation forces + REFSAFE: integrator safety: make ref[0]>=2*timestep + """ + + CONSTRAINT = ... + EQUALITY = ... + LIMIT = ... + CONTACT = ... + PASSIVE = ... + GRAVITY = ... + CLAMPCTRL = ... + WARMSTART = ... + ACTUATION = ... + REFSAFE = ... + EULERDAMP = ... + FILTERPARENT = ... + +class JointType(enum.IntEnum): + """Type of degree of freedom. + + Attributes: + FREE: global position and orientation (quat) (7,) + BALL: orientation (quat) relative to parent (4,) + SLIDE: sliding distance along body-fixed axis (1,) + HINGE: rotation angle (rad) around body-fixed axis (1,) + """ + + FREE = ... + BALL = ... + SLIDE = ... + HINGE = ... + def dof_width(self) -> int: ... + def qpos_width(self) -> int: ... + +class IntegratorType(enum.IntEnum): + """Integrator mode. + + Attributes: + EULER: semi-implicit Euler + RK4: 4th-order Runge Kutta + """ + + EULER = ... + RK4 = ... + +class GeomType(enum.IntEnum): + """Type of geometry. + + Attributes: + PLANE: plane + HFIELD: height field + SPHERE: sphere + CAPSULE: capsule + ELLIPSOID: ellipsoid + CYLINDER: cylinder + BOX: box + MESH: mesh + """ + + PLANE = ... + HFIELD = ... + SPHERE = ... + CAPSULE = ... + ELLIPSOID = ... + CYLINDER = ... + BOX = ... + MESH = ... + +class ConeType(enum.IntEnum): + """Type of friction cone. + + Attributes: + PYRAMIDAL: pyramidal + """ + + PYRAMIDAL = ... + +class SolverType(enum.IntEnum): + """Constraint solver algorithm. + + Attributes: + CG: Conjugate gradient (primal) + """ + + CG = ... + NEWTON = ... + +class EqType(enum.IntEnum): + """Type of equality constraint. + + Attributes: + CONNECT: connect two bodies at a point (ball joint) + WELD: fix relative position and orientation of two bodies + JOINT: couple the values of two scalar joints with cubic + """ + + CONNECT = ... + WELD = ... + JOINT = ... + +class TrnType(enum.IntEnum): + """Type of actuator transmission. + + Attributes: + JOINT: force on joint + """ + + JOINT = ... + +class DynType(enum.IntEnum): + """Type of actuator dynamics. + + Attributes: + NONE: no internal dynamics; ctrl specifies force + INTEGRATOR: integrator: da/dt = u + """ + + NONE = ... + INTEGRATOR = ... + FILTER = ... + +class GainType(enum.IntEnum): + """Type of actuator gain. + + Attributes: + FIXED: fixed gain + AFFINE: const + kp*length + kv*velocity + """ + + FIXED = ... + AFFINE = ... + +class BiasType(enum.IntEnum): + """Type of actuator bias. + + Attributes: + NONE: no bias + AFFINE: const + kp*length + kv*velocity + """ + + NONE = ... + AFFINE = ... + +class Option(PyTreeNode): + """Physics options. + + Attributes: + timestep: timestep + tolerance: main solver tolerance + ls_tolerance: CG/Newton linesearch tolerance + gravity: gravitational acceleration (3,) + wind: wind (for lift, drag and viscosity) + density: density of medium + viscosity: viscosity of medium + has_fluid_params: automatically set by mjx if wind/density/viscosity are + nonzero. Not used by mj + integrator: integration mode + cone: type of friction cone + solver: solver algorithm + integrator: integration mode + iterations: number of main solver iterations + ls_iterations: maximum number of CG/Newton linesearch iterations + disableflags: bit flags for disabling standard features + """ + + timestep: jax.Array + tolerance: jax.Array + ls_tolerance: jax.Array + gravity: jax.Array + wind: jax.Array + density: jax.Array + viscosity: jax.Array + has_fluid_params: bool + integrator: IntegratorType + cone: ConeType + solver: SolverType + iterations: int + ls_iterations: int + disableflags: DisableBit + ... + +class Statistic(PyTreeNode): + """Model statistics (in qpos0). + + Attributes: + meaninertia: mean diagonal inertia + """ + + meaninertia: jax.Array + ... + +class Model(PyTreeNode): + """Static model of the scene that remains unchanged with each physics step. + + Attributes: + nq: number of generalized coordinates = dim(qpos) + nv: number of degrees of freedom = dim(qvel) + nu: number of actuators/controls = dim(ctrl) + na: number of activation states = dim(act) + nbody: number of bodies + njnt: number of joints + ngeom: number of geoms + nmesh: number of meshes + npair: number of predefined geom pairs + nexclude: number of excluded geom pairs + neq: number of equality constraints + nnumeric: number of numeric custom fields + nM: number of non-zeros in sparse inertia matrix + opt: physics options + stat: model statistics + qpos0: qpos values at default pose (nq,) + qpos_spring: reference pose for springs (nq,) + body_parentid: id of body's parent (nbody,) + body_rootid: id of root above body (nbody,) + body_weldid: id of body that this body is welded to (nbody,) + body_jntnum: number of joints for this body (nbody,) + body_jntadr: start addr of joints; -1: no joints (nbody,) + body_dofnum: number of motion degrees of freedom (nbody,) + body_dofadr: start addr of dofs; -1: no dofs (nbody,) + body_geomnum: number of geoms (nbody,) + body_geomadr: start addr of geoms; -1: no geoms (nbody,) + body_pos: position offset rel. to parent body (nbody, 3) + body_quat: orientation offset rel. to parent body (nbody, 4) + body_ipos: local position of center of mass (nbody, 3) + body_iquat: local orientation of inertia ellipsoid (nbody, 4) + body_mass: mass (nbody,) + body_subtreemass: mass of subtree starting at this body (nbody,) + body_inertia: diagonal inertia in ipos/iquat frame (nbody, 3) + body_invweight0: mean inv inert in qpos0 (trn, rot) (nbody, 2) + jnt_type: type of joint (mjtJoint) (njnt,) + jnt_qposadr: start addr in 'qpos' for joint's data (njnt,) + jnt_dofadr: start addr in 'qvel' for joint's data (njnt,) + jnt_bodyid: id of joint's body (njnt,) + jnt_group: group for visibility (njnt,) + jnt_limited: does joint have limits (njnt,) + jnt_solref: constraint solver reference: limit (njnt, mjNREF) + jnt_solimp: constraint solver impedance: limit (njnt, mjNIMP) + jnt_pos: local anchor position (njnt, 3) + jnt_axis: local joint axis (njnt, 3) + jnt_stiffness: stiffness coefficient (njnt,) + jnt_range: joint limits (njnt, 2) + jnt_actfrcrange: range of total actuator force (njnt, 2) + jnt_margin: min distance for limit detection (njnt,) + dof_bodyid: id of dof's body (nv,) + dof_jntid: id of dof's joint (nv,) + dof_parentid: id of dof's parent; -1: none (nv,) + dof_Madr: dof address in M-diagonal (nv,) + dof_solref: constraint solver reference:frictionloss (nv, mjNREF) + dof_solimp: constraint solver impedance:frictionloss (nv, mjNIMP) + dof_frictionloss: dof friction loss (nv,) + dof_armature: dof armature inertia/mass (nv,) + dof_damping: damping coefficient (nv,) + dof_invweight0: diag. inverse inertia in qpos0 (nv,) + dof_M0: diag. inertia in qpos0 (nv,) + geom_type: geometric type (mjtGeom) (ngeom,) + geom_contype: geom contact type (ngeom,) + geom_conaffinity: geom contact affinity (ngeom,) + geom_condim: contact dimensionality (1, 3, 4, 6) (ngeom,) + geom_bodyid: id of geom's body (ngeom,) + geom_priority: geom contact priority (ngeom,) + geom_solmix: mixing coef for solref/imp in geom pair (ngeom,) + geom_solref: constraint solver reference: contact (ngeom, mjNREF) + geom_solimp: constraint solver impedance: contact (ngeom, mjNIMP) + geom_size: geom-specific size parameters (ngeom, 3) + geom_pos: local position offset rel. to body (ngeom, 3) + geom_quat: local orientation offset rel. to body (ngeom, 4) + geom_friction: friction for (slide, spin, roll) (ngeom, 3) + geom_margin: include in solver if dist Contact: + """Returns a contact filled with zeros.""" + ... + +class Data(PyTreeNode): + """Dynamic state that updates each step. + + Attributes: + solver_niter: number of solver iterations, per island (mjNISLAND,) + ne: number of equality constraints + nf: number of friction constraints + nl: number of limit constraints + nefc: number of constraints + ncon: nubmer of contacts + time: simulation time + qpos: position (nq,) + qvel: velocity (nv,) + act: actuator activation (na,) + qacc_warmstart: acceleration used for warmstart (nv,) + ctrl: control (nu,) + qfrc_applied: applied generalized force (nv,) + xfrc_applied: applied Cartesian force/torque (nbody, 6) + eq_active: enable/disable constraints (neq,) + qacc: acceleration (nv,) + act_dot: time-derivative of actuator activation (na,) + xpos: Cartesian position of body frame (nbody, 3) + xquat: Cartesian orientation of body frame (nbody, 4) + xmat: Cartesian orientation of body frame (nbody, 3, 3) + xipos: Cartesian position of body com (nbody, 3) + ximat: Cartesian orientation of body inertia (nbody, 3, 3) + xanchor: Cartesian position of joint anchor (njnt, 3) + xaxis: Cartesian joint axis (njnt, 3) + geom_xpos: Cartesian geom position (ngeom, 3) + geom_xmat: Cartesian geom orientation (ngeom, 3, 3) + subtree_com: center of mass of each subtree (nbody, 3) + cdof: com-based motion axis of each dof (nv, 6) + cinert: com-based body inertia and mass (nbody, 10) + actuator_length: actuator lengths (nu,) + actuator_moment: actuator moments (nu, nv) + crb: com-based composite inertia and mass (nbody, 10) + qM: total inertia (sparse) (nM,) + qLD: L'*D*L factorization of M (sparse) (nM,) + qLDiagInv: 1/diag(D) (nv,) + qLDiagSqrtInv: 1/sqrt(diag(D)) (nv,) + contact: list of all detected contacts (ncon,) + efc_J: constraint Jacobian (nefc, nv) + efc_frictionloss: frictionloss (friction) (nefc,) + efc_D: constraint mass (nefc,) + actuator_velocity: actuator velocities (nu,) + cvel: com-based velocity [3D rot; 3D tran] (nbody, 6) + cdof_dot: time-derivative of cdof (nv, 6) + qfrc_bias: C(qpos,qvel) (nv,) + qfrc_passive: passive force (nv,) + efc_aref: reference pseudo-acceleration (nefc,) + actuator_force: actuator force in actuation space (nu,) + qfrc_actuator: actuator force (nv,) + qfrc_smooth: net unconstrained force (nv,) + qacc_smooth: unconstrained acceleration (nv,) + qfrc_constraint: constraint force (nv,) + qfrc_inverse: net external force; should equal: (nv,) + qfrc_applied + J'*xfrc_applied + qfrc_actuator + efc_force: constraint force in constraint space (nefc,) + """ + + solver_niter: jax.Array + ne: int + nf: int + nl: int + nefc: int + ncon: int + time: jax.Array + qpos: jax.Array + qvel: jax.Array + act: jax.Array + qacc_warmstart: jax.Array + ctrl: jax.Array + qfrc_applied: jax.Array + xfrc_applied: jax.Array + eq_active: jax.Array + qacc: jax.Array + act_dot: jax.Array + xpos: jax.Array + xquat: jax.Array + xmat: jax.Array + xipos: jax.Array + ximat: jax.Array + xanchor: jax.Array + xaxis: jax.Array + geom_xpos: jax.Array + geom_xmat: jax.Array + subtree_com: jax.Array + cdof: jax.Array + cinert: jax.Array + crb: jax.Array + actuator_length: jax.Array + actuator_moment: jax.Array + qM: jax.Array + qLD: jax.Array + qLDiagInv: jax.Array + qLDiagSqrtInv: jax.Array + contact: Contact + efc_J: jax.Array + efc_frictionloss: jax.Array + efc_D: jax.Array + actuator_velocity: jax.Array + cvel: jax.Array + cdof_dot: jax.Array + qfrc_bias: jax.Array + qfrc_passive: jax.Array + efc_aref: jax.Array + actuator_force: jax.Array + qfrc_actuator: jax.Array + qfrc_smooth: jax.Array + qacc_smooth: jax.Array + qfrc_constraint: jax.Array + qfrc_inverse: jax.Array + efc_force: jax.Array + ... diff --git a/typings/mujoco/msh2obj.pyi b/typings/mujoco/msh2obj.pyi new file mode 100644 index 00000000..f07a5338 --- /dev/null +++ b/typings/mujoco/msh2obj.pyi @@ -0,0 +1,35 @@ +""" +This type stub file was generated by pyright. +""" + +import dataclasses +import pathlib + +import numpy as np + +"""CLI for converting legacy MSH files to Wavefront OBJ files. + +Usage: + python -m mujoco.msh2obj -i -o +""" + +@dataclasses.dataclass(frozen=True) +class Msh: + """MuJoCo legacy binary msh file.""" + + vertex_positions: np.ndarray + vertex_normals: np.ndarray + vertex_texcoords: np.ndarray + face_vertex_indices: np.ndarray + @staticmethod + def create(file: pathlib.Path) -> Msh: + """Create a Msh object from a .msh file.""" + ... + +def msh_to_obj(msh_file: pathlib.Path) -> str: + """Convert a legacy .msh file to the .obj format.""" + ... + +if __name__ == "__main__": + parser = ... + args = ... diff --git a/typings/mujoco/msh2obj_test.pyi b/typings/mujoco/msh2obj_test.pyi new file mode 100644 index 00000000..52f1de73 --- /dev/null +++ b/typings/mujoco/msh2obj_test.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +from absl.testing import absltest + +"""Tests for msh2obj.py.""" +_MESH_FIELDS = ... +_XML = ... + +class MshTest(absltest.TestCase): + def test_obj_model_matches_msh_model(self) -> None: ... + +if __name__ == "__main__": ... diff --git a/typings/mujoco/osmesa/__init__.pyi b/typings/mujoco/osmesa/__init__.pyi new file mode 100644 index 00000000..fa1a4b57 --- /dev/null +++ b/typings/mujoco/osmesa/__init__.pyi @@ -0,0 +1,30 @@ +""" +This type stub file was generated by pyright. +""" + +import os + +from OpenGL import GL, osmesa +from OpenGL.GL import arrays + +"""An OSMesa context for software-based OpenGL rendering.""" +PYOPENGL_PLATFORM = ... +if not PYOPENGL_PLATFORM: ... +else: ... +_DEPTH_BITS = ... +_STENCIL_BITS = ... +_ACCUM_BITS = ... + +class GLContext: + """An OSMesa context for software-based OpenGL rendering.""" + + def __init__(self, max_width, max_height) -> None: + """Initializes this OSMesa context.""" + ... + def make_current(self): # -> None: + ... + def free(self): # -> None: + """Frees resources associated with this context.""" + ... + def __del__(self): # -> None: + ... diff --git a/typings/mujoco/render_test.pyi b/typings/mujoco/render_test.pyi new file mode 100644 index 00000000..b03eb02e --- /dev/null +++ b/typings/mujoco/render_test.pyi @@ -0,0 +1,32 @@ +""" +This type stub file was generated by pyright. +""" + +import mujoco +from absl.testing import absltest + +"""Tests for MuJoCo Python rendering.""" + +@absltest.skipUnless(hasattr(mujoco, "GLContext"), "MuJoCo rendering is disabled") +class MuJoCoRenderTest(absltest.TestCase): + def setUp(self): # -> None: + ... + def tearDown(self): # -> None: + ... + def test_can_render(self): # -> None: + """Test that the bindings can successfully render a simple image. + + This test sets up a basic MuJoCo rendering context similar to the example in + https://mujoco.readthedocs.io/en/latest/programming#visualization + It calls `mjr_rectangle` rather than `mjr_render` so that we can assert an + exact rendered image without needing golden data. The purpose of this test + is to ensure that the bindings can correctly return pixels in Python, rather + than to test MuJoCo's rendering pipeline itself. + """ + ... + def test_safe_to_free_context_twice(self): # -> None: + ... + def test_mjrrect_repr(self): # -> None: + ... + +if __name__ == "__main__": ... diff --git a/typings/mujoco/renderer.pyi b/typings/mujoco/renderer.pyi new file mode 100644 index 00000000..b388d49f --- /dev/null +++ b/typings/mujoco/renderer.pyi @@ -0,0 +1,78 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional, Union + +import numpy as np +from mujoco import _structs + +""" +This type stub file was generated by pyright. +""" + +class Renderer: + """Renders MuJoCo scenes.""" + + def __init__(self, model: _structs.MjModel, height: int = ..., width: int = ..., max_geom: int = ...) -> None: + """Initializes a new `Renderer`. + + Args: + model: an MjModel instance. + height: image height in pixels. + width: image width in pixels. + max_geom: Optional integer specifying the maximum number of geoms that can + be rendered in the same scene. If None this will be chosen automatically + based on the estimated maximum number of renderable geoms in the model. + Raises: + ValueError: If `camera_id` is outside the valid range, or if `width` or + `height` exceed the dimensions of MuJoCo's offscreen framebuffer. + """ + ... + @property + def model(self): ... + @property + def scene(self) -> _structs.MjvScene: ... + @property + def height(self): ... + @property + def width(self): ... + def enable_depth_rendering(self): ... + def disable_depth_rendering(self): ... + def enable_segmentation_rendering(self): ... + def disable_segmentation_rendering(self): ... + def render(self, *, out: Optional[np.ndarray] = ...) -> np.ndarray: + """Renders the scene as a numpy array of pixel values. + + Args: + out: Alternative output array in which to place the resulting pixels. It + must have the same shape as the expected output but the type will be + cast if necessary. The expted shape depends on the value of + `self._depth_rendering`: when `True`, we expect `out.shape == (width, + height)`, and `out.shape == (width, height, 3)` when `False`. + + Returns: + A new numpy array holding the pixels with shape `(H, W)` or `(H, W, 3)`, + depending on the value of `self._depth_rendering` unless + `out is None`, in which case a reference to `out` is returned. + """ + ... + def update_scene( + self, + data: _structs.MjData, + camera: Union[int, str, _structs.MjvCamera] = ..., + scene_option: Optional[_structs.MjvOption] = ..., + ): + """Updates geometry used for rendering. + + Args: + data: An instance of `MjData`. + camera: An instance of `MjvCamera`, a string or an integer + scene_option: A custom `MjvOption` instance to use to render + the scene instead of the default. + + Raises: + ValueError: If `camera_id` is outside the valid range, or if camera does + not exist. + """ + ... diff --git a/typings/mujoco/renderer_test.pyi b/typings/mujoco/renderer_test.pyi new file mode 100644 index 00000000..f24be813 --- /dev/null +++ b/typings/mujoco/renderer_test.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +import mujoco +from absl.testing import absltest, parameterized + +"""Tests for the MuJoCo renderer.""" + +@absltest.skipUnless(hasattr(mujoco, "GLContext"), "MuJoCo rendering is disabled") +class MuJoCoRendererTest(parameterized.TestCase): + def test_renderer_unknown_camera_name(self): # -> None: + ... + def test_renderer_camera_under_range(self): # -> None: + ... + def test_renderer_camera_over_range(self): # -> None: + ... + def test_renderer_renders_scene(self): # -> None: + ... + def test_renderer_output_without_out(self): # -> None: + ... + def test_renderer_output_with_out(self): # -> None: + ... + +if __name__ == "__main__": ... diff --git a/typings/mujoco/rollout.pyi b/typings/mujoco/rollout.pyi new file mode 100644 index 00000000..f31a8f19 --- /dev/null +++ b/typings/mujoco/rollout.pyi @@ -0,0 +1,31 @@ +""" +This type stub file was generated by pyright. +""" + +"""Roll out open-loop trajectories from initial states, get subsequent states and sensor values.""" + +def rollout( + model, + data, + initial_state=..., + ctrl=..., + *, + skip_checks=..., + nstate=..., + nstep=..., + initial_time=..., + initial_warmstart=..., + qfrc_applied=..., + xfrc_applied=..., + mocap=..., + state=..., + sensordata=..., +): # -> tuple[Unknown | None, Unknown | None] | tuple[ndarray[Any, dtype[float64]], ndarray[Any, dtype[float64]]]: + """Roll out open-loop trajectories from initial states, get subsequent states and sensor values. + + This function serves as a Python wrapper for the C++ functionality in + `rollout.cc`, please see documentation therein. This python funtion will + infer `nstate` and `nstep`, tile input arguments with singleton dimensions, + and allocate output arguments if none are given. + """ + ... diff --git a/typings/mujoco/rollout_test.pyi b/typings/mujoco/rollout_test.pyi new file mode 100644 index 00000000..88dab76c --- /dev/null +++ b/typings/mujoco/rollout_test.pyi @@ -0,0 +1,73 @@ +""" +This type stub file was generated by pyright. +""" + +from absl.testing import parameterized + +"""tests for rollout function.""" +TEST_XML = ... +TEST_XML_NO_SENSORS = ... +TEST_XML_NO_ACTUATORS = ... +TEST_XML_MOCAP = ... +TEST_XML_EMPTY = ... +ALL_MODELS = ... + +class MuJoCoRolloutTest(parameterized.TestCase): + def setUp(self): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_single_step(self, model_name): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_single_rollout(self, model_name): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_multi_step(self, model_name): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_single_rollout_fixed_ctrl(self, model_name): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_multi_rollout(self, model_name): # -> None: + ... + @parameterized.parameters(ALL_MODELS.keys()) + def test_multi_rollout_fixed_ctrl_infer_from_output(self, model_name): # -> None: + ... + @parameterized.product(arg_nstep=[[3, 1, 1], [3, 3, 1], [3, 1, 3]], model_name=list(ALL_MODELS.keys())) + def test_multi_rollout_multiple_inputs(self, arg_nstep, model_name): # -> None: + ... + def test_threading(self): # -> None: + ... + def test_time(self): # -> None: + ... + def test_warmstart(self): # -> None: + ... + def test_mocap(self): # -> None: + ... + def test_intercept_mj_errors(self): # -> None: + ... + def test_invalid(self): # -> None: + ... + def test_bad_sizes(self): # -> None: + ... + def test_stateless(self): # -> None: + ... + +def get_state(data): # -> NDArray[Unknown]: + ... + +def set_state(model, data, state): # -> None: + ... + +def step(model, data, state, **kwargs): # -> tuple[NDArray[Unknown], Unknown]: + ... + +def single_rollout(model, data, initial_state, **kwargs): # -> tuple[NDArray[float64], NDArray[float64]]: + ... + +def multi_rollout( + model, data, initial_state, **kwargs +): # -> tuple[ndarray[Any, dtype[float64]], ndarray[Any, dtype[float64]]]: + ... + +if __name__ == "__main__": ... diff --git a/typings/mujoco/viewer.pyi b/typings/mujoco/viewer.pyi new file mode 100644 index 00000000..c8fae3d2 --- /dev/null +++ b/typings/mujoco/viewer.pyi @@ -0,0 +1,103 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import queue +from typing import Callable, Optional, Tuple, Union + +import glfw +import mujoco + +"""Interactive GUI viewer for MuJoCo.""" +if not glfw._glfw: ... +else: ... +PERCENT_REALTIME = ... +MAX_SYNC_MISALIGN = ... +SIM_REFRESH_FRACTION = ... +CallbackType = Callable[[mujoco.MjModel, mujoco.MjData], None] +LoaderType = Callable[[], Tuple[mujoco.MjModel, mujoco.MjData]] +KeyCallbackType = Callable[[int], None] +_LoaderWithPathType = Callable[[], Tuple[mujoco.MjModel, mujoco.MjData, str]] +_InternalLoaderType = Union[LoaderType, _LoaderWithPathType] +_Simulate = ... + +class Handle: + """A handle for interacting with a MuJoCo viewer.""" + + def __init__( + self, + sim: _Simulate, + cam: mujoco.MjvCamera, + opt: mujoco.MjvOption, + pert: mujoco.MjvPerturb, + user_scn: Optional[mujoco.MjvScene], + ) -> None: ... + @property + def cam(self): ... + @property + def opt(self): ... + @property + def perturb(self): ... + @property + def user_scn(self): # -> None: + ... + def close(self): # -> None: + ... + def is_running(self) -> bool: ... + def lock(self): # -> nullcontext[None]: + ... + def sync(self): # -> None: + ... + def update_hfield(self, hfieldid: int): # -> None: + ... + def update_mesh(self, meshid: int): # -> None: + ... + def update_texture(self, texid: int): # -> None: + ... + def __enter__(self): # -> Self@Handle: + ... + def __exit__(self, exc_type, exc_val, exc_tb): # -> None: + ... + +class _MjPythonBase(metaclass=abc.ABCMeta): + def launch_on_ui_thread( + self, + model: mujoco.MjModel, + data: mujoco.MjData, + handle_return: Optional[queue.Queue[Handle]], + key_callback: Optional[KeyCallbackType], + ): # -> None: + ... + +_MJPYTHON: Optional[_MjPythonBase] = ... + +def launch( + model: Optional[mujoco.MjModel] = ..., + data: Optional[mujoco.MjData] = ..., + *, + loader: Optional[LoaderType] = ..., + show_left_ui: bool = ..., + show_right_ui: bool = ..., +) -> None: + """Launches the Simulate GUI.""" + ... + +def launch_from_path(path: str) -> None: + """Launches the Simulate GUI from file path.""" + ... + +def launch_passive( + model: mujoco.MjModel, + data: mujoco.MjData, + *, + key_callback: Optional[KeyCallbackType] = ..., + show_left_ui: bool = ..., + show_right_ui: bool = ..., +) -> Handle: + """Launches a passive Simulate GUI without blocking the running thread.""" + ... + +if __name__ == "__main__": + _MJCF_PATH = ... + def main(argv) -> None: ... diff --git a/typings/mujoco/viewer_test.pyi b/typings/mujoco/viewer_test.pyi new file mode 100644 index 00000000..66645387 --- /dev/null +++ b/typings/mujoco/viewer_test.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from absl.testing import absltest + +"""Extremely minimal test of mujoco.viewer that just tries to import it.""" + +class ViewerTest(absltest.TestCase): + def test_launch_function_exists(self): # -> None: + ... + +if __name__ == "__main__": ... diff --git a/typings/trimesh/__init__.pyi b/typings/trimesh/__init__.pyi new file mode 100644 index 00000000..ab4e805f --- /dev/null +++ b/typings/trimesh/__init__.pyi @@ -0,0 +1,98 @@ +""" +This type stub file was generated by pyright. +""" + +from . import ( + boolean, + caching, + collision, + comparison, + convex, + creation, + curvature, + decomposition, + geometry, + graph, + grouping, + inertia, + intersections, + path, + permutate, + poses, + primitives, + proximity, + ray, + registration, + remesh, + repair, + sample, + smoothing, + transformations, + triangles, + units, + util, +) +from .base import Trimesh +from .constants import tol +from .exchange.load import available_formats, load, load_mesh, load_path, load_remote +from .points import PointCloud +from .scene.scene import Scene +from .transformations import transform_points +from .util import unitize +from .version import __version__ + +""" +https://github.com/mikedh/trimesh +------------------------------------ + +Trimesh is a pure Python (2.7- 3.3+) library for loading and using triangular +meshes with an emphasis on watertight meshes. The goal of the library is to +provide a fully featured Trimesh object which allows for easy manipulation +and analysis, in the style of the Polygon object in the Shapely library. +""" +__all__ = [ + "PointCloud", + "Trimesh", + "Scene", + "util", + "__version__", + "available_formats", + "boolean", + "bounds", + "caching", + "collision", + "comparison", + "convex", + "creation", + "curvature", + "decomposition", + "geometry", + "graph", + "grouping", + "inertia", + "intersections", + "load", + "load_mesh", + "load_path", + "load_remote", + "nsphere", + "path", + "permutate", + "poses", + "primitives", + "proximity", + "ray", + "registration", + "remesh", + "repair", + "sample", + "smoothing", + "tol", + "transform_points", + "transformations", + "triangles", + "unitize", + "units", + "utilScene", + "voxel", +] diff --git a/typings/trimesh/base.pyi b/typings/trimesh/base.pyi new file mode 100644 index 00000000..77e31cba --- /dev/null +++ b/typings/trimesh/base.pyi @@ -0,0 +1,2030 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, Dict, List, Optional, Tuple, Union + +from networkx import Graph +from numpy import float64, int64, ndarray +from rtree import Index +from scipy.sparse import coo_matrix +from scipy.spatial import cKDTree + +from . import caching, units +from .parent import Geometry3D +from .path import Path2D, Path3D +from .scene import Scene +from .triangles import MassProperties +from .typed import ArrayLike, NDArray +from .visual import ColorVisuals, TextureVisuals + +""" +github.com/mikedh/trimesh +---------------------------- + +Library for importing, exporting and doing simple operations on triangular meshes. +""" + +class Trimesh(Geometry3D): + def __init__( + self, + vertices: Optional[NDArray[float64]] = ..., + faces: Optional[NDArray[int64]] = ..., + face_normals: Optional[NDArray[float64]] = ..., + vertex_normals: Optional[NDArray[float64]] = ..., + face_colors: Optional[NDArray[float64]] = ..., + vertex_colors: Optional[NDArray[float64]] = ..., + face_attributes: Optional[Dict[str, NDArray]] = ..., + vertex_attributes: Optional[Dict[str, NDArray]] = ..., + metadata: Optional[Dict[str, Any]] = ..., + process: bool = ..., + validate: bool = ..., + merge_tex: Optional[bool] = ..., + merge_norm: Optional[bool] = ..., + use_embree: bool = ..., + initial_cache: Optional[Dict[str, ndarray]] = ..., + visual: Optional[Union[ColorVisuals, TextureVisuals]] = ..., + **kwargs, + ) -> None: + """ + A Trimesh object contains a triangular 3D mesh. + + Parameters + ------------ + vertices : (n, 3) float + Array of vertex locations + faces : (m, 3) or (m, 4) int + Array of triangular or quad faces (triangulated on load) + face_normals : (m, 3) float + Array of normal vectors corresponding to faces + vertex_normals : (n, 3) float + Array of normal vectors for vertices + metadata : dict + Any metadata about the mesh + process : bool + if True, Nan and Inf values will be removed + immediately and vertices will be merged + validate : bool + If True, degenerate and duplicate faces will be + removed immediately, and some functions will alter + the mesh to ensure consistent results. + use_embree : bool + If True try to use pyembree raytracer. + If pyembree is not available it will automatically fall + back to a much slower rtree/numpy implementation + initial_cache : dict + A way to pass things to the cache in case expensive + things were calculated before creating the mesh object. + visual : ColorVisuals or TextureVisuals + Assigned to self.visual + """ + ... + def process( + self, validate: bool = ..., merge_tex: Optional[bool] = ..., merge_norm: Optional[bool] = ... + ) -> Trimesh: + """ + Do processing to make a mesh useful. + + Does this by: + 1) removing NaN and Inf values + 2) merging duplicate vertices + If validate: + 3) Remove triangles which have one edge + of their 2D oriented bounding box + shorter than tol.merge + 4) remove duplicated triangles + 5) ensure triangles are consistently wound + and normals face outwards + + Parameters + ------------ + validate : bool + Remove degenerate and duplicate faces. + + Returns + ------------ + self: trimesh.Trimesh + Current mesh + """ + ... + @property + def mutable(self) -> bool: + """ + Is the current mesh allowed to be altered in-place? + + Returns + ------------- + mutable + If data is allowed to be set for the mesh. + """ + ... + @mutable.setter + def mutable(self, value: bool) -> None: + """ + Set the mutability of the current mesh. + + Parameters + ---------- + value + Change whether the current mesh is allowed to be altered in-place. + """ + ... + @property + def faces(self) -> NDArray[int64]: + """ + The faces of the mesh. + + This is regarded as core information which cannot be + regenerated from cache and as such is stored in + `self._data` which tracks the array for changes and + clears cached values of the mesh altered. + + Returns + ---------- + faces : (n, 3) int64 + References for `self.vertices` for triangles. + """ + ... + @faces.setter + def faces(self, values: Union[List[List[int]], NDArray[int64]]): # -> None: + """ + Set the vertex indexes that make up triangular faces. + + Parameters + -------------- + values : (n, 3) int64 + Indexes of self.vertices + """ + ... + @caching.cache_decorator + def faces_sparse(self) -> coo_matrix: + """ + A sparse matrix representation of the faces. + + Returns + ---------- + sparse : scipy.sparse.coo_matrix + Has properties: + dtype : bool + shape : (len(self.vertices), len(self.faces)) + """ + ... + @property + def face_normals(self) -> NDArray[float64]: + """ + Return the unit normal vector for each face. + + If a face is degenerate and a normal can't be generated + a zero magnitude unit vector will be returned for that face. + + Returns + ----------- + normals : (len(self.faces), 3) float64 + Normal vectors of each face + """ + ... + @face_normals.setter + def face_normals(self, values: NDArray[float64]) -> None: + """ + Assign values to face normals. + + Parameters + ------------- + values : (len(self.faces), 3) float + Unit face normals + """ + ... + @property + def vertices(self) -> NDArray[float64]: + """ + The vertices of the mesh. + + This is regarded as core information which cannot be + generated from cache and as such is stored in self._data + which tracks the array for changes and clears cached + values of the mesh if this is altered. + + Returns + ---------- + vertices : (n, 3) float + Points in cartesian space referenced by self.faces + """ + ... + @vertices.setter + def vertices(self, values: NDArray[float64]): # -> None: + """ + Assign vertex values to the mesh. + + Parameters + -------------- + values : (n, 3) float + Points in space + """ + ... + @caching.cache_decorator + def vertex_normals(self) -> NDArray[float64]: + """ + The vertex normals of the mesh. If the normals were loaded + we check to make sure we have the same number of vertex + normals and vertices before returning them. If there are + no vertex normals defined or a shape mismatch we calculate + the vertex normals from the mean normals of the faces the + vertex is used in. + + Returns + ---------- + vertex_normals : (n, 3) float + Represents the surface normal at each vertex. + Where n == len(self.vertices) + """ + ... + @vertex_normals.setter + def vertex_normals(self, values: NDArray[float64]) -> None: + """ + Assign values to vertex normals. + + Parameters + ------------- + values : (len(self.vertices), 3) float + Unit normal vectors for each vertex + """ + ... + @caching.cache_decorator + def vertex_faces(self) -> NDArray[int64]: + """ + A representation of the face indices that correspond to each vertex. + + Returns + ---------- + vertex_faces : (n,m) int + Each row contains the face indices that correspond to the given vertex, + padded with -1 up to the max number of faces corresponding to any one vertex + Where n == len(self.vertices), m == max number of faces for a single vertex + """ + ... + @caching.cache_decorator + def bounds(self) -> NDArray[float64]: + """ + The axis aligned bounds of the faces of the mesh. + + Returns + ----------- + bounds : (2, 3) float or None + Bounding box with [min, max] coordinates + If mesh is empty will return None + """ + ... + @caching.cache_decorator + def extents(self) -> NDArray[float64]: + """ + The length, width, and height of the axis aligned + bounding box of the mesh. + + Returns + ----------- + extents : (3, ) float or None + Array containing axis aligned [length, width, height] + If mesh is empty returns None + """ + ... + @caching.cache_decorator + def scale(self) -> float: + """ + A metric for the overall scale of the mesh, the length of the + diagonal of the axis aligned bounding box of the mesh. + + Returns + ---------- + scale : float + The length of the meshes AABB diagonal + """ + ... + @caching.cache_decorator + def centroid(self) -> NDArray[float64]: + """ + The point in space which is the average of the triangle + centroids weighted by the area of each triangle. + + This will be valid even for non-watertight meshes, + unlike self.center_mass + + Returns + ---------- + centroid : (3, ) float + The average vertex weighted by face area + """ + ... + @property + def center_mass(self) -> NDArray[float64]: + """ + The point in space which is the center of mass/volume. + + Returns + ----------- + center_mass : (3, ) float + Volumetric center of mass of the mesh. + """ + ... + @center_mass.setter + def center_mass(self, value: NDArray[float64]) -> None: + """ + Override the point in space which is the center of mass and volume. + + Parameters + ----------- + center_mass : (3, ) float + Volumetric center of mass of the mesh. + """ + ... + @property + def density(self) -> float: + """ + The density of the mesh used in inertia calculations. + + Returns + ----------- + density + The density of the primitive. + """ + ... + @density.setter + def density(self, value: float) -> None: + """ + Set the density of the primitive. + + Parameters + ------------- + density + Specify the density of the primitive to be + used in inertia calculations. + """ + ... + @property + def volume(self) -> float64: + """ + Volume of the current mesh calculated using a surface + integral. If the current mesh isn't watertight this is + garbage. + + Returns + --------- + volume : float + Volume of the current mesh + """ + ... + @property + def mass(self) -> float64: + """ + Mass of the current mesh, based on specified density and + volume. If the current mesh isn't watertight this is garbage. + + Returns + --------- + mass : float + Mass of the current mesh + """ + ... + @property + def moment_inertia(self) -> NDArray[float64]: + """ + Return the moment of inertia matrix of the current mesh. + If mesh isn't watertight this is garbage. The returned + moment of inertia is *axis aligned* at the mesh's center + of mass `mesh.center_mass`. If you want the moment at any + other frame including the origin call: + `mesh.moment_inertia_frame` + + Returns + --------- + inertia : (3, 3) float + Moment of inertia of the current mesh at the center of + mass and aligned with the cartesian axis. + """ + ... + def moment_inertia_frame(self, transform: NDArray[float64]) -> NDArray[float64]: + """ + Get the moment of inertia of this mesh with respect to + an arbitrary frame, versus with respect to the center + of mass as returned by `mesh.moment_inertia`. + + For example if `transform` is an identity matrix `np.eye(4)` + this will give the moment at the origin. + + Uses the parallel axis theorum to move the center mass + tensor to this arbitrary frame. + + Parameters + ------------ + transform : (4, 4) float + Homogeneous transformation matrix. + + Returns + ------------- + inertia : (3, 3) + Moment of inertia in the requested frame. + """ + ... + @caching.cache_decorator + def principal_inertia_components(self) -> NDArray[float64]: + """ + Return the principal components of inertia + + Ordering corresponds to mesh.principal_inertia_vectors + + Returns + ---------- + components : (3, ) float + Principal components of inertia + """ + ... + @property + def principal_inertia_vectors(self) -> NDArray[float64]: + """ + Return the principal axis of inertia as unit vectors. + The order corresponds to `mesh.principal_inertia_components`. + + Returns + ---------- + vectors : (3, 3) float + Three vectors pointing along the + principal axis of inertia directions + """ + ... + @caching.cache_decorator + def principal_inertia_transform(self) -> NDArray[float64]: + """ + A transform which moves the current mesh so the principal + inertia vectors are on the X,Y, and Z axis, and the centroid is + at the origin. + + Returns + ---------- + transform : (4, 4) float + Homogeneous transformation matrix + """ + ... + @caching.cache_decorator + def symmetry(self) -> Optional[str]: + """ + Check whether a mesh has rotational symmetry around + an axis (radial) or point (spherical). + + Returns + ----------- + symmetry : None, 'radial', 'spherical' + What kind of symmetry does the mesh have. + """ + ... + @property + def symmetry_axis(self) -> NDArray[float64]: + """ + If a mesh has rotational symmetry, return the axis. + + Returns + ------------ + axis : (3, ) float + Axis around which a 2D profile was revolved to create this mesh. + """ + ... + @property + def symmetry_section(self) -> NDArray[float64]: + """ + If a mesh has rotational symmetry return the two + vectors which make up a section coordinate frame. + + Returns + ---------- + section : (2, 3) float + Vectors to take a section along + """ + ... + @caching.cache_decorator + def triangles(self) -> NDArray[float64]: + """ + Actual triangles of the mesh (points, not indexes) + + Returns + --------- + triangles : (n, 3, 3) float + Points of triangle vertices + """ + ... + @caching.cache_decorator + def triangles_tree(self) -> Index: + """ + An R-tree containing each face of the mesh. + + Returns + ---------- + tree : rtree.index + Each triangle in self.faces has a rectangular cell + """ + ... + @caching.cache_decorator + def triangles_center(self) -> NDArray[float64]: + """ + The center of each triangle (barycentric [1/3, 1/3, 1/3]) + + Returns + --------- + triangles_center : (len(self.faces), 3) float + Center of each triangular face + """ + ... + @caching.cache_decorator + def triangles_cross(self) -> NDArray[float64]: + """ + The cross product of two edges of each triangle. + + Returns + --------- + crosses : (n, 3) float + Cross product of each triangle + """ + ... + @caching.cache_decorator + def edges(self) -> NDArray[int64]: + """ + Edges of the mesh (derived from faces). + + Returns + --------- + edges : (n, 2) int + List of vertex indices making up edges + """ + ... + @caching.cache_decorator + def edges_face(self) -> NDArray[int64]: + """ + Which face does each edge belong to. + + Returns + --------- + edges_face : (n, ) int + Index of self.faces + """ + ... + @caching.cache_decorator + def edges_unique(self) -> NDArray[int64]: + """ + The unique edges of the mesh. + + Returns + ---------- + edges_unique : (n, 2) int + Vertex indices for unique edges + """ + ... + @caching.cache_decorator + def edges_unique_length(self) -> NDArray[float64]: + """ + How long is each unique edge. + + Returns + ---------- + length : (len(self.edges_unique), ) float + Length of each unique edge + """ + ... + @caching.cache_decorator + def edges_unique_inverse(self) -> NDArray[int64]: + """ + Return the inverse required to reproduce + self.edges_sorted from self.edges_unique. + + Useful for referencing edge properties: + mesh.edges_unique[mesh.edges_unique_inverse] == m.edges_sorted + + Returns + ---------- + inverse : (len(self.edges), ) int + Indexes of self.edges_unique + """ + ... + @caching.cache_decorator + def edges_sorted(self) -> NDArray[int64]: + """ + Edges sorted along axis 1 + + Returns + ---------- + edges_sorted : (n, 2) + Same as self.edges but sorted along axis 1 + """ + ... + @caching.cache_decorator + def edges_sorted_tree(self) -> cKDTree: + """ + A KDTree for mapping edges back to edge index. + + Returns + ------------ + tree : scipy.spatial.cKDTree + Tree when queried with edges will return + their index in mesh.edges_sorted + """ + ... + @caching.cache_decorator + def edges_sparse(self) -> coo_matrix: + """ + Edges in sparse bool COO graph format where connected + vertices are True. + + Returns + ---------- + sparse: (len(self.vertices), len(self.vertices)) bool + Sparse graph in COO format + """ + ... + @caching.cache_decorator + def body_count(self) -> int: + """ + How many connected groups of vertices exist in this mesh. + Note that this number may differ from result in mesh.split, + which is calculated from FACE rather than vertex adjacency. + + Returns + ----------- + count : int + Number of connected vertex groups + """ + ... + @caching.cache_decorator + def faces_unique_edges(self) -> NDArray[int64]: + """ + For each face return which indexes in mesh.unique_edges constructs + that face. + + Returns + --------- + faces_unique_edges : (len(self.faces), 3) int + Indexes of self.edges_unique that + construct self.faces + + Examples + --------- + In [0]: mesh.faces[:2] + Out[0]: + TrackedArray([[ 1, 6946, 24224], + [ 6946, 1727, 24225]]) + + In [1]: mesh.edges_unique[mesh.faces_unique_edges[:2]] + Out[1]: + array([[[ 1, 6946], + [ 6946, 24224], + [ 1, 24224]], + [[ 1727, 6946], + [ 1727, 24225], + [ 6946, 24225]]]) + """ + ... + @caching.cache_decorator + def euler_number(self) -> int: + """ + Return the Euler characteristic (a topological invariant) for the mesh + In order to guarantee correctness, this should be called after + remove_unreferenced_vertices + + Returns + ---------- + euler_number : int + Topological invariant + """ + ... + @caching.cache_decorator + def referenced_vertices(self) -> NDArray[bool]: + """ + Which vertices in the current mesh are referenced by a face. + + Returns + ------------- + referenced : (len(self.vertices), ) bool + Which vertices are referenced by a face + """ + ... + @property + def units(self) -> Optional[str]: + """ + Definition of units for the mesh. + + Returns + ---------- + units : str + Unit system mesh is in, or None if not defined + """ + ... + @units.setter + def units(self, value: str) -> None: + """ + Define the units of the current mesh. + """ + ... + def convert_units(self, desired: str, guess: bool = ...) -> Trimesh: + """ + Convert the units of the mesh into a specified unit. + + Parameters + ------------ + desired : string + Units to convert to (eg 'inches') + guess : boolean + If self.units are not defined should we + guess the current units of the document and then convert? + """ + ... + def merge_vertices( + self, + merge_tex: Optional[bool] = ..., + merge_norm: Optional[bool] = ..., + digits_vertex: Optional[bool] = ..., + digits_norm: Optional[bool] = ..., + digits_uv: Optional[bool] = ..., + ) -> None: + """ + Removes duplicate vertices grouped by position and + optionally texture coordinate and normal. + + Parameters + ------------- + mesh : Trimesh object + Mesh to merge vertices on + merge_tex : bool + If True textured meshes with UV coordinates will + have vertices merged regardless of UV coordinates + merge_norm : bool + If True, meshes with vertex normals will have + vertices merged ignoring different normals + digits_vertex : None or int + Number of digits to consider for vertex position + digits_norm : int + Number of digits to consider for unit normals + digits_uv : int + Number of digits to consider for UV coordinates + """ + ... + def update_vertices(self, mask: NDArray[bool], inverse: Optional[NDArray] = ...) -> None: + """ + Update vertices with a mask. + + Parameters + ------------ + vertex_mask : (len(self.vertices)) bool + Array of which vertices to keep + inverse : (len(self.vertices)) int + Array to reconstruct vertex references + such as output by np.unique + """ + ... + def update_faces(self, mask: NDArray[bool]) -> None: + """ + In many cases, we will want to remove specific faces. + However, there is additional bookkeeping to do this cleanly. + This function updates the set of faces with a validity mask, + as well as keeping track of normals and colors. + + Parameters + ------------ + valid : (m) int or (len(self.faces)) bool + Mask to remove faces + """ + ... + def remove_infinite_values(self) -> None: + """ + Ensure that every vertex and face consists of finite numbers. + This will remove vertices or faces containing np.nan and np.inf + + Alters `self.faces` and `self.vertices` + """ + ... + def unique_faces(self) -> NDArray[bool]: + """ + On the current mesh find which faces are unique. + + Returns + -------- + unique : (len(faces),) bool + A mask where the first occurrence of a unique face is true. + """ + ... + def remove_duplicate_faces(self) -> None: + """ + DERECATED MARCH 2024 REPLACE WITH: + `mesh.update_faces(mesh.unique_faces())` + """ + ... + def rezero(self) -> None: + """ + Translate the mesh so that all vertex vertices are positive. + + Alters `self.vertices`. + """ + ... + def split(self, **kwargs) -> List[Trimesh]: + """ + Returns a list of Trimesh objects, based on face connectivity. + Splits into individual components, sometimes referred to as 'bodies' + + Parameters + ------------ + only_watertight : bool + Only return watertight meshes and discard remainder + adjacency : None or (n, 2) int + Override face adjacency with custom values + + Returns + --------- + meshes : (n, ) trimesh.Trimesh + Separate bodies from original mesh + """ + ... + @caching.cache_decorator + def face_adjacency(self) -> NDArray[int64]: + """ + Find faces that share an edge i.e. 'adjacent' faces. + + Returns + ---------- + adjacency : (n, 2) int + Pairs of faces which share an edge + + Examples + --------- + + In [1]: mesh = trimesh.load('models/featuretype.STL') + + In [2]: mesh.face_adjacency + Out[2]: + array([[ 0, 1], + [ 2, 3], + [ 0, 3], + ..., + [1112, 949], + [3467, 3475], + [1113, 3475]]) + + In [3]: mesh.faces[mesh.face_adjacency[0]] + Out[3]: + TrackedArray([[ 1, 0, 408], + [1239, 0, 1]], dtype=int64) + + In [4]: import networkx as nx + + In [5]: graph = nx.from_edgelist(mesh.face_adjacency) + + In [6]: groups = nx.connected_components(graph) + """ + ... + @caching.cache_decorator + def face_neighborhood(self) -> NDArray[int64]: + """ + Find faces that share a vertex i.e. 'neighbors' faces. + + Returns + ---------- + neighborhood : (n, 2) int + Pairs of faces which share a vertex + """ + ... + @caching.cache_decorator + def face_adjacency_edges(self) -> NDArray[int64]: + """ + Returns the edges that are shared by the adjacent faces. + + Returns + -------- + edges : (n, 2) int + Vertex indices which correspond to face_adjacency + """ + ... + @caching.cache_decorator + def face_adjacency_edges_tree(self) -> cKDTree: + """ + A KDTree for mapping edges back face adjacency index. + + Returns + ------------ + tree : scipy.spatial.cKDTree + Tree when queried with SORTED edges will return + their index in mesh.face_adjacency + """ + ... + @caching.cache_decorator + def face_adjacency_angles(self) -> NDArray[float64]: + """ + Return the angle between adjacent faces + + Returns + -------- + adjacency_angle : (n, ) float + Angle between adjacent faces + Each value corresponds with self.face_adjacency + """ + ... + @caching.cache_decorator + def face_adjacency_projections(self) -> NDArray[float64]: + """ + The projection of the non-shared vertex of a triangle onto + its adjacent face + + Returns + ---------- + projections : (len(self.face_adjacency), ) float + Dot product of vertex + onto plane of adjacent triangle. + """ + ... + @caching.cache_decorator + def face_adjacency_convex(self) -> NDArray[bool]: + """ + Return faces which are adjacent and locally convex. + + What this means is that given faces A and B, the one vertex + in B that is not shared with A, projected onto the plane of A + has a projection that is zero or negative. + + Returns + ---------- + are_convex : (len(self.face_adjacency), ) bool + Face pairs that are locally convex + """ + ... + @caching.cache_decorator + def face_adjacency_unshared(self) -> NDArray[int64]: + """ + Return the vertex index of the two vertices not in the shared + edge between two adjacent faces + + Returns + ----------- + vid_unshared : (len(mesh.face_adjacency), 2) int + Indexes of mesh.vertices + """ + ... + @caching.cache_decorator + def face_adjacency_radius(self) -> NDArray[float64]: + """ + The approximate radius of a cylinder that fits inside adjacent faces. + + Returns + ------------ + radii : (len(self.face_adjacency), ) float + Approximate radius formed by triangle pair + """ + ... + @caching.cache_decorator + def face_adjacency_span(self) -> NDArray[float64]: + """ + The approximate perpendicular projection of the non-shared + vertices in a pair of adjacent faces onto the shared edge of + the two faces. + + Returns + ------------ + span : (len(self.face_adjacency), ) float + Approximate span between the non-shared vertices + """ + ... + @caching.cache_decorator + def integral_mean_curvature(self) -> float64: + """ + The integral mean curvature, or the surface integral of the mean curvature. + + Returns + --------- + area : float + Integral mean curvature of mesh + """ + ... + @caching.cache_decorator + def vertex_adjacency_graph(self) -> Graph: + """ + Returns a networkx graph representing the vertices and their connections + in the mesh. + + Returns + --------- + graph: networkx.Graph + Graph representing vertices and edges between + them where vertices are nodes and edges are edges + + Examples + ---------- + This is useful for getting nearby vertices for a given vertex, + potentially for some simple smoothing techniques. + + mesh = trimesh.primitives.Box() + graph = mesh.vertex_adjacency_graph + graph.neighbors(0) + > [1, 2, 3, 4] + """ + ... + @caching.cache_decorator + def vertex_neighbors(self) -> List[List[int64]]: + """ + The vertex neighbors of each vertex of the mesh, determined from + the cached vertex_adjacency_graph, if already existent. + + Returns + ---------- + vertex_neighbors : (len(self.vertices), ) int + Represents immediate neighbors of each vertex along + the edge of a triangle + + Examples + ---------- + This is useful for getting nearby vertices for a given vertex, + potentially for some simple smoothing techniques. + + >>> mesh = trimesh.primitives.Box() + >>> mesh.vertex_neighbors[0] + [1, 2, 3, 4] + """ + ... + @caching.cache_decorator + def is_winding_consistent(self) -> bool: + """ + Does the mesh have consistent winding or not. + A mesh with consistent winding has each shared edge + going in an opposite direction from the other in the pair. + + Returns + -------- + consistent : bool + Is winding is consistent or not + """ + ... + @caching.cache_decorator + def is_watertight(self) -> bool: + """ + Check if a mesh is watertight by making sure every edge is + included in two faces. + + Returns + ---------- + is_watertight : bool + Is mesh watertight or not + """ + ... + @caching.cache_decorator + def is_volume(self) -> bool: + """ + Check if a mesh has all the properties required to represent + a valid volume, rather than just a surface. + + These properties include being watertight, having consistent + winding and outward facing normals. + + Returns + --------- + valid : bool + Does the mesh represent a volume + """ + ... + @property + def is_empty(self) -> bool: + """ + Does the current mesh have data defined. + + Returns + -------- + empty : bool + If True, no data is set on the current mesh + """ + ... + @caching.cache_decorator + def is_convex(self) -> bool: + """ + Check if a mesh is convex or not. + + Returns + ---------- + is_convex: bool + Is mesh convex or not + """ + ... + @caching.cache_decorator + def kdtree(self) -> cKDTree: + """ + Return a scipy.spatial.cKDTree of the vertices of the mesh. + Not cached as this lead to observed memory issues and segfaults. + + Returns + --------- + tree : scipy.spatial.cKDTree + Contains mesh.vertices + """ + ... + def remove_degenerate_faces(self, height: float = ...) -> None: + """ + DERECATED MARCH 2024 REPLACE WITH: + `self.update_faces(self.nondegenerate_faces(height=height))` + """ + ... + def nondegenerate_faces(self, height: float = ...) -> NDArray[bool]: + """ + Remove degenerate faces (faces without 3 unique vertex indices) + from the current mesh. + + If a height is specified, it will remove any face with a 2D oriented + bounding box with one edge shorter than that height. + + If not specified, it will remove any face with a zero normal. + + Parameters + ------------ + height : float + If specified removes faces with an oriented bounding + box shorter than this on one side. + + Returns + ------------- + nondegenerate : (len(self.faces), ) bool + Mask used to remove faces + """ + ... + @caching.cache_decorator + def facets(self) -> List[NDArray[int64]]: + """ + Return a list of face indices for coplanar adjacent faces. + + Returns + --------- + facets : (n, ) sequence of (m, ) int + Groups of indexes of self.faces + """ + ... + @caching.cache_decorator + def facets_area(self) -> NDArray[float64]: + """ + Return an array containing the area of each facet. + + Returns + --------- + area : (len(self.facets), ) float + Total area of each facet (group of faces) + """ + ... + @caching.cache_decorator + def facets_normal(self) -> NDArray[float64]: + """ + Return the normal of each facet + + Returns + --------- + normals: (len(self.facets), 3) float + A unit normal vector for each facet + """ + ... + @caching.cache_decorator + def facets_origin(self) -> NDArray[float64]: + """ + Return a point on the facet plane. + + Returns + ------------ + origins : (len(self.facets), 3) float + A point on each facet plane + """ + ... + @caching.cache_decorator + def facets_boundary(self) -> List[NDArray[int64]]: + """ + Return the edges which represent the boundary of each facet + + Returns + --------- + edges_boundary : sequence of (n, 2) int + Indices of self.vertices + """ + ... + @caching.cache_decorator + def facets_on_hull(self) -> NDArray[bool]: + """ + Find which facets of the mesh are on the convex hull. + + Returns + --------- + on_hull : (len(mesh.facets), ) bool + is A facet on the meshes convex hull or not + """ + ... + def fix_normals(self, multibody: Optional[bool] = ...) -> None: + """ + Find and fix problems with self.face_normals and self.faces + winding direction. + + For face normals ensure that vectors are consistently pointed + outwards, and that self.faces is wound in the correct direction + for all connected components. + + Parameters + ------------- + multibody : None or bool + Fix normals across multiple bodies + if None automatically pick from body_count + """ + ... + def fill_holes(self) -> bool: + """ + Fill single triangle and single quad holes in the current mesh. + + Returns + ---------- + watertight : bool + Is the mesh watertight after the function completes + """ + ... + def register(self, other: Geometry3D, **kwargs): # -> tuple[NDArray[floating[Any]] | Any | NDArray[float64], Any]: + """ + Align a mesh with another mesh or a PointCloud using + the principal axes of inertia as a starting point which + is refined by iterative closest point. + + Parameters + ------------ + mesh : trimesh.Trimesh object + Mesh to align with other + other : trimesh.Trimesh or (n, 3) float + Mesh or points in space + samples : int + Number of samples from mesh surface to align + icp_first : int + How many ICP iterations for the 9 possible + combinations of + icp_final : int + How many ICP itertations for the closest + candidate from the wider search + + Returns + ----------- + mesh_to_other : (4, 4) float + Transform to align mesh to the other object + cost : float + Average square distance per point + """ + ... + def compute_stable_poses( + self, + center_mass: Optional[NDArray[float64]] = ..., + sigma: float = ..., + n_samples: int = ..., + threshold: float = ..., + ): # -> tuple[ndarray[Any, dtype[Unknown]], ndarray[Any, dtype[Unknown]]]: + """ + Computes stable orientations of a mesh and their quasi-static probabilities. + + This method samples the location of the center of mass from a multivariate + gaussian (mean at com, cov equal to identity times sigma) over n_samples. + For each sample, it computes the stable resting poses of the mesh on a + a planar workspace and evaluates the probabilities of landing in + each pose if the object is dropped onto the table randomly. + + This method returns the 4x4 homogeneous transform matrices that place + the shape against the planar surface with the z-axis pointing upwards + and a list of the probabilities for each pose. + The transforms and probabilties that are returned are sorted, with the + most probable pose first. + + Parameters + ------------ + center_mass : (3, ) float + The object center of mass (if None, this method + assumes uniform density and watertightness and + computes a center of mass explicitly) + sigma : float + The covariance for the multivariate gaussian used + to sample center of mass locations + n_samples : int + The number of samples of the center of mass location + threshold : float + The probability value at which to threshold + returned stable poses + + Returns + ------- + transforms : (n, 4, 4) float + The homogeneous matrices that transform the + object to rest in a stable pose, with the + new z-axis pointing upwards from the table + and the object just touching the table. + + probs : (n, ) float + A probability ranging from 0.0 to 1.0 for each pose + """ + ... + def subdivide(self, face_index: None = ...) -> Trimesh: + """ + Subdivide a mesh, with each subdivided face replaced with four + smaller faces. + + Parameters + ------------ + face_index: (m, ) int or None + If None all faces of mesh will be subdivided + If (m, ) int array of indices: only specified faces will be + subdivided. Note that in this case the mesh will generally + no longer be manifold, as the additional vertex on the midpoint + will not be used by the adjacent faces to the faces specified, + and an additional postprocessing step will be required to + make resulting mesh watertight + """ + ... + def subdivide_to_size( + self, max_edge, max_iter=..., return_index=... + ): # -> tuple[Trimesh, NDArray[Unknown] | Unbound] | Trimesh: + """ + Subdivide a mesh until every edge is shorter than a + specified length. + + Will return a triangle soup, not a nicely structured mesh. + + Parameters + ------------ + max_edge : float + Maximum length of any edge in the result + max_iter : int + The maximum number of times to run subdivision + return_index : bool + If True, return index of original face for new faces + """ + ... + def subdivide_loop(self, iterations=...): # -> Trimesh: + """ + Subdivide a mesh by dividing each triangle into four + triangles and approximating their smoothed surface + using loop subdivision. Loop subdivision often looks + better on triangular meshes than catmul-clark, which + operates primarily on quads. + + Parameters + ------------ + iterations : int + Number of iterations to run subdivision. + multibody : bool + If True will try to subdivide for each submesh + """ + ... + def smoothed(self, **kwargs): + """ + DEPRECATED: use `mesh.smooth_shaded` or `trimesh.graph.smooth_shade(mesh)` + """ + ... + @property + def smooth_shaded(self): + """ + Smooth shading in OpenGL relies on which vertices are shared, + this function will disconnect regions above an angle threshold + and return a non-watertight version which will look better + in an OpenGL rendering context. + + If you would like to use non-default arguments see `graph.smooth_shade`. + + Returns + --------- + smooth_shaded : trimesh.Trimesh + Non watertight version of current mesh. + """ + ... + @property + def visual(self): # -> None: + """ + Get the stored visuals for the current mesh. + + Returns + ------------- + visual : ColorVisuals or TextureVisuals + Contains visual information about the mesh + """ + ... + @visual.setter + def visual(self, value): # -> None: + """ + When setting a visual object, always make sure + that `visual.mesh` points back to the source mesh. + + Parameters + -------------- + visual : ColorVisuals or TextureVisuals + Contains visual information about the mesh + """ + ... + def section(self, plane_normal: List[int], plane_origin: List[int], **kwargs) -> Path3D: + """ + Returns a 3D cross section of the current mesh and a plane + defined by origin and normal. + + Parameters + ------------ + plane_normal: (3) vector for plane normal + Normal vector of section plane + plane_origin : (3, ) float + Point on the cross section plane + + Returns + --------- + intersections: Path3D or None + Curve of intersection + """ + ... + def section_multiplane( + self, plane_origin: NDArray[float64], plane_normal: NDArray[float64], heights: NDArray[float64] + ): # -> list[None]: + """ + Return multiple parallel cross sections of the current + mesh in 2D. + + Parameters + ------------ + plane_origin : (3, ) float + Point on the cross section plane + plane_normal: (3) vector for plane normal + Normal vector of section plane + heights : (n, ) float + Each section is offset by height along + the plane normal. + + Returns + --------- + paths : (n, ) Path2D or None + 2D cross sections at specified heights. + path.metadata['to_3D'] contains transform + to return 2D section back into 3D space. + """ + ... + def slice_plane( + self, plane_origin, plane_normal, cap=..., face_index=..., cached_dots=..., **kwargs + ): # -> Trimesh | None: + """ + Slice the mesh with a plane, returning a new mesh that is the + portion of the original mesh to the positive normal side of the plane + + plane_origin : (3,) float + Point on plane to intersect with mesh + plane_normal : (3,) float + Normal vector of plane to intersect with mesh + cap : bool + If True, cap the result with a triangulated polygon + face_index : ((m,) int) + Indexes of mesh.faces to slice. When no mask is + provided, the default is to slice all faces. + cached_dots : (n, 3) float + If an external function has stored dot + products pass them here to avoid recomputing + + Returns + --------- + new_mesh: trimesh.Trimesh or None + Subset of current mesh that intersects the half plane + to the positive normal side of the plane + """ + ... + def unwrap(self, image=...): # -> Trimesh: + """ + Returns a Trimesh object equivalent to the current mesh where + the vertices have been assigned uv texture coordinates. Vertices + may be split into as many as necessary by the unwrapping + algorithm, depending on how many uv maps they appear in. + + Requires `pip install xatlas` + + Parameters + ------------ + image : None or PIL.Image + Image to assign to the material + + Returns + -------- + unwrapped : trimesh.Trimesh + Mesh with unwrapped uv coordinates + """ + ... + @caching.cache_decorator + def convex_hull(self) -> Trimesh: + """ + Returns a Trimesh object representing the convex hull of + the current mesh. + + Returns + -------- + convex : trimesh.Trimesh + Mesh of convex hull of current mesh + """ + ... + def sample( + self, count: int, return_index: bool = ..., face_weight: Optional[NDArray[float64]] = ... + ): # -> tuple[Unknown, Unknown]: + """ + Return random samples distributed across the + surface of the mesh + + Parameters + ------------ + count : int + Number of points to sample + return_index : bool + If True will also return the index of which face each + sample was taken from. + face_weight : None or len(mesh.faces) float + Weight faces by a factor other than face area. + If None will be the same as face_weight=mesh.area + + Returns + --------- + samples : (count, 3) float + Points on surface of mesh + face_index : (count, ) int + Index of self.faces + """ + ... + def remove_unreferenced_vertices(self) -> None: + """ + Remove all vertices in the current mesh which are not + referenced by a face. + """ + ... + def unmerge_vertices(self) -> None: + """ + Removes all face references so that every face contains + three unique vertex indices and no faces are adjacent. + """ + ... + def apply_transform(self, matrix: NDArray[float64]) -> Trimesh: + """ + Transform mesh by a homogeneous transformation matrix. + + Does the bookkeeping to avoid recomputing things so this function + should be used rather than directly modifying self.vertices + if possible. + + Parameters + ------------ + matrix : (4, 4) float + Homogeneous transformation matrix + """ + ... + def voxelized(self, pitch, method=..., **kwargs): + """ + Return a VoxelGrid object representing the current mesh + discretized into voxels at the specified pitch + + Parameters + ------------ + pitch : float + The edge length of a single voxel + method: implementation key. See `trimesh.voxel.creation.voxelizers` + **kwargs: additional kwargs passed to the specified implementation. + + Returns + ---------- + voxelized : VoxelGrid object + Representing the current mesh + """ + ... + @caching.cache_decorator + def as_open3d(self): + """ + Return an `open3d.geometry.TriangleMesh` version of + the current mesh. + + Returns + --------- + open3d : open3d.geometry.TriangleMesh + Current mesh as an open3d object. + """ + ... + def simplify_quadratic_decimation(self, *args, **kwargs): # -> Trimesh: + """ + DERECATED MARCH 2024 REPLACE WITH: + `mesh.simplify_quadric_decimation` + """ + ... + def simplify_quadric_decimation(self, face_count: int) -> Trimesh: + """ + A thin wrapper around the `open3d` implementation of this: + `open3d.geometry.TriangleMesh.simplify_quadric_decimation` + + Parameters + ----------- + face_count : int + Number of faces desired in the resulting mesh. + + Returns + --------- + simple : trimesh.Trimesh + Simplified version of mesh. + """ + ... + def outline(self, face_ids: Optional[NDArray[int64]] = ..., **kwargs) -> Path3D: + """ + Given a list of face indexes find the outline of those + faces and return it as a Path3D. + + The outline is defined here as every edge which is only + included by a single triangle. + + Note that this implies a non-watertight mesh as the + outline of a watertight mesh is an empty path. + + Parameters + ------------ + face_ids : (n, ) int + Indices to compute the outline of. + If None, outline of full mesh will be computed. + **kwargs: passed to Path3D constructor + + Returns + ---------- + path : Path3D + Curve in 3D of the outline + """ + ... + def projected(self, normal, **kwargs) -> Path2D: + """ + Project a mesh onto a plane and then extract the + polygon that outlines the mesh projection on that + plane. + + Parameters + ---------- + mesh : trimesh.Trimesh + Source geometry + check : bool + If True make sure is flat + normal : (3,) float + Normal to extract flat pattern along + origin : None or (3,) float + Origin of plane to project mesh onto + pad : float + Proportion to pad polygons by before unioning + and then de-padding result by to avoid zero-width gaps. + tol_dot : float + Tolerance for discarding on-edge triangles. + max_regions : int + Raise an exception if the mesh has more than this + number of disconnected regions to fail quickly before unioning. + + Returns + ---------- + projected : trimesh.path.Path2D + Outline of source mesh + """ + ... + @caching.cache_decorator + def area(self) -> float64: + """ + Summed area of all triangles in the current mesh. + + Returns + --------- + area : float + Surface area of mesh + """ + ... + @caching.cache_decorator + def area_faces(self) -> NDArray[float64]: + """ + The area of each face in the mesh. + + Returns + --------- + area_faces : (n, ) float + Area of each face + """ + ... + @caching.cache_decorator + def mass_properties(self) -> MassProperties: + """ + Returns the mass properties of the current mesh. + + Assumes uniform density, and result is probably garbage if mesh + isn't watertight. + + Returns + ---------- + properties : dict + With keys: + 'volume' : in global units^3 + 'mass' : From specified density + 'density' : Included again for convenience (same as kwarg density) + 'inertia' : Taken at the center of mass and aligned with global + coordinate system + 'center_mass' : Center of mass location, in global coordinate system + """ + ... + def invert(self) -> None: + """ + Invert the mesh in-place by reversing the winding of every + face and negating normals without dumping the cache. + + Alters `self.faces` by reversing columns, and negating + `self.face_normals` and `self.vertex_normals`. + """ + ... + def scene(self, **kwargs) -> Scene: + """ + Returns a Scene object containing the current mesh. + + Returns + --------- + scene : trimesh.scene.scene.Scene + Contains just the current mesh + """ + ... + def show(self, **kwargs): # -> SceneViewer: + """ + Render the mesh in an opengl window. Requires pyglet. + + Parameters + ------------ + smooth : bool + Run smooth shading on mesh or not, + large meshes will be slow + + Returns + ----------- + scene : trimesh.scene.Scene + Scene with current mesh in it + """ + ... + def submesh( + self, faces_sequence: List[NDArray[int64]], **kwargs + ): # -> list[Unknown] | NDArray[Any] | Any | ndarray[Any, dtype[Unknown | Any]]: + """ + Return a subset of the mesh. + + Parameters + ------------ + faces_sequence : sequence (m, ) int + Face indices of mesh + only_watertight : bool + Only return submeshes which are watertight + append : bool + Return a single mesh which has the faces appended. + if this flag is set, only_watertight is ignored + + Returns + --------- + submesh : Trimesh or (n,) Trimesh + Single mesh if `append` or list of submeshes + """ + ... + @caching.cache_decorator + def identifier(self) -> NDArray[float64]: + """ + Return a float vector which is unique to the mesh + and is robust to rotation and translation. + + Returns + ----------- + identifier : (7,) float + Identifying properties of the current mesh + """ + ... + @caching.cache_decorator + def identifier_hash(self) -> str: + """ + A hash of the rotation invariant identifier vector. + + Returns + --------- + hashed : str + Hex string of the SHA256 hash from + the identifier vector at hand-tuned sigfigs. + """ + ... + def export( + self, file_obj=..., file_type: Optional[str] = ..., **kwargs + ) -> Union[ + Dict[str, Union[Dict[str, str], List[List[int]], List[List[float]]]], + str, + bytes, + Dict[str, Union[Dict[str, str], Dict[str, Union[str, Tuple[int, int]]]]], + ]: + """ + Export the current mesh to a file object. + If file_obj is a filename, file will be written there. + + Supported formats are stl, off, ply, collada, json, + dict, glb, dict64, msgpack. + + Parameters + ------------ + file_obj : open writeable file object + str, file name where to save the mesh + None, return the export blob + file_type : str + Which file type to export as, if `file_name` + is passed this is not required. + """ + ... + def to_dict(self) -> Dict[str, Union[str, List[List[float]], List[List[int]]]]: + """ + Return a dictionary representation of the current mesh + with keys that can be used as the kwargs for the + Trimesh constructor and matches the schema in: + `trimesh/resources/schema/primitive/trimesh.schema.json` + + Returns + ---------- + result : dict + Matches schema and Trimesh constructor. + """ + ... + def convex_decomposition(self, **kwargs) -> List[Trimesh]: + """ + Compute an approximate convex decomposition of a mesh + using `pip install pyVHACD`. + + Returns + ------- + meshes + List of convex meshes that approximate the original + **kwargs : VHACD keyword arguments + """ + ... + def union(self, other: Trimesh, engine: Optional[str] = ..., **kwargs) -> Trimesh: + """ + Boolean union between this mesh and n other meshes + + Parameters + ------------ + other : Trimesh or (n, ) Trimesh + Other meshes to union + engine : None or str + Which backend to use + + Returns + --------- + union : trimesh.Trimesh + Union of self and other Trimesh objects + """ + ... + def difference(self, other: Trimesh, engine: Optional[str] = ..., **kwargs) -> Trimesh: + """ + Boolean difference between this mesh and n other meshes + + Parameters + ------------ + other : trimesh.Trimesh, or list of trimesh.Trimesh objects + Meshes to difference + + Returns + --------- + difference : trimesh.Trimesh + Difference between self and other Trimesh objects + """ + ... + def intersection(self, other: Trimesh, engine: Optional[str] = ..., **kwargs) -> Trimesh: + """ + Boolean intersection between this mesh and n other meshes + + Parameters + ------------ + other : trimesh.Trimesh, or list of trimesh.Trimesh objects + Meshes to calculate intersections with + + Returns + --------- + intersection : trimesh.Trimesh + Mesh of the volume contained by all passed meshes + """ + ... + def contains(self, points: ArrayLike) -> NDArray[bool]: + """ + Given an array of points determine whether or not they + are inside the mesh. This raises an error if called on a + non-watertight mesh. + + Parameters + ------------ + points : (n, 3) float + Points in cartesian space + + Returns + --------- + contains : (n, ) bool + Whether or not each point is inside the mesh + """ + ... + @caching.cache_decorator + def face_angles(self) -> NDArray[float64]: + """ + Returns the angle at each vertex of a face. + + Returns + -------- + angles : (len(self.faces), 3) float + Angle at each vertex of a face + """ + ... + @caching.cache_decorator + def face_angles_sparse(self) -> coo_matrix: + """ + A sparse matrix representation of the face angles. + + Returns + ---------- + sparse : scipy.sparse.coo_matrix + Float sparse matrix with with shape: + (len(self.vertices), len(self.faces)) + """ + ... + @caching.cache_decorator + def vertex_defects(self) -> NDArray[float64]: + """ + Return the vertex defects, or (2*pi) minus the sum of the angles + of every face that includes that vertex. + + If a vertex is only included by coplanar triangles, this + will be zero. For convex regions this is positive, and + concave negative. + + Returns + -------- + vertex_defect : (len(self.vertices), ) float + Vertex defect at the every vertex + """ + ... + @caching.cache_decorator + def vertex_degree(self) -> NDArray[int64]: + """ + Return the number of faces each vertex is included in. + + Returns + ---------- + degree : (len(self.vertices), ) int + Number of faces each vertex is included in + """ + ... + @caching.cache_decorator + def face_adjacency_tree(self) -> Index: + """ + An R-tree of face adjacencies. + + Returns + -------- + tree + Where each edge in self.face_adjacency has a + rectangular cell + """ + ... + def copy(self, include_cache: bool = ...) -> Trimesh: + """ + Safely return a copy of the current mesh. + + By default, copied meshes will have emptied cache + to avoid memory issues and so may be slow on initial + operations until caches are regenerated. + + Current object will *never* have its cache cleared. + + Parameters + ------------ + include_cache : bool + If True, will shallow copy cached data to new mesh + + Returns + --------- + copied : trimesh.Trimesh + Copy of current mesh + """ + ... + def __deepcopy__(self, *args) -> Trimesh: ... + def __copy__(self, *args) -> Trimesh: ... + def eval_cached(self, statement: str, *args): # -> Any | None: + """ + Evaluate a statement and cache the result before returning. + + Statements are evaluated inside the Trimesh object, and + + Parameters + ------------ + statement : str + Statement of valid python code + *args : list + Available inside statement as args[0], etc + + Returns + ----------- + result : result of running eval on statement with args + + Examples + ----------- + r = mesh.eval_cached('np.dot(self.vertices, args[0])', [0, 0, 1]) + """ + ... + def __add__(self, other: Trimesh) -> Trimesh: + """ + Concatenate the mesh with another mesh. + + Parameters + ------------ + other : trimesh.Trimesh object + Mesh to be concatenated with self + + Returns + ---------- + concat : trimesh.Trimesh + Mesh object of combined result + """ + ... diff --git a/typings/trimesh/boolean.pyi b/typings/trimesh/boolean.pyi new file mode 100644 index 00000000..72a8224e --- /dev/null +++ b/typings/trimesh/boolean.pyi @@ -0,0 +1,73 @@ +""" +This type stub file was generated by pyright. +""" + +""" +boolean.py +------------- + +Do boolean operations on meshes using either Blender or Manifold. +""" + +def difference(meshes, engine=..., **kwargs): + """ + Compute the boolean difference between a mesh an n other meshes. + + Parameters + ---------- + meshes : list of trimesh.Trimesh + Meshes to be processed + engine : str + Which backend to use, i.e. 'blender' or 'manifold' + + Returns + ---------- + difference : a - (other meshes), **kwargs for a Trimesh + """ + ... + +def union(meshes, engine=..., **kwargs): + """ + Compute the boolean union between a mesh an n other meshes. + + Parameters + ---------- + meshes : list of trimesh.Trimesh + Meshes to be processed + engine : str + Which backend to use, i.e. 'blender' or 'manifold' + + Returns + ---------- + union : a + (other meshes), **kwargs for a Trimesh + """ + ... + +def intersection(meshes, engine=..., **kwargs): + """ + Compute the boolean intersection between a mesh an n other meshes. + + Parameters + ---------- + meshes : list of trimesh.Trimesh + Meshes to be processed + engine : str + Which backend to use, i.e. 'blender' or 'manifold' + + Returns + ---------- + intersection : **kwargs for a Trimesh object of the + volume that is contained by all meshes + """ + ... + +def boolean_manifold(meshes, operation, debug=..., **kwargs): # -> Trimesh: + """ + Run an operation on a set of meshes using the Manifold engine. + """ + ... + +def boolean_scad(*args, **kwargs): # -> Trimesh: + ... + +_engines = ... diff --git a/typings/trimesh/bounds.pyi b/typings/trimesh/bounds.pyi new file mode 100644 index 00000000..6a8bb5fa --- /dev/null +++ b/typings/trimesh/bounds.pyi @@ -0,0 +1,143 @@ +""" +This type stub file was generated by pyright. +""" + +_flip = ... + +def oriented_bounds_2D(points, qhull_options=...): # -> tuple[Any | NDArray[float64], ndarray[Any, dtype[Any]]]: + """ + Find an oriented bounding box for an array of 2D points. + + Parameters + ---------- + points : (n,2) float + Points in 2D. + + Returns + ---------- + transform : (3,3) float + Homogeneous 2D transformation matrix to move the + input points so that the axis aligned bounding box + is CENTERED AT THE ORIGIN. + rectangle : (2,) float + Size of extents once input points are transformed + by transform + """ + ... + +def oriented_bounds(obj, angle_digits=..., ordered=..., normal=..., coplanar_tol=...): + """ + Find the oriented bounding box for a Trimesh + + Parameters + ---------- + obj : trimesh.Trimesh, (n, 2) float, or (n, 3) float + Mesh object or points in 2D or 3D space + angle_digits : int + How much angular precision do we want on our result. + Even with less precision the returned extents will cover + the mesh albeit with larger than minimal volume, and may + experience substantial speedups. + ordered : bool + Return a consistent order for bounds + normal : None or (3,) float + Override search for normal on 3D meshes. + coplanar_tol : float + If a convex hull fails and we are checking to see if the + points are coplanar this is the maximum deviation from + a plane where the points will be considered coplanar. + + Returns + ---------- + to_origin : (4,4) float + Transformation matrix which will move the center of the + bounding box of the input mesh to the origin. + extents: (3,) float + The extents of the mesh once transformed with to_origin + """ + ... + +def minimum_cylinder(obj, sample_count=..., angle_tol=...): # -> dict[str, Unknown] | dict[str, Unknown | Any]: + """ + Find the approximate minimum volume cylinder which contains + a mesh or a a list of points. + + Samples a hemisphere then uses scipy.optimize to pick the + final orientation of the cylinder. + + A nice discussion about better ways to implement this is here: + https://www.staff.uni-mainz.de/schoemer/publications/ALGO00.pdf + + + Parameters + ---------- + obj : trimesh.Trimesh, or (n, 3) float + Mesh object or points in space + sample_count : int + How densely should we sample the hemisphere. + Angular spacing is 180 degrees / this number + + Returns + ---------- + result : dict + With keys: + 'radius' : float, radius of cylinder + 'height' : float, height of cylinder + 'transform' : (4,4) float, transform from the origin + to centered cylinder + """ + ... + +def to_extents(bounds): # -> tuple[Any, NDArray[float64]]: + """ + Convert an axis aligned bounding box to extents and + transform. + + Parameters + ------------ + bounds : (2, 3) float + Axis aligned bounds in space + + Returns + ------------ + extents : (3,) float + Extents of the bounding box + transform : (4, 4) float + Homogeneous transform moving extents to bounds + """ + ... + +def corners(bounds): # -> ndarray[Any, dtype[float64]] | ndarray[Any, dtype[Any]]: + """ + Given a pair of axis aligned bounds, return all + 8 corners of the bounding box. + + Parameters + ---------- + bounds : (2,3) or (2,2) float + Axis aligned bounds + + Returns + ---------- + corners : (8,3) float + Corner vertices of the cube + """ + ... + +def contains(bounds, points): # -> Any: + """ + Do an axis aligned bounding box check on a list of points. + + Parameters + ----------- + bounds : (2, dimension) float + Axis aligned bounding box + points : (n, dimension) float + Points in space + + Returns + ----------- + points_inside : (n,) bool + True if points are inside the AABB + """ + ... diff --git a/typings/trimesh/caching.pyi b/typings/trimesh/caching.pyi new file mode 100644 index 00000000..05a59482 --- /dev/null +++ b/typings/trimesh/caching.pyi @@ -0,0 +1,366 @@ +""" +This type stub file was generated by pyright. +""" + +from collections.abc import Mapping + +import numpy as np + +""" +caching.py +----------- + +Functions and classes that help with tracking changes +in `numpy.ndarray` and clearing cached values based +on those changes. + +You should really `pip install xxhash`: + +``` +In [23]: %timeit int(blake2b(d).hexdigest(), 16) +102 us +/- 684 ns per loop + +In [24]: %timeit int(sha256(d).hexdigest(), 16) +142 us +/- 3.73 us + +In [25]: %timeit xxh3_64_intdigest(d) +3.37 us +/- 116 ns per loop +``` +""" + +def sha256(item): # -> int: + ... + +def hash_fallback(item): # -> int: + ... + +def tracked_array(array, dtype=...): # -> TrackedArray: + """ + Properly subclass a numpy ndarray to track changes. + + Avoids some pitfalls of subclassing by forcing contiguous + arrays and does a view into a TrackedArray. + + Parameters + ------------ + array : array- like object + To be turned into a TrackedArray + dtype : np.dtype + Which dtype to use for the array + + Returns + ------------ + tracked : TrackedArray + Contains input array data. + """ + ... + +def cache_decorator(function): # -> property: + """ + A decorator for class methods, replaces @property + but will store and retrieve function return values + in object cache. + + Parameters + ------------ + function : method + This is used as a decorator: + ``` + @cache_decorator + def foo(self, things): + return 'happy days' + ``` + """ + ... + +class TrackedArray(np.ndarray): + """ + Subclass of numpy.ndarray that provides hash methods + to track changes. + + General method is to aggressively set 'modified' flags + on operations which might (but don't necessarily) alter + the array, ideally we sometimes compute hashes when we + don't need to, but we don't return wrong hashes ever. + + We store boolean modified flag for each hash type to + make checks fast even for queries of different hashes. + + Methods + ---------- + __hash__ : int + Runs the fastest available hash in this order: + `xxh3_64, xxh_64, blake2b, sha256` + """ + + def __array_finalize__(self, obj): # -> None: + """ + Sets a modified flag on every TrackedArray + This flag will be set on every change as well as + during copies and certain types of slicing. + """ + ... + def __array_wrap__(self, out_arr, context=...): # -> ndarray[Unknown, Unknown]: + """ + Return a numpy scalar if array is 0d. + See https://github.com/numpy/numpy/issues/5819 + """ + ... + @property + def mutable(self): # -> bool: + ... + @mutable.setter + def mutable(self, value): # -> None: + ... + def __hash__(self) -> int: + """ + Return a fast hash of the contents of the array. + + Returns + ------------- + hash : long int + A hash of the array contents. + """ + ... + def __iadd__(self, *args, **kwargs): + """ + In-place addition. + + The i* operations are in- place and modify the array, + so we better catch all of them. + """ + ... + def __isub__(self, *args, **kwargs): ... + def fill(self, *args, **kwargs): # -> None: + ... + def partition(self, *args, **kwargs): # -> None: + ... + def put(self, *args, **kwargs): # -> None: + ... + def byteswap(self, *args, **kwargs): # -> Self@TrackedArray: + ... + def itemset(self, *args, **kwargs): # -> None: + ... + def sort(self, *args, **kwargs): # -> None: + ... + def setflags(self, *args, **kwargs): # -> None: + ... + def __imul__(self, *args, **kwargs): ... + def __idiv__(self, *args, **kwargs): ... + def __itruediv__(self, *args, **kwargs): ... + def __imatmul__(self, *args, **kwargs): ... + def __ipow__(self, *args, **kwargs): ... + def __imod__(self, *args, **kwargs): ... + def __ifloordiv__(self, *args, **kwargs): ... + def __ilshift__(self, *args, **kwargs): ... + def __irshift__(self, *args, **kwargs): ... + def __iand__(self, *args, **kwargs): ... + def __ixor__(self, *args, **kwargs): ... + def __ior__(self, *args, **kwargs): ... + def __setitem__(self, *args, **kwargs): ... + def __setslice__(self, *args, **kwargs): ... + +class Cache: + """ + Class to cache values which will be stored until the + result of an ID function changes. + """ + + def __init__(self, id_function, force_immutable=...) -> None: + """ + Create a cache object. + + Parameters + ------------ + id_function : function + Returns hashable value + force_immutable : bool + If set will make all numpy arrays read-only + """ + ... + def delete(self, key): # -> None: + """ + Remove a key from the cache. + """ + ... + def verify(self): # -> None: + """ + Verify that the cached values are still for the same + value of id_function and delete all stored items if + the value of id_function has changed. + """ + ... + def clear(self, exclude=...): # -> None: + """ + Remove elements in the cache. + + Parameters + ----------- + exclude : list + List of keys in cache to not clear. + """ + ... + def update(self, items): # -> None: + """ + Update the cache with a set of key, value pairs without + checking id_function. + """ + ... + def id_set(self): # -> None: + """ + Set the current ID to the value of the ID function. + """ + ... + def __getitem__(self, key): # -> None: + """ + Get an item from the cache. If the item + is not in the cache, it will return None + + Parameters + ------------- + key : hashable + Key in dict + + Returns + ------------- + cached : object, or None + Object that was stored + """ + ... + def __setitem__(self, key, value): + """ + Add an item to the cache. + + Parameters + ------------ + key : hashable + Key to reference value + value : any + Value to store in cache + """ + ... + def __contains__(self, key): # -> bool: + ... + def __len__(self): # -> int: + ... + def __enter__(self): # -> None: + ... + def __exit__(self, *args): # -> None: + ... + +class DiskCache: + """ + Store results of expensive operations on disk + with an option to expire the results. This is used + to cache the multi-gigabyte test corpuses in + `tests/corpus.py` + """ + + def __init__(self, path, expire_days=...) -> None: + """ + Create a cache on disk for storing expensive results. + + Parameters + -------------- + path : str + A writeable location on the current file path. + expire_days : int or float + How old should results be considered expired. + + """ + ... + def get(self, key, fetch): # -> bytes: + """ + Get a key from the cache or run a calculation. + + Parameters + ----------- + key : str + Key to reference item with + fetch : function + If key isn't stored and recent run this + function and store its result on disk. + """ + ... + +class DataStore(Mapping): + """ + A class to store multiple numpy arrays and track them all + for changes. + + Operates like a dict that only stores numpy.ndarray + """ + + def __init__(self) -> None: ... + def __iter__(self): # -> Iterator[Unknown]: + ... + def pop(self, key): ... + def __delitem__(self, key): # -> None: + ... + @property + def mutable(self): # -> Any | bool: + """ + Is data allowed to be altered or not. + + Returns + ----------- + is_mutable : bool + Can data be altered in the DataStore + """ + ... + @mutable.setter + def mutable(self, value): # -> None: + """ + Is data allowed to be altered or not. + + Parameters + ------------ + is_mutable : bool + Should data be allowed to be altered + """ + ... + def is_empty(self): # -> bool: + """ + Is the current DataStore empty or not. + + Returns + ---------- + empty : bool + False if there are items in the DataStore + """ + ... + def clear(self): # -> None: + """ + Remove all data from the DataStore. + """ + ... + def __getitem__(self, key): ... + def __setitem__(self, key, data): # -> None: + """ + Store an item in the DataStore. + + Parameters + ------------- + key + A hashable key to store under + data + Usually a numpy array which will be subclassed + but anything hashable should be able to be stored. + """ + ... + def __contains__(self, key): # -> bool: + ... + def __len__(self): # -> int: + ... + def update(self, values): # -> None: + ... + def __hash__(self) -> int: + """ + Get a hash reflecting everything in the DataStore. + + Returns + ---------- + hash : str + hash of data in hexadecimal + """ + ... diff --git a/typings/trimesh/collision.pyi b/typings/trimesh/collision.pyi new file mode 100644 index 00000000..0d34bde6 --- /dev/null +++ b/typings/trimesh/collision.pyi @@ -0,0 +1,401 @@ +""" +This type stub file was generated by pyright. +""" + +class ContactData: + """ + Data structure for holding information about a collision contact. + """ + + def __init__(self, names, contact) -> None: + """ + Initialize a ContactData. + + Parameters + ---------- + names : list of str + The names of the two objects in order. + contact : fcl.Contact + The contact in question. + """ + ... + @property + def normal(self): + """ + The 3D intersection normal for this contact. + + Returns + ------- + normal : (3,) float + The intersection normal. + """ + ... + @property + def point(self): + """ + The 3D point of intersection for this contact. + + Returns + ------- + point : (3,) float + The intersection point. + """ + ... + @property + def depth(self): + """ + The penetration depth of the 3D point of intersection for this contact. + + Returns + ------- + depth : float + The penetration depth. + """ + ... + def index(self, name): + """ + Returns the index of the face in contact for the mesh with + the given name. + + Parameters + ---------- + name : str + The name of the target object. + + Returns + ------- + index : int + The index of the face in collision + """ + ... + +class DistanceData: + """ + Data structure for holding information about a distance query. + """ + + def __init__(self, names, result) -> None: + """ + Initialize a DistanceData. + + Parameters + ---------- + names : list of str + The names of the two objects in order. + contact : fcl.DistanceResult + The distance query result. + """ + ... + @property + def distance(self): + """ + Returns the distance between the two objects. + + Returns + ------- + distance : float + The euclidean distance between the objects. + """ + ... + def index(self, name): + """ + Returns the index of the closest face for the mesh with + the given name. + + Parameters + ---------- + name : str + The name of the target object. + + Returns + ------- + index : int + The index of the face in collisoin. + """ + ... + def point(self, name): + """ + The 3D point of closest distance on the mesh with the given name. + + Parameters + ---------- + name : str + The name of the target object. + + Returns + ------- + point : (3,) float + The closest point. + """ + ... + +class CollisionManager: + """ + A mesh-mesh collision manager. + """ + + def __init__(self) -> None: + """ + Initialize a mesh-mesh collision manager. + """ + ... + def add_object(self, name, mesh, transform=...): + """ + Add an object to the collision manager. + + If an object with the given name is already in the manager, + replace it. + + Parameters + ---------- + name : str + An identifier for the object + mesh : Trimesh object + The geometry of the collision object + transform : (4,4) float + Homogeneous transform matrix for the object + """ + ... + def remove_object(self, name): # -> None: + """ + Delete an object from the collision manager. + + Parameters + ---------- + name : str + The identifier for the object + """ + ... + def set_transform(self, name, transform): # -> None: + """ + Set the transform for one of the manager's objects. + This replaces the prior transform. + + Parameters + ---------- + name : str + An identifier for the object already in the manager + transform : (4,4) float + A new homogeneous transform matrix for the object + """ + ... + def in_collision_single( + self, mesh, transform=..., return_names=..., return_data=... + ): # -> tuple[Unknown, set[Unknown], list[Unknown]] | tuple[Unknown, set[Unknown]] | tuple[Unknown, list[Unknown]]: + """ + Check a single object for collisions against all objects in the + manager. + + Parameters + ---------- + mesh : Trimesh object + The geometry of the collision object + transform : (4,4) float + Homogeneous transform matrix + return_names : bool + If true, a set is returned containing the names + of all objects in collision with the object + return_data : bool + If true, a list of ContactData is returned as well + + Returns + ------------ + is_collision : bool + True if a collision occurs and False otherwise + names : set of str + [OPTIONAL] The set of names of objects that collided with the + provided one + contacts : list of ContactData + [OPTIONAL] All contacts detected + """ + ... + def in_collision_internal( + self, return_names=..., return_data=... + ): # -> tuple[Unknown, set[Unknown], list[Unknown]] | tuple[Unknown, set[Unknown]] | tuple[Unknown, list[Unknown]]: + """ + Check if any pair of objects in the manager collide with one another. + + Parameters + ---------- + return_names : bool + If true, a set is returned containing the names + of all pairs of objects in collision. + return_data : bool + If true, a list of ContactData is returned as well + + Returns + ------- + is_collision : bool + True if a collision occurred between any pair of objects + and False otherwise + names : set of 2-tup + The set of pairwise collisions. Each tuple + contains two names in alphabetical order indicating + that the two corresponding objects are in collision. + contacts : list of ContactData + All contacts detected + """ + ... + def in_collision_other( + self, other_manager, return_names=..., return_data=... + ): # -> tuple[Unknown, set[Unknown], list[Unknown]] | tuple[Unknown, set[Unknown]] | tuple[Unknown, list[Unknown]]: + """ + Check if any object from this manager collides with any object + from another manager. + + Parameters + ------------------- + other_manager : CollisionManager + Another collision manager object + return_names : bool + If true, a set is returned containing the names + of all pairs of objects in collision. + return_data : bool + If true, a list of ContactData is returned as well + + Returns + ------------- + is_collision : bool + True if a collision occurred between any pair of objects + and False otherwise + names : set of 2-tup + The set of pairwise collisions. Each tuple + contains two names (first from this manager, + second from the other_manager) indicating + that the two corresponding objects are in collision. + contacts : list of ContactData + All contacts detected + """ + ... + def min_distance_single( + self, mesh, transform=..., return_name=..., return_data=... + ): # -> tuple[Unknown, None, DistanceData | None] | tuple[Unknown, None] | tuple[Unknown, DistanceData | None]: + """ + Get the minimum distance between a single object and any + object in the manager. + + Parameters + --------------- + mesh : Trimesh object + The geometry of the collision object + transform : (4,4) float + Homogeneous transform matrix for the object + return_names : bool + If true, return name of the closest object + return_data : bool + If true, a DistanceData object is returned as well + + Returns + ------------- + distance : float + Min distance between mesh and any object in the manager + name : str + The name of the object in the manager that was closest + data : DistanceData + Extra data about the distance query + """ + ... + def min_distance_internal( + self, return_names=..., return_data=... + ): # -> tuple[Unknown, tuple[Unknown, ...] | None, DistanceData | None] | tuple[Unknown, tuple[Unknown, ...] | None] | tuple[Unknown, DistanceData | None]: + """ + Get the minimum distance between any pair of objects in the manager. + + Parameters + ------------- + return_names : bool + If true, a 2-tuple is returned containing the names + of the closest objects. + return_data : bool + If true, a DistanceData object is returned as well + + Returns + ----------- + distance : float + Min distance between any two managed objects + names : (2,) str + The names of the closest objects + data : DistanceData + Extra data about the distance query + """ + ... + def min_distance_other( + self, other_manager, return_names=..., return_data=... + ): # -> tuple[Unknown, tuple[None, Unknown] | None, DistanceData | None] | tuple[Unknown, tuple[None, Unknown] | None] | tuple[Unknown, DistanceData | None]: + """ + Get the minimum distance between any pair of objects, + one in each manager. + + Parameters + ---------- + other_manager : CollisionManager + Another collision manager object + return_names : bool + If true, a 2-tuple is returned containing + the names of the closest objects. + return_data : bool + If true, a DistanceData object is returned as well + + Returns + ----------- + distance : float + The min distance between a pair of objects, + one from each manager. + names : 2-tup of str + A 2-tuple containing two names (first from this manager, + second from the other_manager) indicating + the two closest objects. + data : DistanceData + Extra data about the distance query + """ + ... + +def mesh_to_BVH(mesh): + """ + Create a BVHModel object from a Trimesh object + + Parameters + ----------- + mesh : Trimesh + Input geometry + + Returns + ------------ + bvh : fcl.BVHModel + BVH of input geometry + """ + ... + +def mesh_to_convex(mesh): + """ + Create a Convex object from a Trimesh object + + Parameters + ----------- + mesh : Trimesh + Input geometry + + Returns + ------------ + convex : fcl.Convex + Convex of input geometry + """ + ... + +def scene_to_collision(scene): # -> tuple[CollisionManager, dict[Unknown, Unknown]]: + """ + Create collision objects from a trimesh.Scene object. + + Parameters + ------------ + scene : trimesh.Scene + Scene to create collision objects for + + Returns + ------------ + manager : CollisionManager + CollisionManager for objects in scene + objects: {node name: CollisionObject} + Collision objects for nodes in scene + """ + ... diff --git a/typings/trimesh/comparison.pyi b/typings/trimesh/comparison.pyi new file mode 100644 index 00000000..3d411d03 --- /dev/null +++ b/typings/trimesh/comparison.pyi @@ -0,0 +1,49 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +""" +comparison.py +---------------- + +Provide methods for quickly hashing and comparing meshes. +""" +id_sigfig = np.array([5, 10, 5, 2, 2, 3, 1]) + +def identifier_simple(mesh): # -> NDArray[float64]: + """ + Return a basic identifier for a mesh consisting of + properties that have been hand tuned to be somewhat + robust to rigid transformations and different + tesselations. + + Parameters + ------------ + mesh : trimesh.Trimesh + Source geometry + + Returns + ---------- + identifier : (7,) float + Identifying values of the mesh + """ + ... + +def identifier_hash(identifier): # -> str: + """ + Hash an identifier array in a way that is hand-tuned to be + somewhat robust to likely changes. + + Parameters + ------------ + identifier : (n,) float + Vector of properties + + Returns + ---------- + hash : (64,) str + A SHA256 of the identifier vector at hand-tuned precision. + """ + ... diff --git a/typings/trimesh/constants.pyi b/typings/trimesh/constants.pyi new file mode 100644 index 00000000..578a7b34 --- /dev/null +++ b/typings/trimesh/constants.pyi @@ -0,0 +1,125 @@ +""" +This type stub file was generated by pyright. +""" + +from dataclasses import dataclass + +@dataclass +class ToleranceMesh: + """ + ToleranceMesh objects hold tolerance information about meshes. + + Parameters + ---------------- + tol.zero : float + Floating point numbers smaller than this are considered zero + tol.merge : float + When merging vertices, consider vertices closer than this + to be the same vertex. Here we use the same value (1e-8) + as SolidWorks uses, according to their documentation. + tol.planar : float + The maximum distance from a plane a point can be and + still be considered to be on the plane + tol.facet_threshold : float + Threshold for two facets to be considered coplanar + tol.strict : bool + If True, run additional in- process checks (slower) + """ + + zero: float = ... + merge: float = ... + planar: float = ... + facet_threshold: int = ... + strict: bool = ... + +@dataclass +class TolerancePath: + """ + TolerancePath objects contain tolerance information used in + Path objects. + + Parameters + --------------- + tol.zero : float + Floating point numbers smaller than this are considered zero + tol.merge : float + When merging vertices, consider vertices closer than this + to be the same vertex. Here we use the same value (1e-8) + as SolidWorks uses, according to their documentation. + tol.planar : float + The maximum distance from a plane a point can be and + still be considered to be on the plane + tol.seg_frac : float + When simplifying line segments what percentage of the drawing + scale can a segment be and have a curve fitted + tol.seg_angle : float + When simplifying line segments to arcs, what angle + can a segment span to be acceptable. + tol.aspect_frac : float + When simplifying line segments to closed arcs (circles) + what percentage can the aspect ratio differfrom 1:1 + before escaping the fit early + tol.radius_frac : float + When simplifying line segments to arcs, what percentage + of the fit radius can vertices deviate to be acceptable + tol.radius_min : + When simplifying line segments to arcs, what is the minimum + radius multiplied by document scale for an acceptable fit + tol.radius_max : + When simplifying line segments to arcs, what is the maximum + radius multiplied by document scale for an acceptable fit + tol.tangent : + When simplifying line segments to curves, what is the maximum + angle the end sections can deviate from tangent that is + acceptable. + """ + + zero: float = ... + merge: float = ... + planar: float = ... + seg_frac: float = ... + seg_angle: float = ... + seg_angle_min: float = ... + seg_angle_frac: float = ... + aspect_frac: float = ... + radius_frac: float = ... + radius_min: float = ... + radius_max: float = ... + tangent: float = ... + strict: bool = ... + +@dataclass +class ResolutionPath: + """ + res.seg_frac : float + When discretizing curves, what percentage of the drawing + scale should we aim to make a single segment + res.seg_angle : float + When discretizing curves, what angle should a section span + res.max_sections : int + When discretizing splines, what is the maximum number + of segments per control point + res.min_sections : int + When discretizing splines, what is the minimum number + of segments per control point + res.export : str + Format string to use when exporting floating point vertices + """ + + seg_frac: float = ... + seg_angle: float = ... + max_sections: float = ... + min_sections: float = ... + export: str = ... + +tol = ... +tol_path = ... +res_path = ... + +def log_time(method): # -> (*args: Unknown, **kwargs: Unknown) -> Unknown: + """ + A decorator for methods which will time the method + and then emit a log.debug message with the method name + and how long it took to execute. + """ + ... diff --git a/typings/trimesh/convex.pyi b/typings/trimesh/convex.pyi new file mode 100644 index 00000000..f1498cc8 --- /dev/null +++ b/typings/trimesh/convex.pyi @@ -0,0 +1,86 @@ +""" +This type stub file was generated by pyright. +""" + +""" +convex.py + +Deal with creating and checking convex objects in 2, 3 and N dimensions. + +Convex is defined as: +1) "Convex, meaning "curving out" or "extending outward" (compare to concave) +2) having an outline or surface curved like the exterior of a circle or sphere. +3) (of a polygon) having only interior angles measuring less than 180 +""" + +def convex_hull(obj, qhull_options=..., repair=...): # -> Trimesh: + """ + Get a new Trimesh object representing the convex hull of the + current mesh attempting to return a watertight mesh with correct + normals. + + Details on qhull options: + http://www.qhull.org/html/qh-quick.htm#options + + Arguments + -------- + obj : Trimesh, or (n,3) float + Mesh or cartesian points + qhull_options : str + Options to pass to qhull. + + Returns + -------- + convex : Trimesh + Mesh of convex hull + """ + ... + +def adjacency_projections(mesh): # -> Any: + """ + Test if a mesh is convex by projecting the vertices of + a triangle onto the normal of its adjacent face. + + Parameters + ---------- + mesh : Trimesh + Input geometry + + Returns + ---------- + projection : (len(mesh.face_adjacency),) float + Distance of projection of adjacent vertex onto plane + """ + ... + +def is_convex(mesh): # -> bool: + """ + Check if a mesh is convex. + + Parameters + ----------- + mesh : Trimesh + Input geometry + + Returns + ----------- + convex : bool + Was passed mesh convex or not + """ + ... + +def hull_points(obj, qhull_options=...): # -> ndarray[Any, dtype[float64]]: + """ + Try to extract a convex set of points from multiple input formats. + + Parameters + --------- + obj: Trimesh object + (n,d) points + (m,) Trimesh objects + + Returns + -------- + points: (o,d) convex set of points + """ + ... diff --git a/typings/trimesh/creation.pyi b/typings/trimesh/creation.pyi new file mode 100644 index 00000000..0f5d92b9 --- /dev/null +++ b/typings/trimesh/creation.pyi @@ -0,0 +1,431 @@ +""" +This type stub file was generated by pyright. +""" + +""" +creation.py +-------------- + +Create meshes from primitives, or with operations. +""" + +def revolve(linestring, angle=..., sections=..., transform=..., **kwargs): # -> Trimesh: + """ + Revolve a 2D line string around the 2D Y axis, with a result with + the 2D Y axis pointing along the 3D Z axis. + + This function is intended to handle the complexity of indexing + and is intended to be used to create all radially symmetric primitives, + eventually including cylinders, annular cylinders, capsules, cones, + and UV spheres. + + Note that if your linestring is closed, it needs to be counterclockwise + if you would like face winding and normals facing outwards. + + Parameters + ------------- + linestring : (n, 2) float + Lines in 2D which will be revolved + angle : None or float + Angle in radians to revolve curve by + sections : None or int + Number of sections result should have + If not specified default is 32 per revolution + transform : None or (4, 4) float + Transform to apply to mesh after construction + **kwargs : dict + Passed to Trimesh constructor + + Returns + -------------- + revolved : Trimesh + Mesh representing revolved result + """ + ... + +def extrude_polygon(polygon, height, transform=..., **kwargs): # -> Trimesh: + """ + Extrude a 2D shapely polygon into a 3D mesh + + Parameters + ---------- + polygon : shapely.geometry.Polygon + 2D geometry to extrude + height : float + Distance to extrude polygon along Z + triangle_args : str or None + Passed to triangle + **kwargs : dict + Passed to `triangulate_polygon` + + Returns + ---------- + mesh : trimesh.Trimesh + Resulting extrusion as watertight body + """ + ... + +def sweep_polygon(polygon, path, angles=..., **kwargs): # -> Trimesh: + """ + Extrude a 2D shapely polygon into a 3D mesh along an + arbitrary 3D path. Doesn't handle sharp curvature well. + + + Parameters + ---------- + polygon : shapely.geometry.Polygon + Profile to sweep along path + path : (n, 3) float + A path in 3D + angles : (n,) float + Optional rotation angle relative to prior vertex + at each vertex + **kwargs : dict + Passed to `triangulate_polygon`. + Returns + ------- + mesh : trimesh.Trimesh + Geometry of result + """ + ... + +def extrude_triangulation(vertices, faces, height, transform=..., **kwargs): # -> Trimesh: + """ + Extrude a 2D triangulation into a watertight mesh. + + Parameters + ---------- + vertices : (n, 2) float + 2D vertices + faces : (m, 3) int + Triangle indexes of vertices + height : float + Distance to extrude triangulation + **kwargs : dict + Passed to Trimesh constructor + + Returns + --------- + mesh : trimesh.Trimesh + Mesh created from extrusion + """ + ... + +def triangulate_polygon( + polygon, triangle_args=..., engine=..., **kwargs +): # -> tuple[NDArray[Unknown], Unknown] | tuple[Unknown, Unknown]: + """ + Given a shapely polygon create a triangulation using a + python interface to `triangle.c` or mapbox-earcut. + > pip install triangle + > pip install mapbox_earcut + + Parameters + --------- + polygon : Shapely.geometry.Polygon + Polygon object to be triangulated. + triangle_args : str or None + Passed to triangle.triangulate i.e: 'p', 'pq30' + engine : None or str + Any value other than 'earcut' will use `triangle` + + Returns + -------------- + vertices : (n, 2) float + Points in space + faces : (n, 3) int + Index of vertices that make up triangles + """ + ... + +def box(extents=..., transform=..., bounds=..., **kwargs): # -> Trimesh: + """ + Return a cuboid. + + Parameters + ------------ + extents : float, or (3,) float + Edge lengths + transform: (4, 4) float + Transformation matrix + bounds : None or (2, 3) float + Corners of AABB, overrides extents and transform. + **kwargs: + passed to Trimesh to create box + + Returns + ------------ + geometry : trimesh.Trimesh + Mesh of a cuboid + """ + ... + +def icosahedron(**kwargs): # -> Trimesh: + """ + Create an icosahedron, one of the platonic solids which is has 20 faces. + + Parameters + ------------ + kwargs : dict + Passed through to `Trimesh` constructor. + + Returns + ------------- + ico : trimesh.Trimesh + Icosahederon centered at the origin. + """ + ... + +def icosphere(subdivisions=..., radius=..., **kwargs): # -> Trimesh: + """ + Create an isophere centered at the origin. + + Parameters + ---------- + subdivisions : int + How many times to subdivide the mesh. + Note that the number of faces will grow as function of + 4 ** subdivisions, so you probably want to keep this under ~5 + radius : float + Desired radius of sphere + kwargs : dict + Passed through to `Trimesh` constructor. + + Returns + --------- + ico : trimesh.Trimesh + Meshed sphere + """ + ... + +def uv_sphere(radius=..., count=..., transform=..., **kwargs): # -> Trimesh: + """ + Create a UV sphere (latitude + longitude) centered at the + origin. Roughly one order of magnitude faster than an + icosphere but slightly uglier. + + Parameters + ---------- + radius : float + Radius of sphere + count : (2,) int + Number of latitude and longitude lines + kwargs : dict + Passed thgrough + Returns + ---------- + mesh : trimesh.Trimesh + Mesh of UV sphere with specified parameters + """ + ... + +def capsule(height=..., radius=..., count=..., transform=...): # -> Trimesh: + """ + Create a mesh of a capsule, or a cylinder with hemispheric ends. + + Parameters + ---------- + height : float + Center to center distance of two spheres + radius : float + Radius of the cylinder and hemispheres + count : (2,) int + Number of sections on latitude and longitude + + Returns + ---------- + capsule : trimesh.Trimesh + Capsule geometry with: + - cylinder axis is along Z + - one hemisphere is centered at the origin + - other hemisphere is centered along the Z axis at height + """ + ... + +def cone(radius, height, sections=..., transform=..., **kwargs): # -> Trimesh: + """ + Create a mesh of a cone along Z centered at the origin. + + Parameters + ---------- + radius : float + The radius of the cylinder + height : float + The height of the cylinder + sections : int or None + How many pie wedges per revolution + transform : (4, 4) float or None + Transform to apply after creation + **kwargs : dict + Passed to Trimesh constructor + + Returns + ---------- + cone: trimesh.Trimesh + Resulting mesh of a cone + """ + ... + +def cylinder(radius, height=..., sections=..., segment=..., transform=..., **kwargs): # -> Trimesh: + """ + Create a mesh of a cylinder along Z centered at the origin. + + Parameters + ---------- + radius : float + The radius of the cylinder + height : float or None + The height of the cylinder + sections : int or None + How many pie wedges should the cylinder have + segment : (2, 3) float + Endpoints of axis, overrides transform and height + transform : (4, 4) float + Transform to apply + **kwargs: + passed to Trimesh to create cylinder + + Returns + ---------- + cylinder: trimesh.Trimesh + Resulting mesh of a cylinder + """ + ... + +def annulus(r_min, r_max, height=..., sections=..., transform=..., segment=..., **kwargs): # -> Trimesh: + """ + Create a mesh of an annular cylinder along Z centered at the origin. + + Parameters + ---------- + r_min : float + The inner radius of the annular cylinder + r_max : float + The outer radius of the annular cylinder + height : float + The height of the annular cylinder + sections : int or None + How many pie wedges should the annular cylinder have + transform : (4, 4) float or None + Transform to apply to move result from the origin + segment : None or (2, 3) float + Override transform and height with a line segment + **kwargs: + passed to Trimesh to create annulus + + Returns + ---------- + annulus : trimesh.Trimesh + Mesh of annular cylinder + """ + ... + +def random_soup(face_count=...): # -> Trimesh: + """ + Return random triangles as a Trimesh + + Parameters + ----------- + face_count : int + Number of faces desired in mesh + + Returns + ----------- + soup : trimesh.Trimesh + Geometry with face_count random faces + """ + ... + +def axis(origin_size=..., transform=..., origin_color=..., axis_radius=..., axis_length=...): # -> list[Unknown] | Any: + """ + Return an XYZ axis marker as a Trimesh, which represents position + and orientation. If you set the origin size the other parameters + will be set relative to it. + + Parameters + ---------- + transform : (4, 4) float + Transformation matrix + origin_size : float + Radius of sphere that represents the origin + origin_color : (3,) float or int, uint8 or float + Color of the origin + axis_radius : float + Radius of cylinder that represents x, y, z axis + axis_length: float + Length of cylinder that represents x, y, z axis + + Returns + ------- + marker : trimesh.Trimesh + Mesh geometry of axis indicators + """ + ... + +def camera_marker(camera, marker_height=..., origin_size=...): # -> list[Unknown | list[Unknown] | Any]: + """ + Create a visual marker for a camera object, including an axis and FOV. + + Parameters + --------------- + camera : trimesh.scene.Camera + Camera object with FOV and transform defined + marker_height : float + How far along the camera Z should FOV indicators be + origin_size : float + Sphere radius of the origin (default: marker_height / 10.0) + + Returns + ------------ + meshes : list + Contains Trimesh and Path3D objects which can be visualized + """ + ... + +def truncated_prisms(tris, origin=..., normal=...): # -> Trimesh: + """ + Return a mesh consisting of multiple watertight prisms below + a list of triangles, truncated by a specified plane. + + Parameters + ------------- + triangles : (n, 3, 3) float + Triangles in space + origin : None or (3,) float + Origin of truncation plane + normal : None or (3,) float + Unit normal vector of truncation plane + + Returns + ----------- + mesh : trimesh.Trimesh + Triangular mesh + """ + ... + +def torus(major_radius, minor_radius, major_sections=..., minor_sections=..., transform=..., **kwargs): # -> Trimesh: + """Create a mesh of a torus around Z centered at the origin. + + Parameters + ------------ + major_radius: (float) + Radius from the center of the torus to the center of the tube. + minor_radius: (float) + Radius of the tube. + major_sections: int + Number of sections around major radius result should have + If not specified default is 32 per revolution + minor_sections: int + Number of sections around minor radius result should have + If not specified default is 32 per revolution + transform: (4, 4) float + Transformation matrix + **kwargs: + passed to Trimesh to create torus + + Returns + ------------ + geometry : trimesh.Trimesh + Mesh of a torus + """ + ... diff --git a/typings/trimesh/curvature.pyi b/typings/trimesh/curvature.pyi new file mode 100644 index 00000000..0aa25066 --- /dev/null +++ b/typings/trimesh/curvature.pyi @@ -0,0 +1,118 @@ +""" +This type stub file was generated by pyright. +""" + +""" +curvature.py +--------------- + +Query mesh curvature. +""" + +def face_angles_sparse(mesh): # -> coo_matrix: + """ + A sparse matrix representation of the face angles. + + Returns + ---------- + sparse : scipy.sparse.coo_matrix + matrix is float shaped (len(vertices), len(faces)) + """ + ... + +def vertex_defects(mesh): # -> NDArray[floating[Any]]: + """ + Return the vertex defects, or (2*pi) minus the sum of the + angles of every face that includes that vertex. + + If a vertex is only included by coplanar triangles, this + will be zero. For convex regions this is positive, and + concave negative. + + Returns + -------- + vertex_defect : (len(self.vertices), ) float + Vertex defect at the every vertex + """ + ... + +def discrete_gaussian_curvature_measure(mesh, points, radius): # -> NDArray[Unknown]: + """ + Return the discrete gaussian curvature measure of a sphere + centered at a point as detailed in 'Restricted Delaunay + triangulations and normal cycle'- Cohen-Steiner and Morvan. + + This is the sum of the vertex defects at all vertices + within the radius for each point. + + Parameters + ---------- + points : (n, 3) float + Points in space + radius : float , + The sphere radius, which can be zero if vertices + passed are points. + + Returns + -------- + gaussian_curvature: (n,) float + Discrete gaussian curvature measure. + """ + ... + +def discrete_mean_curvature_measure(mesh, points, radius): # -> NDArray[float64]: + """ + Return the discrete mean curvature measure of a sphere + centered at a point as detailed in 'Restricted Delaunay + triangulations and normal cycle'- Cohen-Steiner and Morvan. + + This is the sum of the angle at all edges contained in the + sphere for each point. + + Parameters + ---------- + points : (n, 3) float + Points in space + radius : float + Sphere radius which should typically be greater than zero + + Returns + -------- + mean_curvature : (n,) float + Discrete mean curvature measure. + """ + ... + +def line_ball_intersection(start_points, end_points, center, radius): # -> NDArray[float64]: + """ + Compute the length of the intersection of a line segment with a ball. + + Parameters + ---------- + start_points : (n,3) float, list of points in space + end_points : (n,3) float, list of points in space + center : (3,) float, the sphere center + radius : float, the sphere radius + + Returns + -------- + lengths: (n,) float, the lengths. + + """ + ... + +def sphere_ball_intersection(R, r): # -> None: + """ + Compute the surface area of the intersection of sphere of radius R centered + at (0, 0, 0) with a ball of radius r centered at (R, 0, 0). + + Parameters + ---------- + R : float, sphere radius + r : float, ball radius + + Returns + -------- + area: float, the surface are. + """ + ... diff --git a/typings/trimesh/decomposition.pyi b/typings/trimesh/decomposition.pyi new file mode 100644 index 00000000..e9d310fe --- /dev/null +++ b/typings/trimesh/decomposition.pyi @@ -0,0 +1,38 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Dict, List + +def convex_decomposition(mesh, **kwargs) -> List[Dict]: + """ + Compute an approximate convex decomposition of a mesh. + + VHACD Parameters which can be passed as kwargs: + + Name Default + ----------------------------------------- + maxConvexHulls 64 + resolution 400000 + minimumVolumePercentErrorAllowed 1.0 + maxRecursionDepth 10 + shrinkWrap True + fillMode "flood" + maxNumVerticesPerCH 64 + asyncACD True + minEdgeLength 2 + findBestPlane False + + Parameters + ---------- + mesh : trimesh.Trimesh + Mesh to be decomposed into convex parts + **kwargs : VHACD keyword arguments + + Returns + ------- + mesh_args : list + List of **kwargs for Trimeshes that are nearly + convex and approximate the original. + """ + ... diff --git a/typings/trimesh/exceptions.pyi b/typings/trimesh/exceptions.pyi new file mode 100644 index 00000000..ea0b8671 --- /dev/null +++ b/typings/trimesh/exceptions.pyi @@ -0,0 +1,26 @@ +""" +This type stub file was generated by pyright. +""" + +""" +exceptions.py +---------------- + +Wrap exceptions. +""" + +class ExceptionWrapper: + """ + Create a dummy object which will raise an exception when attributes + are accessed (i.e. when used as a module) or when called (i.e. + when used like a function) + + For soft dependencies we want to survive failing to import but + we would like to raise an appropriate error when the functionality is + actually requested so the user gets an easily debuggable message. + """ + + def __init__(self, exception) -> None: ... + def __getattribute__(self, *args, **kwargs): # -> type[None]: + ... + def __call__(self, *args, **kwargs): ... diff --git a/typings/trimesh/exchange/__init__.pyi b/typings/trimesh/exchange/__init__.pyi new file mode 100644 index 00000000..3d3b5b44 --- /dev/null +++ b/typings/trimesh/exchange/__init__.pyi @@ -0,0 +1,18 @@ +""" +This type stub file was generated by pyright. +""" + +""" +trimesh/exchange +---------------- + +Contains the importers and exporters for various mesh formats. + +Note that *you should probably not be using these directly*, if +you call `trimesh.load` it will then call and wrap the result +of the various loaders: + +``` +mesh = trimesh.load(file_name) +``` +""" diff --git a/typings/trimesh/exchange/binvox.pyi b/typings/trimesh/exchange/binvox.pyi new file mode 100644 index 00000000..65ce8b1a --- /dev/null +++ b/typings/trimesh/exchange/binvox.pyi @@ -0,0 +1,342 @@ +""" +This type stub file was generated by pyright. +""" + +""" +Parsing functions for Binvox files. + +https://www.patrickmin.com/binvox/binvox.html + +Exporting meshes as binvox files requires the +`binvox` executable to be in your path. +""" +binvox_encoder = ... +Binvox = ... + +def parse_binvox_header(fp): # -> tuple[tuple[int, ...], tuple[float, ...], float]: + """ + Read the header from a binvox file. + Spec available: + https://www.patrickmin.com/binvox/binvox.html + + Parameters + ------------ + fp: file-object + File like object with binvox file + + Returns + ---------- + shape : tuple + Shape of binvox according to binvox spec + translate : tuple + Translation + scale : float + Scale of voxels + + Raises + ------------ + IOError + If invalid binvox file. + """ + ... + +def parse_binvox(fp, writeable=...): # -> Binvox: + """ + Read a binvox file, spec at + https://www.patrickmin.com/binvox/binvox.html + + Parameters + ------------ + fp: file-object + File like object with binvox file + + Returns + ---------- + binvox : namedtuple + Containing data + rle : numpy array + Run length encoded data + + Raises + ------------ + IOError + If invalid binvox file + """ + ... + +_binvox_header = ... + +def binvox_header(shape, translate, scale): # -> str: + """ + Get a binvox header string. + + Parameters + -------- + shape: length 3 iterable of ints denoting shape of voxel grid. + translate: length 3 iterable of floats denoting translation. + scale: num length of entire voxel grid. + + Returns + -------- + string including "data\n" line. + """ + ... + +def binvox_bytes(rle_data, shape, translate=..., scale=...): + """Get a binary representation of binvox data. + + Parameters + -------- + rle_data : numpy array + Run-length encoded numpy array. + shape : (3,) int + Shape of voxel grid. + translate : (3,) float + Translation of voxels + scale : float + Length of entire voxel grid. + + Returns + -------- + data : bytes + Suitable for writing to binary file + """ + ... + +def voxel_from_binvox(rle_data, shape, translate=..., scale=..., axis_order=...): # -> VoxelGrid: + """ + Factory for building from data associated with binvox files. + + Parameters + --------- + rle_data : numpy + Run-length-encoded of flat voxel + values, or a `trimesh.rle.RunLengthEncoding` object. + See `trimesh.rle` documentation for description of encoding + shape : (3,) int + Shape of voxel grid. + translate : (3,) float + Translation of voxels + scale : float + Length of entire voxel grid. + encoded_axes : iterable + With values in ('x', 'y', 'z', 0, 1, 2), + where x => 0, y => 1, z => 2 + denoting the order of axes in the encoded data. binvox by + default saves in xzy order, but using `xyz` (or (0, 1, 2)) will + be faster in some circumstances. + + Returns + --------- + result : VoxelGrid + Loaded voxels + """ + ... + +def load_binvox(file_obj, resolver=..., axis_order=..., file_type=...): # -> VoxelGrid: + """ + Load trimesh `VoxelGrid` instance from file. + + Parameters + ----------- + file_obj : file-like object + Contains binvox data + resolver : unused + axis_order : str + Order of axes in encoded data. + Binvox default is 'xzy', but 'xyz' may be faster + where this is not relevant. + + Returns + --------- + result : trimesh.voxel.VoxelGrid + Loaded voxel data + """ + ... + +def export_binvox(voxel, axis_order=...): + """ + Export `trimesh.voxel.VoxelGrid` instance to bytes + + Parameters + ------------ + voxel : `trimesh.voxel.VoxelGrid` + Assumes axis ordering of `xyz` and encodes + in binvox default `xzy` ordering. + axis_order : str + Eements in ('x', 'y', 'z', 0, 1, 2), the order + of axes to encode data (standard is 'xzy' for binvox). `voxel` + data is assumed to be in order 'xyz'. + + Returns + ----------- + result : bytes + Representation according to binvox spec + """ + ... + +class Binvoxer: + """ + Interface for binvox CL tool. + + This class is responsible purely for making calls to the CL tool. It + makes no attempt to integrate with the rest of trimesh at all. + + Constructor args configure command line options. + + `Binvoxer.__call__` operates on the path to a mode file. + + If using this interface in published works, please cite the references + below. + + See CL tool website for further details. + + https://www.patrickmin.com/binvox/ + + @article{nooruddin03, + author = {Fakir S. Nooruddin and Greg Turk}, + title = {Simplification and Repair of Polygonal Models Using Volumetric + Techniques}, + journal = {IEEE Transactions on Visualization and Computer Graphics}, + volume = {9}, + number = {2}, + pages = {191--205}, + year = {2003} + } + + @Misc{binvox, + author = {Patrick Min}, + title = {binvox}, + howpublished = {{\tt http://www.patrickmin.com/binvox} or + {\tt https://www.google.com/search?q=binvox}}, + year = {2004 - 2019}, + note = {Accessed: yyyy-mm-dd} + } + """ + + SUPPORTED_INPUT_TYPES = ... + SUPPORTED_OUTPUT_TYPES = ... + def __init__( + self, + dimension=..., + file_type=..., + z_buffer_carving=..., + z_buffer_voting=..., + dilated_carving=..., + exact=..., + bounding_box=..., + remove_internal=..., + center=..., + rotate_x=..., + rotate_z=..., + wireframe=..., + fit=..., + block_id=..., + use_material_block_id=..., + use_offscreen_pbuffer=..., + downsample_factor=..., + downsample_threshold=..., + verbose=..., + binvox_path=..., + ) -> None: + """ + Configure the voxelizer. + + Parameters + ------------ + dimension: voxel grid size (max 1024 when not using exact) + file_type: str + Output file type, supported types are: + 'binvox' + 'hips' + 'mira' + 'vtk' + 'raw' + 'schematic' + 'msh' + z_buffer_carving : use z buffer based carving. At least one of + `z_buffer_carving` and `z_buffer_voting` must be True. + z_buffer_voting: use z-buffer based parity voting method. + dilated_carving: stop carving 1 voxel before intersection. + exact: any voxel with part of a triangle gets set. Does not use + graphics card. + bounding_box: 6-element float list/tuple of min, max values, + (minx, miny, minz, maxx, maxy, maxz) + remove_internal: remove internal voxels if True. Note there is some odd + behaviour if boundary voxels are occupied. + center: center model inside unit cube. + rotate_x: number of 90 degree ccw rotations around x-axis before + voxelizing. + rotate_z: number of 90 degree cw rotations around z-axis before + voxelizing. + wireframe: also render the model in wireframe (helps with thin parts). + fit: only write voxels in the voxel bounding box. + block_id: when converting to schematic, use this as the block ID. + use_matrial_block_id: when converting from obj to schematic, parse + block ID from material spec "usemtl blockid_" (ids 1-255 only). + use_offscreen_pbuffer: use offscreen pbuffer instead of onscreen + window. + downsample_factor: downsample voxels by this factor in each dimension. + Must be a power of 2 or None. If not None/1 and `core dumped` + errors occur, try slightly adjusting dimensions. + downsample_threshold: when downsampling, destination voxel is on if + more than this number of voxels are on. + verbose : bool + If False, silences stdout/stderr from subprocess call. + binvox_path : str + Path to binvox executable. The default looks for an + executable called `binvox` on your `PATH`. + """ + ... + @property + def file_type(self): # -> str: + ... + def __call__(self, path, overwrite=...): # -> str: + """ + Create an voxel file in the same directory as model at `path`. + + Parameters + ------------ + path: string path to model file. Supported types: + 'ug' + 'obj' + 'off' + 'dfx' + 'xgl' + 'pov' + 'brep' + 'ply' + 'jot' (polygongs only) + overwrite: if False, checks the output path (head.file_type) is empty + before running. If True and a file exists, raises an IOError. + + Returns + ------------ + string path to voxel file. File type give by file_type in constructor. + """ + ... + +def voxelize_mesh(mesh, binvoxer=..., export_type=..., **binvoxer_kwargs): # -> VoxelGrid: + """ + Interface for voxelizing Trimesh object via the binvox tool. + + Implementation simply saved the mesh in the specified export_type then + runs the `Binvoxer.__call__` (using either the supplied `binvoxer` or + creating one via `binvoxer_kwargs`) + + Parameters + ------------ + mesh: Trimesh object to voxelize. + binvoxer: optional Binvoxer instance. + export_type: file type to export mesh as temporarily for Binvoxer to + operate on. + **binvoxer_kwargs: kwargs for creating a new Binvoxer instance. If binvoxer + if provided, this must be empty. + + Returns + ------------ + `VoxelGrid` object resulting. + """ + ... + +_binvox_loaders = ... diff --git a/typings/trimesh/exchange/dae.pyi b/typings/trimesh/exchange/dae.pyi new file mode 100644 index 00000000..64b755c9 --- /dev/null +++ b/typings/trimesh/exchange/dae.pyi @@ -0,0 +1,74 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +from .. import util + +_EYE = np.eye(4) + +def load_collada(file_obj, resolver=..., ignore_broken=..., **kwargs): # -> dict[str, Unknown]: + """ + Load a COLLADA (.dae) file into a list of trimesh kwargs. + + Parameters + ---------- + file_obj : file object + Containing a COLLADA file + resolver : trimesh.visual.Resolver or None + For loading referenced files, like texture images + ignore_broken: bool + Ignores broken references during loading: + [collada.common.DaeUnsupportedError, + collada.common.DaeBrokenRefError] + kwargs : ** + Passed to trimesh.Trimesh.__init__ + + Returns + ------- + loaded : list of dict + kwargs for Trimesh constructor + """ + ... + +def export_collada(mesh, **kwargs): # -> bytes: + """ + Export a mesh or a list of meshes as a COLLADA .dae file. + + Parameters + ----------- + mesh: Trimesh object or list of Trimesh objects + The mesh(es) to export. + + Returns + ----------- + export: str, string of COLLADA format output + """ + ... + +def load_zae(file_obj, resolver=..., **kwargs): # -> dict[str, Unknown]: + """ + Load a ZAE file, which is just a zipped DAE file. + + Parameters + ------------- + file_obj : file object + Contains ZAE data + resolver : trimesh.visual.Resolver + Resolver to load additional assets + kwargs : dict + Passed to load_collada + + Returns + ------------ + loaded : dict + Results of loading + """ + ... + +_collada_loaders = ... +_collada_exporters = ... +if util.has_module("collada"): ... +else: + _exc = ... diff --git a/typings/trimesh/exchange/export.pyi b/typings/trimesh/exchange/export.pyi new file mode 100644 index 00000000..de1abf62 --- /dev/null +++ b/typings/trimesh/exchange/export.pyi @@ -0,0 +1,87 @@ +""" +This type stub file was generated by pyright. +""" + +def export_mesh(mesh, file_obj, file_type=..., resolver=..., **kwargs): # -> dict[Unknown, Unknown]: + """ + Export a Trimesh object to a file- like object, or to a filename + + Parameters + ----------- + file_obj : str, file-like + Where should mesh be exported to + file_type : str or None + Represents file type (eg: 'stl') + resolver : None or trimesh.resolvers.Resolver + Resolver to write referenced assets to + + Returns + ---------- + exported : bytes or str + Result of exporter + """ + ... + +def export_dict64(mesh): # -> dict[str, Unknown]: + """ + Export a mesh as a dictionary, with data encoded + to base64. + """ + ... + +def export_dict(mesh, encoding=...): # -> dict[str, Unknown]: + """ + Export a mesh to a dict + + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh to be exported + encoding : str or None + Such as 'base64' + + Returns + ------------- + export : dict + Data stored in dict + """ + ... + +def scene_to_dict(scene, use_base64=..., include_metadata=...): # -> dict[str, Unknown]: + """ + Export a Scene object as a dict. + + Parameters + ------------- + scene : trimesh.Scene + Scene object to be exported + + Returns + ------------- + as_dict : dict + Scene as a dict + """ + ... + +def export_scene( + scene, file_obj, file_type=..., resolver=..., **kwargs +): # -> dict[Unknown, Unknown] | dict[str, Unknown] | bytes | bytearray | memoryview | LiteralString | str | Any: + """ + Export a snapshot of the current scene. + + Parameters + ---------- + file_obj : str, file-like, or None + File object to export to + file_type : str or None + What encoding to use for meshes + IE: dict, dict64, stl + + Returns + ---------- + export : bytes + Only returned if file_obj is None + """ + ... + +_mesh_exporters = ... diff --git a/typings/trimesh/exchange/gltf.pyi b/typings/trimesh/exchange/gltf.pyi new file mode 100644 index 00000000..e9be6ab9 --- /dev/null +++ b/typings/trimesh/exchange/gltf.pyi @@ -0,0 +1,184 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +""" +gltf.py +------------ + +Provides GLTF 2.0 exports of trimesh.Trimesh objects +as GL_TRIANGLES, and trimesh.Path2D/Path3D as GL_LINES +""" +_magic = ... +_dtypes = ... +_dtypes_lookup = ... +_shapes = ... +_default_material = ... +_GL_LINES = ... +_GL_POINTS = ... +_GL_TRIANGLES = ... +_GL_STRIP = ... +_EYE = np.eye(4) +float32 = ... +uint32 = ... +uint8 = ... + +def export_gltf( + scene, + include_normals=..., + merge_buffers=..., + unitize_normals=..., + tree_postprocessor=..., + embed_buffers=..., + extension_webp=..., +): # -> dict[Unknown, Unknown]: + """ + Export a scene object as a GLTF directory. + + This puts each mesh into a separate file (i.e. a `buffer`) + as opposed to one larger file. + + Parameters + ----------- + scene : trimesh.Scene + Scene to be exported + include_normals : None or bool + Include vertex normals + merge_buffers : bool + Merge buffers into one blob. + unitize_normals + GLTF requires unit normals, however sometimes people + want to include non-unit normals for shading reasons. + resolver : trimesh.resolvers.Resolver + If passed will use to write each file. + tree_postprocesser : None or callable + Run this on the header tree before exiting. + embed_buffers : bool + Embed the buffer into JSON file as a base64 string in the URI + extension_webp : bool + Export textures as webP (using glTF's EXT_texture_webp extension). + + Returns + ---------- + export : dict + Format: {file name : file data} + """ + ... + +def export_glb( + scene, + include_normals=..., + unitize_normals=..., + tree_postprocessor=..., + buffer_postprocessor=..., + extension_webp=..., +): # -> bytes: + """ + Export a scene as a binary GLTF (GLB) file. + + Parameters + ------------ + scene: trimesh.Scene + Input geometry + extras : JSON serializable + Will be stored in the extras field. + include_normals : bool + Include vertex normals in output file? + tree_postprocessor : func + Custom function to (in-place) post-process the tree + before exporting. + extension_webp : bool + Export textures as webP using EXT_texture_webp extension. + + Returns + ---------- + exported : bytes + Exported result in GLB 2.0 + """ + ... + +def load_gltf(file_obj=..., resolver=..., ignore_broken=..., merge_primitives=..., **mesh_kwargs): + """ + Load a GLTF file, which consists of a directory structure + with multiple files. + + Parameters + ------------- + file_obj : None or file-like + Object containing header JSON, or None + resolver : trimesh.visual.Resolver + Object which can be used to load other files by name + ignore_broken : bool + If there is a mesh we can't load and this + is True don't raise an exception but return + a partial result + merge_primitives : bool + If True, each GLTF 'mesh' will correspond + to a single Trimesh object + **mesh_kwargs : dict + Passed to mesh constructor + + Returns + -------------- + kwargs : dict + Arguments to create scene + """ + ... + +def load_glb(file_obj, resolver=..., ignore_broken=..., merge_primitives=..., **mesh_kwargs): + """ + Load a GLTF file in the binary GLB format into a trimesh.Scene. + + Implemented from specification: + https://github.com/KhronosGroup/glTF/tree/master/specification/2.0 + + Parameters + ------------ + file_obj : file- like object + Containing GLB data + resolver : trimesh.visual.Resolver + Object which can be used to load other files by name + merge_primitives : bool + If True, each GLTF 'mesh' will correspond to a + single Trimesh object. + + Returns + ------------ + kwargs : dict + Kwargs to instantiate a trimesh.Scene + """ + ... + +def validate(header): # -> None: + """ + Validate a GLTF 2.0 header against the schema. + + Returns result from: + `jsonschema.validate(header, schema=get_schema())` + + Parameters + ------------- + header : dict + Populated GLTF 2.0 header + + Raises + -------------- + err : jsonschema.exceptions.ValidationError + If the tree is an invalid GLTF2.0 header + """ + ... + +def get_schema(): # -> list[Unknown] | dict[Unknown, Unknown] | Any: + """ + Get a copy of the GLTF 2.0 schema with references resolved. + + Returns + ------------ + schema : dict + A copy of the GLTF 2.0 schema without external references. + """ + ... + +_gltf_loaders = ... diff --git a/typings/trimesh/exchange/load.pyi b/typings/trimesh/exchange/load.pyi new file mode 100644 index 00000000..00a62f8d --- /dev/null +++ b/typings/trimesh/exchange/load.pyi @@ -0,0 +1,180 @@ +""" +This type stub file was generated by pyright. +""" + +def mesh_formats(): # -> set[Unknown]: + """ + Get a list of mesh formats available to load. + + Returns + ----------- + loaders : list + Extensions of available mesh loaders, + i.e. 'stl', 'ply', etc. + """ + ... + +def available_formats(): # -> set[Unknown]: + """ + Get a list of all available loaders + + Returns + ----------- + loaders : list + Extensions of available loaders + i.e. 'stl', 'ply', 'dxf', etc. + """ + ... + +def load( + file_obj, file_type=..., resolver=..., force=..., **kwargs +): # -> Geometry | list[Unknown] | Any | Scene | Path: + """ + Load a mesh or vectorized path into objects like + Trimesh, Path2D, Path3D, Scene + + Parameters + ----------- + file_obj : str, or file- like object + The source of the data to be loadeded + file_type: str + What kind of file type do we have (eg: 'stl') + resolver : trimesh.visual.Resolver + Object to load referenced assets like materials and textures + force : None or str + For 'mesh': try to coerce scenes into a single mesh + For 'scene': try to coerce everything into a scene + kwargs : dict + Passed to geometry __init__ + + Returns + --------- + geometry : Trimesh, Path2D, Path3D, Scene + Loaded geometry as trimesh classes + """ + ... + +def load_mesh(file_obj, file_type=..., resolver=..., **kwargs): # -> list[Unknown]: + """ + Load a mesh file into a Trimesh object + + Parameters + ----------- + file_obj : str or file object + File name or file with mesh data + file_type : str or None + Which file type, e.g. 'stl' + kwargs : dict + Passed to Trimesh constructor + + Returns + ---------- + mesh : trimesh.Trimesh or trimesh.Scene + Loaded geometry data + """ + ... + +def load_compressed(file_obj, file_type=..., resolver=..., mixed=..., **kwargs): + """ + Given a compressed archive load all the geometry that + we can from it. + + Parameters + ---------- + file_obj : open file-like object + Containing compressed data + file_type : str + Type of the archive file + mixed : bool + If False, for archives containing both 2D and 3D + data will only load the 3D data into the Scene. + + Returns + ---------- + scene : trimesh.Scene + Geometry loaded in to a Scene object + """ + ... + +def load_remote(url, **kwargs): # -> Geometry | list[Unknown] | Any | Scene | Path: + """ + Load a mesh at a remote URL into a local trimesh object. + + This must be called explicitly rather than automatically + from trimesh.load to ensure users don't accidentally make + network requests. + + Parameters + ------------ + url : string + URL containing mesh file + **kwargs : passed to `load` + + Returns + ------------ + loaded : Trimesh, Path, Scene + Loaded result + """ + ... + +def load_kwargs(*args, **kwargs): # -> Scene: + """ + Load geometry from a properly formatted dict or kwargs + """ + ... + +def parse_file_args(file_obj, file_type, resolver=..., **kwargs): + """ + Given a file_obj and a file_type try to magically convert + arguments to a file-like object and a lowercase string of + file type. + + Parameters + ----------- + file_obj : str + if string represents a file path, returns: + file_obj: an 'rb' opened file object of the path + file_type: the extension from the file path + + if string is NOT a path, but has JSON-like special characters: + file_obj: the same string passed as file_obj + file_type: set to 'json' + + if string is a valid-looking URL + file_obj: an open 'rb' file object with retrieved data + file_type: from the extension + + if string is none of those: + raise ValueError as we can't do anything with input + + if file like object: + ValueError will be raised if file_type is None + file_obj: same as input + file_type: same as input + + if other object: like a shapely.geometry.Polygon, etc: + file_obj: same as input + file_type: if None initially, set to the class name + (in lower case), otherwise passed through + + file_type : str + type of file and handled according to above + + Returns + ----------- + file_obj : file-like object + Contains data + file_type : str + Lower case of the type of file (eg 'stl', 'dae', etc) + metadata : dict + Any metadata gathered + opened : bool + Did we open the file or not + resolver : trimesh.visual.Resolver + Resolver to load other assets + """ + ... + +compressed_loaders = ... +mesh_loaders = ... +voxel_loaders = ... diff --git a/typings/trimesh/exchange/misc.pyi b/typings/trimesh/exchange/misc.pyi new file mode 100644 index 00000000..37d44bce --- /dev/null +++ b/typings/trimesh/exchange/misc.pyi @@ -0,0 +1,55 @@ +""" +This type stub file was generated by pyright. +""" + +def load_dict(data, **kwargs): # -> dict[Unknown, Unknown]: + """ + Load multiple input types into kwargs for a Trimesh constructor. + Tries to extract keys: + 'faces' + 'vertices' + 'face_normals' + 'vertex_normals' + + Parameters + ---------- + data : dict + accepts multiple forms + -dict: has keys for vertices and faces as (n,3) numpy arrays + -dict: has keys for vertices/faces (n,3) arrays encoded as dicts/base64 + with trimesh.util.array_to_encoded/trimesh.util.encoded_to_array + -str: json blob as dict with either straight array or base64 values + -file object: json blob of dict + file_type: not used + + Returns + ----------- + loaded: dict with keys + -vertices: (n,3) float + -faces: (n,3) int + -face_normals: (n,3) float (optional) + """ + ... + +def load_meshio(file_obj, file_type=..., **kwargs): # -> dict[Unknown, Unknown]: + """ + Load a meshio-supported file into the kwargs for a Trimesh + constructor. + + + Parameters + ---------- + file_obj : file object + Contains a meshio file + file_type : str + File extension, aka 'vtk' + + Returns + ---------- + loaded : dict + kwargs for Trimesh constructor + """ + ... + +_misc_loaders = ... +_meshio_loaders = ... diff --git a/typings/trimesh/exchange/obj.pyi b/typings/trimesh/exchange/obj.pyi new file mode 100644 index 00000000..bcca0d5b --- /dev/null +++ b/typings/trimesh/exchange/obj.pyi @@ -0,0 +1,103 @@ +""" +This type stub file was generated by pyright. +""" + +def load_obj(file_obj, resolver=..., group_material=..., skip_materials=..., maintain_order=..., **kwargs): + """ + Load a Wavefront OBJ file into kwargs for a trimesh.Scene + object. + + Parameters + -------------- + file_obj : file like object + Contains OBJ data + resolver : trimesh.visual.resolvers.Resolver + Allow assets such as referenced textures and + material files to be loaded + group_material : bool + Group faces that share the same material + into the same mesh. + skip_materials : bool + Don't load any materials. + maintain_order : bool or None + Do not reorder faces or vertices which may result + in visual artifacts. + + Returns + ------------- + kwargs : dict + Keyword arguments which can be loaded by + trimesh.exchange.load.load_kwargs into a trimesh.Scene + """ + ... + +def parse_mtl(mtl, resolver=...): # -> dict[Unknown, Unknown]: + """ + Parse a loaded MTL file. + + Parameters + ------------- + mtl : str or bytes + Data from an MTL file + resolver : trimesh.Resolver + Fetch assets by name from file system, web, or other + + Returns + ------------ + mtllibs : list of dict + Each dict has keys: newmtl, map_Kd, Kd + """ + ... + +def export_obj( + mesh, + include_normals=..., + include_color=..., + include_texture=..., + return_texture=..., + write_texture=..., + resolver=..., + digits=..., + mtl_name=..., + header=..., +): + """ + Export a mesh as a Wavefront OBJ file. + TODO: scenes with textured meshes + + Parameters + ----------- + mesh : trimesh.Trimesh + Mesh to be exported + include_normals : Optional[bool] + Include vertex normals in export. If None + will only be included if vertex normals are in cache. + include_color : bool + Include vertex color in export + include_texture : bool + Include `vt` texture in file text + return_texture : bool + If True, return a dict with texture files + write_texture : bool + If True and a writable resolver is passed + write the referenced texture files with resolver + resolver : None or trimesh.resolvers.Resolver + Resolver which can write referenced text objects + digits : int + Number of digits to include for floating point + mtl_name : None or str + If passed, the file name of the MTL file. + header : str or None + Header string for top of file or None for no header. + + Returns + ----------- + export : str + OBJ format output + texture : dict + Contains files that need to be saved in the same + directory as the exported mesh: {file name : bytes} + """ + ... + +_obj_loaders = ... diff --git a/typings/trimesh/exchange/off.pyi b/typings/trimesh/exchange/off.pyi new file mode 100644 index 00000000..e3ac9bee --- /dev/null +++ b/typings/trimesh/exchange/off.pyi @@ -0,0 +1,40 @@ +""" +This type stub file was generated by pyright. +""" + +def load_off(file_obj, **kwargs): # -> dict[str, Unknown]: + """ + Load an OFF file into the kwargs for a Trimesh constructor. + + Parameters + ---------- + file_obj : file object + Contains an OFF file + + Returns + ---------- + loaded : dict + kwargs for Trimesh constructor + """ + ... + +def export_off(mesh, digits=...): # -> str: + """ + Export a mesh as an OFF file, a simple text format + + Parameters + ----------- + mesh : trimesh.Trimesh + Geometry to export + digits : int + Number of digits to include on floats + + Returns + ----------- + export : str + OFF format output + """ + ... + +_off_loaders = ... +_off_exporters = ... diff --git a/typings/trimesh/exchange/openctm.pyi b/typings/trimesh/exchange/openctm.pyi new file mode 100644 index 00000000..8c688afa --- /dev/null +++ b/typings/trimesh/exchange/openctm.pyi @@ -0,0 +1,31 @@ +""" +This type stub file was generated by pyright. +""" + +import os + +_ctm_loaders = ... +_ctm_lib_name = ... +if os.name == "nt": ... +else: + _ctm_loader = ... +if _ctm_lib_name is None or len(_ctm_lib_name) == 0: ... + +def load_ctm(file_obj, file_type=..., **kwargs): # -> dict[str, Unknown]: + """ + Load OpenCTM files from a file object. + + Parameters + ---------- + file_obj : file object + Open file-like object with CTM data. + + Returns + ---------- + loaded : dict + Keyword arguments for the Trimesh constructor + """ + ... + +if _ctm_lib_name is not None: + _ctm_loaders = ... diff --git a/typings/trimesh/exchange/ply.pyi b/typings/trimesh/exchange/ply.pyi new file mode 100644 index 00000000..cd709a12 --- /dev/null +++ b/typings/trimesh/exchange/ply.pyi @@ -0,0 +1,94 @@ +""" +This type stub file was generated by pyright. +""" + +_dtypes = ... +_inverse_dtypes = ... + +def load_ply(file_obj, resolver=..., fix_texture=..., prefer_color=..., *args, **kwargs): + """ + Load a PLY file from an open file object. + + Parameters + --------- + file_obj : an open file- like object + Source data, ASCII or binary PLY + resolver : trimesh.visual.resolvers.Resolver + Object which can resolve assets + fix_texture : bool + If True, will re- index vertices and faces + so vertices with different UV coordinates + are disconnected. + prefer_color : None, 'vertex', or 'face' + Which kind of color to prefer if both defined + + Returns + --------- + mesh_kwargs : dict + Data which can be passed to + Trimesh constructor, eg: a = Trimesh(**mesh_kwargs) + """ + ... + +def export_ply(mesh, encoding=..., vertex_normal=..., include_attributes=...): + """ + Export a mesh in the PLY format. + + Parameters + ---------- + mesh : trimesh.Trimesh + Mesh to export. + encoding : str + PLY encoding: 'ascii' or 'binary_little_endian' + vertex_normal : None or include vertex normals + + Returns + ---------- + export : bytes of result + """ + ... + +def export_draco(mesh, bits=...): # -> bytes: + """ + Export a mesh using Google's Draco compressed format. + + Only works if draco_encoder is in your PATH: + https://github.com/google/draco + + Parameters + ---------- + mesh : Trimesh object + Mesh to export + bits : int + Bits of quantization for position + tol.merge=1e-8 is roughly 25 bits + + Returns + ---------- + data : str or bytes + DRC file bytes + """ + ... + +def load_draco(file_obj, **kwargs): + """ + Load a mesh from Google's Draco format. + + Parameters + ---------- + file_obj : file- like object + Contains data + + Returns + ---------- + kwargs : dict + Keyword arguments to construct a Trimesh object + """ + ... + +_ply_loaders = ... +_ply_exporters = ... +draco_encoder = ... +draco_decoder = ... +if draco_decoder is not None: ... +if draco_encoder is not None: ... diff --git a/typings/trimesh/exchange/stl.pyi b/typings/trimesh/exchange/stl.pyi new file mode 100644 index 00000000..941bdbac --- /dev/null +++ b/typings/trimesh/exchange/stl.pyi @@ -0,0 +1,95 @@ +""" +This type stub file was generated by pyright. +""" + +class HeaderError(Exception): ... + +_stl_dtype = ... +_stl_dtype_header = ... + +def load_stl(file_obj, **kwargs): # -> dict[str, dict[Unknown, Unknown]] | dict[str, Unknown]: + """ + Load an STL file from a file object. + + Parameters + ---------- + file_obj : open file-like object + Containing STL data + + Returns + ---------- + loaded : dict + kwargs for a Trimesh constructor with keys: + vertices: (n,3) float, vertices + faces: (m,3) int, indexes of vertices + face_normals: (m,3) float, normal vector of each face + """ + ... + +def load_stl_binary(file_obj): # -> dict[str, dict[Unknown, Unknown]] | dict[str, Unknown]: + """ + Load a binary STL file from a file object. + + Parameters + ---------- + file_obj : open file- like object + Containing STL data + + Returns + ---------- + loaded: kwargs for a Trimesh constructor with keys: + vertices: (n,3) float, vertices + faces: (m,3) int, indexes of vertices + face_normals: (m,3) float, normal vector of each face + """ + ... + +def load_stl_ascii(file_obj): # -> dict[str, dict[Unknown, Unknown]]: + """ + Load an ASCII STL file from a file object. + + Parameters + ---------- + file_obj : open file- like object + Containing input data + + Returns + ---------- + loaded : dict + kwargs for a Trimesh constructor with keys: + vertices: (n,3) float, vertices + faces: (m,3) int, indexes of vertices + face_normals: (m,3) float, normal vector of each face + """ + ... + +def export_stl(mesh): # -> bytes: + """ + Convert a Trimesh object into a binary STL file. + + Parameters + --------- + mesh: Trimesh object + + Returns + --------- + export: bytes, representing mesh in binary STL form + """ + ... + +def export_stl_ascii(mesh): # -> str: + """ + Convert a Trimesh object into an ASCII STL file. + + Parameters + --------- + mesh : trimesh.Trimesh + + Returns + --------- + export : str + Mesh represented as an ASCII STL file + """ + ... + +_stl_loaders = ... diff --git a/typings/trimesh/exchange/threedxml.pyi b/typings/trimesh/exchange/threedxml.pyi new file mode 100644 index 00000000..413925e5 --- /dev/null +++ b/typings/trimesh/exchange/threedxml.pyi @@ -0,0 +1,39 @@ +""" +This type stub file was generated by pyright. +""" + +""" +threedxml.py +------------- + +Load 3DXML files, a scene format from Dassault products like Solidworks, Abaqus, Catia +""" + +def load_3DXML(file_obj, *args, **kwargs): + """ + Load a 3DXML scene into kwargs. 3DXML is a CAD format + that can be exported from Solidworks + + Parameters + ------------ + file_obj : file object + Open and containing 3DXML data + + Returns + ----------- + kwargs : dict + Can be passed to trimesh.exchange.load.load_kwargs + """ + ... + +def print_element(element): + """ + Pretty-print an lxml.etree element. + + Parameters + ------------ + element : etree element + """ + ... + +_threedxml_loaders = ... diff --git a/typings/trimesh/exchange/threemf.pyi b/typings/trimesh/exchange/threemf.pyi new file mode 100644 index 00000000..d936b8de --- /dev/null +++ b/typings/trimesh/exchange/threemf.pyi @@ -0,0 +1,44 @@ +""" +This type stub file was generated by pyright. +""" + +def load_3MF(file_obj, postprocess=..., **kwargs): # -> dict[str, Unknown]: + """ + Load a 3MF formatted file into a Trimesh scene. + + Parameters + ------------ + file_obj : file-like + Contains 3MF formatted data + + Returns + ------------ + kwargs : dict + Constructor arguments for `trimesh.Scene` + """ + ... + +def export_3MF(mesh, batch_size=..., compression=..., compresslevel=...): # -> bytes: + """ + Converts a Trimesh object into a 3MF file. + + Parameters + --------- + mesh trimesh.trimesh + Mesh or Scene to export. + batch_size : int + Number of nodes to write per batch. + compression : zipfile.ZIP_* + Type of zip compression to use in this export. + compresslevel : int + For Python > 3.7 specify the 0-9 compression level. + + Returns + --------- + export : bytes + Represents geometry as a 3MF file. + """ + ... + +_three_loaders = ... +_3mf_exporters = ... diff --git a/typings/trimesh/exchange/urdf.pyi b/typings/trimesh/exchange/urdf.pyi new file mode 100644 index 00000000..c69c2727 --- /dev/null +++ b/typings/trimesh/exchange/urdf.pyi @@ -0,0 +1,23 @@ +""" +This type stub file was generated by pyright. +""" + +def export_urdf(mesh, directory, scale=..., color=..., **kwargs): + """ + Convert a Trimesh object into a URDF package for physics + simulation. This breaks the mesh into convex pieces and + writes them to the same directory as the .urdf file. + + Parameters + --------- + mesh : trimesh.Trimesh + Input geometry + directory : str + The directory path for the URDF package + + Returns + --------- + mesh : Trimesh + Multi-body mesh containing convex decomposition + """ + ... diff --git a/typings/trimesh/exchange/xaml.pyi b/typings/trimesh/exchange/xaml.pyi new file mode 100644 index 00000000..b04cba29 --- /dev/null +++ b/typings/trimesh/exchange/xaml.pyi @@ -0,0 +1,28 @@ +""" +This type stub file was generated by pyright. +""" + +""" +xaml.py +--------- + +Load 3D XAMl files, an export option from Solidworks. +""" + +def load_XAML(file_obj, *args, **kwargs): # -> dict[Unknown, Unknown]: + """ + Load a 3D XAML file. + + Parameters + ---------- + file_obj : file object + Open XAML file. + + Returns + ---------- + result : dict + Kwargs for a Trimesh constructor. + """ + ... + +_xaml_loaders = ... diff --git a/typings/trimesh/exchange/xyz.pyi b/typings/trimesh/exchange/xyz.pyi new file mode 100644 index 00000000..f2719fd3 --- /dev/null +++ b/typings/trimesh/exchange/xyz.pyi @@ -0,0 +1,45 @@ +""" +This type stub file was generated by pyright. +""" + +def load_xyz(file_obj, delimiter=..., **kwargs): # -> dict[str, Unknown]: + """ + Load an XYZ file into a PointCloud. + + Parameters + ------------ + file_obj : an open file-like object + Source data, ASCII XYZ + delimiter : None or string + Characters used to separate the columns of the file + If not passed will use whitespace or commas + + Returns + ---------- + kwargs : dict + Data which can be passed to PointCloud constructor + """ + ... + +def export_xyz(cloud, write_colors=..., delimiter=...): # -> str: + """ + Export a PointCloud object to an XYZ format string. + + Parameters + ------------- + cloud : trimesh.PointCloud + Geometry in space + write_colors : bool + Write colors or not + delimiter : None or str + What to separate columns with + + Returns + -------------- + export : str + Pointcloud in XYZ format + """ + ... + +_xyz_loaders = ... +_xyz_exporters = ... diff --git a/typings/trimesh/geometry.pyi b/typings/trimesh/geometry.pyi new file mode 100644 index 00000000..4a9ccdea --- /dev/null +++ b/typings/trimesh/geometry.pyi @@ -0,0 +1,221 @@ +""" +This type stub file was generated by pyright. +""" + +def plane_transform(origin, normal): # -> tuple[NDArray[float64], Any] | NDArray[float64]: + """ + Given the origin and normal of a plane find the transform + that will move that plane to be coplanar with the XY plane. + + Parameters + ---------- + origin : (3,) float + Point that lies on the plane + normal : (3,) float + Vector that points along normal of plane + + Returns + --------- + transform: (4,4) float + Transformation matrix to move points onto XY plane + """ + ... + +def align_vectors(a, b, return_angle=...): # -> tuple[NDArray[float64], Any] | NDArray[float64]: + """ + Find the rotation matrix that transforms one 3D vector + to another. + + Parameters + ------------ + a : (3,) float + Unit vector + b : (3,) float + Unit vector + return_angle : bool + Return the angle between vectors or not + + Returns + ------------- + matrix : (4, 4) float + Homogeneous transform to rotate from `a` to `b` + angle : float + If `return_angle` angle in radians between `a` and `b` + + """ + ... + +def faces_to_edges(faces, return_index=...): # -> tuple[Unknown, ndarray[Any, dtype[signedinteger[Any]]]]: + """ + Given a list of faces (n,3), return a list of edges (n*3,2) + + Parameters + ----------- + faces : (n, 3) int + Vertex indices representing faces + + Returns + ----------- + edges : (n*3, 2) int + Vertex indices representing edges + """ + ... + +def vector_angle(pairs): # -> NDArray[Any] | Any: + """ + Find the angles between pairs of unit vectors. + + Parameters + ---------- + pairs : (n, 2, 3) float + Unit vector pairs + + Returns + ---------- + angles : (n,) float + Angles between vectors in radians + """ + ... + +def triangulate_quads(quads, dtype=...): # -> NDArray[int64]: + """ + Given an array of quad faces return them as triangle faces, + also handles pure triangles and mixed triangles and quads. + + Parameters + ----------- + quads: (n, 4) int + Vertex indices of quad faces. + + Returns + ----------- + faces : (m, 3) int + Vertex indices of triangular faces.c + """ + ... + +def vertex_face_indices(vertex_count, faces, faces_sparse): + """ + Find vertex face indices from the faces array of vertices + + Parameters + ----------- + vertex_count : int + The number of vertices faces refer to + faces : (n, 3) int + List of vertex indices + faces_sparse : scipy.sparse.COO + Sparse matrix + + Returns + ----------- + vertex_faces : (vertex_count, ) int + Face indices for every vertex + Array padded with -1 in each row for all vertices with fewer + face indices than the max number of face indices. + """ + ... + +def mean_vertex_normals(vertex_count, faces, face_normals, sparse=..., **kwargs): # -> tuple[Unknown, Any]: + """ + Find vertex normals from the mean of the faces that contain + that vertex. + + Parameters + ----------- + vertex_count : int + The number of vertices faces refer to + faces : (n, 3) int + List of vertex indices + face_normals : (n, 3) float + Normal vector for each face + + Returns + ----------- + vertex_normals : (vertex_count, 3) float + Normals for every vertex + Vertices unreferenced by faces will be zero. + """ + ... + +def weighted_vertex_normals(vertex_count, faces, face_normals, face_angles, use_loop=...): # -> tuple[Unknown, Any]: + """ + Compute vertex normals from the faces that contain that vertex. + The contibution of a face's normal to a vertex normal is the + ratio of the corner-angle in which the vertex is, with respect + to the sum of all corner-angles surrounding the vertex. + + Grit Thuerrner & Charles A. Wuethrich (1998) + Computing Vertex Normals from Polygonal Facets, + Journal of Graphics Tools, 3:1, 43-46 + + Parameters + ----------- + vertex_count : int + The number of vertices faces refer to + faces : (n, 3) int + List of vertex indices + face_normals : (n, 3) float + Normal vector for each face + face_angles : (n, 3) float + Angles at each vertex in the face + + Returns + ----------- + vertex_normals : (vertex_count, 3) float + Normals for every vertex + Vertices unreferenced by faces will be zero. + """ + ... + +def index_sparse(columns, indices, data=..., dtype=...): # -> coo_matrix: + """ + Return a sparse matrix for which vertices are contained in which faces. + A data vector can be passed which is then used instead of booleans + + Parameters + ------------ + columns : int + Number of columns, usually number of vertices + indices : (m, d) int + Usually mesh.faces + + Returns + --------- + sparse: scipy.sparse.coo_matrix of shape (columns, len(faces)) + dtype is boolean + + Examples + ---------- + In [1]: sparse = faces_sparse(len(mesh.vertices), mesh.faces) + + In [2]: sparse.shape + Out[2]: (12, 20) + + In [3]: mesh.faces.shape + Out[3]: (20, 3) + co + In [4]: mesh.vertices.shape + Out[4]: (12, 3) + + In [5]: dense = sparse.toarray().astype(int) + + In [6]: dense + Out[6]: + array([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0], + [0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0], + [0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1], + [1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0], + [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1], + [0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1]]) + + In [7]: dense.sum(axis=0) + Out[7]: array([3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]) + """ + ... diff --git a/typings/trimesh/graph.pyi b/typings/trimesh/graph.pyi new file mode 100644 index 00000000..c77e0fb4 --- /dev/null +++ b/typings/trimesh/graph.pyi @@ -0,0 +1,449 @@ +""" +This type stub file was generated by pyright. +""" + +from .typed import Optional + +""" +graph.py +------------- + +Deal with graph operations. Primarily deal with graphs in (n, 2) +edge list form, and abstract the backend graph library being used. + +Currently uses networkx or scipy.sparse.csgraph backend. +""" + +def face_adjacency( + faces=..., mesh=..., return_edges=... +): # -> tuple[ndarray[Any, dtype[signedinteger[Any]]] | Any | Unknown, ndarray[Any, dtype[Any]] | Unknown | Any] | ndarray[Any, dtype[signedinteger[Any]]] | Any: + """ + Returns an (n, 2) list of face indices. + Each pair of faces in the list shares an edge, making them adjacent. + + + Parameters + ----------- + faces : (n, 3) int, or None + Vertex indices representing triangles + mesh : Trimesh object + If passed will used cached edges + instead of generating from faces + return_edges : bool + Return the edges shared by adjacent faces + + Returns + ---------- + adjacency : (m, 2) int + Indexes of faces that are adjacent + edges: (m, 2) int + Only returned if return_edges is True + Indexes of vertices which make up the + edges shared by the adjacent faces + + Examples + ---------- + This is useful for lots of things such as finding + face- connected components: + ```python + >>> graph = nx.Graph() + >>> graph.add_edges_from(mesh.face_adjacency) + >>> groups = nx.connected_components(graph_connected) + ``` + """ + ... + +def face_neighborhood(mesh): # -> NDArray[Unknown]: + """ + Find faces that share a vertex i.e. 'neighbors' faces. + Relies on the fact that an adjacency matrix at a power p + contains the number of paths of length p connecting two nodes. + Here we take the bipartite graph from mesh.faces_sparse to the power 2. + The non-zeros are the faces connected by one vertex. + + Returns + ---------- + neighborhood : (n, 2) int + Pairs of faces which share a vertex + """ + ... + +def face_adjacency_unshared(mesh): # -> NDArray[signedinteger[Any]]: + """ + Return the vertex index of the two vertices not in the shared + edge between two adjacent faces + + Parameters + ---------- + mesh : Trimesh object + Input mesh + + Returns + ----------- + vid_unshared : (len(mesh.face_adjacency), 2) int + Indexes of mesh.vertices + for degenerate faces without exactly + one unshared vertex per face it will be -1 + """ + ... + +def face_adjacency_radius(mesh): # -> tuple[NDArray[floating[Any]], Any]: + """ + Compute an approximate radius between adjacent faces. + + Parameters + -------------- + mesh : trimesh.Trimesh + + Returns + ------------- + radii : (len(self.face_adjacency),) float + Approximate radius between faces + Parallel faces will have a value of np.inf + span : (len(self.face_adjacency),) float + Perpendicular projection distance of two + unshared vertices onto the shared edge + """ + ... + +def vertex_adjacency_graph(mesh): # -> Graph | None: + """ + Returns a networkx graph representing the vertices and + their connections in the mesh. + + Parameters + ---------- + mesh : Trimesh object + + Returns + --------- + graph : networkx.Graph + Graph representing vertices and edges between + them where vertices are nodes and edges are edges + + Examples + ---------- + This is useful for getting nearby vertices for a given vertex, + potentially for some simple smoothing techniques. + >>> graph = mesh.vertex_adjacency_graph + >>> graph.neighbors(0) + > [1, 3, 4] + """ + ... + +def shared_edges(faces_a, faces_b): # -> ndarray[Any, dtype[int64]]: + """ + Given two sets of faces, find the edges which are in both sets. + + Parameters + --------- + faces_a : (n, 3) int + Array of faces + faces_b : (m, 3) int + Array of faces + + Returns + --------- + shared : (p, 2) int + Edges shared between faces + """ + ... + +def facets(mesh, engine=...): # -> list[Unknown] | Any | list[ndarray[Any, dtype[int64]]] | list[list[Unknown]]: + """ + Find the list of parallel adjacent faces. + + Parameters + ----------- + mesh : trimesh.Trimesh + engine : str + Which graph engine to use: + ('scipy', 'networkx') + + Returns + --------- + facets : sequence of (n,) int + Groups of face indexes of + parallel adjacent faces. + """ + ... + +def split(mesh, only_watertight=..., adjacency=..., engine=..., **kwargs): + """ + Split a mesh into multiple meshes from face + connectivity. + + If only_watertight is true it will only return + watertight meshes and will attempt to repair + single triangle or quad holes. + + Parameters + ---------- + mesh : trimesh.Trimesh + only_watertight: bool + Only return watertight components + adjacency : (n, 2) int + Face adjacency to override full mesh + engine : str or None + Which graph engine to use + + Returns + ---------- + meshes : (m,) trimesh.Trimesh + Results of splitting + """ + ... + +def connected_components( + edges, min_len=..., nodes=..., engine=... +): # -> list[Unknown] | Any | list[ndarray[Any, dtype[int64]]] | list[list[Unknown]]: + """ + Find groups of connected nodes from an edge list. + + Parameters + ----------- + edges : (n, 2) int + Edges between nodes + nodes : (m, ) int or None + List of nodes that exist + min_len : int + Minimum length of a component group to return + engine : str or None + Which graph engine to use (None for automatic): + (None, 'networkx', 'scipy') + + + Returns + ----------- + components : (n,) sequence of (*,) int + Nodes which are connected + """ + ... + +def connected_component_labels(edges, node_count=...): + """ + Label graph nodes from an edge list, using scipy.sparse.csgraph + + Parameters + ----------- + edges : (n, 2) int + Edges of a graph + node_count : int, or None + The largest node in the graph. + + Returns + ---------- + labels : (node_count,) int + Component labels for each node + """ + ... + +def split_traversal(traversal, edges, edges_hash=...): # -> list[NDArray[int64]] | list[NDArray[Any]]: + """ + Given a traversal as a list of nodes, split the traversal + if a sequential index pair is not in the given edges. + + Parameters + -------------- + edges : (n, 2) int + Graph edge indexes + traversal : (m,) int + Traversal through edges + edge_hash : (n,) + Edges sorted on axis=1 and + passed to grouping.hashable_rows + + Returns + --------------- + split : sequence of (p,) int + """ + ... + +def fill_traversals(traversals, edges, edges_hash=...): # -> NDArray[int64] | list[Unknown]: + """ + Convert a traversal of a list of edges into a sequence of + traversals where every pair of consecutive node indexes + is an edge in a passed edge list + + Parameters + ------------- + traversals : sequence of (m,) int + Node indexes of traversals of a graph + edges : (n, 2) int + Pairs of connected node indexes + edges_hash : None, or (n,) int + Edges sorted along axis 1 then hashed + using grouping.hashable_rows + + Returns + -------------- + splits : sequence of (p,) int + Node indexes of connected traversals + """ + ... + +def traversals(edges, mode=...): # -> list[Unknown]: + """ + Given an edge list generate a sequence of ordered depth + first search traversals using scipy.csgraph routines. + + Parameters + ------------ + edges : (n, 2) int + Undirected edges of a graph + mode : str + Traversal type, 'bfs' or 'dfs' + + Returns + ----------- + traversals : (m,) sequence of (p,) int + Ordered DFS or BFS traversals of the graph. + """ + ... + +def edges_to_coo(edges, count=..., data=...): # -> coo_matrix: + """ + Given an edge list, return a boolean scipy.sparse.coo_matrix + representing the edges in matrix form. + + Parameters + ------------ + edges : (n, 2) int + Edges of a graph + count : int + The total number of nodes in the graph + if None: count = edges.max() + 1 + data : (n,) any + Assign data to each edge, if None will + be bool True for each specified edge + + Returns + ------------ + matrix: (count, count) scipy.sparse.coo_matrix + Sparse COO + """ + ... + +def neighbors(edges, max_index=..., directed=...): # -> list[list[Unknown]]: + """ + Find the neighbors for each node in an edgelist graph. + + TODO : re-write this with sparse matrix operations + + Parameters + ------------ + edges : (n, 2) int + Connected nodes + directed : bool + If True, only connect edges in one direction + + Returns + --------- + neighbors : sequence + Vertex index corresponds to set of other vertex indices + """ + ... + +def smoothed(*args, **kwargs): + """ + DEPRECATED: use `trimesh.graph.smooth_shade(mesh, ...)` + """ + ... + +def smooth_shade(mesh, angle: Optional[float] = ..., facet_minarea: Optional[float] = ...): + """ + Return a non-watertight version of the mesh which + will render nicely with smooth shading by + disconnecting faces at sharp angles to each other. + + Parameters + ----------- + mesh : trimesh.Trimesh + Source geometry + angle : float or None + Angle in radians face pairs with angles + smaller than this will appear smoothed + facet_minarea : float or None + Minimum area fraction to consider + IE for `facets_minarea=25` only facets larger + than `mesh.area / 25` will be considered. + + Returns + --------- + smooth : trimesh.Trimesh + Geometry with disconnected face patches + """ + ... + +def is_watertight(edges, edges_sorted=...): # -> tuple[bool, bool]: + """ + Parameters + ----------- + edges : (n, 2) int + List of vertex indices + edges_sorted : (n, 2) int + Pass vertex indices sorted on axis 1 as a speedup + + Returns + --------- + watertight : boolean + Whether every edge is shared by an even + number of faces + winding : boolean + Whether every shared edge is reversed + """ + ... + +def graph_to_svg(graph): # -> bytes: + """ + Turn a networkx graph into an SVG string + using graphviz `dot`. + + Parameters + ---------- + graph: networkx graph + + Returns + --------- + svg: string, pictoral layout in SVG format + """ + ... + +def multigraph_paths(G, source, cutoff=...): # -> list[Unknown]: + """ + For a networkx MultiDiGraph, find all paths from a source node + to leaf nodes. This function returns edge instance numbers + in addition to nodes, unlike networkx.all_simple_paths. + + Parameters + --------------- + G : networkx.MultiDiGraph + Graph to evaluate + source : hashable + Node to start traversal at + cutoff : int + Number of nodes to visit + If None will visit all nodes + + Returns + ---------- + traversals : (n,) list of [(node, edge instance index), ] paths + Traversals of the multigraph + """ + ... + +def multigraph_collect(G, traversal, attrib=...): # -> list[Unknown]: + """ + Given a MultiDiGraph traversal, collect attributes along it. + + Parameters + ------------- + G: networkx.MultiDiGraph + traversal: (n) list of (node, instance) tuples + attrib: dict key, name to collect. If None, will return all + + Returns + ------------- + collected: (len(traversal) - 1) list of attributes + """ + ... diff --git a/typings/trimesh/grouping.pyi b/typings/trimesh/grouping.pyi new file mode 100644 index 00000000..2a19be04 --- /dev/null +++ b/typings/trimesh/grouping.pyi @@ -0,0 +1,417 @@ +""" +This type stub file was generated by pyright. +""" + +""" +grouping.py +------------- + +Functions for grouping values and rows. +""" + +def merge_vertices(mesh, merge_tex=..., merge_norm=..., digits_vertex=..., digits_norm=..., digits_uv=...): # -> None: + """ + Removes duplicate vertices, grouped by position and + optionally texture coordinate and normal. + + Parameters + ------------- + mesh : Trimesh object + Mesh to merge vertices on + merge_tex : bool + If True textured meshes with UV coordinates will + have vertices merged regardless of UV coordinates + merge_norm : bool + If True, meshes with vertex normals will have + vertices merged ignoring different normals + digits_vertex : None or int + Number of digits to consider for vertex position + digits_norm : int + Number of digits to consider for unit normals + digits_uv : int + Number of digits to consider for UV coordinates + """ + ... + +def group(values, min_len=..., max_len=...): # -> list[Unknown]: + """ + Return the indices of values that are identical + + Parameters + ---------- + values : (n,) int + Values to group + min_len : int + The shortest group allowed + All groups will have len >= min_length + max_len : int + The longest group allowed + All groups will have len <= max_length + + Returns + ---------- + groups : sequence + Contains indices to form groups + IE [0,1,0,1] returns [[0,2], [1,3]] + """ + ... + +def hashable_rows(data, digits=...): # -> NDArray[Any] | NDArray[int64] | ndarray[Any, dtype[void]]: + """ + We turn our array into integers based on the precision + given by digits and then put them in a hashable format. + + Parameters + --------- + data : (n, m) array + Input data + digits : int or None + How many digits to add to hash if data is floating point + If None, tol.merge will be used + + Returns + --------- + hashable : (n,) array + Custom data type which can be sorted + or used as hash keys + """ + ... + +def float_to_int(data, digits=..., dtype=...): + """ + Given a numpy array of float/bool/int, return as integers. + + Parameters + ------------- + data : (n, d) float, int, or bool + Input data + digits : float or int + Precision for float conversion + dtype : numpy.dtype + What datatype should result be returned as + + Returns + ------------- + as_int : (n, d) int + Data as integers + """ + ... + +def unique_ordered(data, return_index=..., return_inverse=...): # -> list[Unknown]: + """ + Returns the same as np.unique, but ordered as per the + first occurrence of the unique value in data. + + Examples + --------- + In [1]: a = [0, 3, 3, 4, 1, 3, 0, 3, 2, 1] + + In [2]: np.unique(a) + Out[2]: array([0, 1, 2, 3, 4]) + + In [3]: trimesh.grouping.unique_ordered(a) + Out[3]: array([0, 3, 4, 1, 2]) + """ + ... + +def unique_bincount( + values, minlength=..., return_inverse=..., return_counts=... +): # -> NDArray[intp] | tuple[NDArray[intp], ...] | tuple[NDArray[intp], ndarray[Any, dtype[signedinteger[Any]]]]: + """ + For arrays of integers find unique values using bin counting. + Roughly 10x faster for correct input than np.unique + + Parameters + -------------- + values : (n,) int + Values to find unique members of + minlength : int + Maximum value that will occur in values (values.max()) + return_inverse : bool + If True, return an inverse such that unique[inverse] == values + return_counts : bool + If True, also return the number of times each + unique item appears in values + + Returns + ------------ + unique : (m,) int + Unique values in original array + inverse : (n,) int, optional + An array such that unique[inverse] == values + Only returned if return_inverse is True + counts : (m,) int, optional + An array holding the counts of each unique item in values + Only returned if return_counts is True + """ + ... + +def merge_runs(data, digits=...): + """ + Merge duplicate sequential values. This differs from unique_ordered + in that values can occur in multiple places in the sequence, but + only consecutive repeats are removed + + Parameters + ----------- + data: (n,) float or int + + Returns + -------- + merged: (m,) float or int + + Examples + --------- + In [1]: a + Out[1]: + array([-1, -1, -1, 0, 0, 1, 1, 2, 0, + 3, 3, 4, 4, 5, 5, 6, 6, 7, + 7, 8, 8, 9, 9, 9]) + + In [2]: trimesh.grouping.merge_runs(a) + Out[2]: array([-1, 0, 1, 2, 0, 3, 4, 5, 6, 7, 8, 9]) + """ + ... + +def unique_float(data, return_index=..., return_inverse=..., digits=...): # -> tuple[Unknown, ...]: + """ + Identical to the numpy.unique command, except evaluates floating point + numbers, using a specified number of digits. + + If digits isn't specified, the library default TOL_MERGE will be used. + """ + ... + +def unique_rows(data, digits=..., keep_order=...): # -> list[Unknown] | tuple[NDArray[intp], NDArray[intp]]: + """ + Returns indices of unique rows. It will return the + first occurrence of a row that is duplicated: + [[1,2], [3,4], [1,2]] will return [0,1] + + Parameters + --------- + data : (n, m) array + Floating point data + digits : int or None + How many digits to consider + + Returns + -------- + unique : (j,) int + Index in data which is a unique row + inverse : (n,) int + Array to reconstruct original + Example: data[unique][inverse] == data + """ + ... + +def unique_value_in_row(data, unique=...): # -> NDArray[Any]: + """ + For a 2D array of integers find the position of a + value in each row which only occurs once. + + If there are more than one value per row which + occur once, the last one is returned. + + Parameters + ---------- + data : (n, d) int + Data to check values + unique : (m,) int + List of unique values contained in data. + Generated from np.unique if not passed + + Returns + --------- + result : (n, d) bool + With one or zero True values per row. + + + Examples + ------------------------------------- + In [0]: r = np.array([[-1, 1, 1], + [-1, 1, -1], + [-1, 1, 1], + [-1, 1, -1], + [-1, 1, -1]], dtype=np.int8) + + In [1]: unique_value_in_row(r) + Out[1]: + array([[ True, False, False], + [False, True, False], + [ True, False, False], + [False, True, False], + [False, True, False]], dtype=bool) + + In [2]: unique_value_in_row(r).sum(axis=1) + Out[2]: array([1, 1, 1, 1, 1]) + + In [3]: r[unique_value_in_row(r)] + Out[3]: array([-1, 1, -1, 1, 1], dtype=int8) + """ + ... + +def group_rows(data, require_count=..., digits=...): # -> list[Unknown] | ndarray[Any, Unknown]: + """ + Returns index groups of duplicate rows, for example: + [[1,2], [3,4], [1,2]] will return [[0,2], [1]] + + + Note that using require_count allows numpy advanced + indexing to be used in place of looping and + checking hashes and is ~10x faster. + + + Parameters + ---------- + data : (n, m) array + Data to group + require_count : None or int + Only return groups of a specified length, eg: + require_count = 2 + [[1,2], [3,4], [1,2]] will return [[0,2]] + digits : None or int + If data is floating point how many decimals + to consider, or calculated from tol.merge + + Returns + ---------- + groups : sequence (*,) int + Indices from in indicating identical rows. + """ + ... + +def boolean_rows(a, b, operation=...): + """ + Find the rows in two arrays which occur in both rows. + + Parameters + --------- + a: (n, d) int + Array with row vectors + b: (m, d) int + Array with row vectors + operation : function + Numpy boolean set operation function: + -np.intersect1d + -np.setdiff1d + + Returns + -------- + shared: (p, d) array containing rows in both a and b + """ + ... + +def group_vectors(vectors, angle=..., include_negative=...): # -> tuple[NDArray[Any], list[Unknown]]: + """ + Group vectors based on an angle tolerance, with the option to + include negative vectors. + + Parameters + ----------- + vectors : (n,3) float + Direction vector + angle : float + Group vectors closer than this angle in radians + include_negative : bool + If True consider the same: + [0,0,1] and [0,0,-1] + + Returns + ------------ + new_vectors : (m,3) float + Direction vector + groups : (m,) sequence of int + Indices of source vectors + """ + ... + +def group_distance(values, distance): # -> tuple[NDArray[Unknown], list[Unknown]]: + """ + Find groups of points which have neighbours closer than radius, + where no two points in a group are farther than distance apart. + + Parameters + --------- + points : (n, d) float + Points of dimension d + distance : float + Max distance between points in a cluster + + Returns + ---------- + unique : (m, d) float + Median value of each group + groups : (m) sequence of int + Indexes of points that make up a group + + """ + ... + +def clusters(points, radius): # -> list[Unknown] | Any | list[ndarray[Any, dtype[int64]]] | list[list[Unknown]]: + """ + Find clusters of points which have neighbours closer than radius + + Parameters + --------- + points : (n, d) float + Points of dimension d + radius : float + Max distance between points in a cluster + + Returns + ---------- + groups : (m,) sequence of int + Indices of points in a cluster + + """ + ... + +def blocks( + data, min_len=..., max_len=..., wrap=..., digits=..., only_nonzero=... +): # -> list[ndarray[Any, dtype[signedinteger[Any]]]]: + """ + Find the indices in an array of contiguous blocks + of equal values. + + Parameters + ------------ + data : (n,) array + Data to find blocks on + min_len : int + The minimum length group to be returned + max_len : int + The maximum length group to be retuurned + wrap : bool + Combine blocks on both ends of 1D array + digits : None or int + If dealing with floats how many digits to consider + only_nonzero : bool + Only return blocks of non- zero values + + Returns + --------- + blocks : (m) sequence of (*,) int + Indices referencing data + """ + ... + +def group_min(groups, data): + """ + Given a list of groups find the minimum element of data + within each group + + Parameters + ----------- + groups : (n,) sequence of (q,) int + Indexes of each group corresponding to each element in data + data : (m,) + The data that groups indexes reference + + Returns + ----------- + minimums : (n,) + Minimum value of data per group + + """ + ... diff --git a/typings/trimesh/inertia.pyi b/typings/trimesh/inertia.pyi new file mode 100644 index 00000000..df11bc4a --- /dev/null +++ b/typings/trimesh/inertia.pyi @@ -0,0 +1,145 @@ +""" +This type stub file was generated by pyright. +""" + +""" +inertia.py +------------- + +Functions for dealing with inertia tensors. + +Results validated against known geometries and checked for +internal consistency. +""" + +def cylinder_inertia(mass, radius, height, transform=...): # -> Any | NDArray[floating[Any]]: + """ + Return the inertia tensor of a cylinder. + + Parameters + ------------ + mass : float + Mass of cylinder + radius : float + Radius of cylinder + height : float + Height of cylinder + transform : (4, 4) float + Transformation of cylinder + + Returns + ------------ + inertia : (3, 3) float + Inertia tensor + """ + ... + +def sphere_inertia(mass, radius): + """ + Return the inertia tensor of a sphere. + + Parameters + ------------ + mass : float + Mass of sphere + radius : float + Radius of sphere + + Returns + ------------ + inertia : (3, 3) float + Inertia tensor + """ + ... + +def principal_axis(inertia): # -> tuple[NDArray[floating[Any]], NDArray[floating[Any]]]: + """ + Find the principal components and principal axis + of inertia from the inertia tensor. + + Parameters + ------------ + inertia : (3, 3) float + Inertia tensor + + Returns + ------------ + components : (3,) float + Principal components of inertia + vectors : (3, 3) float + Row vectors pointing along the + principal axes of inertia + """ + ... + +def transform_inertia(transform, inertia_tensor, parallel_axis=..., mass=...): # -> Any: + """ + Transform an inertia tensor to a new frame. + + Note that in trimesh `mesh.moment_inertia` is *axis aligned* + and at `mesh.center_mass`. + + So to transform to a new frame and get the moment of inertia at + the center of mass the translation should be ignored and only + rotation applied. + + If parallel axis is enabled it will compute the inertia + about a new location. + + More details in the MIT OpenCourseWare PDF: + ` MIT16_07F09_Lec26.pdf` + + + Parameters + ------------ + transform : (3, 3) or (4, 4) float + Transformation matrix + inertia_tensor : (3, 3) float + Inertia tensor. + parallel_axis : bool + Apply the parallel axis theorum or not. + If the passed inertia tensor is at the center of mass + and you want the new post-transform tensor also at the + center of mass you DON'T want this enabled as you *only* + want to apply the rotation. Use this to get moment of + inertia at an arbitrary frame that isn't the center of mass. + + Returns + ------------ + transformed : (3, 3) float + Inertia tensor in new frame. + """ + ... + +def radial_symmetry( + mesh, +): # -> tuple[None, None, None] | tuple[Literal['spherical'], Unknown, Unknown] | tuple[Literal['radial'], Unknown, Unknown]: + """ + Check whether a mesh has radial symmetry. + + Returns + ----------- + symmetry : None or str + None No rotational symmetry + 'radial' Symmetric around an axis + 'spherical' Symmetric around a point + axis : None or (3,) float + Rotation axis or point + section : None or (3, 2) float + If radial symmetry provide vectors + to get cross section + """ + ... + +def scene_inertia(scene, transform): # -> Any: + """ + Calculate the inertia of a scene about a specific frame. + + Parameters + ------------ + scene : trimesh.Scene + Scene with geometry. + transform : None or (4, 4) float + Homogeneous transform to compute inertia at. + """ + ... diff --git a/typings/trimesh/interfaces/__init__.pyi b/typings/trimesh/interfaces/__init__.pyi new file mode 100644 index 00000000..93b8494a --- /dev/null +++ b/typings/trimesh/interfaces/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +from . import blender, gmsh + +__all__ = ["blender", "gmsh"] diff --git a/typings/trimesh/interfaces/blender.pyi b/typings/trimesh/interfaces/blender.pyi new file mode 100644 index 00000000..a9757eeb --- /dev/null +++ b/typings/trimesh/interfaces/blender.pyi @@ -0,0 +1,27 @@ +""" +This type stub file was generated by pyright. +""" + +import platform + +_search_path = ... +if platform.system() == "Windows": + _search_path = ... + _search_path = ... +if platform.system() == "Darwin": + _search_path = ... + _search_path = ... +_blender_executable = ... +exists = ... + +def boolean(meshes, operation=..., debug=...): + """ + Run a boolean operation with multiple meshes using Blender. + """ + ... + +def unwrap(mesh, angle_limit=..., island_margin=..., debug=...): + """ + Run an unwrap operation using blender. + """ + ... diff --git a/typings/trimesh/interfaces/generic.pyi b/typings/trimesh/interfaces/generic.pyi new file mode 100644 index 00000000..b8d1fa26 --- /dev/null +++ b/typings/trimesh/interfaces/generic.pyi @@ -0,0 +1,11 @@ +""" +This type stub file was generated by pyright. +""" + +class MeshScript: + def __init__(self, meshes, script, exchange=..., debug=..., **kwargs) -> None: ... + def __enter__(self): # -> Self@MeshScript: + ... + def run(self, command): ... + def __exit__(self, *args, **kwargs): # -> None: + ... diff --git a/typings/trimesh/interfaces/gmsh.pyi b/typings/trimesh/interfaces/gmsh.pyi new file mode 100644 index 00000000..dac75ebf --- /dev/null +++ b/typings/trimesh/interfaces/gmsh.pyi @@ -0,0 +1,76 @@ +""" +This type stub file was generated by pyright. +""" + +def load_gmsh(file_name, gmsh_args=...): # -> dict[str, dict[Unknown, Unknown]] | dict[str, Unknown]: + """ + Returns a surface mesh from CAD model in Open Cascade + Breap (.brep), Step (.stp or .step) and Iges formats + Or returns a surface mesh from 3D volume mesh using gmsh. + + For a list of possible options to pass to GMSH, check: + http://gmsh.info/doc/texinfo/gmsh.html + + An easy way to install the GMSH SDK is through the `gmsh-sdk` + package on PyPi, which downloads and sets up gmsh: + >>> pip install gmsh-sdk + + Parameters + -------------- + file_name : str + Location of the file to be imported + gmsh_args : (n, 2) list + List of (parameter, value) pairs to be passed to + gmsh.option.setNumber + max_element : float or None + Maximum length of an element in the volume mesh + + Returns + ------------ + mesh : trimesh.Trimesh + Surface mesh of input geometry + """ + ... + +def to_volume(mesh, file_name=..., max_element=..., mesher_id=...): # -> bytes | None: + """ + Convert a surface mesh to a 3D volume mesh generated by gmsh. + + An easy way to install the gmsh sdk is through the gmsh-sdk + package on pypi, which downloads and sets up gmsh: + pip install gmsh-sdk + + Algorithm details, although check gmsh docs for more information: + The "Delaunay" algorithm is split into three separate steps. + First, an initial mesh of the union of all the volumes in the model is performed, + without inserting points in the volume. The surface mesh is then recovered using H. + Si's boundary recovery algorithm Tetgen/BR. Then a three-dimensional version of the + 2D Delaunay algorithm described above is applied to insert points in the volume to + respect the mesh size constraints. + + The Frontal" algorithm uses J. Schoeberl's Netgen algorithm. + The "HXT" algorithm is a new efficient and parallel reimplementaton + of the Delaunay algorithm. + The "MMG3D" algorithm (experimental) allows to generate + anisotropic tetrahedralizations + + + Parameters + -------------- + mesh : trimesh.Trimesh + Surface mesh of input geometry + file_name : str or None + Location to save output, in .msh (gmsh) or .bdf (Nastran) format + max_element : float or None + Maximum length of an element in the volume mesh + mesher_id : int + 3D unstructured algorithms: + 1: Delaunay, 3: Initial mesh only, 4: Frontal, 7: MMG3D, 9: R-tree, 10: HXT + + Returns + ------------ + data : None or bytes + MSH data, only returned if file_name is None + + """ + ... diff --git a/typings/trimesh/intersections.pyi b/typings/trimesh/intersections.pyi new file mode 100644 index 00000000..8bb75fb6 --- /dev/null +++ b/typings/trimesh/intersections.pyi @@ -0,0 +1,208 @@ +""" +This type stub file was generated by pyright. +""" + +""" +intersections.py +------------------ + +Primarily mesh-plane intersections (slicing). +""" + +def mesh_plane( + mesh, plane_normal, plane_origin, return_faces=..., local_faces=..., cached_dots=... +): # -> tuple[NDArray[Unknown], NDArray[intp]] | tuple[NDArray[Unknown], ndarray[Any, dtype[int64]]] | NDArray[Unknown]: + """ + Find a the intersections between a mesh and a plane, + returning a set of line segments on that plane. + + Parameters + --------- + mesh : Trimesh object + Source mesh to slice + plane_normal : (3,) float + Normal vector of plane to intersect with mesh + plane_origin : (3,) float + Point on plane to intersect with mesh + return_faces : bool + If True return face index each line is from + local_faces : None or (m,) int + Limit section to just these faces. + cached_dots : (n, 3) float + If an external function has stored dot + products pass them here to avoid recomputing. + + Returns + ---------- + lines : (m, 2, 3) float + List of 3D line segments in space. + face_index : (m,) int + Index of mesh.faces for each line + Only returned if return_faces was True + """ + ... + +def mesh_multiplane( + mesh, plane_origin, plane_normal, heights +): # -> tuple[list[Unknown], NDArray[float64], list[Unknown]]: + """ + A utility function for slicing a mesh by multiple + parallel planes which caches the dot product operation. + + Parameters + ------------- + mesh : trimesh.Trimesh + Geometry to be sliced by planes + plane_origin : (3,) float + Point on a plane + plane_normal : (3,) float + Normal vector of plane + heights : (m,) float + Offset distances from plane to slice at: + at `height=0` it will be exactly on the passed plane. + + Returns + -------------- + lines : (m,) sequence of (n, 2, 2) float + Lines in space for m planes + to_3D : (m, 4, 4) float + Transform to move each section back to 3D + face_index : (m,) sequence of (n,) int + Indexes of mesh.faces for each segment + """ + ... + +def plane_lines(plane_origin, plane_normal, endpoints, line_segments=...): # -> tuple[Unknown, Any]: + """ + Calculate plane-line intersections + + Parameters + --------- + plane_origin : (3,) float + Point on plane + plane_normal : (3,) float + Plane normal vector + endpoints : (2, n, 3) float + Points defining lines to be tested + line_segments : bool + If True, only returns intersections as valid if + vertices from endpoints are on different sides + of the plane. + + Returns + --------- + intersections : (m, 3) float + Cartesian intersection points + valid : (n, 3) bool + Indicate whether a valid intersection exists + for each input line segment + """ + ... + +def planes_lines( + plane_origins, plane_normals, line_origins, line_directions, return_distance=..., return_denom=... +): # -> list[Any]: + """ + Given one line per plane find the intersection points. + + Parameters + ----------- + plane_origins : (n,3) float + Point on each plane + plane_normals : (n,3) float + Normal vector of each plane + line_origins : (n,3) float + Point at origin of each line + line_directions : (n,3) float + Direction vector of each line + return_distance : bool + Return distance from origin to point also + return_denom : bool + Return denominator, so you can check for small values + + Returns + ---------- + on_plane : (n,3) float + Points on specified planes + valid : (n,) bool + Did plane intersect line or not + distance : (n,) float + [OPTIONAL] Distance from point + denom : (n,) float + [OPTIONAL] Denominator + """ + ... + +def slice_faces_plane( + vertices, faces, plane_normal, plane_origin, uv=..., face_index=..., cached_dots=... +): # -> tuple[Unknown, Unknown, Unknown | None] | tuple[NDArray[float64], NDArray[int64], NDArray[float64] | None] | tuple[Unknown, ndarray[Any, dtype[intp]] | Any | Unknown, Unknown | None] | tuple[ndarray[Any, dtype[Any]] | Unknown, ndarray[Any, dtype[intp]] | Any | Unknown, ndarray[Any, dtype[Unknown | Any]] | None]: + """ + Slice a mesh (given as a set of faces and vertices) with a plane, returning a + new mesh (again as a set of faces and vertices) that is the + portion of the original mesh to the positive normal side of the plane. + + Parameters + --------- + vertices : (n, 3) float + Vertices of source mesh to slice + faces : (n, 3) int + Faces of source mesh to slice + plane_normal : (3,) float + Normal vector of plane to intersect with mesh + plane_origin : (3,) float + Point on plane to intersect with mesh + uv : (n, 2) float, optional + UV coordinates of source mesh to slice + face_index : ((m,) int) + Indexes of faces to slice. When no mask is provided, the + default is to slice all faces. + cached_dots : (n, 3) float + If an external function has stored dot + products pass them here to avoid recomputing + + Returns + ---------- + new_vertices : (n, 3) float + Vertices of sliced mesh + new_faces : (n, 3) int + Faces of sliced mesh + new_uv : (n, 2) int or None + UV coordinates of sliced mesh + """ + ... + +def slice_mesh_plane( + mesh, plane_normal, plane_origin, face_index=..., cap=..., cached_dots=..., engine=..., **kwargs +): # -> Trimesh | None: + """ + Slice a mesh with a plane returning a new mesh that is the + portion of the original mesh to the positive normal side + of the plane. + + Parameters + --------- + mesh : Trimesh object + Source mesh to slice + plane_normal : (3,) float + Normal vector of plane to intersect with mesh + plane_origin : (3,) float + Point on plane to intersect with mesh + cap : bool + If True, cap the result with a triangulated polygon + face_index : ((m,) int) + Indexes of mesh.faces to slice. When no mask is provided, the + default is to slice all faces. + cached_dots : (n, 3) float + If an external function has stored dot + products pass them here to avoid recomputing + engine : None or str + Triangulation engine passed to `triangulate_polygon` + kwargs : dict + Passed to the newly created sliced mesh + + Returns + ---------- + new_mesh : Trimesh object + Sliced mesh + """ + ... diff --git a/typings/trimesh/interval.pyi b/typings/trimesh/interval.pyi new file mode 100644 index 00000000..53362e52 --- /dev/null +++ b/typings/trimesh/interval.pyi @@ -0,0 +1,59 @@ +""" +This type stub file was generated by pyright. +""" + +from .typed import NDArray, float64 + +""" +interval.py +-------------- + +Deal with 1D intervals which are defined by: + [start position, end position] +""" + +def intersection(a: NDArray[float64], b: NDArray[float64]) -> NDArray[float64]: + """ + Given pairs of ranges merge them in to + one range if they overlap. + + Parameters + -------------- + a : (2, ) or (n, 2) + Start and end of a 1D interval + b : (2, ) float + Start and end of a 1D interval + + Returns + -------------- + inter : (2, ) or (2, 2) float + The unioned range from the two inputs, + if not `inter.ptp(axis=1)` will be zero. + """ + ... + +def union(intervals: NDArray[float64], sort: bool = ...) -> NDArray[float64]: + """ + For array of multiple intervals union them all into + the subset of intervals. + + For example: + `intervals = [[1,2], [2,3]] -> [[1, 3]]` + `intervals = [[1,2], [2.5,3]] -> [[1, 2], [2.5, 3]]` + + + Parameters + ------------ + intervals : (n, 2) + Pairs of `(min, max)` values. + sort + If the array is already ordered into (min, max) pairs + and then pairs sorted by minimum value you can skip the + sorting in this function. + + Returns + ---------- + unioned : (m, 2) + New intervals where `m <= n` + """ + ... diff --git a/typings/trimesh/nsphere.pyi b/typings/trimesh/nsphere.pyi new file mode 100644 index 00000000..3663d6ff --- /dev/null +++ b/typings/trimesh/nsphere.pyi @@ -0,0 +1,72 @@ +""" +This type stub file was generated by pyright. +""" + +""" +nsphere.py +-------------- + +Functions for fitting and minimizing nspheres: +circles, spheres, hyperspheres, etc. +""" + +def minimum_nsphere(obj): # -> tuple[Unknown, Unknown] | tuple[Any | Unknown, Any | Unknown]: + """ + Compute the minimum n- sphere for a mesh or a set of points. + + Uses the fact that the minimum n- sphere will be centered at one of + the vertices of the furthest site voronoi diagram, which is n*log(n) + but should be pretty fast due to using the scipy/qhull implementations + of convex hulls and voronoi diagrams. + + Parameters + ---------- + obj : (n, d) float or trimesh.Trimesh + Points or mesh to find minimum bounding nsphere + + Returns + ---------- + center : (d,) float + Center of fitted n- sphere + radius : float + Radius of fitted n-sphere + """ + ... + +def fit_nsphere(points, prior=...): # -> tuple[Unknown, Any, Any]: + """ + Fit an n-sphere to a set of points using least squares. + + Parameters + ------------ + points : (n, d) float + Points in space + prior : (d,) float + Best guess for center of nsphere + + Returns + --------- + center : (d,) float + Location of center + radius : float + Mean radius across circle + error : float + Peak to peak value of deviation from mean radius + """ + ... + +def is_nsphere(points): # -> Any: + """ + Check if a list of points is an nsphere. + + Parameters + ----------- + points : (n, dimension) float + Points in space + + Returns + ----------- + check : bool + True if input points are on an nsphere + """ + ... diff --git a/typings/trimesh/parent.pyi b/typings/trimesh/parent.pyi new file mode 100644 index 00000000..28fcdb9f --- /dev/null +++ b/typings/trimesh/parent.pyi @@ -0,0 +1,201 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +from . import caching +from .util import ABC + +""" +parent.py +------------- + +The base class for Trimesh, PointCloud, and Scene objects +""" + +class Geometry(ABC): + """ + `Geometry` is the parent class for all geometry. + + By decorating a method with `abc.abstractmethod` it means + the objects that inherit from `Geometry` MUST implement + those methods. + """ + + @abc.abstractproperty + def bounds(self): # -> None: + ... + @abc.abstractproperty + def extents(self): # -> None: + ... + @abc.abstractmethod + def apply_transform(self, matrix): # -> None: + ... + @abc.abstractmethod + def is_empty(self) -> bool: ... + def __hash__(self) -> int: + """ + Get a hash of the current geometry. + + Returns + --------- + hash : int + Hash of current graph and geometry. + """ + ... + @abc.abstractmethod + def copy(self): # -> None: + ... + @abc.abstractmethod + def show(self): # -> None: + ... + @abc.abstractmethod + def __add__(self, other): # -> None: + ... + @abc.abstractmethod + def export(self, file_obj, file_type=...): # -> None: + ... + def __repr__(self): # -> str: + """ + Print quick summary of the current geometry without + computing properties. + + Returns + ----------- + repr : str + Human readable quick look at the geometry. + """ + ... + def apply_translation(self, translation): # -> None: + """ + Translate the current mesh. + + Parameters + ---------- + translation : (3,) float + Translation in XYZ + """ + ... + def apply_scale(self, scaling): # -> None: + """ + Scale the mesh. + + Parameters + ---------- + scaling : float or (3,) float + Scale factor to apply to the mesh + """ + ... + def __radd__(self, other): # -> Self@Geometry | None: + """ + Concatenate the geometry allowing concatenation with + built in `sum()` function: + `sum(Iterable[trimesh.Trimesh])` + + Parameters + ------------ + other : Geometry + Geometry or 0 + + Returns + ---------- + concat : Geometry + Geometry of combined result + """ + ... + +class Geometry3D(Geometry): + """ + The `Geometry3D` object is the parent object of geometry objects + which are three dimensional, including Trimesh, PointCloud, + and Scene objects. + """ + + @caching.cache_decorator + def bounding_box(self): # -> Box: + """ + An axis aligned bounding box for the current mesh. + + Returns + ---------- + aabb : trimesh.primitives.Box + Box object with transform and extents defined + representing the axis aligned bounding box of the mesh + """ + ... + @caching.cache_decorator + def bounding_box_oriented(self): # -> Box: + """ + An oriented bounding box for the current mesh. + + Returns + --------- + obb : trimesh.primitives.Box + Box object with transform and extents defined + representing the minimum volume oriented + bounding box of the mesh + """ + ... + @caching.cache_decorator + def bounding_sphere(self): # -> Sphere: + """ + A minimum volume bounding sphere for the current mesh. + + Note that the Sphere primitive returned has an unpadded + exact `sphere_radius` so while the distance of every vertex + of the current mesh from sphere_center will be less than + sphere_radius, the faceted sphere primitive may not + contain every vertex. + + Returns + -------- + minball : trimesh.primitives.Sphere + Sphere primitive containing current mesh + """ + ... + @caching.cache_decorator + def bounding_cylinder(self): # -> Cylinder: + """ + A minimum volume bounding cylinder for the current mesh. + + Returns + -------- + mincyl : trimesh.primitives.Cylinder + Cylinder primitive containing current mesh + """ + ... + @caching.cache_decorator + def bounding_primitive(self): # -> Any: + """ + The minimum volume primitive (box, sphere, or cylinder) that + bounds the mesh. + + Returns + --------- + bounding_primitive : object + Smallest primitive which bounds the mesh: + trimesh.primitives.Sphere + trimesh.primitives.Box + trimesh.primitives.Cylinder + """ + ... + def apply_obb(self, **kwargs): + """ + Apply the oriented bounding box transform to the current mesh. + + This will result in a mesh with an AABB centered at the + origin and the same dimensions as the OBB. + + Parameters + ------------ + kwargs + Passed through to `bounds.oriented_bounds` + + Returns + ---------- + matrix : (4, 4) float + Transformation matrix that was applied + to mesh to move it into OBB frame + """ + ... diff --git a/typings/trimesh/path/__init__.pyi b/typings/trimesh/path/__init__.pyi new file mode 100644 index 00000000..903c4ed0 --- /dev/null +++ b/typings/trimesh/path/__init__.pyi @@ -0,0 +1,14 @@ +""" +This type stub file was generated by pyright. +""" + +from .path import Path2D, Path3D + +""" +trimesh.path +------------- + +Handle 2D and 3D vector paths such as those contained in an +SVG or DXF file. +""" +__all__ = ["Path2D", "Path3D"] diff --git a/typings/trimesh/path/arc.pyi b/typings/trimesh/path/arc.pyi new file mode 100644 index 00000000..fa1492e1 --- /dev/null +++ b/typings/trimesh/path/arc.pyi @@ -0,0 +1,84 @@ +""" +This type stub file was generated by pyright. +""" + +from dataclasses import dataclass + +from ..typed import ArrayLike, NDArray, Optional, float64 + +_TOL_ZERO = ... + +@dataclass +class ArcInfo: + radius: float + center: NDArray[float64] + normal: Optional[NDArray[float64]] = ... + angles: Optional[NDArray[float64]] = ... + span: Optional[float] = ... + def __getitem__(self, item): # -> Any: + ... + +def arc_center(points: ArrayLike, return_normal: bool = ..., return_angle: bool = ...) -> ArcInfo: + """ + Given three points on a 2D or 3D arc find the center, + radius, normal, and angular span. + + Parameters + --------- + points : (3, dimension) float + Points in space, where dimension is either 2 or 3 + return_normal : bool + If True calculate the 3D normal unit vector + return_angle : bool + If True calculate the start and stop angle and span + + Returns + --------- + info + Arc center, radius, and other information. + """ + ... + +def discretize_arc(points, close=..., scale=...): # -> ndarray[Any, dtype[float64]] | Any: + """ + Returns a version of a three point arc consisting of + line segments. + + Parameters + --------- + points : (3, d) float + Points on the arc where d in [2,3] + close : boolean + If True close the arc into a circle + scale : float + What is the approximate overall drawing scale + Used to establish order of magnitude for precision + + Returns + --------- + discrete : (m, d) float + Connected points in space + """ + ... + +def to_threepoint(center, radius, angles=...): + """ + For 2D arcs, given a center and radius convert them to three + points on the arc. + + Parameters + ----------- + center : (2,) float + Center point on the plane + radius : float + Radius of arc + angles : (2,) float + Angles in radians for start and end angle + if not specified, will default to (0.0, pi) + + Returns + ---------- + three : (3, 2) float + Arc control points + """ + ... diff --git a/typings/trimesh/path/creation.pyi b/typings/trimesh/path/creation.pyi new file mode 100644 index 00000000..5ca8eb3e --- /dev/null +++ b/typings/trimesh/path/creation.pyi @@ -0,0 +1,117 @@ +""" +This type stub file was generated by pyright. +""" + +def circle_pattern(pattern_radius, circle_radius, count, center=..., angle=..., **kwargs): # -> Path2D: + """ + Create a Path2D representing a circle pattern. + + Parameters + ------------ + pattern_radius : float + Radius of circle centers + circle_radius : float + The radius of each circle + count : int + Number of circles in the pattern + center : (2,) float + Center of pattern + angle : float + If defined pattern will span this angle + If None, pattern will be evenly spaced + + Returns + ------------- + pattern : trimesh.path.Path2D + Path containing circular pattern + """ + ... + +def circle(radius, center=..., **kwargs): # -> Path2D: + """ + Create a Path2D containing circle with the specified + radius. + + Parameters + -------------- + radius : float + The radius of the circle + center : None or (2,) float + Center of the circle, origin by default + ** kwargs : dict + Passed to trimesh.path.Path2D constructor + + Returns + ------------- + circle : Path2D + Path containing specified circle + """ + ... + +def rectangle(bounds, **kwargs): # -> Path2D: + """ + Create a Path2D containing a single or multiple rectangles + with the specified bounds. + + Parameters + -------------- + bounds : (2, 2) float, or (m, 2, 2) float + Minimum XY, Maximum XY + + Returns + ------------- + rect : Path2D + Path containing specified rectangles + """ + ... + +def box_outline(extents=..., transform=..., **kwargs): # -> Path | Scene: + """ + Return a cuboid. + + Parameters + ------------ + extents : float, or (3,) float + Edge lengths + transform: (4, 4) float + Transformation matrix + **kwargs: + passed to Trimesh to create box + + Returns + ------------ + geometry : trimesh.Path3D + Path outline of a cuboid geometry + """ + ... + +def grid( + side, count=..., transform=..., plane_origin=..., plane_normal=..., include_circle=..., sections_circle=... +): # -> Path3D: + """ + Create a Path3D for a grid visualization of a plane. + + Parameters + ----------- + side : float + Length of half of a grid side + count : int + Number of grid lines per grid half + transform : None or (4, 4) float + Transformation matrix to move grid location. + Takes precedence over plane_origin if both are passed. + plane_origin : None or (3,) float + Plane origin + plane_normal : None or (3,) float + Unit normal vector + include_circle : bool + Include a circular pattern inside the grid + sections_circle : int + How many sections should the smallest circle have + + Returns + ---------- + grid : trimesh.path.Path3D + Path containing grid plane visualization + """ + ... diff --git a/typings/trimesh/path/curve.pyi b/typings/trimesh/path/curve.pyi new file mode 100644 index 00000000..c092f591 --- /dev/null +++ b/typings/trimesh/path/curve.pyi @@ -0,0 +1,61 @@ +""" +This type stub file was generated by pyright. +""" + +def discretize_bezier(points, count=..., scale=...): # -> Any: + """ + Parameters + ---------- + points : (order, dimension) float + Control points of the bezier curve + For a 2D cubic bezier, order=3, dimension=2 + count : int, or None + Number of segments + scale : float + Scale of curve + Returns + ---------- + discrete: (n, dimension) float + Points forming a a polyline representation + """ + ... + +def discretize_bspline(control, knots, count=..., scale=...): + """ + Given a B-Splines control points and knot vector, return + a sampled version of the curve. + + Parameters + ---------- + control : (o, d) float + Control points of the b- spline + knots : (j,) float + B-spline knots + count : int + Number of line segments to discretize the spline + If not specified will be calculated as something reasonable + + Returns + ---------- + discrete : (count, dimension) float + Points on a polyline version of the B-spline + """ + ... + +def binomial(n): # -> list[int] | Any: + """ + Return all binomial coefficients for a given order. + + For n > 5, scipy.special.binom is used, below we hardcode. + + Parameters + -------------- + n : int + Order of binomial + + Returns + --------------- + binom : (n + 1,) int + Binomial coefficients of a given order + """ + ... diff --git a/typings/trimesh/path/entities.pyi b/typings/trimesh/path/entities.pyi new file mode 100644 index 00000000..7a87b5d5 --- /dev/null +++ b/typings/trimesh/path/entities.pyi @@ -0,0 +1,519 @@ +""" +This type stub file was generated by pyright. +""" + +from ..util import ABC + +""" +entities.py +-------------- + +Basic geometric primitives which only store references to +vertex indices rather than vertices themselves. +""" + +class Entity(ABC): + def __init__(self, points, closed=..., layer=..., metadata=..., color=..., **kwargs) -> None: ... + @property + def metadata(self): # -> dict[Unknown, Unknown]: + """ + Get any metadata about the entity. + + Returns + --------- + metadata : dict + Bag of properties. + """ + ... + @property + def layer(self): # -> None: + """ + Set the layer the entity resides on as a shortcut + to putting it in the entity metadata. + + Returns + ---------- + layer : any + Hashable layer identifier. + """ + ... + @layer.setter + def layer(self, value): # -> None: + """ + Set the current layer of the entity. + + Returns + ---------- + layer : any + Hashable layer indicator + """ + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Returns a dictionary with all of the information + about the entity. + + Returns + ----------- + as_dict : dict + Has keys 'type', 'points', 'closed' + """ + ... + @property + def closed(self): # -> Any | Literal[False]: + """ + If the first point is the same as the end point + the entity is closed + + Returns + ----------- + closed : bool + Is the entity closed or not? + """ + ... + @property + def nodes(self): # -> ndarray[Any, dtype[int64]]: + """ + Returns an (n,2) list of nodes, or vertices on the path. + Note that this generic class function assumes that all of the + reference points are on the path which is true for lines and + three point arcs. + + If you were to define another class where that wasn't the case + (for example, the control points of a bezier curve), + you would need to implement an entity- specific version of this + function. + + The purpose of having a list of nodes is so that they can then be + added as edges to a graph so we can use functions to check + connectivity, extract paths, etc. + + The slicing on this function is essentially just tiling points + so the first and last vertices aren't repeated. Example: + + self.points = [0,1,2] + returns: [[0,1], [1,2]] + """ + ... + @property + def end_points(self): + """ + Returns the first and last points. Also note that if you + define a new entity class where the first and last vertices + in self.points aren't the endpoints of the curve you need to + implement this function for your class. + + Returns + ------------- + ends : (2,) int + Indices of the two end points of the entity + """ + ... + @property + def is_valid(self): # -> Literal[True]: + """ + Is the current entity valid. + + Returns + ----------- + valid : bool + Is the current entity well formed + """ + ... + def reverse(self, direction=...): # -> None: + """ + Reverse the current entity in place. + + Parameters + ---------------- + direction : int + If positive will not touch direction + If negative will reverse self.points + """ + ... + def bounds(self, vertices): # -> NDArray[Unknown]: + """ + Return the AABB of the current entity. + + Parameters + ----------- + vertices : (n, dimension) float + Vertices in space + + Returns + ----------- + bounds : (2, dimension) float + Coordinates of AABB, in (min, max) form + """ + ... + def length(self, vertices): # -> Any: + """ + Return the total length of the entity. + + Parameters + -------------- + vertices : (n, dimension) float + Vertices in space + + Returns + --------- + length : float + Total length of entity + """ + ... + def explode(self): # -> list[Self@Entity]: + """ + Split the entity into multiple entities. + + Returns + ------------ + explode : list of Entity + Current entity split into multiple entities. + """ + ... + def copy(self): # -> Self@Entity: + """ + Return a copy of the current entity. + + Returns + ------------ + copied : Entity + Copy of current entity + """ + ... + def __hash__(self) -> int: + """ + Return a hash that represents the current entity. + + Returns + ---------- + hashed : int + Hash of current class name, points, and closed + """ + ... + +class Text(Entity): + """ + Text to annotate a 2D or 3D path. + """ + + def __init__( + self, origin, text, height=..., vector=..., normal=..., align=..., layer=..., color=..., metadata=... + ) -> None: + """ + An entity for text labels. + + Parameters + -------------- + origin : int + Index of a single vertex for text origin + text : str + The text to label + height : float or None + The height of text + vector : int or None + An vertex index for which direction text + is written along unitized: vector - origin + normal : int or None + A vertex index for the plane normal: + vector is along unitized: normal - origin + align : (2,) str or None + Where to draw from for [horizontal, vertical]: + 'center', 'left', 'right' + """ + ... + @property + def origin(self): # -> Any: + """ + The origin point of the text. + + Returns + ----------- + origin : int + Index of vertices + """ + ... + @origin.setter + def origin(self, value): # -> None: + ... + @property + def vector(self): # -> Any: + """ + A point representing the text direction + along the vector: vertices[vector] - vertices[origin] + + Returns + ---------- + vector : int + Index of vertex + """ + ... + @vector.setter + def vector(self, value): # -> None: + ... + @property + def normal(self): # -> Any: + """ + A point representing the plane normal along the + vector: vertices[normal] - vertices[origin] + + Returns + ------------ + normal : int + Index of vertex + """ + ... + @normal.setter + def normal(self, value): # -> None: + ... + def plot(self, vertices, show=...): # -> None: + """ + Plot the text using matplotlib. + + Parameters + -------------- + vertices : (n, 2) float + Vertices in space + show : bool + If True, call plt.show() + """ + ... + def angle(self, vertices): # -> Any: + """ + If Text is 2D, get the rotation angle in radians. + + Parameters + ----------- + vertices : (n, 2) float + Vertices in space referenced by self.points + + Returns + --------- + angle : float + Rotation angle in radians + """ + ... + def length(self, vertices): # -> float: + ... + def discrete(self, *args, **kwargs): # -> NDArray[Any]: + ... + @property + def closed(self): # -> Literal[False]: + ... + @property + def is_valid(self): # -> Literal[True]: + ... + @property + def nodes(self): # -> NDArray[Any]: + ... + @property + def end_points(self): # -> NDArray[Any]: + ... + +class Line(Entity): + """ + A line or poly-line entity + """ + + def discrete(self, vertices, scale=...): + """ + Discretize into a world- space path. + + Parameters + ------------ + vertices: (n, dimension) float + Points in space + scale : float + Size of overall scene for numerical comparisons + + Returns + ------------- + discrete: (m, dimension) float + Path in space composed of line segments + """ + ... + @property + def is_valid(self): # -> bool_: + """ + Is the current entity valid. + + Returns + ----------- + valid : bool + Is the current entity well formed + """ + ... + def explode(self): # -> list[Line]: + """ + If the current Line entity consists of multiple line + break it up into n Line entities. + + Returns + ---------- + exploded: (n,) Line entities + """ + ... + +class Arc(Entity): + @property + def closed(self): # -> Any | bool: + """ + A boolean flag for whether the arc is closed (a circle) or not. + + Returns + ---------- + closed : bool + If set True, Arc will be a closed circle + """ + ... + @closed.setter + def closed(self, value): # -> None: + """ + Set the Arc to be closed or not, without + changing the control points + + Parameters + ------------ + value : bool + Should this Arc be a closed circle or not + """ + ... + @property + def is_valid(self): # -> bool: + """ + Is the current Arc entity valid. + + Returns + ----------- + valid : bool + Does the current Arc have exactly 3 control points + """ + ... + def length(self, vertices): # -> float: + """ + Return the arc length of the 3-point arc. + + Parameter + ---------- + vertices : (n, d) float + Vertices for overall drawing. + + Returns + ----------- + length : float + Length of arc. + """ + ... + def discrete(self, vertices, scale=...): # -> ndarray[Any, dtype[float64]] | Any: + """ + Discretize the arc entity into line sections. + + Parameters + ------------ + vertices : (n, dimension) float + Points in space + scale : float + Size of overall scene for numerical comparisons + + Returns + ------------- + discrete : (m, dimension) float + Path in space made up of line segments + """ + ... + def center(self, vertices, **kwargs): # -> ArcInfo: + """ + Return the center information about the arc entity. + + Parameters + ------------- + vertices : (n, dimension) float + Vertices in space + + Returns + ------------- + info : dict + With keys: 'radius', 'center' + """ + ... + def bounds(self, vertices): # -> NDArray[float64]: + """ + Return the AABB of the arc entity. + + Parameters + ----------- + vertices: (n, dimension) float + Vertices in space + + Returns + ----------- + bounds : (2, dimension) float + Coordinates of AABB in (min, max) form + """ + ... + +class Curve(Entity): + """ + The parent class for all wild curves in space. + """ + + @property + def nodes(self): # -> list[list[Any]]: + ... + +class Bezier(Curve): + """ + An open or closed Bezier curve + """ + + def discrete(self, vertices, scale=..., count=...): # -> Any: + """ + Discretize the Bezier curve. + + Parameters + ------------- + vertices : (n, 2) or (n, 3) float + Points in space + scale : float + Scale of overall drawings (for precision) + count : int + Number of segments to return + + Returns + ------------- + discrete : (m, 2) or (m, 3) float + Curve as line segments + """ + ... + +class BSpline(Curve): + """ + An open or closed B- Spline. + """ + + def __init__(self, points, knots, layer=..., metadata=..., color=..., **kwargs) -> None: ... + def discrete(self, vertices, count=..., scale=...): + """ + Discretize the B-Spline curve. + + Parameters + ------------- + vertices : (n, 2) or (n, 3) float + Points in space + scale : float + Scale of overall drawings (for precision) + count : int + Number of segments to return + + Returns + ------------- + discrete : (m, 2) or (m, 3) float + Curve as line segments + """ + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Returns a dictionary with all of the information + about the entity. + """ + ... diff --git a/typings/trimesh/path/exchange/__init__.pyi b/typings/trimesh/path/exchange/__init__.pyi new file mode 100644 index 00000000..cea7ef96 --- /dev/null +++ b/typings/trimesh/path/exchange/__init__.pyi @@ -0,0 +1,3 @@ +""" +This type stub file was generated by pyright. +""" diff --git a/typings/trimesh/path/exchange/dxf.pyi b/typings/trimesh/path/exchange/dxf.pyi new file mode 100644 index 00000000..1aea2877 --- /dev/null +++ b/typings/trimesh/path/exchange/dxf.pyi @@ -0,0 +1,110 @@ +""" +This type stub file was generated by pyright. +""" + +_DXF_UNITS = ... +_UNITS_TO_DXF = ... +_SAFESPACE = ... +XRECORD_METADATA = ... +XRECORD_SENTINEL = ... +XRECORD_MAX_LINE = ... +XRECORD_MAX_INDEX = ... + +def load_dxf(file_obj, **kwargs): # -> dict[str, Unknown]: + """ + Load a DXF file to a dictionary containing vertices and + entities. + + Parameters + ---------- + file_obj: file or file- like object (has object.read method) + + Returns + ---------- + result: dict, keys are entities, vertices and metadata + """ + ... + +def convert_entities(blob, blob_raw=..., blocks=..., return_name=...): + """ + Convert a chunk of entities into trimesh entities. + + Parameters + ------------ + blob : (n, 2) str + Blob of entities uppercased + blob_raw : (n, 2) str + Blob of entities not uppercased + blocks : None or dict + Blocks referenced by INSERT entities + return_name : bool + If True return the first '2' value + + Returns + ---------- + """ + ... + +def export_dxf(path, only_layers=...): # -> str: + """ + Export a 2D path object to a DXF file. + + Parameters + ---------- + path : trimesh.path.path.Path2D + Input geometry to export + only_layers : None or set + If passed only export the layers specified + + Returns + ---------- + export : str + Path formatted as a DXF file + """ + ... + +def bulge_to_arcs( + lines, bulge, bulge_idx, is_closed=..., metadata=... +): # -> tuple[NDArray[float64], list[Line]] | tuple[NDArray[float64], list[Unknown]]: + """ + Polylines can have "vertex bulge," which means the polyline + has an arc tangent to segments, rather than meeting at a + vertex. + + From Autodesk reference: + The bulge is the tangent of one fourth the included + angle for an arc segment, made negative if the arc + goes clockwise from the start point to the endpoint. + A bulge of 0 indicates a straight segment, and a + bulge of 1 is a semicircle. + + Parameters + ---------------- + lines : (n, 2) float + Polyline vertices in order + bulge : (m,) float + Vertex bulge value + bulge_idx : (m,) float + Which index of lines is bulge associated with + is_closed : bool + Is segment closed + metadata : None, or dict + Entity metadata to add + + Returns + --------------- + vertices : (a, 2) float + New vertices for poly-arc + entities : (b,) entities.Entity + New entities, either line or arc + """ + ... + +def get_key(blob, field, code): # -> int | None: + """ + Given a loaded (n, 2) blob and a field name + get a value by code. + """ + ... + +_dxf_loaders = ... diff --git a/typings/trimesh/path/exchange/export.pyi b/typings/trimesh/path/exchange/export.pyi new file mode 100644 index 00000000..0ac7690e --- /dev/null +++ b/typings/trimesh/path/exchange/export.pyi @@ -0,0 +1,30 @@ +""" +This type stub file was generated by pyright. +""" + +def export_path(path, file_type=..., file_obj=..., **kwargs): + """ + Export a Path object to a file- like object, or to a filename + + Parameters + --------- + file_obj: None, str, or file object + A filename string or a file-like object + file_type: None or str + File type, e.g.: 'svg', 'dxf' + kwargs : passed to loader + + Returns + --------- + exported : str or bytes + Data exported + """ + ... + +def export_dict(path): # -> dict[str, Unknown]: + """ + Export a path as a dict of kwargs for the Path constructor. + """ + ... + +_path_exporters = ... diff --git a/typings/trimesh/path/exchange/load.pyi b/typings/trimesh/path/exchange/load.pyi new file mode 100644 index 00000000..a612b7b2 --- /dev/null +++ b/typings/trimesh/path/exchange/load.pyi @@ -0,0 +1,42 @@ +""" +This type stub file was generated by pyright. +""" + +def load_path(file_obj, file_type=..., **kwargs): # -> Path | Scene: + """ + Load a file to a Path file_object. + + Parameters + ----------- + file_obj : One of the following: + - Path, Path2D, or Path3D file_objects + - open file file_object (dxf or svg) + - file name (dxf or svg) + - shapely.geometry.Polygon + - shapely.geometry.MultiLineString + - dict with kwargs for Path constructor + - (n,2,(2|3)) float, line segments + file_type : str + Type of file is required if file + file_object passed. + + Returns + --------- + path : Path, Path2D, Path3D file_object + Data as a native trimesh Path file_object + """ + ... + +def path_formats(): # -> set[str]: + """ + Get a list of supported path formats. + + Returns + ------------ + loaders : list of str + Extensions of loadable formats, ie: + ['svg', 'dxf'] + """ + ... + +path_loaders = ... diff --git a/typings/trimesh/path/exchange/misc.pyi b/typings/trimesh/path/exchange/misc.pyi new file mode 100644 index 00000000..51db2f06 --- /dev/null +++ b/typings/trimesh/path/exchange/misc.pyi @@ -0,0 +1,106 @@ +""" +This type stub file was generated by pyright. +""" + +def dict_to_path(as_dict): + """ + Turn a pure dict into a dict containing entity objects that + can be sent directly to a Path constructor. + + Parameters + ------------ + as_dict : dict + Has keys: 'vertices', 'entities' + + Returns + ------------ + kwargs : dict + Has keys: 'vertices', 'entities' + """ + ... + +def lines_to_path(lines): # -> dict[str, Unknown]: + """ + Turn line segments into a Path2D or Path3D object. + + Parameters + ------------ + lines : (n, 2, dimension) or (n, dimension) float + Line segments or connected polyline curve in 2D or 3D + + Returns + ----------- + kwargs : dict + kwargs for Path constructor + """ + ... + +def polygon_to_path(polygon): # -> dict[str, Unknown]: + """ + Load shapely Polygon objects into a trimesh.path.Path2D object + + Parameters + ------------- + polygon : shapely.geometry.Polygon + Input geometry + + Returns + ----------- + kwargs : dict + Keyword arguments for Path2D constructor + """ + ... + +def linestrings_to_path(multi): # -> dict[str, NDArray[Unknown]]: + """ + Load shapely LineString objects into a trimesh.path.Path2D object + + Parameters + ------------- + multi : shapely.geometry.LineString or MultiLineString + Input 2D geometry + + Returns + ------------- + kwargs : dict + Keyword arguments for Path2D constructor + """ + ... + +def faces_to_path(mesh, face_ids=..., **kwargs): # -> dict[str, Unknown]: + """ + Given a mesh and face indices find the outline edges and + turn them into a Path3D. + + Parameters + ------------ + mesh : trimesh.Trimesh + Triangulated surface in 3D + face_ids : (n,) int + Indexes referencing mesh.faces + + Returns + --------- + kwargs : dict + Kwargs for Path3D constructor + """ + ... + +def edges_to_path(edges, vertices, **kwargs): # -> dict[str, Unknown]: + """ + Given an edge list of indices and associated vertices + representing lines, generate kwargs for a Path object. + + Parameters + ----------- + edges : (n, 2) int + Vertex indices of line segments + vertices : (m, dimension) float + Vertex positions where dimension is 2 or 3 + + Returns + ---------- + kwargs : dict + Kwargs for Path constructor + """ + ... diff --git a/typings/trimesh/path/exchange/svg_io.pyi b/typings/trimesh/path/exchange/svg_io.pyi new file mode 100644 index 00000000..1e3ec2e9 --- /dev/null +++ b/typings/trimesh/path/exchange/svg_io.pyi @@ -0,0 +1,75 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +_ns_name = ... +_ns_url = ... +_ns = ... +_IDENTITY = np.eye(3) + +def svg_to_path( + file_obj=..., file_type=..., path_string=... +): # -> dict[str, list[Unknown]] | dict[str, dict[Unknown, dict[str, Unknown]]] | dict[str, Unknown]: + """ + Load an SVG file into a Path2D object. + + Parameters + ----------- + file_obj : open file object + Contains SVG data + file_type: None + Not used + path_string : None or str + If passed, parse a single path string and ignore `file_obj`. + + Returns + ----------- + loaded : dict + With kwargs for Path2D constructor + """ + ... + +def transform_to_matrices(transform): # -> list[Unknown]: + """ + Convert an SVG transform string to an array of matrices. + + i.e. "rotate(-10 50 100) + translate(-36 45.5) + skewX(40) + scale(1 0.5)" + + Parameters + ----------- + transform : str + Contains transformation information in SVG form + + Returns + ----------- + matrices : (n, 3, 3) float + Multiple transformation matrices from input transform string + """ + ... + +def export_svg(drawing, return_path=..., only_layers=..., digits=..., **kwargs): # -> LiteralString | str | Any: + """ + Export a Path2D object into an SVG file. + + Parameters + ----------- + drawing : Path2D + Source geometry + return_path : bool + If True return only path string not wrapped in XML + only_layers : None or set + If passed only export the specified layers + digits : None or int + Number of digits for floating point values + + Returns + ----------- + as_svg : str + XML formatted SVG, or path string + """ + ... diff --git a/typings/trimesh/path/packing.pyi b/typings/trimesh/path/packing.pyi new file mode 100644 index 00000000..4f9d506b --- /dev/null +++ b/typings/trimesh/path/packing.pyi @@ -0,0 +1,301 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional + +""" +packing.py +------------ + +Pack rectangular regions onto larger rectangular regions. +""" +_TOL_ZERO = ... + +class RectangleBin: + """ + An N-dimensional binary space partition tree for packing + hyper-rectangles. Split logic is pure `numpy` but behaves + similarly to `scipy.spatial.Rectangle`. + + Mostly useful for packing 2D textures and 3D boxes and + has not been tested outside of 2 and 3 dimensions. + + Original article about using this for packing textures: + http://www.blackpawn.com/texts/lightmaps/ + """ + + def __init__(self, bounds) -> None: + """ + Create a rectangular bin. + + Parameters + ------------ + bounds : (2, dimension *) float + Bounds array are `[mins, maxes]` + """ + ... + @property + def extents(self): # -> Any: + """ + Bounding box size. + + Returns + ---------- + extents : (dimension,) float + Edge lengths of bounding box + """ + ... + def insert(self, size, rotate=...): # -> NDArray[float64] | None: + """ + Insert a rectangle into the bin. + + Parameters + ------------- + size : (dimension,) float + Size of rectangle to insert/ + + Returns + ---------- + inserted : (2,) float or None + Position of insertion in the tree or None + if the insertion was unsuccessful. + """ + ... + +def rectangles_single( + extents, size=..., shuffle=..., rotate=..., random=... +): # -> tuple[ndarray[Any, dtype[float64]], NDArray[Any]]: + """ + Execute a single insertion order of smaller rectangles onto + a larger rectangle using a binary space partition tree. + + Parameters + ---------- + extents : (n, dimension) float + The size of the hyper-rectangles to pack. + size : None or (dim,) float + Maximum size of container to pack onto. + If not passed it will re-root the tree when items + larger than any available node are inserted. + shuffle : bool + Whether or not to shuffle the insert order of the + smaller rectangles, as the final packing density depends + on insertion order. + rotate : bool + If True, allow integer-roll rotation. + + Returns + --------- + bounds : (m, 2, dim) float + Axis aligned resulting bounds in space + transforms : (m, dim + 1, dim + 1) float + Homogeneous transformation including rotation. + consume : (n,) bool + Which of the original rectangles were packed, + i.e. `consume.sum() == m` + """ + ... + +def paths(paths, **kwargs): # -> tuple[Unknown | Any, NDArray[Any], Unknown]: + """ + Pack a list of Path2D objects into a rectangle. + + Parameters + ------------ + paths: (n,) Path2D + Geometry to be packed + + Returns + ------------ + packed : trimesh.path.Path2D + All paths packed into a single path object. + transforms : (m, 3, 3) float + Homogeneous transforms to move paths from their + original position to the new one. + consume : (n,) bool + Which of the original paths were inserted, + i.e. `consume.sum() == m` + """ + ... + +def polygons(polygons, **kwargs): # -> tuple[NDArray[Any], Unknown]: + """ + Pack polygons into a rectangle by taking each Polygon's OBB + and then packing that as a rectangle. + + Parameters + ------------ + polygons : (n,) shapely.geometry.Polygon + Source geometry + **kwargs : dict + Passed through to `packing.rectangles`. + + Returns + ------------- + transforms : (m, 3, 3) float + Homogeonous transforms from original frame to + packed frame. + consume : (n,) bool + Which of the original polygons was packed, + i.e. `consume.sum() == m` + """ + ... + +def rectangles( + extents, size=..., density_escape=..., spacing=..., iterations=..., rotate=..., quanta=..., seed=... +): # -> list[Unknown]: + """ + Run multiple iterations of rectangle packing, this is the + core function for all rectangular packing. + + Parameters + ------------ + extents : (n, dimension) float + Size of hyper-rectangle to be packed + size : None or (dimension,) float + Size of sheet to pack onto. If not passed tree will be allowed + to create new volume-minimizing parent nodes. + density_escape : float + Exit early if rectangular density is above this threshold. + spacing : float + Distance to allow between rectangles + iterations : int + Number of iterations to run + rotate : bool + Allow right angle rotations or not. + quanta : None or float + Discrete "snap" interval. + seed + If deterministic results are needed seed the RNG here. + + Returns + --------- + bounds : (m, 2, dimension) float + Axis aligned bounding boxes of inserted hyper-rectangle. + inserted : (n,) bool + Which of the original rect were packed. + """ + ... + +def images( + images, + power_resize: bool = ..., + deduplicate: bool = ..., + iterations: Optional[int] = ..., + seed: Optional[int] = ..., + spacing: Optional[float] = ..., + mode: Optional[str] = ..., +): # -> tuple[Image, Unknown]: + """ + Pack a list of images and return result and offsets. + + Parameters + ------------ + images : (n,) PIL.Image + Images to be packed + power_resize : bool + Should the result image be upsized to the nearest + power of two? Not every GPU supports materials that + aren't a power of two size. + deduplicate + Should images that have identical hashes be inserted + more than once? + mode + If passed return an output image with the + requested mode, otherwise will be picked + from the input images. + + Returns + ----------- + packed : PIL.Image + Multiple images packed into result + offsets : (n, 2) int + Offsets for original image to pack + """ + ... + +def meshes(meshes, **kwargs): # -> tuple[list[Unknown], NDArray[float64], Unknown]: + """ + Pack 3D meshes into a rectangular volume using box packing. + + Parameters + ------------ + meshes : (n,) trimesh.Trimesh + Input geometry to pack + **kwargs : dict + Passed to `packing.rectangles` + + Returns + ------------ + placed : (m,) trimesh.Trimesh + Meshes moved into the rectangular volume. + transforms : (m, 4, 4) float + Homogeneous transform moving mesh from original + position to being packed in a rectangular volume. + consume : (n,) bool + Which of the original meshes were inserted, + i.e. `consume.sum() == m` + """ + ... + +def visualize(extents, bounds): # -> Scene: + """ + Visualize a 3D box packing. + + Parameters + ------------ + extents : (n, 3) float + AABB size before packing. + bounds : (n, 2, 3) float + AABB location after packing. + + Returns + ------------ + scene : trimesh.Scene + Scene with boxes at requested locations. + """ + ... + +def roll_transform(bounds, extents): # -> list[Unknown] | NDArray[float64]: + """ + Packing returns rotations with integer "roll" which + needs to be converted into a homogeneous rotation matrix. + + Currently supports `dimension=2` and `dimension=3`. + + Parameters + -------------- + bounds : (n, 2, dimension) float + Axis aligned bounding boxes of packed position + extents : (n, dimension) float + Original pre-rolled extents will be used + to determine rotation to move to `bounds`. + + Returns + ---------- + transforms : (n, dimension + 1, dimension + 1) float + Homogeneous transformation to move cuboid at the origin + into the position determined by `bounds`. + """ + ... + +def bounds_overlap(bounds, epsilon=...): # -> bool: + """ + Check to see if multiple axis-aligned bounding boxes + contains overlaps using `rtree`. + + Parameters + ------------ + bounds : (n, 2, dimension) float + Axis aligned bounding boxes + epsilon : float + Amount to shrink AABB to avoid spurious floating + point hits. + + Returns + -------------- + overlap : bool + True if any bound intersects any other bound. + """ + ... diff --git a/typings/trimesh/path/path.pyi b/typings/trimesh/path/path.pyi new file mode 100644 index 00000000..af011248 --- /dev/null +++ b/typings/trimesh/path/path.pyi @@ -0,0 +1,886 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import caching, parent, units +from ..typed import Dict, List, NDArray, Optional, float64 +from .entities import Entity + +""" +path.py +----------- + +A module designed to work with vector paths such as +those stored in a DXF or SVG file. +""" + +class Path(parent.Geometry): + """ + A Path object consists of vertices and entities. Vertices + are a simple (n, dimension) float array of points in space. + + Entities are a list of objects representing geometric + primitives, such as Lines, Arcs, BSpline, etc. All entities + reference vertices by index, so any transform applied to the + simple vertex array is applied to the entity. + """ + + def __init__( + self, + entities: Optional[List[Entity]] = ..., + vertices: Optional[NDArray[float64]] = ..., + metadata: Optional[Dict] = ..., + process: bool = ..., + colors=..., + **kwargs, + ) -> None: + """ + Instantiate a path object. + + Parameters + ----------- + entities : (m,) trimesh.path.entities.Entity + Contains geometric entities + vertices : (n, dimension) float + The vertices referenced by entities + metadata : dict + Any metadata about the path + process : bool + Run simple cleanup or not + """ + ... + def __repr__(self): # -> str: + """ + Print a quick summary of the number of vertices and entities. + """ + ... + def process(self): # -> Self@Path: + """ + Apply basic cleaning functions to the Path object in-place. + """ + ... + @property + def colors(self): # -> NDArray[uint8] | None: + """ + Colors are stored per-entity. + + Returns + ------------ + colors : (len(entities), 4) uint8 + RGBA colors for each entity + """ + ... + @colors.setter + def colors(self, values): # -> None: + """ + Set the color for every entity in the Path. + + Parameters + ------------ + values : (len(entities), 4) uint8 + Color of each entity + """ + ... + @property + def vertices(self): # -> TrackedArray: + ... + @vertices.setter + def vertices(self, values: NDArray[float64]): # -> None: + ... + @property + def entities(self): # -> NDArray[Any] | NDArray[Unknown]: + """ + The actual entities making up the path. + + Returns + ----------- + entities : (n,) trimesh.path.entities.Entity + Entities such as Line, Arc, or BSpline curves + """ + ... + @entities.setter + def entities(self, values): # -> None: + ... + @property + def layers(self): # -> list[Any | Unknown]: + """ + Get a list of the layer for every entity. + + Returns + --------- + layers : (len(entities), ) any + Whatever is stored in each `entity.layer` + """ + ... + def __hash__(self) -> int: + """ + A hash of the current vertices and entities. + + Returns + ------------ + hash : long int + Appended hashes + """ + ... + @caching.cache_decorator + def paths(self): # -> Any: + """ + Sequence of closed paths, encoded by entity index. + + Returns + --------- + paths : (n,) sequence of (*,) int + Referencing self.entities + """ + ... + @caching.cache_decorator + def dangling(self): # -> NDArray[signedinteger[Any]]: + """ + List of entities that aren't included in a closed path + + Returns + ---------- + dangling : (n,) int + Index of self.entities + """ + ... + @caching.cache_decorator + def kdtree(self): # -> cKDTree[None]: + """ + A KDTree object holding the vertices of the path. + + Returns + ---------- + kdtree : scipy.spatial.cKDTree + Object holding self.vertices + """ + ... + @property + def scale(self): # -> floating[Any]: + """ + What is a representitive number that reflects the magnitude + of the world holding the paths, for numerical comparisons. + + Returns + ---------- + scale : float + Approximate size of the world holding this path + """ + ... + @caching.cache_decorator + def length(self): # -> float: + """ + The total discretized length of every entity. + + Returns + -------- + length : float + Summed length of every entity + """ + ... + @caching.cache_decorator + def bounds(self): # -> NDArray[float64]: + """ + Return the axis aligned bounding box of the current path. + + Returns + ---------- + bounds : (2, dimension) float + AABB with (min, max) coordinates + """ + ... + @caching.cache_decorator + def centroid(self): # -> Any: + """ + Return the centroid of axis aligned bounding box enclosing + all entities of the path object. + + Returns + ----------- + centroid : (d,) float + Approximate centroid of the path + """ + ... + @property + def extents(self): # -> Any: + """ + The size of the axis aligned bounding box. + + Returns + --------- + extents : (dimension,) float + Edge length of AABB + """ + ... + @property + def units(self): # -> None: + """ + If there are units defined in self.metadata return them. + + Returns + ----------- + units : str + Current unit system + """ + ... + @units.setter + def units(self, units): # -> None: + ... + def convert_units(self, desired, guess=...): # -> None: + """ + Convert the units of the current drawing in place. + + Parameters + ----------- + desired : str + Unit system to convert to + guess : bool + If True will attempt to guess units + """ + ... + def explode(self): # -> None: + """ + Turn every multi- segment entity into single segment + entities in- place. + """ + ... + def fill_gaps(self, distance=...): # -> None: + """ + Find vertices without degree 2 and try to connect to + other vertices. Operations are done in-place. + + Parameters + ---------- + distance : float + Connect vertices up to this distance + """ + ... + @property + def is_closed(self): # -> bool: + """ + Are all entities connected to other entities. + + Returns + ----------- + closed : bool + Every entity is connected at its ends + """ + ... + @property + def is_empty(self): # -> bool: + """ + Are any entities defined for the current path. + + Returns + ---------- + empty : bool + True if no entities are defined + """ + ... + @caching.cache_decorator + def vertex_graph(self): # -> Graph | None: + """ + Return a networkx.Graph object for the entity connectivity + + graph : networkx.Graph + Holds vertex indexes + """ + ... + @caching.cache_decorator + def vertex_nodes(self): # -> NDArray[Any | Unknown]: + """ + Get a list of which vertex indices are nodes, + which are either endpoints or points where the + entity makes a direction change. + + Returns + -------------- + nodes : (n, 2) int + Indexes of self.vertices which are nodes + """ + ... + def apply_transform(self, transform): # -> Self@Path: + """ + Apply a transformation matrix to the current path in- place + + Parameters + ----------- + transform : (d+1, d+1) float + Homogeneous transformations for vertices + """ + ... + def apply_layer(self, name): # -> None: + """ + Apply a layer name to every entity in the path. + + Parameters + ------------ + name : str + Apply layer name to every entity + """ + ... + def rezero(self): # -> NDArray[float64]: + """ + Translate so that every vertex is positive in the current + mesh is positive. + + Returns + ----------- + matrix : (dimension + 1, dimension + 1) float + Homogeneous transformations that was applied + to the current Path object. + """ + ... + def merge_vertices(self, digits=...): # -> None: + """ + Merges vertices which are identical and replace references + by altering `self.entities` and `self.vertices` + + Parameters + -------------- + digits : None, or int + How many digits to consider when merging vertices + """ + ... + def replace_vertex_references(self, mask): # -> None: + """ + Replace the vertex index references in every entity. + + Parameters + ------------ + mask : (len(self.vertices), ) int + Contains new vertex indexes + + Notes + ------------ + entity.points in self.entities + Replaced by mask[entity.points] + """ + ... + def remove_entities(self, entity_ids): # -> None: + """ + Remove entities by index. + + Parameters + ----------- + entity_ids : (n,) int + Indexes of self.entities to remove + """ + ... + def remove_invalid(self): # -> None: + """ + Remove entities which declare themselves invalid + + Notes + ---------- + self.entities: shortened + """ + ... + def remove_duplicate_entities(self): # -> None: + """ + Remove entities that are duplicated + + Notes + ------- + self.entities: length same or shorter + """ + ... + @caching.cache_decorator + def referenced_vertices(self): # -> NDArray[int64]: + """ + Which vertices are referenced by an entity. + + Returns + ----------- + referenced_vertices: (n,) int, indexes of self.vertices + """ + ... + def remove_unreferenced_vertices(self): # -> None: + """ + Removes all vertices which aren't used by an entity. + + Notes + --------- + self.vertices : reordered and shortened + self.entities : entity.points references updated + """ + ... + @caching.cache_decorator + def discrete(self) -> List[NDArray[float64]]: + """ + A sequence of connected vertices in space, corresponding to + self.paths. + + Returns + --------- + discrete : (len(self.paths),) + A sequence of (m*, dimension) float + """ + ... + def export(self, file_obj=..., file_type=..., **kwargs): + """ + Export the path to a file object or return data. + + Parameters + --------------- + file_obj : None, str, or file object + File object or string to export to + file_type : None or str + Type of file: dxf, dict, svg + + Returns + --------------- + exported : bytes or str + Exported as specified type + """ + ... + def to_dict(self): ... + def copy(self): # -> Self@Path: + """ + Get a copy of the current mesh + + Returns + --------- + copied : Path object + Copy of self + """ + ... + def scene(self): # -> Scene: + """ + Get a scene object containing the current Path3D object. + + Returns + -------- + scene: trimesh.scene.Scene object containing current path + """ + ... + def __add__(self, other): # -> Any: + """ + Concatenate two Path objects by appending vertices and + reindexing point references. + + Parameters + ----------- + other: Path object + + Returns + ----------- + concat: Path object, appended from self and other + """ + ... + +class Path3D(Path): + """ + Hold multiple vector curves (lines, arcs, splines, etc) in 3D. + """ + + def to_planar( + self, to_2D=..., normal=..., check=... + ): # -> tuple[Path2D, NDArray[float64]] | tuple[Path2D, Any | NDArray[floating[Any]]]: + """ + Check to see if current vectors are all coplanar. + + If they are, return a Path2D and a transform which will + transform the 2D representation back into 3 dimensions + + Parameters + ----------- + to_2D: (4,4) float + Homogeneous transformation matrix to apply, + If not passed a plane will be fitted to vertices. + normal: (3,) float, or None + Approximate normal of direction of plane + If to_2D is not specified sign + will be applied to fit plane normal + check: bool + If True: Raise a ValueError if + points aren't coplanar + + Returns + ----------- + planar : trimesh.path.Path2D + Current path transformed onto plane + to_3D : (4,4) float + Homeogenous transformations to move planar + back into 3D space + """ + ... + def show(self, **kwargs): # -> SceneViewer: + """ + Show the current Path3D object. + """ + ... + +class Path2D(Path): + """ + Hold multiple vector curves (lines, arcs, splines, etc) in 3D. + """ + + def show(self, annotations=...): # -> None: + """ + Plot the current Path2D object using matplotlib. + """ + ... + def apply_obb(self): # -> Any: + """ + Transform the current path so that its OBB is axis aligned + and OBB center is at the origin. + + Returns + ----------- + obb : (3, 3) float + Homogeneous transformation matrix + """ + ... + def apply_scale(self, scale): # -> Self@Path2D: + """ + Apply a 2D scale to the current Path2D. + + Parameters + ------------- + scale : float or (2,) float + Scale to apply in-place. + """ + ... + @caching.cache_decorator + def obb(self): # -> Any | NDArray[float64]: + """ + Get a transform that centers and aligns the OBB of the + referenced vertices with the XY axis. + + Returns + ----------- + obb : (3, 3) float + Homogeneous transformation matrix + """ + ... + def rasterize(self, pitch=..., origin=..., resolution=..., fill=..., width=..., **kwargs): # -> Image | None: + """ + Rasterize a Path2D object into a boolean image ("mode 1"). + + Parameters + ------------ + pitch : float or (2,) float + Length(s) in model space of pixel edges + origin : (2,) float + Origin position in model space + resolution : (2,) int + Resolution in pixel space + fill : bool + If True will return closed regions as filled + width : int + If not None will draw outline this wide (pixels) + + Returns + ------------ + raster : PIL.Image object, mode 1 + Rasterized version of closed regions. + """ + ... + def sample(self, count, **kwargs): # -> NDArray[Any] | Any | None: + """ + Use rejection sampling to generate random points inside a + polygon. + + Parameters + ----------- + count : int + Number of points to return + If there are multiple bodies, there will + be up to count * bodies points returned + factor : float + How many points to test per loop + IE, count * factor + max_iter : int, + Maximum number of intersection loops + to run, total points sampled is + count * factor * max_iter + + Returns + ----------- + hit : (n, 2) float + Random points inside polygon + """ + ... + @property + def body_count(self): # -> int: + """ + Returns a count of the number of unconnected polygons that + may contain other curves but aren't contained themselves. + + Returns + --------- + body_count : int + Number of unconnected independent polygons. + """ + ... + def to_3D(self, transform=...): # -> Path3D: + """ + Convert 2D path to 3D path on the XY plane. + + Parameters + ------------- + transform : (4, 4) float + If passed, will transform vertices. + If not passed and 'to_3D' is in self.metadata + that transform will be used. + + Returns + ----------- + path_3D : Path3D + 3D version of current path + """ + ... + @caching.cache_decorator + def polygons_closed(self): # -> NDArray[Any] | None: + """ + Cycles in the vertex graph, as shapely.geometry.Polygons. + These are polygon objects for every closed circuit, with no notion + of whether a polygon is a hole or an area. Every polygon in this + list will have an exterior, but NO interiors. + + Returns + --------- + polygons_closed: (n,) list of shapely.geometry.Polygon objects + """ + ... + @caching.cache_decorator + def polygons_full(self): # -> list[None]: + """ + A list of shapely.geometry.Polygon objects with interiors created + by checking which closed polygons enclose which other polygons. + + Returns + --------- + full : (len(self.root),) shapely.geometry.Polygon + Polygons containing interiors + """ + ... + @caching.cache_decorator + def area(self): # -> float: + """ + Return the area of the polygons interior. + + Returns + --------- + area : float + Total area of polygons minus interiors + """ + ... + def extrude(self, height, **kwargs): # -> Extrusion | list[Extrusion]: + """ + Extrude the current 2D path into a 3D mesh. + + Parameters + ---------- + height: float, how far to extrude the profile + kwargs: passed directly to meshpy.triangle.build: + triangle.build(mesh_info, + verbose=False, + refinement_func=None, + attributes=False, + volume_constraints=True, + max_volume=None, + allow_boundary_steiner=True, + allow_volume_steiner=True, + quality_meshing=True, + generate_edges=None, + generate_faces=False, + min_angle=None) + Returns + -------- + mesh: trimesh object representing extruded polygon + """ + ... + def triangulate( + self, **kwargs + ): # -> tuple[Unknown | NDArray[Any] | NDArray[Unknown], Unknown | NDArray[Any] | NDArray[Unknown]]: + """ + Create a region- aware triangulation of the 2D path. + + Parameters + ------------- + **kwargs : dict + Passed to `trimesh.creation.triangulate_polygon` + + Returns + ------------- + vertices : (n, 2) float + 2D vertices of triangulation + faces : (n, 3) int + Indexes of vertices for triangles + """ + ... + def medial_axis(self, resolution=..., clip=...): # -> Path2D | Any: + """ + Find the approximate medial axis based + on a voronoi diagram of evenly spaced points on the + boundary of the polygon. + + Parameters + ---------- + resolution : None or float + Distance between each sample on the polygon boundary + clip : None, or (2,) float + Min, max number of samples + + Returns + ---------- + medial : Path2D object + Contains only medial axis of Path + """ + ... + def connected_paths(self, path_id, include_self=...): # -> NDArray[signedinteger[Any]]: + """ + Given an index of self.paths find other paths which + overlap with that path. + + Parameters + ----------- + path_id : int + Index of self.paths + include_self : bool + Should the result include path_id or not + + Returns + ----------- + path_ids : (n, ) int + Indexes of self.paths that overlap input path_id + """ + ... + def simplify(self, **kwargs): # -> Self@Path2D: + """ + Return a version of the current path with colinear segments + merged, and circles entities replacing segmented circular paths. + + Returns + --------- + simplified : Path2D object + """ + ... + def simplify_spline(self, smooth=..., verbose=...): # -> Any: + """ + Convert paths into b-splines. + + Parameters + ----------- + smooth : float + How much the spline should smooth the curve + verbose : bool + Print detailed log messages + + Returns + ------------ + simplified : Path2D + Discrete curves replaced with splines + """ + ... + def split(self, **kwargs): # -> NDArray[Unknown]: + """ + If the current Path2D consists of n 'root' curves, + split them into a list of n Path2D objects + + Returns + ---------- + split: (n,) list of Path2D objects + Each connected region and interiors + """ + ... + def plot_discrete(self, show=..., annotations=...): # -> Axes: + """ + Plot the closed curves of the path. + """ + ... + def plot_entities(self, show=..., annotations=..., color=...): # -> None: + """ + Plot the entities of the path with no notion of topology. + + Parameters + ------------ + show : bool + Open a window immediately or not + annotations : bool + Call an entities custom plot function. + color : str + Override entity colors and make them all this color. + """ + ... + @property + def identifier(self): # -> NDArray[float64] | None: + """ + A unique identifier for the path. + + Returns + --------- + identifier : (5,) float + Unique identifier + """ + ... + @caching.cache_decorator + def identifier_hash(self): # -> str: + """ + Return a hash of the identifier. + + Returns + ---------- + hashed : (64,) str + SHA256 hash of the identifier vector. + """ + ... + @property + def path_valid(self): # -> NDArray[Any]: + """ + Returns + ---------- + path_valid : (n,) bool + Indexes of self.paths self.polygons_closed + which are valid polygons. + """ + ... + @caching.cache_decorator + def root(self): # -> None: + """ + Which indexes of self.paths/self.polygons_closed + are root curves, also known as 'shell' or 'exterior. + + Returns + --------- + root : (n,) int + List of indexes + """ + ... + @caching.cache_decorator + def enclosure(self): # -> Any: + """ + Undirected graph object of polygon enclosure. + + Returns + ----------- + enclosure : networkx.Graph + Enclosure graph of self.polygons by index. + """ + ... + @caching.cache_decorator + def enclosure_directed(self): # -> DiGraph | None: + """ + Directed graph of polygon enclosure. + + Returns + ---------- + enclosure_directed : networkx.DiGraph + Directed graph: child nodes are fully + contained by their parent node. + """ + ... + @caching.cache_decorator + def enclosure_shell(self): # -> OrderedDict[Any, NDArray[signedinteger[Any]]]: + """ + A dictionary of path indexes which are 'shell' paths, and values + of 'hole' paths. + + Returns + ---------- + corresponding : dict + {index of self.paths of shell : [indexes of holes]} + """ + ... diff --git a/typings/trimesh/path/polygons.pyi b/typings/trimesh/path/polygons.pyi new file mode 100644 index 00000000..d99c1930 --- /dev/null +++ b/typings/trimesh/path/polygons.pyi @@ -0,0 +1,405 @@ +""" +This type stub file was generated by pyright. +""" + +from shapely.geometry import Polygon + +from ..typed import List, NDArray, Optional, float64 + +def enclosure_tree(polygons: List[Polygon]): # -> tuple[ndarray[Any, dtype[Any]], Unknown | DiGraph | None]: + """ + Given a list of shapely polygons with only exteriors, + find which curves represent the exterior shell or root curve + and which represent holes which penetrate the exterior. + + This is done with an R-tree for rough overlap detection, + and then exact polygon queries for a final result. + + Parameters + ----------- + polygons : (n,) shapely.geometry.Polygon + Polygons which only have exteriors and may overlap + + Returns + ----------- + roots : (m,) int + Index of polygons which are root + contains : networkx.DiGraph + Edges indicate a polygon is + contained by another polygon + """ + ... + +def edges_to_polygons(edges, vertices): # -> list[Unknown]: + """ + Given an edge list of indices and associated vertices + representing lines, generate a list of polygons. + + Parameters + ----------- + edges : (n, 2) int + Indexes of vertices which represent lines + vertices : (m, 2) float + Vertices in 2D space + + Returns + ---------- + polygons : (p,) shapely.geometry.Polygon + Polygon objects with interiors + """ + ... + +def polygons_obb(polygons: List[Polygon]): # -> tuple[NDArray[Any], NDArray[Any]]: + """ + Find the OBBs for a list of shapely.geometry.Polygons + """ + ... + +def polygon_obb(polygon: Polygon): # -> tuple[Any | NDArray[float64], ndarray[Any, dtype[Any]]]: + """ + Find the oriented bounding box of a Shapely polygon. + + The OBB is always aligned with an edge of the convex hull of the polygon. + + Parameters + ------------- + polygons : shapely.geometry.Polygon + Input geometry + + Returns + ------------- + transform : (3, 3) float + Transformation matrix + which will move input polygon from its original position + to the first quadrant where the AABB is the OBB + extents : (2,) float + Extents of transformed polygon + """ + ... + +def transform_polygon(polygon, matrix): # -> list[Unknown]: + """ + Transform a polygon by a a 2D homogeneous transform. + + Parameters + ------------- + polygon : shapely.geometry.Polygon + 2D polygon to be transformed. + matrix : (3, 3) float + 2D homogeneous transformation. + + Returns + -------------- + result : shapely.geometry.Polygon + Polygon transformed by matrix. + """ + ... + +def polygon_bounds(polygon, matrix=...): # -> NDArray[Any]: + """ + Get the transformed axis aligned bounding box of a + shapely Polygon object. + + Parameters + ------------ + polygon : shapely.geometry.Polygon + Polygon pre-transform + matrix : (3, 3) float or None. + Homogeneous transform moving polygon in space + + Returns + ------------ + bounds : (2, 2) float + Axis aligned bounding box of transformed polygon. + """ + ... + +def plot(polygon=..., show=..., axes=..., **kwargs): + """ + Plot a shapely polygon using matplotlib. + + Parameters + ------------ + polygon : shapely.geometry.Polygon + Polygon to be plotted + show : bool + If True will display immediately + **kwargs + Passed to plt.plot + """ + ... + +def resample_boundaries(polygon: Polygon, resolution: float, clip=...): # -> dict[str, Unknown]: + """ + Return a version of a polygon with boundaries re-sampled + to a specified resolution. + + Parameters + ------------- + polygon : shapely.geometry.Polygon + Source geometry + resolution : float + Desired distance between points on boundary + clip : (2,) int + Upper and lower bounds to clip + number of samples to avoid exploding count + + Returns + ------------ + kwargs : dict + Keyword args for a Polygon constructor `Polygon(**kwargs)` + """ + ... + +def stack_boundaries(boundaries): # -> NDArray[Unknown]: + """ + Stack the boundaries of a polygon into a single + (n, 2) list of vertices. + + Parameters + ------------ + boundaries : dict + With keys 'shell', 'holes' + + Returns + ------------ + stacked : (n, 2) float + Stacked vertices + """ + ... + +def medial_axis( + polygon: Polygon, resolution: Optional[float] = ..., clip=... +): # -> tuple[NDArray[int64], NDArray[float64]]: + """ + Given a shapely polygon, find the approximate medial axis + using a voronoi diagram of evenly spaced points on the + boundary of the polygon. + + Parameters + ---------- + polygon : shapely.geometry.Polygon + The source geometry + resolution : float + Distance between each sample on the polygon boundary + clip : None, or (2,) int + Clip sample count to min of clip[0] and max of clip[1] + + Returns + ---------- + edges : (n, 2) int + Vertex indices representing line segments + on the polygon's medial axis + vertices : (m, 2) float + Vertex positions in space + """ + ... + +def identifier(polygon: Polygon) -> NDArray[float64]: + """ + Return a vector containing values representative of + a particular polygon. + + Parameters + --------- + polygon : shapely.geometry.Polygon + Input geometry + + Returns + --------- + identifier : (8,) float + Values which should be unique for this polygon. + """ + ... + +def random_polygon(segments=..., radius=...): + """ + Generate a random polygon with a maximum number of sides and approximate radius. + + Parameters + --------- + segments : int + The maximum number of sides the random polygon will have + radius : float + The approximate radius of the polygon desired + + Returns + --------- + polygon : shapely.geometry.Polygon + Geometry object with random exterior and no interiors. + """ + ... + +def polygon_scale(polygon): # -> Any: + """ + For a Polygon object return the diagonal length of the AABB. + + Parameters + ------------ + polygon : shapely.geometry.Polygon + Source geometry + + Returns + ------------ + scale : float + Length of AABB diagonal + """ + ... + +def paths_to_polygons(paths, scale=...): # -> NDArray[Any]: + """ + Given a sequence of connected points turn them into + valid shapely Polygon objects. + + Parameters + ----------- + paths : (n,) sequence + Of (m, 2) float closed paths + scale : float + Approximate scale of drawing for precision + + Returns + ----------- + polys : (p,) list + Filled with Polygon or None + + """ + ... + +def sample(polygon, count, factor=..., max_iter=...): # -> Any | ndarray[Any, dtype[Any]]: + """ + Use rejection sampling to generate random points inside a + polygon. + + Parameters + ----------- + polygon : shapely.geometry.Polygon + Polygon that will contain points + count : int + Number of points to return + factor : float + How many points to test per loop + max_iter : int + Maximum number of intersection checks is: + > count * factor * max_iter + + Returns + ----------- + hit : (n, 2) float + Random points inside polygon + where n <= count + """ + ... + +def repair_invalid(polygon, scale=..., rtol=...): + """ + Given a shapely.geometry.Polygon, attempt to return a + valid version of the polygon through buffering tricks. + + Parameters + ----------- + polygon : shapely.geometry.Polygon + Source geometry + rtol : float + How close does a perimeter have to be + scale : float or None + For numerical precision reference + + Returns + ---------- + repaired : shapely.geometry.Polygon + Repaired polygon + + Raises + ---------- + ValueError + If polygon can't be repaired + """ + ... + +def projected(mesh, normal, origin=..., ignore_sign=..., rpad=..., apad=..., tol_dot=..., max_regions=...): # -> None: + """ + Project a mesh onto a plane and then extract the polygon + that outlines the mesh projection on that plane. + + Note that this will ignore back-faces, which is only + relevant if the source mesh isn't watertight. + + Also padding: this generates a result by unioning the + polygons of multiple connected regions, which requires + the polygons be padded by a distance so that a polygon + union produces a single coherent result. This distance + is calculated as: `apad + (rpad * scale)` + + Parameters + ---------- + mesh : trimesh.Trimesh + Source geometry + check : bool + If True make sure is flat + normal : (3,) float + Normal to extract flat pattern along + origin : None or (3,) float + Origin of plane to project mesh onto + ignore_sign : bool + Allow a projection from the normal vector in + either direction: this provides a substantial speedup + on watertight meshes where the direction is irrelevant + but if you have a triangle soup and want to discard + backfaces you should set this to False. + rpad : float + Proportion to pad polygons by before unioning + and then de-padding result by to avoid zero-width gaps. + apad : float + Absolute padding to pad polygons by before unioning + and then de-padding result by to avoid zero-width gaps. + tol_dot : float + Tolerance for discarding on-edge triangles. + max_regions : int + Raise an exception if the mesh has more than this + number of disconnected regions to fail quickly before + unioning. + + Returns + ---------- + projected : shapely.geometry.Polygon or None + Outline of source mesh + + Raises + --------- + ValueError + If max_regions is exceeded + """ + ... + +def second_moments( + polygon: Polygon, return_centered=... +): # -> list[Any] | tuple[list[Any], list[Any], float | Any | Literal[0], NDArray[float64]]: + """ + Calculate the second moments of area of a polygon + from the boundary. + + Parameters + ------------ + polygon : shapely.geometry.Polygon + Closed polygon. + return_centered : bool + Get second moments for a frame with origin at the centroid + and perform a principal axis transformation. + + Returns + ---------- + moments : (3,) float + The values of `[Ixx, Iyy, Ixy]` + principal_moments : (2,) float + Principal second moments of inertia: `[Imax, Imin]` + Only returned if `centered`. + alpha : float + Angle by which the polygon needs to be rotated, so the + principal axis align with the X and Y axis. + Only returned if `centered`. + transform : (3, 3) float + Transformation matrix which rotates the polygon by alpha. + Only returned if `centered`. + """ + ... diff --git a/typings/trimesh/path/raster.pyi b/typings/trimesh/path/raster.pyi new file mode 100644 index 00000000..28a8fdc6 --- /dev/null +++ b/typings/trimesh/path/raster.pyi @@ -0,0 +1,36 @@ +""" +This type stub file was generated by pyright. +""" + +""" +raster.py +------------ + +Turn 2D vector paths into raster images using `pillow` +""" + +def rasterize(path, pitch=..., origin=..., resolution=..., fill=..., width=...): # -> Image | None: + """ + Rasterize a Path2D object into a boolean image ("mode 1"). + + Parameters + ------------ + path : Path2D + Original geometry + pitch : float or (2,) float + Length(s) in model space of pixel edges + origin : (2,) float + Origin position in model space + resolution : (2,) int + Resolution in pixel space + fill : bool + If True will return closed regions as filled + width : int + If not None will draw outline this wide in pixels + + Returns + ------------ + raster : PIL.Image + Rasterized version of input as `mode 1` image + """ + ... diff --git a/typings/trimesh/path/repair.pyi b/typings/trimesh/path/repair.pyi new file mode 100644 index 00000000..3a65b8e0 --- /dev/null +++ b/typings/trimesh/path/repair.pyi @@ -0,0 +1,33 @@ +""" +This type stub file was generated by pyright. +""" + +""" +repair.py +-------------- + +Try to fix problems with closed regions. +""" + +def fill_gaps(path, distance=...): # -> None: + """ + For 3D line segments defined by two points, turn + them in to an origin defined as the closest point along + the line to the zero origin as well as a direction vector + and start and end parameter. + + Parameters + ------------ + segments : (n, 2, 3) float + Line segments defined by start and end points + + Returns + -------------- + origins : (n, 3) float + Point on line closest to [0, 0, 0] + vectors : (n, 3) float + Unit line directions + parameters : (n, 2) float + Start and end distance pairs for each line + """ + ... diff --git a/typings/trimesh/path/segments.pyi b/typings/trimesh/path/segments.pyi new file mode 100644 index 00000000..5173abb8 --- /dev/null +++ b/typings/trimesh/path/segments.pyi @@ -0,0 +1,238 @@ +""" +This type stub file was generated by pyright. +""" + +from ..typed import NDArray, float64 + +""" +segments.py +-------------- + +Deal with (n, 2, 3) line segments. +""" + +def segments_to_parameters( + segments: NDArray[float64], +): # -> tuple[Any, NDArray[floating[Any]] | Any, NDArray[floating[Any]] | Any]: + """ + For 3D line segments defined by two points, turn + them in to an origin defined as the closest point along + the line to the zero origin as well as a direction vector + and start and end parameter. + + Parameters + ------------ + segments : (n, 2, 3) float + Line segments defined by start and end points + + Returns + -------------- + origins : (n, 3) float + Point on line closest to [0, 0, 0] + vectors : (n, 3) float + Unit line directions + parameters : (n, 2) float + Start and end distance pairs for each line + """ + ... + +def parameters_to_segments( + origins: NDArray[float64], vectors: NDArray[float64], parameters: NDArray[float64] +): # -> ndarray[Any, dtype[floating[Any]]]: + """ + Convert a parametric line segment representation to + a two point line segment representation + + Parameters + ------------ + origins : (n, 3) float + Line origin point + vectors : (n, 3) float + Unit line directions + parameters : (n, 2) float + Start and end distance pairs for each line + + Returns + -------------- + segments : (n, 2, 3) float + Line segments defined by start and end points + """ + ... + +def colinear_pairs(segments, radius=..., angle=..., length=...): # -> ndarray[Any, dtype[intp]]: + """ + Find pairs of segments which are colinear. + + Parameters + ------------- + segments : (n, 2, (2, 3)) float + Two or three dimensional line segments + radius : float + Maximum radius line origins can differ + and be considered colinear + angle : float + Maximum angle in radians segments can + differ and still be considered colinear + length : None or float + If specified, will additionally require + that pairs have a *vertex* within this distance. + + Returns + ------------ + pairs : (m, 2) int + Indexes of segments which are colinear + """ + ... + +def clean(segments: NDArray[float64], digits: int = ...) -> NDArray[float64]: + """ + Clean up line segments by unioning the ranges of colinear segments. + + Parameters + ------------ + segments : (n, 2, 2) or (n, 2, 3) + Line segments in space. + digits + How many digits to consider. + + Returns + ----------- + cleaned : (m, 2, 2) or (m, 2, 3) + Where `m <= n` + """ + ... + +def split(segments, points, atol=...): # -> NDArray[float64]: + """ + Find any points that lie on a segment (not an endpoint) + and then split that segment into two segments. + + We are basically going to find the distance between + point and both segment vertex, and see if it is with + tolerance of the segment length. + + Parameters + -------------- + segments : (n, 2, (2, 3) float + Line segments in space + points : (n, (2, 3)) float + Points in space + atol : float + Absolute tolerance for distances + + Returns + ------------- + split : (n, 2, (3 | 3) float + Line segments in space, split at vertices + """ + ... + +def unique(segments, digits=...): # -> ndarray[Any, dtype[float64]]: + """ + Find unique non-zero line segments. + + Parameters + ------------ + segments : (n, 2, (2|3)) float + Line segments in space + digits : int + How many digits to consider when merging vertices + + Returns + ----------- + unique : (m, 2, (2|3)) float + Segments with duplicates merged + """ + ... + +def extrude(segments, height, double_sided=...): # -> tuple[NDArray[float64], NDArray[signedinteger[Any]]]: + """ + Extrude 2D line segments into 3D triangles. + + Parameters + ------------- + segments : (n, 2, 2) float + 2D line segments + height : float + Distance to extrude along Z + double_sided : bool + If true, return 4 triangles per segment + + Returns + ------------- + vertices : (n, 3) float + Vertices in space + faces : (n, 3) int + Indices of vertices forming triangles + """ + ... + +def length(segments, summed=...): # -> Any: + """ + Extrude 2D line segments into 3D triangles. + + Parameters + ------------- + segments : (n, 2, 2) float + 2D line segments + height : float + Distance to extrude along Z + double_sided : bool + If true, return 4 triangles per segment + + Returns + ------------- + vertices : (n, 3) float + Vertices in space + faces : (n, 3) int + Indices of vertices forming triangles + """ + ... + +def resample(segments, maxlen, return_index=..., return_count=...): # -> NDArray[Unknown] | list[NDArray[Unknown]]: + """ + Resample line segments until no segment + is longer than maxlen. + + Parameters + ------------- + segments : (n, 2, 2|3) float + 2D line segments + maxlen : float + The maximum length of a line segment + return_index : bool + Return the index of the source segment + return_count : bool + Return how many segments each original was split into + + Returns + ------------- + resampled : (m, 2, 2|3) float + Line segments where no segment is longer than maxlen + index : (m,) int + [OPTIONAL] The index of segments resampled came from + count : (n,) int + [OPTIONAL] The count of the original segments + """ + ... + +def to_svg(segments, digits=..., matrix=..., merge=...): # -> str: + """ + Convert (n, 2, 2) line segments to an SVG path string. + + Parameters + ------------ + segments : (n, 2, 2) float + Line segments to convert + digits : int + Number of digits to include in SVG string + matrix : None or (3, 3) float + Homogeneous 2D transformation to apply before export + + Returns + ----------- + path : str + SVG path string with one line per segment + IE: 'M 0.1 0.2 L 10 12' + """ + ... diff --git a/typings/trimesh/path/simplify.pyi b/typings/trimesh/path/simplify.pyi new file mode 100644 index 00000000..dc2d1a5d --- /dev/null +++ b/typings/trimesh/path/simplify.pyi @@ -0,0 +1,153 @@ +""" +This type stub file was generated by pyright. +""" + +def fit_circle_check(points, scale, prior=..., final=..., verbose=...): # -> dict[str, Unknown] | None: + """ + Fit a circle, and reject the fit if: + * the radius is larger than tol.radius_min*scale or tol.radius_max*scale + * any segment spans more than tol.seg_angle + * any segment is longer than tol.seg_frac*scale + * the fit deviates by more than tol.radius_frac*radius + * the segments on the ends deviate from tangent by more than tol.tangent + + Parameters + --------- + points : (n, d) + List of points which represent a path + prior : (center, radius) tuple + Best guess or None if unknown + scale : float + What is the overall scale of the set of points + verbose : bool + Output log.debug messages for the reasons + for fit rejection only suggested for manual debugging + + Returns + ----------- + if fit is acceptable: + (center, radius) tuple + else: + None + """ + ... + +def is_circle(points, scale, verbose=...): # -> None: + """ + Given a set of points, quickly determine if they represent + a circle or not. + + Parameters + ------------- + points : (n,2 ) float + Points in space + scale : float + Scale of overall drawing + verbose : bool + Print all fit messages or not + + Returns + ------------- + control: (3,2) float, points in space, OR + None, if not a circle + """ + ... + +def merge_colinear(points, scale): # -> NDArray[float64]: + """ + Given a set of points representing a path in space, + merge points which are colinear. + + Parameters + ---------- + points : (n, dimension) float + Points in space + scale : float + Scale of drawing for precision + + Returns + ---------- + merged : (j, d) float + Points with colinear and duplicate + points merged, where (j < n) + """ + ... + +def resample_spline(points, smooth=..., count=..., degree=...): + """ + Resample a path in space, smoothing along a b-spline. + + Parameters + ----------- + points : (n, dimension) float + Points in space + smooth : float + Smoothing distance + count : int or None + Number of samples desired in output + degree : int + Degree of spline polynomial + + Returns + --------- + resampled : (count, dimension) float + Points in space + """ + ... + +def points_to_spline_entity(points, smooth=..., count=...): # -> tuple[BSpline, ndarray[Any, dtype[Unknown]]]: + """ + Create a spline entity from a curve in space + + Parameters + ----------- + points : (n, dimension) float + Points in space + smooth : float + Smoothing distance + count : int or None + Number of samples desired in result + + Returns + --------- + entity : entities.BSpline + Entity object with points indexed at zero + control : (m, dimension) float + New vertices for entity + """ + ... + +def simplify_basic(drawing, process=..., **kwargs): # -> Any: + """ + Merge colinear segments and fit circles. + + Parameters + ----------- + drawing : Path2D + Source geometry, will not be modified + + Returns + ----------- + simplified : Path2D + Original path but with some closed line-loops converted to circles + """ + ... + +def simplify_spline(path, smooth=..., verbose=...): # -> Any: + """ + Replace discrete curves with b-spline or Arc and + return the result as a new Path2D object. + + Parameters + ------------ + path : trimesh.path.Path2D + Input geometry + smooth : float + Distance to smooth + + Returns + ------------ + simplified : Path2D + Consists of Arc and BSpline entities + """ + ... diff --git a/typings/trimesh/path/traversal.pyi b/typings/trimesh/path/traversal.pyi new file mode 100644 index 00000000..ac95ae84 --- /dev/null +++ b/typings/trimesh/path/traversal.pyi @@ -0,0 +1,147 @@ +""" +This type stub file was generated by pyright. +""" + +def vertex_graph(entities): # -> tuple[Graph | None, NDArray[Unknown]]: + """ + Given a set of entity objects generate a networkx.Graph + that represents their vertex nodes. + + Parameters + -------------- + entities : list + Objects with 'closed' and 'nodes' attributes + + Returns + ------------- + graph : networkx.Graph + Graph where node indexes represent vertices + closed : (n,) int + Indexes of entities which are 'closed' + """ + ... + +def vertex_to_entity_path(vertex_path, graph, entities, vertices=...): # -> list[Unknown] | NDArray[Unknown]: + """ + Convert a path of vertex indices to a path of entity indices. + + Parameters + ---------- + vertex_path : (n,) int + Ordered list of vertex indices representing a path + graph : nx.Graph + Vertex connectivity + entities : (m,) list + Entity objects + vertices : (p, dimension) float + Vertex points in space + + Returns + ---------- + entity_path : (q,) int + Entity indices which make up vertex_path + """ + ... + +def closed_paths(entities, vertices): # -> Any: + """ + Paths are lists of entity indices. + We first generate vertex paths using graph cycle algorithms, + and then convert them to entity paths. + + This will also change the ordering of entity.points in place + so a path may be traversed without having to reverse the entity. + + Parameters + ------------- + entities : (n,) entity objects + Entity objects + vertices : (m, dimension) float + Vertex points in space + + Returns + ------------- + entity_paths : sequence of (n,) int + Ordered traversals of entities + """ + ... + +def discretize_path(entities, vertices, path, scale=...): # -> NDArray[Unknown]: + """ + Turn a list of entity indices into a path of connected points. + + Parameters + ----------- + entities : (j,) entity objects + Objects like 'Line', 'Arc', etc. + vertices: (n, dimension) float + Vertex points in space. + path : (m,) int + Indexes of entities + scale : float + Overall scale of drawing used for + numeric tolerances in certain cases + + Returns + ----------- + discrete : (p, dimension) float + Connected points in space that lie on the + path and can be connected with line segments. + """ + ... + +class PathSample: + def __init__(self, points) -> None: ... + def sample(self, distances): ... + def truncate(self, distance): # -> ndarray[Any, dtype[Unknown]]: + """ + Return a truncated version of the path. + Only one vertex (at the endpoint) will be added. + """ + ... + +def resample_path(points, count=..., step=..., step_round=...): # -> NDArray[bool_]: + """ + Given a path along (n,d) points, resample them such that the + distance traversed along the path is constant in between each + of the resampled points. Note that this can produce clipping at + corners, as the original vertices are NOT guaranteed to be in the + new, resampled path. + + ONLY ONE of count or step can be specified + Result can be uniformly distributed (np.linspace) by specifying count + Result can have a specific distance (np.arange) by specifying step + + + Parameters + ---------- + points: (n, d) float + Points in space + count : int, + Number of points to sample evenly (aka np.linspace) + step : float + Distance each step should take along the path (aka np.arange) + + Returns + ---------- + resampled : (j,d) float + Points on the path + """ + ... + +def split(path): # -> NDArray[Unknown]: + """ + Split a Path2D into multiple Path2D objects where each + one has exactly one root curve. + + Parameters + -------------- + path : trimesh.path.Path2D + Input geometry + + Returns + ------------- + split : list of trimesh.path.Path2D + Original geometry as separate paths + """ + ... diff --git a/typings/trimesh/path/util.pyi b/typings/trimesh/path/util.pyi new file mode 100644 index 00000000..015c077e --- /dev/null +++ b/typings/trimesh/path/util.pyi @@ -0,0 +1,19 @@ +""" +This type stub file was generated by pyright. +""" + +def concatenate(paths): # -> Any: + """ + Concatenate multiple paths into a single path. + + Parameters + ------------- + paths : (n,) Path + Path objects to concatenate + + Returns + ------------- + concat : Path, Path2D, or Path3D + Concatenated result + """ + ... diff --git a/typings/trimesh/permutate.pyi b/typings/trimesh/permutate.pyi new file mode 100644 index 00000000..206d6e7e --- /dev/null +++ b/typings/trimesh/permutate.pyi @@ -0,0 +1,81 @@ +""" +This type stub file was generated by pyright. +""" + +""" +permutate.py +------------- + +Randomly deform meshes in different ways. +""" + +def transform(mesh, translation_scale=...): # -> Any: + """ + Return a permutated variant of a mesh by randomly reording faces + and rotatating + translating a mesh by a random matrix. + + Parameters + ---------- + mesh : trimesh.Trimesh + Mesh, will not be altered by this function + + Returns + ---------- + permutated : trimesh.Trimesh + Mesh with same faces as input mesh but reordered + and rigidly transformed in space. + """ + ... + +def noise(mesh, magnitude=...): # -> Any: + """ + Add gaussian noise to every vertex of a mesh, making + no effort to maintain topology or sanity. + + Parameters + ---------- + mesh : trimesh.Trimesh + Input geometry, will not be altered + magnitude : float + What is the maximum distance per axis we can displace a vertex. + If None, value defaults to (mesh.scale / 100.0) + + Returns + ---------- + permutated : trimesh.Trimesh + Input mesh with noise applied + """ + ... + +def tessellation(mesh): # -> Any: + """ + Subdivide each face of a mesh into three faces with the new vertex + randomly placed inside the old face. + + This produces a mesh with exactly the same surface area and volume + but with different tessellation. + + Parameters + ------------ + mesh : trimesh.Trimesh + Input geometry + + Returns + ---------- + permutated : trimesh.Trimesh + Mesh with remeshed facets + """ + ... + +class Permutator: + def __init__(self, mesh) -> None: + """ + A convenience object to get permutated versions of a mesh. + """ + ... + def transform(self, translation_scale=...): # -> Any: + ... + def noise(self, magnitude=...): # -> Any: + ... + def tessellation(self): # -> Any: + ... diff --git a/typings/trimesh/points.pyi b/typings/trimesh/points.pyi new file mode 100644 index 00000000..ae337b9e --- /dev/null +++ b/typings/trimesh/points.pyi @@ -0,0 +1,440 @@ +""" +This type stub file was generated by pyright. +""" + +from . import caching +from .parent import Geometry3D + +""" +points.py +------------- + +Functions dealing with (n, d) points. +""" + +def point_plane_distance(points, plane_normal, plane_origin=...): # -> Any: + """ + The minimum perpendicular distance of a point to a plane. + + Parameters + ----------- + points : (n, 3) float + Points in space + plane_normal : (3,) float + Unit normal vector + plane_origin : (3,) float + Plane origin in space + + Returns + ------------ + distances : (n,) float + Distance from point to plane + """ + ... + +def major_axis(points): # -> tuple[Unknown, Any]: + """ + Returns an approximate vector representing the major + axis of the passed points. + + Parameters + ------------- + points : (n, dimension) float + Points in space + + Returns + ------------- + axis : (dimension,) float + Vector along approximate major axis + """ + ... + +def plane_fit(points): # -> tuple[Any, Unknown]: + """ + Fit a plane to points using SVD. + + Parameters + --------- + points : (n, 3) float or (p, n, 3,) float + 3D points in space + Second option allows to simultaneously compute + p centroids and normals + + Returns + --------- + C : (3,) float or (p, 3,) float + Point on the plane + N : (3,) float or (p, 3,) float + Unit normal vector of plane + """ + ... + +def radial_sort(points, origin, normal, start=...): + """ + Sorts a set of points radially (by angle) around an + axis specified by origin and normal vector. + + Parameters + -------------- + points : (n, 3) float + Points in space + origin : (3,) float + Origin to sort around + normal : (3,) float + Vector to sort around + start : (3,) float + Vector to specify start position in counter-clockwise + order viewing in direction of normal, MUST not be + parallel with normal + + Returns + -------------- + ordered : (n, 3) float + Same as input points but reordered + """ + ... + +def project_to_plane( + points, plane_normal, plane_origin, transform=..., return_transform=..., return_planar=... +): # -> tuple[ndarray[Any, dtype[float64]] | Any, NDArray[floating[Any]]] | ndarray[Any, dtype[float64]] | Any: + """ + Project (n, 3) points onto a plane. + + Parameters + ----------- + points : (n, 3) float + Points in space. + plane_normal : (3,) float + Unit normal vector of plane + plane_origin : (3,) + Origin point of plane + transform : None or (4, 4) float + Homogeneous transform, if specified, normal+origin are overridden + return_transform : bool + Returns the (4, 4) matrix used or not + return_planar : bool + Return (n, 2) points rather than (n, 3) points + """ + ... + +def remove_close(points, radius): # -> tuple[Unknown, NDArray[Any]]: + """ + Given an (n, m) array of points return a subset of + points where no point is closer than radius. + + Parameters + ------------ + points : (n, dimension) float + Points in space + radius : float + Minimum radius between result points + + Returns + ------------ + culled : (m, dimension) float + Points in space + mask : (n,) bool + Which points from the original points were returned + """ + ... + +def k_means(points, k, **kwargs): # -> tuple[Any | Unknown, Any]: + """ + Find k centroids that attempt to minimize the k- means problem: + https://en.wikipedia.org/wiki/Metric_k-center + + Parameters + ---------- + points: (n, d) float + Points in space + k : int + Number of centroids to compute + **kwargs : dict + Passed directly to scipy.cluster.vq.kmeans + + Returns + ---------- + centroids : (k, d) float + Points in some space + labels: (n) int + Indexes for which points belong to which centroid + """ + ... + +def tsp(points, start=...): # -> tuple[NDArray[signedinteger[Any]], NDArray[floating[_64Bit]]]: + """ + Find an ordering of points where each is visited and + the next point is the closest in euclidean distance, + and if there are multiple points with equal distance + go to an arbitrary one. + + Assumes every point is visitable from every other point, + i.e. the travelling salesman problem on a fully connected + graph. It is not a MINIMUM traversal; rather it is a + "not totally goofy traversal, quickly." On random points + this traversal is often ~20x shorter than random ordering, + and executes on 1000 points in around 29ms on a 2014 i7. + + Parameters + --------------- + points : (n, dimension) float + ND points in space + start : int + The index of points we should start at + + Returns + --------------- + traversal : (n,) int + Ordered traversal visiting every point + distances : (n - 1,) float + The euclidean distance between points in traversal + """ + ... + +def plot_points(points, show=...): # -> None: + """ + Plot an (n, 3) list of points using matplotlib + + Parameters + ------------- + points : (n, 3) float + Points in space + show : bool + If False, will not show until plt.show() is called + """ + ... + +class PointCloud(Geometry3D): + """ + Hold 3D points in an object which can be visualized + in a scene. + """ + + def __init__(self, vertices, colors=..., metadata=..., **kwargs) -> None: + """ + Load an array of points into a PointCloud object. + + Parameters + ------------- + vertices : (n, 3) float + Points in space + colors : (n, 4) uint8 or None + RGBA colors for each point + metadata : dict or None + Metadata about points + """ + ... + def __setitem__(self, *args, **kwargs): ... + def __getitem__(self, *args, **kwargs): # -> ndarray[Any, dtype[float64]]: + ... + @property + def shape(self): # -> _Shape: + """ + Get the shape of the pointcloud + + Returns + ---------- + shape : (2,) int + Shape of vertex array + """ + ... + @property + def is_empty(self): # -> bool: + """ + Are there any vertices defined or not. + + Returns + ---------- + empty : bool + True if no vertices defined + """ + ... + def copy(self): # -> PointCloud: + """ + Safely get a copy of the current point cloud. + + Copied objects will have emptied caches to avoid memory + issues and so may be slow on initial operations until + caches are regenerated. + + Current object will *not* have its cache cleared. + + Returns + --------- + copied : trimesh.PointCloud + Copy of current point cloud + """ + ... + def hash(self): # -> int: + """ + Get a hash of the current vertices. + + Returns + ---------- + hash : str + Hash of self.vertices + """ + ... + def crc(self): + """ + Get a CRC hash of the current vertices. + + Returns + ---------- + crc : int + Hash of self.vertices + """ + ... + def merge_vertices(self): # -> None: + """ + Merge vertices closer than tol.merge (default: 1e-8) + """ + ... + def apply_transform(self, transform): # -> Self@PointCloud: + """ + Apply a homogeneous transformation to the PointCloud + object in- place. + + Parameters + -------------- + transform : (4, 4) float + Homogeneous transformation to apply to PointCloud + """ + ... + @property + def bounds(self): # -> NDArray[Any | Unknown]: + """ + The axis aligned bounds of the PointCloud + + Returns + ------------ + bounds : (2, 3) float + Minimum, Maximum verteex + """ + ... + @property + def extents(self): # -> Any: + """ + The size of the axis aligned bounds + + Returns + ------------ + extents : (3,) float + Edge length of axis aligned bounding box + """ + ... + @property + def centroid(self): # -> Any: + """ + The mean vertex position + + Returns + ------------ + centroid : (3,) float + Mean vertex position + """ + ... + @property + def vertices(self): # -> NDArray[float64]: + """ + Vertices of the PointCloud + + Returns + ------------ + vertices : (n, 3) float + Points in the PointCloud + """ + ... + @vertices.setter + def vertices(self, values): # -> None: + """ + Assign vertex values to the point cloud. + + Parameters + -------------- + values : (n, 3) float + Points in space + """ + ... + @property + def colors(self): # -> ndarray[Any, Unknown] | TrackedArray: + """ + Stored per- point color + + Returns + ---------- + colors : (len(self.vertices), 4) np.uint8 + Per- point RGBA color + """ + ... + @colors.setter + def colors(self, data): # -> None: + ... + @caching.cache_decorator + def kdtree(self): # -> cKDTree[None]: + """ + Return a scipy.spatial.cKDTree of the vertices of the mesh. + Not cached as this lead to observed memory issues and segfaults. + + Returns + --------- + tree : scipy.spatial.cKDTree + Contains mesh.vertices + """ + ... + @caching.cache_decorator + def convex_hull(self): # -> Trimesh: + """ + A convex hull of every point. + + Returns + ------------- + convex_hull : trimesh.Trimesh + A watertight mesh of the hull of the points + """ + ... + def scene(self): # -> Scene: + """ + A scene containing just the PointCloud + + Returns + ---------- + scene : trimesh.Scene + Scene object containing this PointCloud + """ + ... + def show(self, **kwargs): # -> None: + """ + Open a viewer window displaying the current PointCloud + """ + ... + def export(self, file_obj=..., file_type=..., **kwargs): # -> dict[Unknown, Unknown]: + """ + Export the current pointcloud to a file object. + If file_obj is a filename, file will be written there. + Supported formats are xyz + Parameters + ------------ + file_obj: open writeable file object + str, file name where to save the pointcloud + None, if you would like this function to return the export blob + file_type: str + Which file type to export as. + If file name is passed this is not required + """ + ... + def query(self, input_points, **kwargs): + """ + Find the the closest points and associated attributes from this PointCloud. + Parameters + ------------ + input_points : (n, 3) float + Input query points + kwargs : dict + Arguments for proximity.query_from_points + result : proximity.NearestQueryResult + Result of the query. + """ + ... + def __add__(self, other): # -> PointCloud: + ... diff --git a/typings/trimesh/poses.pyi b/typings/trimesh/poses.pyi new file mode 100644 index 00000000..3c04cfa5 --- /dev/null +++ b/typings/trimesh/poses.pyi @@ -0,0 +1,60 @@ +""" +This type stub file was generated by pyright. +""" + +""" +poses.py +----------- + +Find stable orientations of meshes. +""" + +def compute_stable_poses( + mesh, center_mass=..., sigma=..., n_samples=..., threshold=... +): # -> tuple[ndarray[Any, dtype[Unknown]], ndarray[Any, dtype[Unknown]]]: + """ + Computes stable orientations of a mesh and their quasi-static probabilities. + + This method samples the location of the center of mass from a multivariate + gaussian with the mean at the center of mass, and a covariance + equal to and identity matrix times sigma, over n_samples. + + For each sample, it computes the stable resting poses of the mesh on a + a planar workspace and evaluates the probabilities of landing in + each pose if the object is dropped onto the table randomly. + + This method returns the 4x4 homogeneous transform matrices that place + the shape against the planar surface with the z-axis pointing upwards + and a list of the probabilities for each pose. + + The transforms and probabilties that are returned are sorted, with the + most probable pose first. + + Parameters + ---------- + mesh : trimesh.Trimesh + The target mesh + com : (3,) float + Rhe object center of mass. If None, this method + assumes uniform density and watertightness and + computes a center of mass explicitly + sigma : float + Rhe covariance for the multivariate gaussian used + to sample center of mass locations + n_samples : int + The number of samples of the center of mass location + threshold : float + The probability value at which to threshold + returned stable poses + + Returns + ------- + transforms : (n, 4, 4) float + The homogeneous matrices that transform the + object to rest in a stable pose, with the + new z-axis pointing upwards from the table + and the object just touching the table. + probs : (n,) float + Probability in (0, 1) for each pose + """ + ... diff --git a/typings/trimesh/primitives.pyi b/typings/trimesh/primitives.pyi new file mode 100644 index 00000000..dc35f0c1 --- /dev/null +++ b/typings/trimesh/primitives.pyi @@ -0,0 +1,557 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +import numpy as np + +from . import caching +from .base import Trimesh + +""" +primitives.py +---------------- + +Subclasses of Trimesh objects that are parameterized as primitives. + +Useful because you can move boxes and spheres around +and then use trimesh operations on them at any point. +""" +_IDENTITY = np.eye(4) + +class Primitive(Trimesh): + """ + Geometric Primitives which are a subclass of Trimesh. + Mesh is generated lazily when vertices or faces are requested. + """ + + __copy__ = ... + __deepcopy__ = ... + def __init__(self) -> None: ... + def __repr__(self): # -> str: + ... + @property + def faces(self): # -> None: + ... + @faces.setter + def faces(self, values): # -> None: + ... + @property + def vertices(self): # -> None: + ... + @vertices.setter + def vertices(self, values): # -> None: + ... + @property + def face_normals(self): # -> NDArray[float64] | None: + ... + @face_normals.setter + def face_normals(self, values): # -> None: + ... + @property + def transform(self): + """ + The transform of the Primitive object. + + Returns + ------------- + transform : (4, 4) float + Homogeneous transformation matrix + """ + ... + @abc.abstractmethod + def to_dict(self): + """ + Should be implemented by each primitive. + """ + ... + def copy(self, **kwargs): # -> Self@Primitive: + """ + Return a copy of the Primitive object. + + Returns + ------------- + copied : object + Copy of current primitive + """ + ... + def to_mesh(self, **kwargs): # -> Trimesh: + """ + Return a copy of the Primitive object as a Trimesh. + + Parameters + ----------- + kwargs : dict + Passed to the Trimesh object constructor. + + Returns + ------------ + mesh : trimesh.Trimesh + Tessellated version of the primitive. + """ + ... + def apply_transform(self, matrix): # -> Self@Primitive | Box | Cylinder | Capsule | Sphere: + """ + Apply a transform to the current primitive by + applying a new transform on top of existing + `self.primitive.transform`. If the matrix + contains scaling it will change parameters + like `radius` or `height` automatically. + + Parameters + ------------ + matrix: (4, 4) float + Homogeneous transformation + """ + ... + +class PrimitiveAttributes: + """ + Hold the mutable data which defines a primitive. + """ + + def __init__(self, parent, defaults, kwargs, mutable=...) -> None: + """ + Hold the attributes for a Primitive. + + Parameters + ------------ + parent : Primitive + Parent object reference. + defaults : dict + The default values for this primitive type. + kwargs : dict + User-passed values, i.e. {'radius': 10.0} + """ + ... + @property + def __doc__(self): # -> str: + ... + def __getattr__(self, key): ... + def __setattr__(self, key, value): # -> None: + ... + def __dir__(self): # -> list[str]: + ... + +class Cylinder(Primitive): + def __init__(self, radius=..., height=..., transform=..., sections=..., mutable=...) -> None: + """ + Create a Cylinder Primitive, a subclass of Trimesh. + + Parameters + ------------- + radius : float + Radius of cylinder + height : float + Height of cylinder + transform : (4, 4) float + Homogeneous transformation matrix + sections : int + Number of facets in circle. + mutable : bool + Are extents and transform mutable after creation. + """ + ... + @caching.cache_decorator + def volume(self): + """ + The analytic volume of the cylinder primitive. + + Returns + --------- + volume : float + Volume of the cylinder + """ + ... + @caching.cache_decorator + def moment_inertia(self): # -> Any | NDArray[floating[Any]]: + """ + The analytic inertia tensor of the cylinder primitive. + + Returns + ---------- + tensor: (3, 3) float + 3D inertia tensor + """ + ... + @caching.cache_decorator + def direction(self): # -> Any: + """ + The direction of the cylinder's axis. + + Returns + -------- + axis: (3,) float, vector along the cylinder axis + """ + ... + @property + def segment(self): # -> Any: + """ + A line segment which if inflated by cylinder radius + would represent the cylinder primitive. + + Returns + ------------- + segment : (2, 3) float + Points representing a single line segment + """ + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Get a copy of the current Cylinder primitive as + a JSON-serializable dict that matches the schema + in `trimesh/resources/schema/cylinder.schema.json` + + Returns + ---------- + as_dict : dict + Serializable data for this primitive. + """ + ... + def buffer(self, distance): # -> Cylinder: + """ + Return a cylinder primitive which covers the source + cylinder by distance: radius is inflated by distance + height by twice the distance. + + Parameters + ------------ + distance : float + Distance to inflate cylinder radius and height + + Returns + ------------- + buffered : Cylinder + Cylinder primitive inflated by distance + """ + ... + +class Capsule(Primitive): + def __init__(self, radius=..., height=..., transform=..., sections=..., mutable=...) -> None: + """ + Create a Capsule Primitive, a subclass of Trimesh. + + Parameters + ---------- + radius : float + Radius of cylinder + height : float + Height of cylinder + transform : (4, 4) float + Transformation matrix + sections : int + Number of facets in circle + mutable : bool + Are extents and transform mutable after creation. + """ + ... + @property + def transform(self): ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Get a copy of the current Capsule primitive as + a JSON-serializable dict that matches the schema + in `trimesh/resources/schema/capsule.schema.json` + + Returns + ---------- + as_dict : dict + Serializable data for this primitive. + """ + ... + @caching.cache_decorator + def direction(self): # -> Any: + """ + The direction of the capsule's axis. + + Returns + -------- + axis : (3,) float + Vector along the cylinder axis + """ + ... + +class Sphere(Primitive): + def __init__(self, radius=..., center=..., transform=..., subdivisions=..., mutable=...) -> None: + """ + Create a Sphere Primitive, a subclass of Trimesh. + + Parameters + ---------- + radius : float + Radius of sphere + center : None or (3,) float + Center of sphere. + transform : None or (4, 4) float + Full homogeneous transform. Pass `center` OR `transform. + subdivisions : int + Number of subdivisions for icosphere. + mutable : bool + Are extents and transform mutable after creation. + """ + ... + @property + def center(self): ... + @center.setter + def center(self, value): # -> None: + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Get a copy of the current Sphere primitive as + a JSON-serializable dict that matches the schema + in `trimesh/resources/schema/sphere.schema.json` + + Returns + ---------- + as_dict : dict + Serializable data for this primitive. + """ + ... + @property + def bounds(self): # -> NDArray[Unknown]: + ... + @property + def bounding_box_oriented(self): # -> Any: + ... + @caching.cache_decorator + def area(self): + """ + Surface area of the current sphere primitive. + + Returns + -------- + area: float, surface area of the sphere Primitive + """ + ... + @caching.cache_decorator + def volume(self): + """ + Volume of the current sphere primitive. + + Returns + -------- + volume: float, volume of the sphere Primitive + """ + ... + @caching.cache_decorator + def moment_inertia(self): + """ + The analytic inertia tensor of the sphere primitive. + + Returns + ---------- + tensor: (3, 3) float + 3D inertia tensor. + """ + ... + +class Box(Primitive): + def __init__(self, extents=..., transform=..., bounds=..., mutable=...) -> None: + """ + Create a Box Primitive as a subclass of Trimesh + + Parameters + ---------- + extents : Optional[ndarray] (3,) float + Length of each side of the 3D box. + transform : Optional[ndarray] (4, 4) float + Homogeneous transformation matrix for box center. + bounds : Optional[ndarray] (2, 3) float + Axis aligned bounding box, if passed extents and + transform will be derived from this. + mutable : bool + Are extents and transform mutable after creation. + """ + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Get a copy of the current Box primitive as + a JSON-serializable dict that matches the schema + in `trimesh/resources/schema/box.schema.json` + + Returns + ---------- + as_dict : dict + Serializable data for this primitive. + """ + ... + @property + def transform(self): ... + def sample_volume(self, count): # -> NDArray[float64] | Any: + """ + Return random samples from inside the volume of the box. + + Parameters + ------------- + count : int + Number of samples to return + + Returns + ---------- + samples : (count, 3) float + Points inside the volume + """ + ... + def sample_grid(self, count=..., step=...): # -> NDArray[float64] | Any: + """ + Return a 3D grid which is contained by the box. + Samples are either 'step' distance apart, or there are + 'count' samples per box side. + + Parameters + ----------- + count : int or (3,) int + If specified samples are spaced with np.linspace + step : float or (3,) float + If specified samples are spaced with np.arange + + Returns + ----------- + grid : (n, 3) float + Points inside the box + """ + ... + @property + def is_oriented(self): # -> bool: + """ + Returns whether or not the current box is rotated at all. + """ + ... + @caching.cache_decorator + def volume(self): # -> float: + """ + Volume of the box Primitive. + + Returns + -------- + volume : float + Volume of box. + """ + ... + def as_outline(self): # -> Path | Scene: + """ + Return a Path3D containing the outline of the box. + + Returns + ----------- + outline : trimesh.path.Path3D + Outline of box primitive + """ + ... + +class Extrusion(Primitive): + def __init__(self, polygon=..., transform=..., height=..., mutable=...) -> None: + """ + Create an Extrusion primitive, which + is a subclass of Trimesh. + + Parameters + ---------- + polygon : shapely.geometry.Polygon + Polygon to extrude + transform : (4, 4) float + Transform to apply after extrusion + height : float + Height to extrude polygon by + mutable : bool + Are extents and transform mutable after creation. + """ + ... + @caching.cache_decorator + def area(self): + """ + The surface area of the primitive extrusion. + + Calculated from polygon and height to avoid mesh creation. + + Returns + ---------- + area: float + Surface area of 3D extrusion + """ + ... + @caching.cache_decorator + def volume(self): + """ + The volume of the Extrusion primitive. + Calculated from polygon and height to avoid mesh creation. + + Returns + ---------- + volume : float + Volume of 3D extrusion + """ + ... + @caching.cache_decorator + def direction(self): # -> Any: + """ + Based on the extrudes transform what is the + vector along which the polygon will be extruded. + + Returns + --------- + direction : (3,) float + Unit direction vector + """ + ... + @property + def origin(self): + """ + Based on the extrude transform what is the + origin of the plane it is extruded from. + + Returns + ----------- + origin : (3,) float + Origin of extrusion plane + """ + ... + @property + def transform(self): ... + @caching.cache_decorator + def bounding_box_oriented(self): # -> Box: + ... + def slide(self, distance): # -> None: + """ + Alter the transform of the current extrusion to slide it + along its extrude_direction vector + + Parameters + ----------- + distance : float + Distance along self.extrude_direction to move + """ + ... + def buffer(self, distance, distance_height=..., **kwargs): # -> Self@Extrusion: + """ + Return a new Extrusion object which is expanded in profile + and in height by a specified distance. + + Parameters + -------------- + distance : float + Distance to buffer polygon + distance_height : float + Distance to buffer above and below extrusion + kwargs : dict + Passed to Extrusion constructor + + Returns + ---------- + buffered : primitives.Extrusion + Extrusion object with new values + """ + ... + def to_dict(self): # -> dict[str, Unknown]: + """ + Get a copy of the current Extrusion primitive as + a JSON-serializable dict that matches the schema + in `trimesh/resources/schema/extrusion.schema.json` + + Returns + ---------- + as_dict : dict + Serializable data for this primitive. + """ + ... diff --git a/typings/trimesh/proximity.pyi b/typings/trimesh/proximity.pyi new file mode 100644 index 00000000..d471d3ff --- /dev/null +++ b/typings/trimesh/proximity.pyi @@ -0,0 +1,258 @@ +""" +This type stub file was generated by pyright. +""" + +from .constants import log_time + +""" +proximity.py +--------------- + +Query mesh- point proximity. +""" + +def nearby_faces(mesh, points): # -> list[list[Unknown]]: + """ + For each point find nearby faces relatively quickly. + + The closest point on the mesh to the queried point is guaranteed to be + on one of the faces listed. + + Does this by finding the nearest vertex on the mesh to each point, and + then returns all the faces that intersect the axis aligned bounding box + centered at the queried point and extending to the nearest vertex. + + Parameters + ---------- + mesh : trimesh.Trimesh + Mesh to query. + points : (n, 3) float + Points in space + + Returns + ----------- + candidates : (points,) int + Sequence of indexes for mesh.faces + """ + ... + +def closest_point_naive(mesh, points): # -> tuple[NDArray[Any], NDArray[floating[Any]], NDArray[Any]]: + """ + Given a mesh and a list of points find the closest point + on any triangle. + + Does this by constructing a very large intermediate array and + comparing every point to every triangle. + + Parameters + ---------- + mesh : Trimesh + Takes mesh to have same interfaces as `closest_point` + points : (m, 3) float + Points in space + + Returns + ---------- + closest : (m, 3) float + Closest point on triangles for each point + distance : (m,) float + Distances between point and triangle + triangle_id : (m,) int + Index of triangle containing closest point + """ + ... + +def closest_point(mesh, points): # -> tuple[ndarray[Any, dtype[float64]], Any, Any]: + """ + Given a mesh and a list of points find the closest point + on any triangle. + + Parameters + ---------- + mesh : trimesh.Trimesh + Mesh to query + points : (m, 3) float + Points in space + + Returns + ---------- + closest : (m, 3) float + Closest point on triangles for each point + distance : (m,) float + Distance to mesh. + triangle_id : (m,) int + Index of triangle containing closest point + """ + ... + +def signed_distance(mesh, points): # -> Any: + """ + Find the signed distance from a mesh to a list of points. + + * Points OUTSIDE the mesh will have NEGATIVE distance + * Points within tol.merge of the surface will have POSITIVE distance + * Points INSIDE the mesh will have POSITIVE distance + + Parameters + ----------- + mesh : trimesh.Trimesh + Mesh to query. + points : (n, 3) float + Points in space + + Returns + ---------- + signed_distance : (n,) float + Signed distance from point to mesh + """ + ... + +class NearestQueryResult: + """ + Stores the nearest points and attributes for nearest points queries. + """ + + def __init__(self) -> None: ... + def has_normals(self): # -> bool: + ... + +class ProximityQuery: + """ + Proximity queries for the current mesh. + """ + + def __init__(self, mesh) -> None: ... + @log_time + def on_surface(self, points): # -> tuple[ndarray[Any, dtype[float64]], Any, Any]: + """ + Given list of points, for each point find the closest point + on any triangle of the mesh. + + Parameters + ---------- + points : (m,3) float, points in space + + Returns + ---------- + closest : (m, 3) float + Closest point on triangles for each point + distance : (m,) float + Distance to surface + triangle_id : (m,) int + Index of closest triangle for each point. + """ + ... + def vertex(self, points): + """ + Given a set of points, return the closest vertex index to each point + + Parameters + ---------- + points : (n, 3) float + Points in space + + Returns + ---------- + distance : (n,) float + Distance from source point to vertex. + vertex_id : (n,) int + Index of mesh.vertices for closest vertex. + """ + ... + def signed_distance(self, points): # -> Any: + """ + Find the signed distance from a mesh to a list of points. + + * Points OUTSIDE the mesh will have NEGATIVE distance + * Points within tol.merge of the surface will have POSITIVE distance + * Points INSIDE the mesh will have POSITIVE distance + + Parameters + ----------- + points : (n, 3) float + Points in space + + Returns + ---------- + signed_distance : (n,) float + Signed distance from point to mesh. + """ + ... + +def longest_ray(mesh, points, directions): # -> ndarray[Any, dtype[Any]]: + """ + Find the lengths of the longest rays which do not intersect the mesh + cast from a list of points in the provided directions. + + Parameters + ----------- + points : (n, 3) float + Points in space. + directions : (n, 3) float + Directions of rays. + + Returns + ---------- + signed_distance : (n,) float + Length of rays. + """ + ... + +def max_tangent_sphere( + mesh, points, inwards=..., normals=..., threshold=..., max_iter=... +): # -> tuple[NDArray[floating[Any]] | Unknown, NDArray[floating[Any]]]: + """ + Find the center and radius of the sphere which is tangent to + the mesh at the given point and at least one more point with no + non-tangential intersections with the mesh. + + Masatomo Inui, Nobuyuki Umezu & Ryohei Shimane (2016) + Shrinking sphere: + A parallel algorithm for computing the thickness of 3D objects, + Computer-Aided Design and Applications, 13:2, 199-207, + DOI: 10.1080/16864360.2015.1084186 + + Parameters + ---------- + points : (n, 3) float + Points in space. + inwards : bool + Whether to have the sphere inside or outside the mesh. + normals : (n, 3) float or None + Normals of the mesh at the given points + if is None computed automatically. + + Returns + ---------- + centers : (n,3) float + Centers of spheres + radii : (n,) float + Radii of spheres + """ + ... + +def thickness( + mesh, points, exterior=..., normals=..., method=... +): # -> NDArray[floating[Any]] | ndarray[Any, dtype[Any]]: + """ + Find the thickness of the mesh at the given points. + + Parameters + ---------- + points : (n, 3) float + Points in space + exterior : bool + Whether to compute the exterior thickness + (a.k.a. reach) + normals : (n, 3) float + Normals of the mesh at the given points + If is None computed automatically. + method : string + One of 'max_sphere' or 'ray' + + Returns + ---------- + thickness : (n,) float + Thickness at given points. + """ + ... diff --git a/typings/trimesh/ray/__init__.pyi b/typings/trimesh/ray/__init__.pyi new file mode 100644 index 00000000..f2755fea --- /dev/null +++ b/typings/trimesh/ray/__init__.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +from . import ray_pyembree, ray_triangle + +has_embree = ... +__all__ = ["ray_triangle", "ray_pyembree"] diff --git a/typings/trimesh/ray/ray_pyembree.pyi b/typings/trimesh/ray/ray_pyembree.pyi new file mode 100644 index 00000000..1ca69ee7 --- /dev/null +++ b/typings/trimesh/ray/ray_pyembree.pyi @@ -0,0 +1,150 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +from ..constants import log_time + +""" +Ray queries using the embreex package with the +API wrapped to match our native raytracer. +""" +_ray_offset_factor = ... +_ray_offset_floor = ... +_embree_dtype = np.float32 + +class RayMeshIntersector: + def __init__(self, geometry, scale_to_box=...) -> None: + """ + Do ray- mesh queries. + + Parameters + ------------- + geometry : Trimesh object + Mesh to do ray tests on + scale_to_box : bool + If true, will scale mesh to approximate + unit cube to avoid problems with extreme + large or small meshes. + """ + ... + def intersects_location( + self, ray_origins, ray_directions, multiple_hits=... + ): # -> tuple[NDArray[Any] | NDArray[Unknown], NDArray[Unknown], NDArray[Unknown]]: + """ + Return the location of where a ray hits a surface. + + Parameters + ---------- + ray_origins : (n, 3) float + Origins of rays + ray_directions : (n, 3) float + Direction (vector) of rays + + Returns + --------- + locations : (m) sequence of (p, 3) float + Intersection points + index_ray : (m,) int + Indexes of ray + index_tri : (m,) int + Indexes of mesh.faces + """ + ... + @log_time + def intersects_id( + self, ray_origins, ray_directions, multiple_hits=..., max_hits=..., return_locations=... + ): # -> tuple[NDArray[Unknown], NDArray[Unknown], NDArray[Any] | NDArray[Unknown]] | tuple[NDArray[Unknown], NDArray[Unknown]]: + """ + Find the triangles hit by a list of rays, including + optionally multiple hits along a single ray. + + + Parameters + ---------- + ray_origins : (n, 3) float + Origins of rays + ray_directions : (n, 3) float + Direction (vector) of rays + multiple_hits : bool + If True will return every hit along the ray + If False will only return first hit + max_hits : int + Maximum number of hits per ray + return_locations : bool + Should we return hit locations or not + + Returns + --------- + index_tri : (m,) int + Indexes of mesh.faces + index_ray : (m,) int + Indexes of ray + locations : (m) sequence of (p, 3) float + Intersection points, only returned if return_locations + """ + ... + @log_time + def intersects_first(self, ray_origins, ray_directions): # -> Any: + """ + Find the index of the first triangle a ray hits. + + + Parameters + ---------- + ray_origins : (n, 3) float + Origins of rays + ray_directions : (n, 3) float + Direction (vector) of rays + + Returns + ---------- + triangle_index : (n,) int + Index of triangle ray hit, or -1 if not hit + """ + ... + def intersects_any(self, ray_origins, ray_directions): # -> Any: + """ + Check if a list of rays hits the surface. + + + Parameters + ----------- + ray_origins : (n, 3) float + Origins of rays + ray_directions : (n, 3) float + Direction (vector) of rays + + Returns + ---------- + hit : (n,) bool + Did each ray hit the surface + """ + ... + def contains_points(self, points): # -> NDArray[Any]: + """ + Check if a mesh contains a list of points, using ray tests. + + If the point is on the surface of the mesh, behavior is undefined. + + Parameters + --------- + points: (n, 3) points in space + + Returns + --------- + contains: (n,) bool + Whether point is inside mesh or not + """ + ... + +class _EmbreeWrap: + """ + A light wrapper for Embreex scene objects which + allows queries to be scaled to help with precision + issues, as well as selecting the correct dtypes. + """ + + def __init__(self, vertices, faces, scale) -> None: ... + def run(self, origins, normals, **kwargs): ... diff --git a/typings/trimesh/ray/ray_triangle.pyi b/typings/trimesh/ray/ray_triangle.pyi new file mode 100644 index 00000000..0e4239c0 --- /dev/null +++ b/typings/trimesh/ray/ray_triangle.pyi @@ -0,0 +1,197 @@ +""" +This type stub file was generated by pyright. +""" + +""" +A basic slow implementation of ray- triangle queries. +""" + +class RayMeshIntersector: + """ + An object to query a mesh for ray intersections. + Precomputes an r-tree for each triangle on the mesh. + """ + + def __init__(self, mesh) -> None: ... + def intersects_id( + self, ray_origins, ray_directions, return_locations=..., multiple_hits=..., **kwargs + ): # -> tuple[NDArray[int64], NDArray[int64], NDArray[float64] | Any] | tuple[NDArray[int64], NDArray[int64]]: + """ + Find the intersections between the current mesh and an + array of rays. + + Parameters + ------------ + ray_origins : (m, 3) float + Ray origin points + ray_directions : (m, 3) float + Ray direction vectors + multiple_hits : bool + Consider multiple hits of each ray or not + return_locations : bool + Return hit locations or not + + Returns + ----------- + index_triangle : (h,) int + Index of triangles hit + index_ray : (h,) int + Index of ray that hit triangle + locations : (h, 3) float + [optional] Position of intersection in space + """ + ... + def intersects_location( + self, ray_origins, ray_directions, **kwargs + ): # -> tuple[NDArray[float64] | Any, NDArray[int64], NDArray[int64]]: + """ + Return unique cartesian locations where rays hit the mesh. + If you are counting the number of hits a ray had, this method + should be used as if only the triangle index is used on- edge hits + will be counted twice. + + Parameters + ------------ + ray_origins : (m, 3) float + Ray origin points + ray_directions : (m, 3) float + Ray direction vectors + + Returns + --------- + locations : (n) sequence of (m,3) float + Intersection points + index_ray : (n,) int + Array of ray indexes + index_tri: (n,) int + Array of triangle (face) indexes + """ + ... + def intersects_first(self, ray_origins, ray_directions, **kwargs): # -> NDArray[signedinteger[Any]]: + """ + Find the index of the first triangle a ray hits. + + + Parameters + ---------- + ray_origins : (n, 3) float + Origins of rays + ray_directions : (n, 3) float + Direction (vector) of rays + + Returns + ---------- + triangle_index : (n,) int + Index of triangle ray hit, or -1 if not hit + """ + ... + def intersects_any(self, ray_origins, ray_directions, **kwargs): # -> NDArray[Any]: + """ + Find out if each ray hit any triangle on the mesh. + + Parameters + ------------ + ray_origins : (m, 3) float + Ray origin points + ray_directions : (m, 3) float + Ray direction vectors + + Returns + --------- + hit : (m,) bool + Whether any ray hit any triangle on the mesh + """ + ... + def contains_points(self, points): # -> NDArray[Any]: + """ + Check if a mesh contains a list of points, using ray tests. + + If the point is on the surface of the mesh the behavior + is undefined. + + Parameters + ------------ + points : (n, 3) float + Points in space + + Returns + --------- + contains : (n,) bool + Whether point is inside mesh or not + """ + ... + +def ray_triangle_id( + triangles, ray_origins, ray_directions, triangles_normal=..., tree=..., multiple_hits=... +): # -> tuple[NDArray[int64], NDArray[int64], NDArray[float64]] | tuple[ndarray[Any, dtype[int64]], ndarray[Any, dtype[int64]], Any]: + """ + Find the intersections between a group of triangles and rays + + Parameters + ------------- + triangles : (n, 3, 3) float + Triangles in space + ray_origins : (m, 3) float + Ray origin points + ray_directions : (m, 3) float + Ray direction vectors + triangles_normal : (n, 3) float + Normal vector of triangles, optional + tree : rtree.Index + Rtree object holding triangle bounds + + Returns + ----------- + index_triangle : (h,) int + Index of triangles hit + index_ray : (h,) int + Index of ray that hit triangle + locations : (h, 3) float + Position of intersection in space + """ + ... + +def ray_triangle_candidates(ray_origins, ray_directions, tree): # -> tuple[NDArray[int64], NDArray[int64]]: + """ + Do broad- phase search for triangles that the rays + may intersect. + + Does this by creating a bounding box for the ray as it + passes through the volume occupied by the tree + + Parameters + ------------ + ray_origins : (m, 3) float + Ray origin points. + ray_directions : (m, 3) float + Ray direction vectors + tree : rtree object + Ccontains AABB of each triangle + + Returns + ---------- + ray_candidates : (n,) int + Triangle indexes + ray_id : (n,) int + Corresponding ray index for a triangle candidate + """ + ... + +def ray_bounds(ray_origins, ray_directions, bounds, buffer_dist=...): # -> NDArray[floating[Any]]: + """ + Given a set of rays and a bounding box for the volume of interest + where the rays will be passing through, find the bounding boxes + of the rays as they pass through the volume. + + Parameters + ------------ + ray_origins: (m,3) float, ray origin points + ray_directions: (m,3) float, ray direction vectors + bounds: (2,3) bounding box (min, max) + buffer_dist: float, distance to pad zero width bounding boxes + + Returns + --------- + ray_bounding: (n) set of AABB of rays passing through volume + """ + ... diff --git a/typings/trimesh/ray/ray_util.pyi b/typings/trimesh/ray/ray_util.pyi new file mode 100644 index 00000000..4d69798f --- /dev/null +++ b/typings/trimesh/ray/ray_util.pyi @@ -0,0 +1,25 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import constants + +@constants.log_time +def contains_points(intersector, points, check_direction=...): # -> NDArray[Any]: + """ + Check if a mesh contains a set of points, using ray tests. + + If the point is on the surface of the mesh, behavior is + undefined. + + Parameters + --------- + mesh: Trimesh object + points: (n,3) points in space + + Returns + --------- + contains : (n) bool + Whether point is inside mesh or not + """ + ... diff --git a/typings/trimesh/registration.pyi b/typings/trimesh/registration.pyi new file mode 100644 index 00000000..d14fbda2 --- /dev/null +++ b/typings/trimesh/registration.pyi @@ -0,0 +1,281 @@ +""" +This type stub file was generated by pyright. +""" + +""" +registration.py +--------------- + +Functions for registering (aligning) point clouds with meshes. +""" + +def mesh_other( + mesh, other, samples=..., scale=..., icp_first=..., icp_final=..., **kwargs +): # -> tuple[NDArray[floating[Any]] | Any | NDArray[float64], Any]: + """ + Align a mesh with another mesh or a PointCloud using + the principal axes of inertia as a starting point which + is refined by iterative closest point. + + Parameters + ------------ + mesh : trimesh.Trimesh object + Mesh to align with other + other : trimesh.Trimesh or (n, 3) float + Mesh or points in space + samples : int + Number of samples from mesh surface to align + scale : bool + Allow scaling in transform + icp_first : int + How many ICP iterations for the 9 possible + combinations of sign flippage + icp_final : int + How many ICP iterations for the closest + candidate from the wider search + kwargs : dict + Passed through to `icp`, which passes through to `procrustes` + + Returns + ----------- + mesh_to_other : (4, 4) float + Transform to align mesh to the other object + cost : float + Average squared distance per point + """ + ... + +def procrustes( + a, b, weights=..., reflection=..., translation=..., scale=..., return_cost=... +): # -> tuple[NDArray[Any], NDArray[float64] | Any, Any] | NDArray[Any]: + """ + Perform Procrustes' analysis subject to constraints. Finds the + transformation T mapping a to b which minimizes the square sum + distances between Ta and b, also called the cost. Optionally + specify different weights for the points in a to minimize the + weighted square sum distances between Ta and b. This can + improve transformation robustness on noisy data if the points' + probability distribution is known. + + Parameters + ---------- + a : (n,3) float + List of points in space + b : (n,3) float + List of points in space + weights : (n,) float + List of floats representing how much weight is assigned to each point of a + reflection : bool + If the transformation is allowed reflections + translation : bool + If the transformation is allowed translations + scale : bool + If the transformation is allowed scaling + return_cost : bool + Whether to return the cost and transformed a as well + + Returns + ---------- + matrix : (4,4) float + The transformation matrix sending a to b + transformed : (n,3) float + The image of a under the transformation + cost : float + The cost of the transformation + """ + ... + +def icp( + a, b, initial=..., threshold=..., max_iterations=..., **kwargs +): # -> tuple[Any | NDArray[float64] | Unknown, NDArray[float64] | Any | Unbound, Any | Unbound]: + """ + Apply the iterative closest point algorithm to align a point cloud with + another point cloud or mesh. Will only produce reasonable results if the + initial transformation is roughly correct. Initial transformation can be + found by applying Procrustes' analysis to a suitable set of landmark + points (often picked manually). + + Parameters + ---------- + a : (n,3) float + List of points in space. + b : (m,3) float or Trimesh + List of points in space or mesh. + initial : (4,4) float + Initial transformation. + threshold : float + Stop when change in cost is less than threshold + max_iterations : int + Maximum number of iterations + kwargs : dict + Args to pass to procrustes + + Returns + ---------- + matrix : (4,4) float + The transformation matrix sending a to b + transformed : (n,3) float + The image of a under the transformation + cost : float + The cost of the transformation + """ + ... + +def nricp_amberg( + source_mesh, + target_geometry, + source_landmarks=..., + target_positions=..., + steps=..., + eps=..., + gamma=..., + distance_threshold=..., + return_records=..., + use_faces=..., + use_vertex_normals=..., + neighbors_count=..., +): # -> list[Unknown]: + """ + Non Rigid Iterative Closest Points + + Implementation of "Amberg et al. 2007: Optimal Step + Nonrigid ICP Algorithms for Surface Registration." + Allows to register non-rigidly a mesh on another or + on a point cloud. The core algorithm is explained + at the end of page 3 of the paper. + + Comparison between nricp_amberg and nricp_sumner: + * nricp_amberg fits to the target mesh in less steps + * nricp_amberg can generate sharp edges + * only vertices and their neighbors are considered + * nricp_sumner tend to preserve more the original shape + * nricp_sumner parameters are easier to tune + * nricp_sumner solves for triangle positions whereas + nricp_amberg solves for vertex transforms + * nricp_sumner is less optimized when wn > 0 + + Parameters + ---------- + source_mesh : Trimesh + Source mesh containing both vertices and faces. + target_geometry : Trimesh or PointCloud or (n, 3) float + Target geometry. It can contain no faces or be a PointCloud. + source_landmarks : (n,) int or ((n,) int, (n, 3) float) + n landmarks on the the source mesh. + Represented as vertex indices (n,) int. + It can also be represented as a tuple of triangle + indices and barycentric coordinates ((n,) int, (n, 3) float,). + target_positions : (n, 3) float + Target positions assigned to source landmarks + steps : Core parameters of the algorithm + Iterable of iterables (ws, wl, wn, max_iter,). + ws is smoothness term, wl weights landmark importance, wn normal importance + and max_iter is the maximum number of iterations per step. + eps : float + If the error decrease if inferior to this value, the current step ends. + gamma : float + Weight the translation part against the rotational/skew part. + Recommended value : 1. + distance_threshold : float + Distance threshold to account for a vertex match or not. + return_records : bool + If True, also returns all the intermediate results. It can help debugging + and tune the parameters to match a specific case. + use_faces : bool + If True and if target geometry has faces, use proximity.closest_point to find + matching points. Else use scipy's cKDTree object. + use_vertex_normals : bool + If True and if target geometry has faces, interpolate the normals of the target + geometry matching points. + Else use face normals or estimated normals if target geometry has no faces. + neighbors_count : int + number of neighbors used for normal estimation. Only used if target geometry has + no faces or if use_faces is False. + + Returns + ---------- + result : (n, 3) float or List[(n, 3) float] + The vertices positions of source_mesh such that it is registered non-rigidly + onto the target geometry. + If return_records is True, it returns the list of the vertex positions at each + iteration. + """ + ... + +def nricp_sumner( + source_mesh, + target_geometry, + source_landmarks=..., + target_positions=..., + steps=..., + distance_threshold=..., + return_records=..., + use_faces=..., + use_vertex_normals=..., + neighbors_count=..., + face_pairs_type=..., +): # -> list[Unknown]: + """ + Non Rigid Iterative Closest Points + + Implementation of the correspondence computation part of + "Sumner and Popovic 2004: Deformation Transfer for Triangle Meshes" + Allows to register non-rigidly a mesh on another geometry. + + Comparison between nricp_amberg and nricp_sumner: + * nricp_amberg fits to the target mesh in less steps + * nricp_amberg can generate sharp edges (only vertices and their + neighbors are considered) + * nricp_sumner tend to preserve more the original shape + * nricp_sumner parameters are easier to tune + * nricp_sumner solves for triangle positions whereas nricp_amberg solves for + vertex transforms + * nricp_sumner is less optimized when wn > 0 + + Parameters + ---------- + source_mesh : Trimesh + Source mesh containing both vertices and faces. + target_geometry : Trimesh or PointCloud or (n, 3) float + Target geometry. It can contain no faces or be a PointCloud. + source_landmarks : (n,) int or ((n,) int, (n, 3) float) + n landmarks on the the source mesh. + Represented as vertex indices (n,) int. + It can also be represented as a tuple of triangle indices and barycentric + coordinates ((n,) int, (n, 3) float,). + target_positions : (n, 3) float + Target positions assigned to source landmarks + steps : Core parameters of the algorithm + Iterable of iterables (wc, wi, ws, wl, wn). + wc is the correspondence term (strength of fitting), wi is the identity term + (recommended value : 0.001), ws is smoothness term, wl weights the landmark + importance and wn the normal importance. + distance_threshold : float + Distance threshold to account for a vertex match or not. + return_records : bool + If True, also returns all the intermediate results. It can help debugging + and tune the parameters to match a specific case. + use_faces : bool + If True and if target geometry has faces, use proximity.closest_point to find + matching points. Else use scipy's cKDTree object. + use_vertex_normals : bool + If True and if target geometry has faces, interpolate the normals of the target + geometry matching points. + Else use face normals or estimated normals if target geometry has no faces. + neighbors_count : int + number of neighbors used for normal estimation. Only used if target geometry has + no faces or if use_faces is False. + face_pairs_type : str 'vertex' or 'edge' + Method to determine face pairs used in the smoothness cost. 'vertex' yields + smoother results. + + + Returns + ---------- + result : (n, 3) float or List[(n, 3) float] + The vertices positions of source_mesh such that it is registered non-rigidly + onto the target geometry. + If return_records is True, it returns the list of the vertex positions at each + iteration. + """ + ... diff --git a/typings/trimesh/remesh.pyi b/typings/trimesh/remesh.pyi new file mode 100644 index 00000000..d4fff046 --- /dev/null +++ b/typings/trimesh/remesh.pyi @@ -0,0 +1,131 @@ +""" +This type stub file was generated by pyright. +""" + +""" +remesh.py +------------- + +Deal with re- triangulation of existing meshes. +""" + +def subdivide( + vertices, faces, face_index=..., vertex_attributes=..., return_index=... +): # -> tuple[NDArray[Unknown], NDArray[signedinteger[Any]], dict[Unknown, Unknown]] | tuple[NDArray[Unknown], NDArray[signedinteger[Any]], dict[Any, Any]] | tuple[NDArray[Unknown], NDArray[signedinteger[Any]]]: + """ + Subdivide a mesh into smaller triangles. + + Note that if `face_index` is passed, only those + faces will be subdivided and their neighbors won't + be modified making the mesh no longer "watertight." + + Parameters + ------------ + vertices : (n, 3) float + Vertices in space + faces : (m, 3) int + Indexes of vertices which make up triangular faces + face_index : faces to subdivide. + if None: all faces of mesh will be subdivided + if (n,) int array of indices: only specified faces + vertex_attributes : dict + Contains (n, d) attribute data + return_index : bool + If True, return index of original face for new faces + + Returns + ---------- + new_vertices : (q, 3) float + Vertices in space + new_faces : (p, 3) int + Remeshed faces + index_dict : dict + Only returned if `return_index`, {index of + original face : index of new faces}. + """ + ... + +def subdivide_to_size( + vertices, faces, max_edge, max_iter=..., return_index=... +): # -> tuple[Unknown | NDArray[Any] | NDArray[Unknown], Unknown | NDArray[Any] | NDArray[Unknown], NDArray[Unknown]] | tuple[Unknown | NDArray[Any] | NDArray[Unknown], Unknown | NDArray[Any] | NDArray[Unknown]]: + """ + Subdivide a mesh until every edge is shorter than a + specified length. + + Will return a triangle soup, not a nicely structured mesh. + + Parameters + ------------ + vertices : (n, 3) float + Vertices in space + faces : (m, 3) int + Indices of vertices which make up triangles + max_edge : float + Maximum length of any edge in the result + max_iter : int + The maximum number of times to run subdivision + return_index : bool + If True, return index of original face for new faces + + Returns + ------------ + vertices : (j, 3) float + Vertices in space + faces : (q, 3) int + Indices of vertices + index : (q, 3) int + Only returned if `return_index`, index of + original face for each new face. + """ + ... + +def subdivide_loop( + vertices, faces, iterations=... +): # -> tuple[Unknown | NDArray[Unknown], Unknown | NDArray[Unknown] | ndarray[Any, dtype[signedinteger[Any]]]]: + """ + Subdivide a mesh by dividing each triangle into four triangles + and approximating their smoothed surface (loop subdivision). + This function is an array-based implementation of loop subdivision, + which avoids slow for loop and enables faster calculation. + + Overall process: + 1. Calculate odd vertices. + Assign a new odd vertex on each edge and + calculate the value for the boundary case and the interior case. + The value is calculated as follows. + v2 + / f0 \\ 0 + v0--e--v1 / \\ + \\f1 / v0--e--v1 + v3 + - interior case : 3:1 ratio of mean(v0,v1) and mean(v2,v3) + - boundary case : mean(v0,v1) + 2. Calculate even vertices. + The new even vertices are calculated with the existing + vertices and their adjacent vertices. + 1---2 + / \\/ \\ 0---1 + 0---v---3 / \\/ \\ + \\ /\\/ b0---v---b1 + k...4 + - interior case : (1-kB):B ratio of v and k adjacencies + - boundary case : 3:1 ratio of v and mean(b0,b1) + 3. Compose new faces with new vertices. + + Parameters + ------------ + vertices : (n, 3) float + Vertices in space + faces : (m, 3) int + Indices of vertices which make up triangles + + Returns + ------------ + vertices : (j, 3) float + Vertices in space + faces : (q, 3) int + Indices of vertices + iterations : int + Number of iterations to run subdivision + """ + ... diff --git a/typings/trimesh/rendering.pyi b/typings/trimesh/rendering.pyi new file mode 100644 index 00000000..566d08b4 --- /dev/null +++ b/typings/trimesh/rendering.pyi @@ -0,0 +1,185 @@ +""" +This type stub file was generated by pyright. +""" + +""" +rendering.py +-------------- + +Functions to convert trimesh objects to pyglet/opengl objects. +""" + +def convert_to_vertexlist( + geometry, **kwargs +): # -> tuple[int, Literal[4], Unknown | None, Unknown | Any, tuple[Literal['v3f/static'], Unknown], tuple[Literal['n3f/static'], Unknown | Any], tuple[str, Unknown] | tuple[Literal['t2f/static'], Unknown]] | tuple[int, Literal[1], None, Any, tuple[Literal['v3f/static'], ndarray[Any, dtype[float64]] | ndarray[Any, dtype[Any]] | ndarray[Any, dtype[Unknown]] | Unknown], tuple[str, Unknown | Any]] | tuple[int, Literal[0], None, Any, tuple[Literal['v3f/static'], ndarray[Any, dtype[float64]]], tuple[str, Any | Unknown]]: + """ + Try to convert various geometry objects to the constructor + args for a pyglet indexed vertex list. + + Parameters + ------------ + obj : Trimesh, Path2D, Path3D, (n,2) float, (n,3) float + Object to render + + Returns + ------------ + args : tuple + Args to be passed to pyglet indexed vertex list + constructor. + """ + ... + +def mesh_to_vertexlist( + mesh, group=..., smooth=..., smooth_threshold=... +): # -> tuple[int, Literal[4], Unknown | None, Unknown | Any, tuple[Literal['v3f/static'], Unknown], tuple[Literal['n3f/static'], Unknown | Any], tuple[str, Unknown] | tuple[Literal['t2f/static'], Unknown]]: + """ + Convert a Trimesh object to arguments for an + indexed vertex list constructor. + + Parameters + ------------- + mesh : trimesh.Trimesh + Mesh to be rendered + group : str + Rendering group for the vertex list + smooth : bool + Should we try to smooth shade the mesh + smooth_threshold : int + Maximum number of faces to smooth shade + + Returns + -------------- + args : (7,) tuple + Args for vertex list constructor + + """ + ... + +def path_to_vertexlist( + path, group=..., **kwargs +): # -> tuple[int, Literal[1], Unknown | None, Any, tuple[Literal['v3f/static'], ndarray[Any, dtype[float64]] | ndarray[Any, dtype[Any]] | ndarray[Any, dtype[Unknown]] | Unknown], tuple[str, Unknown | Any]]: + """ + Convert a Path3D object to arguments for a + pyglet indexed vertex list constructor. + + Parameters + ------------- + path : trimesh.path.Path3D object + Mesh to be rendered + group : str + Rendering group for the vertex list + + Returns + -------------- + args : (7,) tuple + Args for vertex list constructor + """ + ... + +def points_to_vertexlist( + points, colors=..., group=..., **kwargs +): # -> tuple[int, Literal[0], Unknown | None, Any, tuple[Literal['v3f/static'], ndarray[Any, dtype[float64]]], tuple[str, Any | Unknown]]: + """ + Convert a numpy array of 3D points to args for + a vertex list constructor. + + Parameters + ------------- + points : (n, 3) float + Points to be rendered + colors : (n, 3) or (n, 4) float + Colors for each point + group : str + Rendering group for the vertex list + + Returns + -------------- + args : (7,) tuple + Args for vertex list constructor + """ + ... + +def colors_to_gl(colors, count): # -> tuple[str, Unknown]: + """ + Given a list of colors (or None) return a GL-acceptable + list of colors. + + Parameters + ------------ + colors: (count, (3 or 4)) float + Input colors as an array + + Returns + --------- + colors_type : str + Color type + colors_gl : (count,) list + Colors to pass to pyglet + """ + ... + +def material_to_texture(material, upsize=...): # -> None: + """ + Convert a trimesh.visual.texture.Material object into + a pyglet-compatible texture object. + + Parameters + -------------- + material : trimesh.visual.texture.Material + Material to be converted + upsize: bool + If True, will upscale textures to their nearest power + of two resolution to avoid weirdness + + Returns + --------------- + texture : pyglet.image.Texture + Texture loaded into pyglet form + """ + ... + +def matrix_to_gl(matrix): + """ + Convert a numpy row-major homogeneous transformation matrix + to a flat column-major GLfloat transformation. + + Parameters + ------------- + matrix : (4,4) float + Row-major homogeneous transform + + Returns + ------------- + glmatrix : (16,) gl.GLfloat + Transform in pyglet format + """ + ... + +def vector_to_gl(array, *args): + """ + Convert an array and an optional set of args into a + flat vector of gl.GLfloat + """ + ... + +def light_to_gl(light, transform, lightN): # -> list[tuple[Unknown, Unknown, Unknown]]: + """ + Convert trimesh.scene.lighting.Light objects into + args for gl.glLightFv calls + + Parameters + -------------- + light : trimesh.scene.lighting.Light + Light object to be converted to GL + transform : (4, 4) float + Transformation matrix of light + lightN : int + Result of gl.GL_LIGHT0, gl.GL_LIGHT1, etc + + Returns + -------------- + multiarg : [tuple] + List of args to pass to gl.glLightFv eg: + [gl.glLightfb(*a) for a in multiarg] + """ + ... diff --git a/typings/trimesh/repair.pyi b/typings/trimesh/repair.pyi new file mode 100644 index 00000000..78649db1 --- /dev/null +++ b/typings/trimesh/repair.pyi @@ -0,0 +1,118 @@ +""" +This type stub file was generated by pyright. +""" + +""" +repair.py +------------- + +Fill holes and fix winding and normals of meshes. +""" + +def fix_winding(mesh): # -> None: + """ + Traverse and change mesh faces in-place to make sure + winding is correct with edges on adjacent faces in + opposite directions. + + Parameters + ------------- + mesh : Trimesh + Source geometry to alter in-place. + """ + ... + +def fix_inversion(mesh, multibody=...): # -> None: + """ + Check to see if a mesh has normals pointing "out." + + Parameters + ------------- + mesh : trimesh.Trimesh + Mesh to fix in-place. + multibody : bool + If True will try to fix normals on every body + """ + ... + +def fix_normals(mesh, multibody=...): # -> None: + """ + Fix the winding and direction of a mesh face and + face normals in-place. + + Really only meaningful on watertight meshes but will orient all + faces and winding in a uniform way for non-watertight face + patches as well. + + Parameters + ------------- + mesh : trimesh.Trimesh + Mesh to fix normals on + multibody : bool + if True try to correct normals direction + on every body rather than just one + + Notes + -------------- + mesh.faces : will flip columns on inverted faces + """ + ... + +def broken_faces(mesh, color=...): # -> NDArray[Unknown]: + """ + Return the index of faces in the mesh which break the + watertight status of the mesh. + + Parameters + -------------- + mesh : trimesh.Trimesh + Mesh to check broken faces on + color: (4,) uint8 or None + Will set broken faces to this color if not None + + Returns + --------------- + broken : (n, ) int + Indexes of mesh.faces + """ + ... + +def fill_holes(mesh): # -> bool: + """ + Fill single- triangle holes on triangular meshes by adding + new triangles to fill the holes. New triangles will have + proper winding and normals, and if face colors exist the color + of the last face will be assigned to the new triangles. + + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh will be repaired in- place + """ + ... + +def stitch( + mesh, faces=..., insert_vertices=... +): # -> tuple[NDArray[signedinteger[Any]], NDArray[Unknown] | Unbound] | NDArray[signedinteger[Any]]: + """ + Create a fan stitch over the boundary of the specified + faces. If the boundary is non-convex a triangle fan + is going to be extremely wonky. + + Parameters + ----------- + vertices : (n, 3) float + Vertices in space. + faces : (n,) int + Face indexes to stitch with triangle fans. + insert_vertices : bool + Allow stitching to insert new vertices? + + Returns + ---------- + fan : (m, 3) int + New triangles referencing mesh.vertices. + vertices : (p, 3) float + Inserted vertices (only returned `if insert_vertices`) + """ + ... diff --git a/typings/trimesh/resolvers.pyi b/typings/trimesh/resolvers.pyi new file mode 100644 index 00000000..1c150309 --- /dev/null +++ b/typings/trimesh/resolvers.pyi @@ -0,0 +1,308 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +from . import util + +""" +resolvers.py +--------------- + +Provides a common interface to load assets referenced by name +like MTL files, texture images, etc. Assets can be from ZIP +archives, web assets, or a local file path. +""" + +class Resolver(util.ABC): + """ + The base class for resolvers. + """ + + @abc.abstractmethod + def __init__(self, *args, **kwargs) -> None: ... + @abc.abstractmethod + def get(self, key): ... + @abc.abstractmethod + def write(self, name, data): ... + @abc.abstractmethod + def namespaced(self, namespace): ... + def __getitem__(self, key): ... + def __setitem__(self, key, value): ... + def __contains__(self, key): # -> bool: + ... + +class FilePathResolver(Resolver): + """ + Resolve files from a source path on the file system. + """ + + def __init__(self, source) -> None: + """ + Resolve files based on a source path. + + Parameters + ------------ + source : str + File path where mesh was loaded from + """ + ... + def keys(self): # -> Generator[str, Any, None]: + """ + List all files available to be loaded. + + Yields + ----------- + name : str + Name of a file which can be accessed. + """ + ... + def namespaced(self, namespace): # -> FilePathResolver: + """ + Return a resolver which changes the root of the + resolver by an added namespace. + + Parameters + ------------- + namespace : str + Probably a subdirectory + + Returns + -------------- + resolver : FilePathResolver + Resolver with root directory changed. + """ + ... + def get(self, name): # -> bytes: + """ + Get an asset. + + Parameters + ------------- + name : str + Name of the asset + + Returns + ------------ + data : bytes + Loaded data from asset + """ + ... + def write(self, name, data): # -> None: + """ + Write an asset to a file path. + + Parameters + ----------- + name : str + Name of the file to write + data : str or bytes + Data to write to the file + """ + ... + +class ZipResolver(Resolver): + """ + Resolve files inside a ZIP archive. + """ + + def __init__(self, archive=..., namespace=...) -> None: + """ + Resolve files inside a ZIP archive as loaded by + trimesh.util.decompress + + Parameters + ------------- + archive : dict + Contains resources as file object + namespace : None or str + If passed will only show keys that start + with this value and this substring must be + removed for any get calls. + """ + ... + def keys(self): # -> list[Unknown] | dict_keys[Unknown, Unknown]: + """ + Get the available keys in the current archive. + + Returns + ----------- + keys : iterable + Keys in the current archive. + """ + ... + def write(self, key, value): # -> None: + """ + Store a value in the current archive. + + Parameters + ----------- + key : hashable + Key to store data under. + value : str, bytes, file-like + Value to store. + """ + ... + def get(self, name): # -> bytes | str | None: + """ + Get an asset from the ZIP archive. + + Parameters + ------------- + name : str + Name of the asset + + Returns + ------------- + data : bytes + Loaded data from asset + """ + ... + def namespaced(self, namespace): # -> ZipResolver: + """ + Return a "sub-resolver" with a root namespace. + + Parameters + ------------- + namespace : str + The root of the key to clip off, i.e. if + this resolver has key `a/b/c` you can get + 'a/b/c' with resolver.namespaced('a/b').get('c') + + Returns + ----------- + resolver : Resolver + Namespaced resolver. + """ + ... + def export(self): # -> bytes: + """ + Export the contents of the current archive as + a ZIP file. + + Returns + ------------ + compressed : bytes + Compressed data in ZIP format. + """ + ... + +class WebResolver(Resolver): + """ + Resolve assets from a remote URL. + """ + + def __init__(self, url) -> None: + """ + Resolve assets from a base URL. + + Parameters + -------------- + url : str + Location where a mesh was stored or + directory where mesh was stored + """ + ... + def get(self, name): # -> bytes: + """ + Get a resource from the remote site. + + Parameters + ------------- + name : str + Asset name, i.e. 'quadknot.obj.mtl' + """ + ... + def namespaced(self, namespace): # -> WebResolver: + """ + Return a namespaced version of current resolver. + + Parameters + ------------- + namespace : str + URL fragment + + Returns + ----------- + resolver : WebResolver + With sub-url: `https://example.com/{namespace}` + """ + ... + def write(self, key, value): ... + +class GithubResolver(Resolver): + def __init__(self, repo, branch=..., commit=..., save=...) -> None: + """ + Get files from a remote Github repository by + downloading a zip file with the entire branch + or a specific commit. + + Parameters + ------------- + repo : str + In the format of `owner/repo` + branch : str + The remote branch you want to get files from. + commit : str + The full commit hash: pass either this OR branch. + save : None or str + A path if you want to save results locally. + """ + ... + def keys(self): # -> list[Unknown] | dict_keys[Unknown, Unknown]: + """ + List the available files in the repository. + + Returns + ---------- + keys : iterable + Keys available to the resolved. + """ + ... + def write(self, name, data): ... + @property + def zipped(self): # -> ZipResolver: + """ + + - opened zip file + - locally saved zip file + - retrieve zip file and saved + """ + ... + def get(self, key): # -> bytes | str | None: + ... + def namespaced(self, namespace): # -> ZipResolver: + """ + Return a "sub-resolver" with a root namespace. + + Parameters + ------------- + namespace : str + The root of the key to clip off, i.e. if + this resolver has key `a/b/c` you can get + 'a/b/c' with resolver.namespaced('a/b').get('c') + + Returns + ----------- + resolver : Resolver + Namespaced resolver. + """ + ... + +def nearby_names(name, namespace=...): # -> Generator[Unknown | LiteralString, Any, None]: + """ + Try to find nearby variants of a specified name. + + Parameters + ------------ + name : str + Initial name. + + Yields + ----------- + nearby : str + Name that is a lightly permutated version + of the initial name. + """ + ... diff --git a/typings/trimesh/resources/__init__.pyi b/typings/trimesh/resources/__init__.pyi new file mode 100644 index 00000000..eeefb7df --- /dev/null +++ b/typings/trimesh/resources/__init__.pyi @@ -0,0 +1,49 @@ +""" +This type stub file was generated by pyright. +""" + +import json +import os + +from ..util import decode_text, wrap_as_stream + +_pwd = ... +_cache = ... + +def get(name, decode=..., decode_json=..., as_stream=...): # -> Any | StringIO | BytesIO | bytes | str: + """ + Get a resource from the `trimesh/resources` folder. + + Parameters + ------------- + name : str + File path relative to `trimesh/resources` + decode : bool + Whether or not to decode result as UTF-8 + decode_json : bool + Run `json.loads` on resource if True. + as_stream : bool + Return as a file-like object + + Returns + ------------- + resource : str, bytes, or decoded JSON + File data + """ + ... + +def get_schema(name): # -> list[Unknown] | dict[Unknown, Unknown] | Any: + """ + Load a schema and evaluate the referenced files. + + Parameters + ------------ + name : str + Filename of schema. + + Returns + ---------- + schema : dict + Loaded and resolved schema. + """ + ... diff --git a/typings/trimesh/sample.pyi b/typings/trimesh/sample.pyi new file mode 100644 index 00000000..2aed4100 --- /dev/null +++ b/typings/trimesh/sample.pyi @@ -0,0 +1,145 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +""" +sample.py +------------ + +Randomly sample surface and volume of meshes. +""" +if hasattr(np.random, "default_rng"): + default_rng = ... +else: + class default_rng(np.random.RandomState): + def random(self, *args, **kwargs): ... + +def sample_surface( + mesh, count, face_weight=..., sample_color=..., seed=... +): # -> tuple[Unknown, Unknown, Unknown | None] | tuple[Unknown, Unknown]: + """ + Sample the surface of a mesh, returning the specified + number of points + + For individual triangle sampling uses this method: + http://mathworld.wolfram.com/TrianglePointPicking.html + + Parameters + ----------- + mesh : trimesh.Trimesh + Geometry to sample the surface of + count : int + Number of points to return + face_weight : None or len(mesh.faces) float + Weight faces by a factor other than face area. + If None will be the same as face_weight=mesh.area + sample_color : bool + Option to calculate the color of the sampled points. + Default is False. + seed : None or int + If passed as an integer will provide deterministic results + otherwise pulls the seed from operating system entropy. + + Returns + --------- + samples : (count, 3) float + Points in space on the surface of mesh + face_index : (count,) int + Indices of faces for each sampled point + colors : (count, 4) float + Colors of each sampled point + Returns only when the sample_color is True + """ + ... + +def volume_mesh(mesh, count): + """ + Use rejection sampling to produce points randomly + distributed in the volume of a mesh. + + + Parameters + ----------- + mesh : trimesh.Trimesh + Geometry to sample + count : int + Number of points to return + + Returns + --------- + samples : (n, 3) float + Points in the volume of the mesh where n <= count + """ + ... + +def volume_rectangular(extents, count, transform=...): # -> NDArray[float64] | Any: + """ + Return random samples inside a rectangular volume, + useful for sampling inside oriented bounding boxes. + + Parameters + ----------- + extents : (3,) float + Side lengths of rectangular solid + count : int + Number of points to return + transform : (4, 4) float + Homogeneous transformation matrix + + Returns + --------- + samples : (count, 3) float + Points in requested volume + """ + ... + +def sample_surface_even(mesh, count, radius=..., seed=...): # -> tuple[Unknown, Unknown]: + """ + Sample the surface of a mesh, returning samples which are + VERY approximately evenly spaced. This is accomplished by + sampling and then rejecting pairs that are too close together. + + Note that since it is using rejection sampling it may return + fewer points than requested (i.e. n < count). If this is the + case a log.warning will be emitted. + + Parameters + ----------- + mesh : trimesh.Trimesh + Geometry to sample the surface of + count : int + Number of points to return + radius : None or float + Removes samples below this radius + seed : None or int + Provides deterministic values + + Returns + --------- + samples : (n, 3) float + Points in space on the surface of mesh + face_index : (n,) int + Indices of faces for each sampled point + """ + ... + +def sample_surface_sphere(count): # -> NDArray[Any]: + """ + Correctly pick random points on the surface of a unit sphere + + Uses this method: + http://mathworld.wolfram.com/SpherePointPicking.html + + Parameters + ----------- + count : int + Number of points to return + + Returns + ---------- + points : (count, 3) float + Random points on the surface of a unit sphere + """ + ... diff --git a/typings/trimesh/scene/__init__.pyi b/typings/trimesh/scene/__init__.pyi new file mode 100644 index 00000000..f47f2967 --- /dev/null +++ b/typings/trimesh/scene/__init__.pyi @@ -0,0 +1,8 @@ +""" +This type stub file was generated by pyright. +""" + +from .cameras import Camera +from .scene import Scene, split_scene + +__all__ = ["Camera", "Scene", "split_scene"] diff --git a/typings/trimesh/scene/cameras.pyi b/typings/trimesh/scene/cameras.pyi new file mode 100644 index 00000000..bf6dfff4 --- /dev/null +++ b/typings/trimesh/scene/cameras.pyi @@ -0,0 +1,240 @@ +""" +This type stub file was generated by pyright. +""" + +class Camera: + def __init__(self, name=..., resolution=..., focal=..., fov=..., z_near=..., z_far=...) -> None: + """ + Create a new Camera object that stores camera intrinsic + and extrinsic parameters. + + TODO: skew is not supported + TODO: cx and cy that are not half of width and height + + Parameters + ------------ + name : str or None + Name for camera to be used as node name + resolution : (2,) int + Pixel size in (height, width) + focal : (2,) float + Focal length in pixels. Either pass this OR FOV + but not both. focal = (K[0][0], K[1][1]) + fov : (2,) float + Field of view (fovx, fovy) in degrees + z_near : float + What is the closest + """ + ... + def copy(self): # -> Camera: + """ + Safely get a copy of the current camera. + """ + ... + @property + def resolution(self): # -> NDArray[int64]: + """ + Get the camera resolution in pixels. + + Returns + ------------ + resolution (2,) float + Camera resolution in pixels + """ + ... + @resolution.setter + def resolution(self, values): # -> None: + """ + Set the camera resolution in pixels. + + Parameters + ------------ + resolution (2,) float + Camera resolution in pixels + """ + ... + @property + def focal(self): # -> NDArray[float64]: + """ + Get the focal length in pixels for the camera. + + Returns + ------------ + focal : (2,) float + Focal length in pixels + """ + ... + @focal.setter + def focal(self, values): # -> None: + """ + Set the focal length in pixels for the camera. + + Returns + ------------ + focal : (2,) float + Focal length in pixels. + """ + ... + @property + def K(self): # -> NDArray[float64]: + """ + Get the intrinsic matrix for the Camera object. + + Returns + ----------- + K : (3, 3) float + Intrinsic matrix for camera + """ + ... + @K.setter + def K(self, values): # -> None: + ... + @property + def fov(self): # -> NDArray[float64]: + """ + Get the field of view in degrees. + + Returns + ------------- + fov : (2,) float + XY field of view in degrees + """ + ... + @fov.setter + def fov(self, values): # -> None: + """ + Set the field of view in degrees. + + Parameters + ------------- + values : (2,) float + Size of FOV to set in degrees + """ + ... + def to_rays(self): # -> tuple[tuple[Unknown, Any] | Unknown, NDArray[int64]]: + """ + Calculate ray direction vectors. + + Will return one ray per pixel, as set in self.resolution. + + Returns + -------------- + vectors : (n, 3) float + Ray direction vectors in camera frame with z == -1 + """ + ... + def angles(self): # -> Any: + """ + Get ray spherical coordinates in radians. + + + Returns + -------------- + angles : (n, 2) float + Ray spherical coordinate angles in radians. + """ + ... + def look_at(self, points, **kwargs): # -> NDArray[float64]: + """ + Generate transform for a camera to keep a list + of points in the camera's field of view. + + Parameters + ------------- + points : (n, 3) float + Points in space + rotation : None, or (4, 4) float + Rotation matrix for initial rotation + distance : None or float + Distance from camera to center + center : None, or (3,) float + Center of field of view. + + Returns + -------------- + transform : (4, 4) float + Transformation matrix from world to camera + """ + ... + def __repr__(self): # -> str: + ... + +def look_at(points, fov, rotation=..., distance=..., center=..., pad=...): # -> NDArray[float64]: + """ + Generate transform for a camera to keep a list + of points in the camera's field of view. + + Examples + ------------ + ```python + points = np.array([0, 0, 0], [1, 1, 1]) + scene.camera_transform = scene.camera.look_at(points) + ``` + + Parameters + ------------- + points : (n, 3) float + Points in space + fov : (2,) float + Field of view, in DEGREES + rotation : None, or (4, 4) float + Rotation matrix for initial rotation + distance : None or float + Distance from camera to center + center : None, or (3,) float + Center of field of view. + + Returns + -------------- + transform : (4, 4) float + Transformation matrix from world to camera + """ + ... + +def ray_pixel_coords(camera): # -> tuple[ndarray[Any, dtype[Any]], NDArray[int64]]: + """ + Get the x-y coordinates of rays in camera coordinates at + z == -1. + + One coordinate pair will be given for each pixel as defined in + camera.resolution. If reshaped, the returned array corresponds + to pixels of the rendered image. + + Examples + ------------ + ```python + xy = ray_pixel_coords(camera).reshape( + tuple(camera.coordinates) + (2,)) + top_left == xy[0, 0] + bottom_right == xy[-1, -1] + ``` + + Parameters + -------------- + camera : trimesh.scene.Camera + Camera object to generate rays from + + Returns + -------------- + xy : (n, 2) float + x-y coordinates of intersection of each camera ray + with the z == -1 frame + """ + ... + +def camera_to_rays(camera): # -> tuple[tuple[Unknown, Any] | Unknown, NDArray[int64]]: + """ + Calculate the trimesh.scene.Camera object to direction vectors. + + Will return one ray per pixel, as set in camera.resolution. + + Parameters + -------------- + camera : trimesh.scene.Camera + + Returns + -------------- + vectors : (n, 3) float + Ray direction vectors in camera frame with z == -1 + """ + ... diff --git a/typings/trimesh/scene/lighting.pyi b/typings/trimesh/scene/lighting.pyi new file mode 100644 index 00000000..fd499789 --- /dev/null +++ b/typings/trimesh/scene/lighting.pyi @@ -0,0 +1,188 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +from .. import util + +""" +lighting.py +-------------- + +Hold basic information about lights. + +Forked from the light model in `pyrender`: +https://github.com/mmatl/pyrender +""" +_DEFAULT_RGBA = np.array([60, 60, 60, 255], dtype=np.uint8) + +class Light(util.ABC): + """ + Base class for all light objects. + + Attributes + ---------- + name : str, optional + Name of the light. + color : (4,) uint8 + RGBA value for the light's color in linear space. + intensity : float + Brightness of light. The units that this is defined in depend + on the type of light: point and spot lights use luminous intensity + in candela (lm/sr) while directional lights use illuminance + in lux (lm/m2). + radius : float + Cutoff distance at which light's intensity may be considered to + have reached zero. Supported only for point and spot lights + Must be > 0.0 + If None, the radius is assumed to be infinite. + """ + + def __init__(self, name=..., color=..., intensity=..., radius=...) -> None: ... + @property + def color(self): # -> NDArray[uint8] | Any: + ... + @color.setter + def color(self, value): # -> None: + ... + @property + def intensity(self): # -> float: + ... + @intensity.setter + def intensity(self, value): # -> None: + ... + @property + def radius(self): # -> float: + ... + @radius.setter + def radius(self, value): # -> None: + ... + +class DirectionalLight(Light): + """ + Directional lights are light sources that act as though they are + infinitely far away and emit light in the direction of the local -z axis. + This light type inherits the orientation of the node that it belongs to; + position and scale are ignored except for their effect on the inherited + node orientation. Because it is at an infinite distance, the light is + not attenuated. Its intensity is defined in lumens per metre squared, + or lux (lm/m2). + + Attributes + ---------- + name : str, optional + Name of the light. + color : (4,) unit8 + RGBA value for the light's color in linear space. + intensity : float + Brightness of light. The units that this is defined in depend + on the type of light. + point and spot lights use luminous intensity in candela (lm/sr), + while directional lights use illuminance in lux (lm/m2). + radius : float + Cutoff distance at which light's intensity may be considered to + have reached zero. Supported only for point and spot lights, must be > 0. + If None, the radius is assumed to be infinite. + """ + + def __init__(self, name=..., color=..., intensity=..., radius=...) -> None: ... + +class PointLight(Light): + """ + Point lights emit light in all directions from their position in space; + rotation and scale are ignored except for their effect on the inherited + node position. The brightness of the light attenuates in a physically + correct manner as distance increases from the light's position (i.e. + brightness goes like the inverse square of the distance). Point light + intensity is defined in candela, which is lumens per square radian (lm/sr). + + Attributes + ---------- + name : str, optional + Name of the light. + color : (4,) uint8 + RGBA value for the light's color in linear space. + intensity : float + Brightness of light. The units that this is defined in depend + on the type of light. + point and spot lights use luminous intensity in candela (lm/sr), + while directional lights use illuminance in lux (lm/m2). + radius : float + Cutoff distance at which light's intensity may be considered to + have reached zero. Supported only for point and spot lights, must be > 0. + If None, the radius is assumed to be infinite. + """ + + def __init__(self, name=..., color=..., intensity=..., radius=...) -> None: ... + +class SpotLight(Light): + """ + Spot lights emit light in a cone in the direction of the local -z axis. + The angle and falloff of the cone is defined using two numbers, the + `innerConeAngle` and `outerConeAngle`. As with point lights, the brightness + also attenuates in a physically correct manner as distance increases from + the light's position (i.e. brightness goes like the inverse square of the + distance). Spot light intensity refers to the brightness inside the + `innerConeAngle` (and at the location of the light) and is defined in + candela, which is lumens per square radian (lm/sr). A spot light's position + and orientation are inherited from its node transform. Inherited scale does + not affect cone shape, and is ignored except for its effect on position + and orientation. + + Attributes + ---------- + name : str, optional + Name of the light. + color : (4,) uint8 + RGBA value for the light's color in linear space. + intensity : float + Brightness of light. The units that this is defined in depend + on the type of light. + point and spot lights use luminous intensity in candela (lm/sr), + while directional lights use illuminance in lux (lm/m2). + radius : float + Cutoff distance at which light's intensity may be considered to + have reached zero. Supported only for point and spot lights, must be > 0. + If None, the radius is assumed to be infinite. + innerConeAngle : float + Angle, in radians, from centre of spotlight where falloff begins. + Must be greater than or equal to `0` and less than `outerConeAngle`. + outerConeAngle : float + Angle, in radians, from centre of spotlight where falloff ends. + Must be greater than `innerConeAngle` and less than or equal to `PI / 2.0`. + """ + + def __init__( + self, name=..., color=..., intensity=..., radius=..., innerConeAngle=..., outerConeAngle=... + ) -> None: ... + @property + def innerConeAngle(self): # -> float: + ... + @innerConeAngle.setter + def innerConeAngle(self, value): # -> None: + ... + @property + def outerConeAngle(self): # -> float: + ... + @outerConeAngle.setter + def outerConeAngle(self, value): # -> None: + ... + +def autolight(scene): # -> tuple[list[PointLight], list[NDArray[float64]]]: + """ + Generate a list of lights for a scene that looks decent. + + Parameters + -------------- + scene : trimesh.Scene + Scene with geometry + + Returns + -------------- + lights : [Light] + List of light objects + transforms : (len(lights), 4, 4) float + Transformation matrices for light positions. + """ + ... diff --git a/typings/trimesh/scene/scene.pyi b/typings/trimesh/scene/scene.pyi new file mode 100644 index 00000000..4fad7bc3 --- /dev/null +++ b/typings/trimesh/scene/scene.pyi @@ -0,0 +1,651 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import caching, units +from ..parent import Geometry3D + +class Scene(Geometry3D): + """ + A simple scene graph which can be rendered directly via + pyglet/openGL or through other endpoints such as a + raytracer. Meshes are added by name, which can then be + moved by updating transform in the transform tree. + """ + + def __init__( + self, geometry=..., base_frame=..., metadata=..., graph=..., camera=..., lights=..., camera_transform=... + ) -> None: + """ + Create a new Scene object. + + Parameters + ------------- + geometry : Trimesh, Path2D, Path3D PointCloud or list + Geometry to initially add to the scene + base_frame : str or hashable + Name of base frame + metadata : dict + Any metadata about the scene + graph : TransformForest or None + A passed transform graph to use + camera : Camera or None + A passed camera to use + lights : [trimesh.scene.lighting.Light] or None + A passed lights to use + camera_transform : (4, 4) float or None + Camera transform in the base frame + """ + ... + def apply_transform(self, transform): # -> Self@Scene: + """ + Apply a transform to all children of the base frame + without modifying any geometry. + + Parameters + -------------- + transform : (4, 4) + Homogeneous transformation matrix. + """ + ... + def add_geometry( + self, geometry, node_name=..., geom_name=..., parent_node_name=..., transform=..., metadata=... + ): # -> list[Unknown] | dict[Unknown, Unknown] | str | None: + """ + Add a geometry to the scene. + + If the mesh has multiple transforms defined in its + metadata, they will all be copied into the + TransformForest of the current scene automatically. + + Parameters + ---------- + geometry : Trimesh, Path2D, Path3D PointCloud or list + Geometry to initially add to the scene + node_name : None or str + Name of the added node. + geom_name : None or str + Name of the added geometry. + parent_node_name : None or str + Name of the parent node in the graph. + transform : None or (4, 4) float + Transform that applies to the added node. + metadata : None or dict + Optional metadata for the node. + + Returns + ---------- + node_name : str + Name of single node in self.graph (passed in) or None if + node was not added (eg. geometry was null or a Scene). + """ + ... + def delete_geometry(self, names): # -> None: + """ + Delete one more multiple geometries from the scene and also + remove any node in the transform graph which references it. + + Parameters + -------------- + name : hashable + Name that references self.geometry + """ + ... + def strip_visuals(self): # -> None: + """ + Strip visuals from every Trimesh geometry + and set them to an empty `ColorVisuals`. + """ + ... + def __hash__(self) -> int: + """ + Return information about scene which is hashable. + + Returns + --------- + hashable : str + Data which can be hashed. + """ + ... + @property + def is_empty(self): # -> bool: + """ + Does the scene have anything in it. + + Returns + ---------- + is_empty: bool, True if nothing is in the scene + """ + ... + @property + def is_valid(self): # -> bool: + """ + Is every geometry connected to the root node. + + Returns + ----------- + is_valid : bool + Does every geometry have a transform + """ + ... + @caching.cache_decorator + def bounds_corners(self): # -> dict[Unknown, Unknown]: + """ + Get the post-transform AABB for each node + which has geometry defined. + + Returns + ----------- + corners : dict + Bounds for each node with vertices: + {node_name : (2, 3) float} + """ + ... + @caching.cache_decorator + def bounds(self): # -> NDArray[float64] | None: + """ + Return the overall bounding box of the scene. + + Returns + -------- + bounds : (2, 3) float or None + Position of [min, max] bounding box + Returns None if no valid bounds exist + """ + ... + @caching.cache_decorator + def extents(self): # -> ndarray[Any, dtype[Any]]: + """ + Return the axis aligned box size of the current scene. + + Returns + ---------- + extents : (3,) float + Bounding box sides length + """ + ... + @caching.cache_decorator + def scale(self): # -> Any: + """ + The approximate scale of the mesh + + Returns + ----------- + scale : float + The mean of the bounding box edge lengths + """ + ... + @caching.cache_decorator + def centroid(self): # -> Any: + """ + Return the center of the bounding box for the scene. + + Returns + -------- + centroid : (3) float + Point for center of bounding box + """ + ... + @caching.cache_decorator + def center_mass(self): # -> Any: + """ + Find the center of mass for every instance in the scene. + + Returns + ------------ + center_mass : (3,) float + The center of mass of the scene + """ + ... + @caching.cache_decorator + def moment_inertia(self): # -> Any: + """ + Return the moment of inertia of the current scene with + respect to the center of mass of the current scene. + + Returns + ------------ + inertia : (3, 3) float + Inertia with respect to cartesian axis at `scene.center_mass` + """ + ... + def moment_inertia_frame(self, transform): # -> Any: + """ + Return the moment of inertia of the current scene relative + to a transform from the base frame. + + Parameters + transform : (4, 4) float + Homogeneous transformation matrix. + + Returns + ------------- + inertia : (3, 3) float + Inertia tensor at requested frame. + """ + ... + @caching.cache_decorator + def area(self): # -> float: + """ + What is the summed area of every geometry which + has area. + + Returns + ------------ + area : float + Summed area of every instanced geometry + """ + ... + @caching.cache_decorator + def volume(self): # -> float: + """ + What is the summed volume of every geometry which + has volume + + Returns + ------------ + volume : float + Summed area of every instanced geometry + """ + ... + @caching.cache_decorator + def triangles(self): # -> ndarray[Any, dtype[Unknown]]: + """ + Return a correctly transformed polygon soup of the + current scene. + + Returns + ---------- + triangles : (n, 3, 3) float + Triangles in space + """ + ... + @caching.cache_decorator + def triangles_node(self): # -> None: + """ + Which node of self.graph does each triangle come from. + + Returns + --------- + triangles_index : (len(self.triangles),) + Node name for each triangle + """ + ... + @caching.cache_decorator + def geometry_identifiers(self): # -> dict[Unknown, Unknown]: + """ + Look up geometries by identifier hash + + Returns + --------- + identifiers : dict + {Identifier hash: key in self.geometry} + """ + ... + @caching.cache_decorator + def duplicate_nodes(self): # -> list[Unknown] | list[list[Unknown]]: + """ + Return a sequence of node keys of identical meshes. + + Will include meshes with different geometry but identical + spatial hashes as well as meshes repeated by self.nodes. + + Returns + ----------- + duplicates : (m) sequence + Keys of self.graph that represent identical geometry + """ + ... + def deduplicated(self): # -> Scene: + """ + Return a new scene where each unique geometry is only + included once and transforms are discarded. + + Returns + ------------- + dedupe : Scene + One copy of each unique geometry from scene + """ + ... + def set_camera(self, angles=..., distance=..., center=..., resolution=..., fov=...): # -> Camera: + """ + Create a camera object for self.camera, and add + a transform to self.graph for it. + + If arguments are not passed sane defaults will be figured + out which show the mesh roughly centered. + + Parameters + ----------- + angles : (3,) float + Initial euler angles in radians + distance : float + Distance from centroid + center : (3,) float + Point camera should be center on + camera : Camera object + Object that stores camera parameters + """ + ... + @property + def camera_transform(self): # -> NDArray[float64] | Any: + """ + Get camera transform in the base frame. + + Returns + ------- + camera_transform : (4, 4) float + Camera transform in the base frame + """ + ... + @camera_transform.setter + def camera_transform(self, matrix): # -> None: + """ + Set the camera transform in the base frame + + Parameters + ---------- + camera_transform : (4, 4) float + Camera transform in the base frame + """ + ... + def camera_rays(self): # -> tuple[NDArray[floating[Any]] | Any, NDArray[float64] | Any, NDArray[int64] | Unknown]: + """ + Calculate the trimesh.scene.Camera origin and ray + direction vectors. Returns one ray per pixel as set + in camera.resolution + + Returns + -------------- + origin: (n, 3) float + Ray origins in space + vectors: (n, 3) float + Ray direction unit vectors in world coordinates + pixels : (n, 2) int + Which pixel does each ray correspond to in an image + """ + ... + @property + def camera(self): # -> Camera: + """ + Get the single camera for the scene. If not manually + set one will abe automatically generated. + + Returns + ---------- + camera : trimesh.scene.Camera + Camera object defined for the scene + """ + ... + @camera.setter + def camera(self, camera): # -> None: + """ + Set a camera object for the Scene. + + Parameters + ----------- + camera : trimesh.scene.Camera + Camera object for the scene + """ + ... + @property + def has_camera(self): # -> bool: + ... + @property + def lights(self): # -> list[PointLight]: + """ + Get a list of the lights in the scene. If nothing is + set it will generate some automatically. + + Returns + ------------- + lights : [trimesh.scene.lighting.Light] + Lights in the scene. + """ + ... + @lights.setter + def lights(self, lights): # -> None: + """ + Assign a list of light objects to the scene + + Parameters + -------------- + lights : [trimesh.scene.lighting.Light] + Lights in the scene. + """ + ... + def rezero(self): # -> None: + """ + Move the current scene so that the AABB of the whole + scene is centered at the origin. + + Does this by changing the base frame to a new, offset + base frame. + """ + ... + def dump(self, concatenate=...): # -> list[Unknown] | Any | NDArray[Unknown]: + """ + Append all meshes in scene freezing transforms. + + Parameters + ------------ + concatenate : bool + If True, concatenate results into single mesh + + Returns + ---------- + dumped : (n,) Trimesh, Path2D, Path3D, PointCloud + Depending on what the scene contains. If `concatenate` + then some geometry may be dropped if it doesn't match. + """ + ... + def subscene(self, node): # -> Scene: + """ + Get part of a scene that succeeds a specified node. + + Parameters + ------------ + node : any + Hashable key in `scene.graph` + + Returns + ----------- + subscene : Scene + Partial scene generated from current. + """ + ... + @caching.cache_decorator + def convex_hull(self): # -> Trimesh: + """ + The convex hull of the whole scene + + Returns + --------- + hull: Trimesh object, convex hull of all meshes in scene + """ + ... + def export( + self, file_obj=..., file_type=..., **kwargs + ): # -> dict[Unknown, Unknown] | dict[str, Unknown] | bytes | bytearray | memoryview | LiteralString | str | Any: + """ + Export a snapshot of the current scene. + + Parameters + ---------- + file_obj : str, file-like, or None + File object to export to + file_type : str or None + What encoding to use for meshes + IE: dict, dict64, stl + + Returns + ---------- + export : bytes + Only returned if file_obj is None + """ + ... + def save_image(self, resolution=..., **kwargs): # -> bytes: + """ + Get a PNG image of a scene. + + Parameters + ----------- + resolution : (2,) int + Resolution to render image + **kwargs + Passed to SceneViewer constructor + + Returns + ----------- + png : bytes + Render of scene as a PNG + """ + ... + @property + def units(self) -> str: + """ + Get the units for every model in the scene, and + raise a ValueError if there are mixed units. + + Returns + ----------- + units : str + Units for every model in the scene + """ + ... + @units.setter + def units(self, value: str): # -> None: + """ + Set the units for every model in the scene without + converting any units just setting the tag. + + Parameters + ------------ + value : str + Value to set every geometry unit value to + """ + ... + def convert_units(self, desired: str, guess: bool = ...) -> Scene: + """ + If geometry has units defined convert them to new units. + + Returns a new scene with geometries and transforms scaled. + + Parameters + ---------- + desired : str + Desired final unit system: 'inches', 'mm', etc. + guess : bool + Is the converter allowed to guess scale when models + don't have it specified in their metadata. + + Returns + ---------- + scaled : trimesh.Scene + Copy of scene with scaling applied and units set + for every model + """ + ... + def explode(self, vector=..., origin=...): # -> None: + """ + Explode a scene around a point and vector. + + Parameters + ----------- + vector : (3,) float or float + Explode radially around a direction vector or spherically + origin : (3,) float + Point to explode around + """ + ... + def scaled(self, scale): # -> Scene: + """ + Return a copy of the current scene, with meshes and scene + transforms scaled to the requested factor. + + Parameters + ----------- + scale : float or (3,) float + Factor to scale meshes and transforms + + Returns + ----------- + scaled : trimesh.Scene + A copy of the current scene but scaled + """ + ... + def copy(self): # -> Scene: + """ + Return a deep copy of the current scene + + Returns + ---------- + copied : trimesh.Scene + Copy of the current scene + """ + ... + def show(self, viewer=..., **kwargs): # -> SceneViewer: + """ + Display the current scene. + + Parameters + ----------- + viewer : Union[str, callable, None] + What kind of viewer to use, such as + 'gl' to open a pyglet window, 'notebook' + for a jupyter notebook or None + kwargs : dict + Includes `smooth`, which will turn + on or off automatic smooth shading + """ + ... + def __add__(self, other): # -> Scene: + """ + Concatenate the current scene with another scene or mesh. + + Parameters + ------------ + other : trimesh.Scene, trimesh.Trimesh, trimesh.Path + Other object to append into the result scene + + Returns + ------------ + appended : trimesh.Scene + Scene with geometry from both scenes + """ + ... + +def split_scene(geometry, **kwargs): # -> Scene: + """ + Given a geometry, list of geometries, or a Scene + return them as a single Scene object. + + Parameters + ---------- + geometry : splittable + + Returns + --------- + scene: trimesh.Scene + """ + ... + +def append_scenes(iterable, common=..., base_frame=...): # -> Scene: + """ + Concatenate multiple scene objects into one scene. + + Parameters + ------------- + iterable : (n,) Trimesh or Scene + Geometries that should be appended + common : (n,) str + Nodes that shouldn't be remapped + base_frame : str + Base frame of the resulting scene + + Returns + ------------ + result : trimesh.Scene + Scene containing all geometry + """ + ... diff --git a/typings/trimesh/scene/transforms.pyi b/typings/trimesh/scene/transforms.pyi new file mode 100644 index 00000000..76d882cb --- /dev/null +++ b/typings/trimesh/scene/transforms.pyi @@ -0,0 +1,365 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +from .. import caching + +_identity = np.eye(4) + +class SceneGraph: + """ + Hold data about positions and instances of geometry + in a scene. This includes a forest (i.e. multi-root tree) + of transforms and information on which node is the base + frame, and which geometries are affiliated with which + nodes. + """ + + def __init__(self, base_frame=..., repair_rigid=...) -> None: + """ + Create a scene graph, holding homogeneous transformation + matrices and instance information about geometry. + + Parameters + ----------- + base_frame : any + The root node transforms will be positioned from. + repair_rigid : None or float + If a float will attempt to repair rotation matrices + where `M @ M.T` differs from an identity matrix by + more than floating point zero but less than this value. + This can happen in a deep tree with a lot of matrix + multiplies. + """ + ... + def update(self, frame_to, frame_from=..., **kwargs): # -> None: + """ + Update a transform in the tree. + + Parameters + ------------ + frame_from : hashable object + Usually a string (eg 'world'). + If left as None it will be set to self.base_frame + frame_to : hashable object + Usually a string (eg 'mesh_0') + matrix : (4,4) float + Homogeneous transformation matrix + quaternion : (4,) float + Quaternion ordered [w, x, y, z] + axis : (3,) float + Axis of rotation + angle : float + Angle of rotation, in radians + translation : (3,) float + Distance to translate + geometry : hashable + Geometry object name, e.g. 'mesh_0' + metadata: dictionary + Optional metadata attached to the new frame + (exports to glTF node 'extras'). + """ + ... + def get(self, frame_to, frame_from=...): # -> tuple[NDArray[float64] | Unknown | Any, Unknown | None] | None: + """ + Get the transform from one frame to another. + + Parameters + ------------ + frame_to : hashable + Node name, usually a string (eg 'mesh_0') + frame_from : hashable + Node name, usually a string (eg 'world'). + If None it will be set to self.base_frame + + Returns + ---------- + transform : (4, 4) float + Homogeneous transformation matrix + + Raises + ----------- + ValueError + If the frames aren't connected. + """ + ... + def __hash__(self) -> int: ... + def copy(self): # -> SceneGraph: + """ + Return a copy of the current TransformForest. + + Returns + ------------ + copied : TransformForest + Copy of current object. + """ + ... + def to_flattened(self): # -> dict[Unknown, Unknown]: + """ + Export the current transform graph with all + transforms baked into world->instance. + + Returns + --------- + flat : dict + Keyed {node : {transform, geometry} + """ + ... + def to_gltf(self, scene, mesh_index=...): # -> dict[str, list[dict[str, str]]]: + """ + Export a transforms as the 'nodes' section of the + GLTF header dict. + + Parameters + ------------ + scene : trimesh.Scene + Scene with geometry. + mesh_index : dict or None + Mapping { key in scene.geometry : int } + + Returns + -------- + gltf : dict + With 'nodes' referencing a list of dicts + """ + ... + def to_edgelist(self): # -> list[Unknown]: + """ + Export the current transforms as a list of + edge tuples, with each tuple having the format: + (node_a, node_b, {metadata}) + + Returns + --------- + edgelist : (n,) list + Of edge tuples + """ + ... + def from_edgelist(self, edges, strict=...): # -> None: + """ + Load transform data from an edge list into the current + scene graph. + + Parameters + ------------- + edgelist : (n,) tuples + Keyed (node_a, node_b, {key: value}) + strict : bool + If True raise a ValueError when a + malformed edge is passed in a tuple. + """ + ... + def to_networkx(self): + """ + Return a `networkx` copy of this graph. + + Returns + ---------- + graph : networkx.DiGraph + Directed graph. + """ + ... + def show(self, **kwargs): # -> None: + """ + Plot the scene graph using `networkx.draw_networkx` + which uses matplotlib to display the graph. + + Parameters + ----------- + kwargs : dict + Passed to `networkx.draw_networkx` + """ + ... + def load(self, edgelist): # -> None: + """ + Load transform data from an edge list into the current + scene graph. + + Parameters + ------------- + edgelist : (n,) tuples + Structured (node_a, node_b, {key: value}) + """ + ... + @caching.cache_decorator + def nodes(self): # -> dict_keys[Unknown, dict[Unknown, Unknown]]: + """ + A list of every node in the graph. + + Returns + ------------- + nodes : (n,) array + All node names. + """ + ... + @caching.cache_decorator + def nodes_geometry(self): # -> list[Unknown]: + """ + The nodes in the scene graph with geometry attached. + + Returns + ------------ + nodes_geometry : (m,) array + Node names which have geometry associated + """ + ... + @caching.cache_decorator + def geometry_nodes(self): # -> defaultdict[Unknown, list[Unknown]]: + """ + Which nodes have this geometry? Inverse + of `nodes_geometry`. + + Returns + ------------ + geometry_nodes : dict + Keyed {geometry_name : node name} + """ + ... + def remove_geometries(self, geometries): # -> None: + """ + Remove the reference for specified geometries + from nodes without deleting the node. + + Parameters + ------------ + geometries : list or str + Name of scene.geometry to dereference. + """ + ... + def __contains__(self, key): # -> bool: + ... + def __getitem__(self, key): # -> tuple[NDArray[float64] | Unknown | Any, Unknown | None] | None: + ... + def __setitem__(self, key, value): # -> None: + ... + def clear(self): # -> None: + ... + +class EnforcedForest: + """ + A simple forest graph data structure: every node + is allowed to have exactly one parent. This makes + traversal and implementation much simpler than a + full graph data type; by storing only one parent + reference, it enforces the structure for "free." + """ + + def __init__(self) -> None: ... + def add_edge(self, u, v, **kwargs): # -> bool: + """ + Add an edge to the forest cleanly. + + Parameters + ----------- + u : any + Hashable node key. + v : any + Hashable node key. + kwargs : dict + Stored as (u, v) edge data. + + Returns + -------- + changed : bool + Return if this operation changed anything. + """ + ... + def remove_node(self, u): # -> bool: + """ + Remove a node from the forest. + + Parameters + ----------- + u : any + Hashable node key. + + Returns + -------- + changed : bool + Return if this operation changed anything. + """ + ... + def shortest_path(self, u, v): # -> list[Unknown]: + """ + Find the shortest path between `u` and `v`, returning + a path where the first element is always `u` and the + last element is always `v`, disregarding edge direction. + + Parameters + ----------- + u : any + Hashable node key. + v : any + Hashable node key. + + Returns + ----------- + path : (n,) + Path between `u` and `v` + """ + ... + @property + def nodes(self): # -> dict_keys[Unknown, dict[Unknown, Unknown]]: + """ + Get a set of every node. + + Returns + ----------- + nodes : set + Every node currently stored. + """ + ... + @property + def children(self): # -> dict[Unknown, list[Unknown]]: + """ + Get the children of each node. + + Returns + ---------- + children : dict + Keyed {node : [child, child, ...]} + """ + ... + def successors(self, node): # -> set[Unknown]: + """ + Get all nodes that are successors to specified node, + including the specified node. + + Parameters + ------------- + node : any + Hashable key for a node. + + Returns + ------------ + successors : set + Nodes that succeed specified node. + """ + ... + def __hash__(self) -> int: + """ + Actually hash all of the data, but use a "dirty" mechanism + in functions that modify the data, which MUST + # all invalidate the hash by setting `self._hash = None` + + This was optimized a bit, and is evaluating on an + older laptop on a scene with 77 nodes and 76 edges + 10,000 times in 0.7s which seems fast enough. + """ + ... + +def kwargs_to_matrix( + matrix=..., quaternion=..., translation=..., axis=..., angle=..., **kwargs +): # -> NDArray[float64] | Matrix: + """ + Take multiple keyword arguments and parse them + into a homogeneous transformation matrix. + + Returns + --------- + matrix : (4, 4) float + Homogeneous transformation matrix. + """ + ... diff --git a/typings/trimesh/schemas.pyi b/typings/trimesh/schemas.pyi new file mode 100644 index 00000000..89327afb --- /dev/null +++ b/typings/trimesh/schemas.pyi @@ -0,0 +1,30 @@ +""" +This type stub file was generated by pyright. +""" + +""" +schemas.py +------------- + +Tools for dealing with schemas, particularly JSONschema +""" + +def resolve(item, resolver): # -> list[Unknown] | dict[Unknown, Unknown]: + """ + Given a JSON Schema containing `$ref` keys recursively + evaluate to find and replace referenced files with their + actual values using trimesh.resolvers.Resolver objects. + + Parameters + --------------- + item : any + JSON schema including `$ref` to other files + resolver : trimesh.visual.resolver.Resolver + Resolver to fetch referenced assets + + Returns + ---------- + result : any + JSONSchema with references replaced + """ + ... diff --git a/typings/trimesh/smoothing.pyi b/typings/trimesh/smoothing.pyi new file mode 100644 index 00000000..d803e2e6 --- /dev/null +++ b/typings/trimesh/smoothing.pyi @@ -0,0 +1,164 @@ +""" +This type stub file was generated by pyright. +""" + +def filter_laplacian( + mesh, lamb=..., iterations=..., implicit_time_integration=..., volume_constraint=..., laplacian_operator=... +): + """ + Smooth a mesh in-place using laplacian smoothing. + Articles + 1 - "Improved Laplacian Smoothing of Noisy Surface Meshes" + J. Vollmer, R. Mencl, and H. Muller + 2 - "Implicit Fairing of Irregular Meshes using Diffusion + and Curvature Flow". M. Desbrun, M. Meyer, + P. Schroder, A.H.B. Caltech + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh to be smoothed in place + lamb : float + Diffusion speed constant + If 0.0, no diffusion + If > 0.0, diffusion occurs + implicit_time_integration: boolean + if False: explicit time integration + -lamb <= 1.0 - Stability Limit (Article 1) + if True: implicit time integration + -lamb no limit (Article 2) + iterations : int + Number of passes to run filter + laplacian_operator : None or scipy.sparse.coo.coo_matrix + Sparse matrix laplacian operator + Will be autogenerated if None + """ + ... + +def filter_humphrey(mesh, alpha=..., beta=..., iterations=..., laplacian_operator=...): + """ + Smooth a mesh in-place using laplacian smoothing + and Humphrey filtering. + Articles + "Improved Laplacian Smoothing of Noisy Surface Meshes" + J. Vollmer, R. Mencl, and H. Muller + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh to be smoothed in place + alpha : float + Controls shrinkage, range is 0.0 - 1.0 + If 0.0, not considered + If 1.0, no smoothing + beta : float + Controls how aggressive smoothing is + If 0.0, no smoothing + If 1.0, full aggressiveness + iterations : int + Number of passes to run filter + laplacian_operator : None or scipy.sparse.coo.coo_matrix + Sparse matrix laplacian operator + Will be autogenerated if None + """ + ... + +def filter_taubin(mesh, lamb=..., nu=..., iterations=..., laplacian_operator=...): + """ + Smooth a mesh in-place using laplacian smoothing + and taubin filtering. + Articles + "Improved Laplacian Smoothing of Noisy Surface Meshes" + J. Vollmer, R. Mencl, and H. Muller + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh to be smoothed in place. + lamb : float + Controls shrinkage, range is 0.0 - 1.0 + nu : float + Controls dilation, range is 0.0 - 1.0 + Nu shall be between 0.0 < 1.0/lambda - 1.0/nu < 0.1 + iterations : int + Number of passes to run the filter + laplacian_operator : None or scipy.sparse.coo.coo_matrix + Sparse matrix laplacian operator + Will be autogenerated if None + """ + ... + +def filter_mut_dif_laplacian(mesh, lamb=..., iterations=..., volume_constraint=..., laplacian_operator=...): + """ + Smooth a mesh in-place using laplacian smoothing using a + mutable difusion laplacian. + + Articles + Barroqueiro, B., Andrade-Campos, A., Dias-de-Oliveira, + J., and Valente, R. (January 21, 2021). + "Bridging between topology optimization and additive + manufacturing via Laplacian smoothing." ASME. J. Mech. Des. + + + Parameters + ------------ + mesh : trimesh.Trimesh + Mesh to be smoothed in place + lamb : float + Diffusion speed constant + If 0.0, no diffusion + If > 0.0, diffusion occurs + iterations : int + Number of passes to run filter + laplacian_operator : None or scipy.sparse.coo.coo_matrix + Sparse matrix laplacian operator + Will be autogenerated if None + """ + ... + +def laplacian_calculation(mesh, equal_weight=..., pinned_vertices=...): # -> coo_matrix: + """ + Calculate a sparse matrix for laplacian operations. + Parameters + ------------- + mesh : trimesh.Trimesh + Input geometry + equal_weight : bool + If True, all neighbors will be considered equally + If False, all neighbors will be weighted by inverse distance + Returns + ---------- + laplacian : scipy.sparse.coo.coo_matrix + Laplacian operator + """ + ... + +def get_vertices_normals(mesh): # -> tuple[Unknown, Any]: + """ + Compute Vertex normals using equal weighting of neighbors faces. + Parameters + ------------- + mesh : trimesh.Trimesh + Input geometry + Returns + ---------- + vertices_normals: array + Vertices normals + """ + ... + +def dilate_slope(vertices, faces, normals, v, eps): + """ + Get de derivate of dilation scalar by the volume variation by finite differences + Thus, Vertices += vertex_normals*dilate_slope*(Initial_Volume - Srinked_Volume) + Parameters + ------------- + mesh : trimesh.Trimesh + Input geometry + vertices: mesh.vertices + faces: mesh.faces + normals: array + vertices normals + Returns + ---------- + dilate_slope: float + derivative + """ + ... diff --git a/typings/trimesh/transformations.pyi b/typings/trimesh/transformations.pyi new file mode 100644 index 00000000..f1091c3a --- /dev/null +++ b/typings/trimesh/transformations.pyi @@ -0,0 +1,1300 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +"""Homogeneous Transformation Matrices and Quaternions. + +A library for calculating 4x4 matrices for translating, rotating, reflecting, +scaling, shearing, projecting, orthogonalizing, and superimposing arrays of +3D homogeneous coordinates as well as for converting between rotation matrices, +Euler angles, and quaternions. Also includes an Arcball control object and +functions to decompose transformation matrices. + +:Author: + `Christoph Gohlke `_ + +:Organization: + Laboratory for Fluorescence Dynamics, University of California, Irvine + +:Version: 2017.02.17 + +Requirements +------------ +* `CPython 2.7 or 3.4 `_ +* `numpy 1.9 `_ +* `Transformations.c 2015.03.19 `_ + (recommended for speedup of some functions) + +Notes +----- +The API is not stable yet and is expected to change between revisions. + +This Python code is not optimized for speed. Refer to the transformations.c +module for a faster implementation of some functions. + +Documentation in HTML format can be generated with epydoc. + +Matrices (M) can be inverted using np.linalg.inv(M), be concatenated using +np.dot(M0, M1), or transform homogeneous coordinate arrays (v) using +np.dot(M, v) for shape (4, *) column vectors, respectively +np.dot(v, M.T) for shape (*, 4) row vectors ("array of points"). + +This module follows the "column vectors on the right" and "row major storage" +(C contiguous) conventions. The translation components are in the right column +of the transformation matrix, i.e. M[:3, 3]. +The transpose of the transformation matrices may have to be used to interface +with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16]. + +Calculations are carried out with np.float64 precision. + +Vector, point, quaternion, and matrix function arguments are expected to be +"array like", i.e. tuple, list, or numpy arrays. + +Return types are numpy arrays unless specified otherwise. + +Angles are in radians unless specified otherwise. + +Quaternions w+ix+jy+kz are represented as [w, x, y, z]. + +A triple of Euler angles can be applied/interpreted in 24 ways, which can +be specified using a 4 character string or encoded 4-tuple: + + *Axes 4-string*: e.g. 'sxyz' or 'ryxy' + + - first character : rotations are applied to 's'tatic or 'r'otating frame + - remaining characters : successive rotation axis 'x', 'y', or 'z' + + *Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1) + + - inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix. + - parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed + by 'z', or 'z' is followed by 'x'. Otherwise odd (1). + - repetition : first and last axis are same (1) or different (0). + - frame : rotations are applied to static (0) or rotating (1) frame. + +Other Python packages and modules for 3D transformations and quaternions: + +* `Transforms3d `_ + includes most code of this module. +* `Blender.mathutils `_ +* `numpy-dtypes `_ + +References +---------- +(1) Matrices and transformations. Ronald Goldman. + In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990. +(2) More matrices and transformations: shear and pseudo-perspective. + Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. +(3) Decomposing a matrix into simple transformations. Spencer Thomas. + In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. +(4) Recovering the data from the transformation matrix. Ronald Goldman. + In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991. +(5) Euler angle conversion. Ken Shoemake. + In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994. +(6) Arcball rotation control. Ken Shoemake. + In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994. +(7) Representing attitude: Euler angles, unit quaternions, and rotation + vectors. James Diebel. 2006. +(8) A discussion of the solution for the best rotation to relate two sets + of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828. +(9) Closed-form solution of absolute orientation using unit quaternions. + BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642. +(10) Quaternions. Ken Shoemake. + http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf +(11) From quaternion to matrix and back. JMP van Waveren. 2005. + http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm +(12) Uniform random rotations. Ken Shoemake. + In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992. +(13) Quaternion in molecular modeling. CFF Karney. + J Mol Graph Mod, 25(5):595-604 +(14) New method for extracting the quaternion from a rotation matrix. + Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087. +(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann. + Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130. +(16) Column Vectors vs. Row Vectors. + http://steve.hollasch.net/cgindex/math/matrix/column-vec.html + +Examples +-------- +>>> alpha, beta, gamma = 0.123, -1.234, 2.345 +>>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1] +>>> I = identity_matrix() +>>> Rx = rotation_matrix(alpha, xaxis) +>>> Ry = rotation_matrix(beta, yaxis) +>>> Rz = rotation_matrix(gamma, zaxis) +>>> R = concatenate_matrices(Rx, Ry, Rz) +>>> euler = euler_from_matrix(R, 'rxyz') +>>> np.allclose([alpha, beta, gamma], euler) +True +>>> Re = euler_matrix(alpha, beta, gamma, 'rxyz') +>>> is_same_transform(R, Re) +True +>>> al, be, ga = euler_from_matrix(Re, 'rxyz') +>>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz')) +True +>>> qx = quaternion_about_axis(alpha, xaxis) +>>> qy = quaternion_about_axis(beta, yaxis) +>>> qz = quaternion_about_axis(gamma, zaxis) +>>> q = quaternion_multiply(qx, qy) +>>> q = quaternion_multiply(q, qz) +>>> Rq = quaternion_matrix(q) +>>> is_same_transform(R, Rq) +True +>>> S = scale_matrix(1.23, origin) +>>> T = translation_matrix([1, 2, 3]) +>>> Z = shear_matrix(beta, xaxis, origin, zaxis) +>>> R = random_rotation_matrix(np.random.rand(3)) +>>> M = concatenate_matrices(T, R, Z, S) +>>> scale, shear, angles, trans, persp = decompose_matrix(M) +>>> np.allclose(scale, 1.23) +True +>>> np.allclose(trans, [1, 2, 3]) +True +>>> np.allclose(shear, [0, np.tan(beta), 0]) +True +>>> is_same_transform(R, euler_matrix(axes='sxyz', *angles)) +True +>>> M1 = compose_matrix(scale, shear, angles, trans, persp) +>>> is_same_transform(M, M1) +True +>>> v0, v1 = random_vector(3), random_vector(3) +>>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1)) +>>> v2 = np.dot(v0, M[:3,:3].T) +>>> np.allclose(unit_vector(v1), unit_vector(v2)) +True + +""" +_IDENTITY = np.eye(4) + +def identity_matrix(): # -> NDArray[float64]: + """Return 4x4 identity/unit matrix. + + >>> I = identity_matrix() + >>> np.allclose(I, np.dot(I, I)) + True + >>> np.sum(I), np.trace(I) + (4.0, 4.0) + >>> np.allclose(I, np.identity(4)) + True + + """ + ... + +def translation_matrix(direction): # -> NDArray[float64]: + """ + Return matrix to translate by direction vector. + + >>> v = np.random.random(3) - 0.5 + >>> np.allclose(v, translation_matrix(v)[:3, 3]) + True + + """ + ... + +def translation_from_matrix(matrix): # -> ndarray[Any, dtype[Unknown]]: + """Return translation vector from translation matrix. + + >>> v0 = np.random.random(3) - 0.5 + >>> v1 = translation_from_matrix(translation_matrix(v0)) + >>> np.allclose(v0, v1) + True + + """ + ... + +def reflection_matrix(point, normal): # -> NDArray[float64]: + """Return matrix to mirror at plane defined by point and normal vector. + + >>> v0 = np.random.random(4) - 0.5 + >>> v0[3] = 1. + >>> v1 = np.random.random(3) - 0.5 + >>> R = reflection_matrix(v0, v1) + >>> np.allclose(2, np.trace(R)) + True + >>> np.allclose(v0, np.dot(R, v0)) + True + >>> v2 = v0.copy() + >>> v2[:3] += v1 + >>> v3 = v0.copy() + >>> v2[:3] -= v1 + >>> np.allclose(v2, np.dot(R, v3)) + True + + """ + ... + +def reflection_from_matrix(matrix): # -> tuple[Any, ndarray[Any, _dtype]]: + """Return mirror plane point and normal vector from reflection matrix. + + >>> v0 = np.random.random(3) - 0.5 + >>> v1 = np.random.random(3) - 0.5 + >>> M0 = reflection_matrix(v0, v1) + >>> point, normal = reflection_from_matrix(M0) + >>> M1 = reflection_matrix(point, normal) + >>> is_same_transform(M0, M1) + True + + """ + ... + +def rotation_matrix(angle, direction, point=...): # -> Matrix: + """ + Return matrix to rotate about axis defined by point and + direction. + + Parameters + ------------- + angle : float, or sympy.Symbol + Angle, in radians or symbolic angle + direction : (3,) float + Unit vector along rotation axis + point : (3, ) float, or None + Origin point of rotation axis + + Returns + ------------- + matrix : (4, 4) float, or (4, 4) sympy.Matrix + Homogeneous transformation matrix + + Examples + ------------- + >>> R = rotation_matrix(np.pi/2, [0, 0, 1], [1, 0, 0]) + >>> np.allclose(np.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1]) + True + >>> angle = (random.random() - 0.5) * (2*np.pi) + >>> direc = np.random.random(3) - 0.5 + >>> point = np.random.random(3) - 0.5 + >>> R0 = rotation_matrix(angle, direc, point) + >>> R1 = rotation_matrix(angle-2*np.pi, direc, point) + >>> is_same_transform(R0, R1) + True + >>> R0 = rotation_matrix(angle, direc, point) + >>> R1 = rotation_matrix(-angle, -direc, point) + >>> is_same_transform(R0, R1) + True + >>> I = np.identity(4, np.float64) + >>> np.allclose(I, rotation_matrix(np.pi*2, direc)) + True + >>> np.allclose(2, np.trace(rotation_matrix(np.pi/2,direc,point))) + True + + """ + ... + +def rotation_from_matrix(matrix): # -> tuple[Any, ndarray[Any, _dtype], Any]: + """Return rotation angle and axis from rotation matrix. + + >>> angle = (random.random() - 0.5) * (2*np.pi) + >>> direc = np.random.random(3) - 0.5 + >>> point = np.random.random(3) - 0.5 + >>> R0 = rotation_matrix(angle, direc, point) + >>> angle, direc, point = rotation_from_matrix(R0) + >>> R1 = rotation_matrix(angle, direc, point) + >>> is_same_transform(R0, R1) + True + + """ + ... + +def scale_matrix(factor, origin=..., direction=...): # -> NDArray[Any] | NDArray[float64]: + """Return matrix to scale by factor around origin in direction. + + Use factor -1 for point symmetry. + + >>> v = (np.random.rand(4, 5) - 0.5) * 20 + >>> v[3] = 1 + >>> S = scale_matrix(-1.234) + >>> np.allclose(np.dot(S, v)[:3], -1.234*v[:3]) + True + >>> factor = random.random() * 10 - 5 + >>> origin = np.random.random(3) - 0.5 + >>> direct = np.random.random(3) - 0.5 + >>> S = scale_matrix(factor, origin) + >>> S = scale_matrix(factor, origin, direct) + + """ + ... + +def scale_from_matrix(matrix): # -> tuple[Any, Any, Unknown | None]: + """Return scaling factor, origin and direction from scaling matrix. + + >>> factor = random.random() * 10 - 5 + >>> origin = np.random.random(3) - 0.5 + >>> direct = np.random.random(3) - 0.5 + >>> S0 = scale_matrix(factor, origin) + >>> factor, origin, direction = scale_from_matrix(S0) + >>> S1 = scale_matrix(factor, origin, direction) + >>> is_same_transform(S0, S1) + True + >>> S0 = scale_matrix(factor, origin, direct) + >>> factor, origin, direction = scale_from_matrix(S0) + >>> S1 = scale_matrix(factor, origin, direction) + >>> is_same_transform(S0, S1) + True + + """ + ... + +def projection_matrix(point, normal, direction=..., perspective=..., pseudo=...): # -> NDArray[float64]: + """Return matrix to project onto plane defined by point and normal. + + Using either perspective point, projection direction, or none of both. + + If pseudo is True, perspective projections will preserve relative depth + such that Perspective = dot(Orthogonal, PseudoPerspective). + + >>> P = projection_matrix([0, 0, 0], [1, 0, 0]) + >>> np.allclose(P[1:, 1:], np.identity(4)[1:, 1:]) + True + >>> point = np.random.random(3) - 0.5 + >>> normal = np.random.random(3) - 0.5 + >>> direct = np.random.random(3) - 0.5 + >>> persp = np.random.random(3) - 0.5 + >>> P0 = projection_matrix(point, normal) + >>> P1 = projection_matrix(point, normal, direction=direct) + >>> P2 = projection_matrix(point, normal, perspective=persp) + >>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True) + >>> is_same_transform(P2, np.dot(P0, P3)) + True + >>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0]) + >>> v0 = (np.random.rand(4, 5) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = np.dot(P, v0) + >>> np.allclose(v1[1], v0[1]) + True + >>> np.allclose(v1[0], 3-v1[1]) + True + + """ + ... + +def projection_from_matrix( + matrix, pseudo=... +): # -> tuple[Any, Unknown, Unknown, None, Literal[False]] | tuple[Any, Unknown, None, None, Literal[False]] | tuple[Any, NDArray[float64], None, Any, bool]: + """Return projection plane and perspective point from projection matrix. + + Return values are same as arguments for projection_matrix function: + point, normal, direction, perspective, and pseudo. + + >>> point = np.random.random(3) - 0.5 + >>> normal = np.random.random(3) - 0.5 + >>> direct = np.random.random(3) - 0.5 + >>> persp = np.random.random(3) - 0.5 + >>> P0 = projection_matrix(point, normal) + >>> result = projection_from_matrix(P0) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, direct) + >>> result = projection_from_matrix(P0) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False) + >>> result = projection_from_matrix(P0, pseudo=False) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True) + >>> result = projection_from_matrix(P0, pseudo=True) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + + """ + ... + +def clip_matrix(left, right, bottom, top, near, far, perspective=...): # -> NDArray[Unknown]: + """Return matrix to obtain normalized device coordinates from frustum. + + The frustum bounds are axis-aligned along x (left, right), + y (bottom, top) and z (near, far). + + Normalized device coordinates are in range [-1, 1] if coordinates are + inside the frustum. + + If perspective is True the frustum is a truncated pyramid with the + perspective point at origin and direction along z axis, otherwise an + orthographic canonical view volume (a box). + + Homogeneous coordinates transformed by the perspective clip matrix + need to be dehomogenized (divided by w coordinate). + + >>> frustum = np.random.rand(6) + >>> frustum[1] += frustum[0] + >>> frustum[3] += frustum[2] + >>> frustum[5] += frustum[4] + >>> M = clip_matrix(perspective=False, *frustum) + >>> a = np.dot(M, [frustum[0], frustum[2], frustum[4], 1]) + >>> np.allclose(a, [-1., -1., -1., 1.]) + True + >>> b = np.dot(M, [frustum[1], frustum[3], frustum[5], 1]) + >>> np.allclose(b, [ 1., 1., 1., 1.]) + True + >>> M = clip_matrix(perspective=True, *frustum) + >>> v = np.dot(M, [frustum[0], frustum[2], frustum[4], 1]) + >>> c = v / v[3] + >>> np.allclose(c, [-1., -1., -1., 1.]) + True + >>> v = np.dot(M, [frustum[1], frustum[3], frustum[4], 1]) + >>> d = v / v[3] + >>> np.allclose(d, [ 1., 1., -1., 1.]) + True + + """ + ... + +def shear_matrix(angle, direction, point, normal): # -> NDArray[float64]: + """Return matrix to shear by angle along direction vector on shear plane. + + The shear plane is defined by a point and normal vector. The direction + vector must be orthogonal to the plane's normal vector. + + A point P is transformed by the shear matrix into P" such that + the vector P-P" is parallel to the direction vector and its extent is + given by the angle of P-P'-P", where P' is the orthogonal projection + of P onto the shear plane. + + >>> angle = (random.random() - 0.5) * 4*np.pi + >>> direct = np.random.random(3) - 0.5 + >>> point = np.random.random(3) - 0.5 + >>> normal = np.cross(direct, np.random.random(3)) + >>> S = shear_matrix(angle, direct, point, normal) + >>> np.allclose(1, np.linalg.det(S)) + True + + """ + ... + +def shear_from_matrix(matrix): # -> tuple[Unknown, Any, Any, Unknown]: + """Return shear angle, direction and plane from shear matrix. + + >>> angle = np.pi / 2.0 + >>> direct = [0.0, 1.0, 0.0] + >>> point = [0.0, 0.0, 0.0] + >>> normal = np.cross(direct, np.roll(direct,1)) + >>> S0 = shear_matrix(angle, direct, point, normal) + >>> angle, direct, point, normal = shear_from_matrix(S0) + >>> S1 = shear_matrix(angle, direct, point, normal) + >>> is_same_transform(S0, S1) + True + + """ + ... + +def decompose_matrix(matrix): # -> tuple[NDArray[float64], list[float], list[float], Any, Any | NDArray[Any]]: + """Return sequence of transformations from transformation matrix. + + matrix : array_like + Non-degenerative homogeneous transformation matrix + + Return tuple of: + scale : vector of 3 scaling factors + shear : list of shear factors for x-y, x-z, y-z axes + angles : list of Euler angles about static x, y, z axes + translate : translation vector along x, y, z axes + perspective : perspective partition of matrix + + Raise ValueError if matrix is of wrong type or degenerative. + + >>> T0 = translation_matrix([1, 2, 3]) + >>> scale, shear, angles, trans, persp = decompose_matrix(T0) + >>> T1 = translation_matrix(trans) + >>> np.allclose(T0, T1) + True + >>> S = scale_matrix(0.123) + >>> scale, shear, angles, trans, persp = decompose_matrix(S) + >>> scale[0] + 0.123 + >>> R0 = euler_matrix(1, 2, 3) + >>> scale, shear, angles, trans, persp = decompose_matrix(R0) + >>> R1 = euler_matrix(*angles) + >>> np.allclose(R0, R1) + True + + """ + ... + +def compose_matrix(scale=..., shear=..., angles=..., translate=..., perspective=...): # -> Any: + """Return transformation matrix from sequence of transformations. + + This is the inverse of the decompose_matrix function. + + Sequence of transformations: + scale : vector of 3 scaling factors + shear : list of shear factors for x-y, x-z, y-z axes + angles : list of Euler angles about static x, y, z axes + translate : translation vector along x, y, z axes + perspective : perspective partition of matrix + + >>> scale = np.random.random(3) - 0.5 + >>> shear = np.random.random(3) - 0.5 + >>> angles = (np.random.random(3) - 0.5) * (2*np.pi) + >>> trans = np.random.random(3) - 0.5 + >>> persp = np.random.random(4) - 0.5 + >>> M0 = compose_matrix(scale, shear, angles, trans, persp) + >>> result = decompose_matrix(M0) + >>> M1 = compose_matrix(*result) + >>> is_same_transform(M0, M1) + True + + """ + ... + +def orthogonalization_matrix(lengths, angles): # -> NDArray[Any]: + """Return orthogonalization matrix for crystallographic cell coordinates. + + Angles are expected in degrees. + + The de-orthogonalization matrix is the inverse. + + >>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90]) + >>> np.allclose(O[:3, :3], np.identity(3, float) * 10) + True + >>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7]) + >>> np.allclose(np.sum(O), 43.063229) + True + + """ + ... + +def affine_matrix_from_points(v0, v1, shear=..., scale=..., usesvd=...): # -> Any: + """Return affine transform matrix to register two point sets. + + v0 and v1 are shape (ndims, *) arrays of at least ndims non-homogeneous + coordinates, where ndims is the dimensionality of the coordinate space. + + If shear is False, a similarity transformation matrix is returned. + If also scale is False, a rigid/Euclidean transformation matrix + is returned. + + By default the algorithm by Hartley and Zissermann [15] is used. + If usesvd is True, similarity and Euclidean transformation matrices + are calculated by minimizing the weighted sum of squared deviations + (RMSD) according to the algorithm by Kabsch [8]. + Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9] + is used, which is slower when using this Python implementation. + + The returned matrix performs rotation, translation and uniform scaling + (if specified). + + >>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]] + >>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]] + >>> mat = affine_matrix_from_points(v0, v1) + >>> T = translation_matrix(np.random.random(3)-0.5) + >>> R = random_rotation_matrix(np.random.random(3)) + >>> S = scale_matrix(random.random()) + >>> M = concatenate_matrices(T, R, S) + >>> v0 = (np.random.rand(4, 100) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = np.dot(M, v0) + >>> v0[:3] += np.random.normal(0, 1e-8, 300).reshape(3, -1) + >>> M = affine_matrix_from_points(v0[:3], v1[:3]) + >>> check = np.allclose(v1, np.dot(M, v0)) + + More examples in superimposition_matrix() + + """ + ... + +def superimposition_matrix(v0, v1, scale=..., usesvd=...): # -> Any: + """Return matrix to transform given 3D point set into second point set. + + v0 and v1 are shape (3, *) or (4, *) arrays of at least 3 points. + + The parameters scale and usesvd are explained in the more general + affine_matrix_from_points function. + + The returned matrix is a similarity or Euclidean transformation matrix. + This function has a fast C implementation in transformations.c. + + >>> v0 = np.random.rand(3, 10) + >>> M = superimposition_matrix(v0, v0) + >>> np.allclose(M, np.identity(4)) + True + >>> R = random_rotation_matrix(np.random.random(3)) + >>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]] + >>> v1 = np.dot(R, v0) + >>> M = superimposition_matrix(v0, v1) + >>> np.allclose(v1, np.dot(M, v0)) + True + >>> v0 = (np.random.rand(4, 100) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = np.dot(R, v0) + >>> M = superimposition_matrix(v0, v1) + >>> np.allclose(v1, np.dot(M, v0)) + True + >>> S = scale_matrix(random.random()) + >>> T = translation_matrix(np.random.random(3)-0.5) + >>> M = concatenate_matrices(T, R, S) + >>> v1 = np.dot(M, v0) + >>> v0[:3] += np.random.normal(0, 1e-9, 300).reshape(3, -1) + >>> M = superimposition_matrix(v0, v1, scale=True) + >>> np.allclose(v1, np.dot(M, v0)) + True + >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) + >>> np.allclose(v1, np.dot(M, v0)) + True + >>> v = np.empty((4, 100, 3)) + >>> v[:, :, 0] = v0 + >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) + >>> np.allclose(v1, np.dot(M, v[:, :, 0])) + True + + """ + ... + +def euler_matrix(ai, aj, ak, axes=...): # -> NDArray[float64]: + """Return homogeneous rotation matrix from Euler angles and axis sequence. + + ai, aj, ak : Euler's roll, pitch and yaw angles + axes : One of 24 axis sequences as string or encoded tuple + + >>> R = euler_matrix(1, 2, 3, 'syxz') + >>> np.allclose(np.sum(R[0]), -1.34786452) + True + >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1)) + >>> np.allclose(np.sum(R[0]), -0.383436184) + True + >>> ai, aj, ak = (4*np.pi) * (np.random.random(3) - 0.5) + >>> for axes in _AXES2TUPLE.keys(): + ... R = euler_matrix(ai, aj, ak, axes) + >>> for axes in _TUPLE2AXES.keys(): + ... R = euler_matrix(ai, aj, ak, axes) + + """ + ... + +def euler_from_matrix(matrix, axes=...): # -> tuple[Any | float | NDArray[Any], Any, Any | NDArray[Any] | float]: + """Return Euler angles from rotation matrix for specified axis sequence. + + axes : One of 24 axis sequences as string or encoded tuple + + Note that many Euler angle triplets can describe one matrix. + + >>> R0 = euler_matrix(1, 2, 3, 'syxz') + >>> al, be, ga = euler_from_matrix(R0, 'syxz') + >>> R1 = euler_matrix(al, be, ga, 'syxz') + >>> np.allclose(R0, R1) + True + >>> angles = (4*np.pi) * (np.random.random(3) - 0.5) + >>> for axes in _AXES2TUPLE.keys(): + ... R0 = euler_matrix(axes=axes, *angles) + ... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes)) + ... if not np.allclose(R0, R1): print(axes, "failed") + + """ + ... + +def euler_from_quaternion( + quaternion, axes=... +): # -> tuple[Any | float | NDArray[Any], Any, Any | NDArray[Any] | float]: + """Return Euler angles from quaternion for specified axis sequence. + + >>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0]) + >>> np.allclose(angles, [0.123, 0, 0]) + True + + """ + ... + +def quaternion_from_euler(ai, aj, ak, axes=...): # -> NDArray[float64]: + """Return quaternion from Euler angles and axis sequence. + + ai, aj, ak : Euler's roll, pitch and yaw angles + axes : One of 24 axis sequences as string or encoded tuple + + >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') + >>> np.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) + True + + """ + ... + +def quaternion_about_axis(angle, axis): # -> Any | NDArray[Any]: + """Return quaternion for rotation about axis. + + >>> q = quaternion_about_axis(0.123, [1, 0, 0]) + >>> np.allclose(q, [0.99810947, 0.06146124, 0, 0]) + True + + """ + ... + +def quaternion_matrix(quaternion): # -> ndarray[Any, dtype[float64]]: + """ + Return a homogeneous rotation matrix from quaternion. + + >>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0]) + >>> np.allclose(M, rotation_matrix(0.123, [1, 0, 0])) + True + >>> M = quaternion_matrix([1, 0, 0, 0]) + >>> np.allclose(M, np.identity(4)) + True + >>> M = quaternion_matrix([0, 1, 0, 0]) + >>> np.allclose(M, np.diag([1, -1, -1, 1])) + True + >>> M = quaternion_matrix([[1, 0, 0, 0],[0, 1, 0, 0]]) + >>> np.allclose(M, np.array([np.identity(4), np.diag([1, -1, -1, 1])])) + True + + + """ + ... + +def quaternion_from_matrix(matrix, isprecise=...): # -> Any: + """Return quaternion from rotation matrix. + + If isprecise is True, the input matrix is assumed to be a precise rotation + matrix and a faster algorithm is used. + + >>> q = quaternion_from_matrix(np.identity(4), True) + >>> np.allclose(q, [1, 0, 0, 0]) + True + >>> q = quaternion_from_matrix(np.diag([1, -1, -1, 1])) + >>> np.allclose(q, [0, 1, 0, 0]) or np.allclose(q, [0, -1, 0, 0]) + True + >>> R = rotation_matrix(0.123, (1, 2, 3)) + >>> q = quaternion_from_matrix(R, True) + >>> np.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786]) + True + >>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0], + ... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> np.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611]) + True + >>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0], + ... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> np.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603]) + True + >>> R = random_rotation_matrix() + >>> q = quaternion_from_matrix(R) + >>> is_same_transform(R, quaternion_matrix(q)) + True + >>> is_same_quaternion(quaternion_from_matrix(R, isprecise=False), + ... quaternion_from_matrix(R, isprecise=True)) + True + >>> R = euler_matrix(0.0, 0.0, np.pi/2.0) + >>> is_same_quaternion(quaternion_from_matrix(R, isprecise=False), + ... quaternion_from_matrix(R, isprecise=True)) + True + + """ + ... + +def quaternion_multiply(quaternion1, quaternion0): # -> NDArray[float64]: + """Return multiplication of two quaternions. + + >>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7]) + >>> np.allclose(q, [28, -44, -14, 48]) + True + + """ + ... + +def quaternion_conjugate(quaternion): # -> NDArray[float64]: + """Return conjugate of quaternion. + + >>> q0 = random_quaternion() + >>> q1 = quaternion_conjugate(q0) + >>> q1[0] == q0[0] and all(q1[1:] == -q0[1:]) + True + + """ + ... + +def quaternion_inverse(quaternion): # -> Any: + """Return inverse of quaternion. + + >>> q0 = random_quaternion() + >>> q1 = quaternion_inverse(q0) + >>> np.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) + True + + """ + ... + +def quaternion_real(quaternion): # -> float: + """Return real part of quaternion. + + >>> quaternion_real([3, 0, 1, 2]) + 3.0 + + """ + ... + +def quaternion_imag(quaternion): # -> NDArray[float64]: + """Return imaginary part of quaternion. + + >>> quaternion_imag([3, 0, 1, 2]) + array([0., 1., 2.]) + + """ + ... + +def quaternion_slerp(quat0, quat1, fraction, spin=..., shortestpath=...): # -> Any | NDArray[floating[_64Bit]] | None: + """Return spherical linear interpolation between two quaternions. + + >>> q0 = random_quaternion() + >>> q1 = random_quaternion() + >>> q = quaternion_slerp(q0, q1, 0) + >>> np.allclose(q, q0) + True + >>> q = quaternion_slerp(q0, q1, 1, 1) + >>> np.allclose(q, q1) + True + >>> q = quaternion_slerp(q0, q1, 0.5) + >>> angle = np.arccos(np.dot(q0, q)) + >>> np.allclose(2, np.arccos(np.dot(q0, q1)) / angle) or \ + np.allclose(2, np.arccos(-np.dot(q0, q1)) / angle) + True + + """ + ... + +def random_quaternion(rand=..., num=...): # -> ndarray[Any, dtype[Any]]: + """Return uniform random unit quaternion. + + rand: array like or None + Three independent random variables that are uniformly distributed + between 0 and 1. + + >>> q = random_quaternion() + >>> np.allclose(1, vector_norm(q)) + True + >>> q = random_quaternion(num=10) + >>> np.allclose(1, vector_norm(q, axis=1)) + True + >>> q = random_quaternion(np.random.random(3)) + >>> len(q.shape), q.shape[0]==4 + (1, True) + + """ + ... + +def random_rotation_matrix(rand=..., num=..., translate=...): # -> ndarray[Any, dtype[float64]]: + """Return uniform random rotation matrix. + + rand: array like + Three independent random variables that are uniformly distributed + between 0 and 1 for each returned quaternion. + + >>> R = random_rotation_matrix() + >>> np.allclose(np.dot(R.T, R), np.identity(4)) + True + >>> R = random_rotation_matrix(num=10) + >>> np.allclose(np.einsum('...ji,...jk->...ik', R, R), np.identity(4)) + True + + """ + ... + +class Arcball: + """Virtual Trackball Control. + + >>> ball = Arcball() + >>> ball = Arcball(initial=np.identity(4)) + >>> ball.place([320, 320], 320) + >>> ball.down([500, 250]) + >>> ball.drag([475, 275]) + >>> R = ball.matrix() + >>> np.allclose(np.sum(R), 3.90583455) + True + >>> ball = Arcball(initial=[1, 0, 0, 0]) + >>> ball.place([320, 320], 320) + >>> ball.setaxes([1, 1, 0], [-1, 1, 0]) + >>> ball.constrain = True + >>> ball.down([400, 200]) + >>> ball.drag([200, 400]) + >>> R = ball.matrix() + >>> np.allclose(np.sum(R), 0.2055924) + True + >>> ball.next() + + """ + + def __init__(self, initial=...) -> None: + """Initialize virtual trackball control. + + initial : quaternion or rotation matrix + + """ + ... + def place(self, center, radius): # -> None: + """Place Arcball, e.g. when window size changes. + + center : sequence[2] + Window coordinates of trackball center. + radius : float + Radius of trackball in window coordinates. + + """ + ... + def setaxes(self, *axes): # -> None: + """Set axes to constrain rotations.""" + ... + @property + def constrain(self): # -> bool: + """Return state of constrain to axis mode.""" + ... + @constrain.setter + def constrain(self, value): # -> None: + """Set state of constrain to axis mode.""" + ... + def down(self, point): # -> None: + """Set initial cursor window coordinates and pick constrain-axis.""" + ... + def drag(self, point): # -> None: + """Update current cursor window coordinates.""" + ... + def next(self, acceleration=...): # -> None: + """Continue rotation in direction of last drag.""" + ... + def matrix(self): # -> ndarray[Any, dtype[float64]]: + """Return homogeneous rotation matrix.""" + ... + +def arcball_map_to_sphere(point, center, radius): # -> NDArray[Any] | NDArray[Unknown | Any]: + """Return unit sphere coordinates from window coordinates.""" + ... + +def arcball_constrain_to_axis(point, axis): # -> Any | NDArray[Any] | NDArray[floating[_64Bit]] | None: + """Return sphere point perpendicular to axis.""" + ... + +def arcball_nearest_axis(point, axes): # -> None: + """Return axis, which arc is nearest to point.""" + ... + +_EPS = ... +_NEXT_AXIS = ... +_AXES2TUPLE = ... +_TUPLE2AXES = ... + +def vector_norm(data, axis=..., out=...): # -> Any | NDArray[floating[_64Bit]] | None: + """Return length, i.e. Euclidean norm, of ndarray along axis. + + >>> v = np.random.random(3) + >>> n = vector_norm(v) + >>> np.allclose(n, np.linalg.norm(v)) + True + >>> v = np.random.rand(6, 5, 3) + >>> n = vector_norm(v, axis=-1) + >>> np.allclose(n, np.sqrt(np.sum(v*v, axis=2))) + True + >>> n = vector_norm(v, axis=1) + >>> np.allclose(n, np.sqrt(np.sum(v*v, axis=1))) + True + >>> v = np.random.rand(5, 4, 3) + >>> n = np.empty((5, 3)) + >>> vector_norm(v, axis=1, out=n) + >>> np.allclose(n, np.sqrt(np.sum(v*v, axis=1))) + True + >>> vector_norm([]) + 0.0 + >>> vector_norm([1]) + 1.0 + + """ + ... + +def unit_vector(data, axis=..., out=...): # -> Any | NDArray[floating[_64Bit]] | None: + """Return ndarray normalized by length, i.e. Euclidean norm, along axis. + + >>> v0 = np.random.random(3) + >>> v1 = unit_vector(v0) + >>> np.allclose(v1, v0 / np.linalg.norm(v0)) + True + >>> v0 = np.random.rand(5, 4, 3) + >>> v1 = unit_vector(v0, axis=-1) + >>> v2 = v0 / np.expand_dims(np.sqrt(np.sum(v0*v0, axis=2)), 2) + >>> np.allclose(v1, v2) + True + >>> v1 = unit_vector(v0, axis=1) + >>> v2 = v0 / np.expand_dims(np.sqrt(np.sum(v0*v0, axis=1)), 1) + >>> np.allclose(v1, v2) + True + >>> v1 = np.empty((5, 4, 3)) + >>> unit_vector(v0, axis=1, out=v1) + >>> np.allclose(v1, v2) + True + >>> list(unit_vector([])) + [] + >>> list(unit_vector([1])) + [1.0] + + """ + ... + +def random_vector(size): + """Return array of random doubles in the half-open interval [0.0, 1.0). + + >>> v = random_vector(10000) + >>> np.all(v >= 0) and np.all(v < 1) + True + >>> v0 = random_vector(10) + >>> v1 = random_vector(10) + >>> np.any(v0 == v1) + False + + """ + ... + +def vector_product(v0, v1, axis=...): # -> Any: + """Return vector perpendicular to vectors. + + >>> v = vector_product([2, 0, 0], [0, 3, 0]) + >>> np.allclose(v, [0, 0, 6]) + True + >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] + >>> v1 = [[3], [0], [0]] + >>> v = vector_product(v0, v1) + >>> np.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]]) + True + >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] + >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] + >>> v = vector_product(v0, v1, axis=1) + >>> np.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]]) + True + + """ + ... + +def angle_between_vectors(v0, v1, directed=..., axis=...): # -> Any: + """Return angle between vectors. + + If directed is False, the input vectors are interpreted as undirected axes, + i.e. the maximum angle is pi/2. + + >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3]) + >>> np.allclose(a, np.pi) + True + >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False) + >>> np.allclose(a, 0) + True + >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] + >>> v1 = [[3], [0], [0]] + >>> a = angle_between_vectors(v0, v1) + >>> np.allclose(a, [0, 1.5708, 1.5708, 0.95532]) + True + >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] + >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] + >>> a = angle_between_vectors(v0, v1, axis=1) + >>> np.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532]) + True + + """ + ... + +def inverse_matrix(matrix): + """Return inverse of square transformation matrix. + + >>> M0 = random_rotation_matrix() + >>> M1 = inverse_matrix(M0.T) + >>> np.allclose(M1, np.linalg.inv(M0.T)) + True + >>> for size in range(1, 7): + ... M0 = np.random.rand(size, size) + ... M1 = inverse_matrix(M0) + ... if not np.allclose(M1, np.linalg.inv(M0)): print(size) + + """ + ... + +def concatenate_matrices(*matrices): # -> NDArray[float64] | Any: + """Return concatenation of series of transformation matrices. + + >>> M = np.random.rand(16).reshape((4, 4)) - 0.5 + >>> np.allclose(M, concatenate_matrices(M)) + True + >>> np.allclose(np.dot(M, M.T), concatenate_matrices(M, M.T)) + True + + """ + ... + +def is_same_transform(matrix0, matrix1): # -> bool: + """Return True if two matrices perform same transformation. + + >>> is_same_transform(np.identity(4), np.identity(4)) + True + >>> is_same_transform(np.identity(4), random_rotation_matrix()) + False + + """ + ... + +def is_same_quaternion(q0, q1): # -> bool: + """Return True if two quaternions are equal.""" + ... + +def transform_around(matrix, point): # -> Any: + """ + Given a transformation matrix, apply its rotation + around a point in space. + + Parameters + ---------- + matrix: (4,4) or (3, 3) float, transformation matrix + point: (3,) or (2,) float, point in space + + Returns + --------- + result: (4,4) transformation matrix + """ + ... + +def planar_matrix(offset=..., theta=..., point=..., scale=...): # -> Any | NDArray[float64]: + """ + 2D homogeonous transformation matrix. + + Parameters + ---------- + offset : (2,) float + XY offset + theta : float + Rotation around Z in radians + point : (2, ) float + Point to rotate around + scale : (2,) float or None + Scale to apply + + Returns + ---------- + matrix : (3, 3) flat + Homogeneous 2D transformation matrix + """ + ... + +def planar_matrix_to_3D(matrix_2D): # -> NDArray[float64]: + """ + Given a 2D homogeneous rotation matrix convert it to a 3D rotation + matrix that is rotating around the Z axis + + Parameters + ---------- + matrix_2D: (3,3) float, homogeneous 2D rotation matrix + + Returns + ---------- + matrix_3D: (4,4) float, homogeneous 3D rotation matrix + """ + ... + +def spherical_matrix(theta, phi, axes=...): # -> NDArray[float64]: + """ + Give a spherical coordinate vector, find the rotation that will + transform a [0,0,1] vector to those coordinates + + Parameters + ----------- + theta: float, rotation angle in radians + phi: float, rotation angle in radians + + Returns + ---------- + matrix: (4,4) rotation matrix where the following will + be a cartesian vector in the direction of the + input spherical coordinates: + np.dot(matrix, [0,0,1,0]) + + """ + ... + +def transform_points(points, matrix, translate=...): # -> NDArray[float64] | Any: + """ + Returns points rotated by a homogeneous + transformation matrix. + + If points are (n, 2) matrix must be (3, 3) + If points are (n, 3) matrix must be (4, 4) + + Parameters + ---------- + points : (n, dim) float + Points where `dim` is 2 or 3. + matrix : (3, 3) or (4, 4) float + Homogeneous rotation matrix. + translate : bool + Apply translation from matrix or not. + + Returns + ---------- + transformed : (n, dim) float + Transformed points. + """ + ... + +def fix_rigid(matrix, max_deviance=...): # -> NDArray[float64]: + """ + If a homogeneous transformation matrix is *almost* a rigid + transform but many matrix-multiplies have accumulated some + floating point error try to restore the matrix using SVD. + + Parameters + ----------- + matrix : (4, 4) or (3, 3) float + Homogeneous transformation matrix. + max_deviance : float + Do not alter the matrix if it is not rigid by more + than this amount. + + Returns + ---------- + repaired : (4, 4) or (3, 3) float + Repaired homogeneous transformation matrix + """ + ... + +def is_rigid(matrix, epsilon=...): # -> Any | Literal[False]: + """ + Check to make sure a homogeonous transformation + matrix is a rigid transform. + + Parameters + ----------- + matrix : (4, 4) float + A transformation matrix + + Returns + ----------- + check : bool + True if matrix is a a transform with + only translation, scale, and rotation + """ + ... + +def scale_and_translate(scale=..., translate=...): # -> NDArray[float64]: + """ + Optimized version of `compose_matrix` for just + scaling then translating. + + Scalar args are broadcast to arrays of shape (3,) + + Parameters + -------------- + scale : float or (3,) float + Scale factor + translate : float or (3,) float + Translation + """ + ... + +def flips_winding(matrix): # -> Any: + """ + Check to see if a matrix will invert triangles. + + Parameters + ------------- + matrix : (4, 4) float + Homogeneous transformation matrix + + Returns + -------------- + flip : bool + True if matrix will flip winding of triangles. + """ + ... diff --git a/typings/trimesh/triangles.pyi b/typings/trimesh/triangles.pyi new file mode 100644 index 00000000..c13bd3ee --- /dev/null +++ b/typings/trimesh/triangles.pyi @@ -0,0 +1,321 @@ +""" +This type stub file was generated by pyright. +""" + +from dataclasses import dataclass + +from .typed import NDArray, Optional, float64 + +""" +triangles.py +------------- + +Functions for dealing with triangle soups in (n, 3, 3) float form. +""" + +def cross(triangles): # -> NDArray[Any]: + """ + Returns the cross product of two edges from input triangles + + Parameters + -------------- + triangles: (n, 3, 3) float + Vertices of triangles + + Returns + -------------- + crosses : (n, 3) float + Cross product of two edge vectors + """ + ... + +def area(triangles=..., crosses=..., sum=...): # -> Any: + """ + Calculates the sum area of input triangles + + Parameters + ---------- + triangles : (n, 3, 3) float + Vertices of triangles + crosses : (n, 3) float or None + As a speedup don't re- compute cross products + sum : bool + Return summed area or individual triangle area + + Returns + ---------- + area : (n,) float or float + Individual or summed area depending on `sum` argument + """ + ... + +def normals(triangles=..., crosses=...): # -> tuple[Unknown, Any | Unknown]: + """ + Calculates the normals of input triangles + + Parameters + ------------ + triangles : (n, 3, 3) float + Vertex positions + crosses : (n, 3) float + Cross products of edge vectors + + Returns + ------------ + normals : (m, 3) float + Normal vectors + valid : (n,) bool + Was the face nonzero area or not + """ + ... + +def angles(triangles): # -> NDArray[float64]: + """ + Calculates the angles of input triangles. + + Parameters + ------------ + triangles : (n, 3, 3) float + Vertex positions + + Returns + ------------ + angles : (n, 3) float + Angles at vertex positions in radians + Degenerate angles will be returned as zero + """ + ... + +def all_coplanar(triangles): # -> bool_: + """ + Check to see if a list of triangles are all coplanar + + Parameters + ---------------- + triangles: (n, 3, 3) float + Vertices of triangles + + Returns + --------------- + all_coplanar : bool + True if all triangles are coplanar + """ + ... + +def any_coplanar(triangles): # -> bool_: + """ + For a list of triangles if the FIRST triangle is coplanar + with ANY of the following triangles, return True. + Otherwise, return False. + """ + ... + +@dataclass +class MassProperties: + density: float + mass: float + volume: float + center_mass: NDArray[float64] + inertia: Optional[NDArray[float64]] = ... + def __getitem__(self, item): # -> Any: + ... + +def mass_properties(triangles, crosses=..., density=..., center_mass=..., skip_inertia=...) -> MassProperties: + """ + Calculate the mass properties of a group of triangles. + + Implemented from: + http://www.geometrictools.com/Documentation/PolyhedralMassProperties.pdf + + Parameters + ---------- + triangles : (n, 3, 3) float + Triangle vertices in space + crosses : (n,) float + Optional cross products of triangles + density : float + Optional override for density + center_mass : (3,) float + Optional override for center mass + skip_inertia : bool + if True will not return moments matrix + + Returns + --------- + info : dict + Mass properties + """ + ... + +def windings_aligned(triangles, normals_compare): # -> NDArray[Any]: + """ + Given a list of triangles and a list of normals determine if the + two are aligned + + Parameters + ---------- + triangles : (n, 3, 3) float + Vertex locations in space + normals_compare : (n, 3) float + List of normals to compare + + Returns + ---------- + aligned : (n,) bool + Are normals aligned with triangles + """ + ... + +def bounds_tree(triangles): + """ + Given a list of triangles, create an r-tree for broad- phase + collision detection + + Parameters + --------- + triangles : (n, 3, 3) float + Triangles in space + + Returns + --------- + tree : rtree.Rtree + One node per triangle + """ + ... + +def nondegenerate(triangles, areas=..., height=...): # -> Any: + """ + Find all triangles which have an oriented bounding box + where both of the two sides is larger than a specified height. + + Degenerate triangles can be when: + 1) Two of the three vertices are colocated + 2) All three vertices are unique but colinear + + + Parameters + ---------- + triangles : (n, 3, 3) float + Triangles in space + height : float + Minimum edge length of a triangle to keep + + Returns + ---------- + nondegenerate : (n,) bool + True if a triangle meets required minimum height + """ + ... + +def extents(triangles, areas=...): # -> NDArray[float64]: + """ + Return the 2D bounding box size of each triangle. + + Parameters + ---------- + triangles : (n, 3, 3) float + Triangles in space + areas : (n,) float + Optional area of input triangles + + Returns + ---------- + box : (n, 2) float + The size of each triangle's 2D oriented bounding box + """ + ... + +def barycentric_to_points(triangles, barycentric): # -> Any: + """ + Convert a list of barycentric coordinates on a list of triangles + to cartesian points. + + Parameters + ------------ + triangles : (n, 3, 3) float + Triangles in space + barycentric : (n, 2) float + Barycentric coordinates + + Returns + ----------- + points : (m, 3) float + Points in space + """ + ... + +def points_to_barycentric(triangles, points, method=...): # -> NDArray[float64]: + """ + Find the barycentric coordinates of points relative to triangles. + + The Cramer's rule solution implements: + http://blackpawn.com/texts/pointinpoly + + The cross product solution implements: + https://www.cs.ubc.ca/~heidrich/Papers/JGT.05.pdf + + + Parameters + ----------- + triangles : (n, 3, 3) float + Triangles vertices in space + points : (n, 3) float + Point in space associated with a triangle + method : str + Which method to compute the barycentric coordinates with: + - 'cross': uses a method using cross products, roughly 2x slower but + different numerical robustness properties + - anything else: uses a cramer's rule solution + + Returns + ----------- + barycentric : (n, 3) float + Barycentric coordinates of each point + """ + ... + +def closest_point(triangles, points): # -> NDArray[float64]: + """ + Return the closest point on the surface of each triangle for a + list of corresponding points. + + Implements the method from "Real Time Collision Detection" and + use the same variable names as "ClosestPtPointTriangle" to avoid + being any more confusing. + + + Parameters + ---------- + triangles : (n, 3, 3) float + Triangle vertices in space + points : (n, 3) float + Points in space + + Returns + ---------- + closest : (n, 3) float + Point on each triangle closest to each point + """ + ... + +def to_kwargs(triangles): # -> dict[str, Unknown]: + """ + Convert a list of triangles to the kwargs for the Trimesh + constructor. + + Parameters + --------- + triangles : (n, 3, 3) float + Triangles in space + + Returns + --------- + kwargs : dict + Keyword arguments for the trimesh.Trimesh constructor + Includes keys 'vertices' and 'faces' + + Examples + --------- + >>> mesh = trimesh.Trimesh(**trimesh.triangles.to_kwargs(triangles)) + """ + ... diff --git a/typings/trimesh/typed.pyi b/typings/trimesh/typed.pyi new file mode 100644 index 00000000..61b24584 --- /dev/null +++ b/typings/trimesh/typed.pyi @@ -0,0 +1,5 @@ +""" +This type stub file was generated by pyright. +""" + +__all__ = ["NDArray", "ArrayLike", "Optional", "List", "Dict", "Tuple", "float64", "int64"] diff --git a/typings/trimesh/units.pyi b/typings/trimesh/units.pyi new file mode 100644 index 00000000..841389a2 --- /dev/null +++ b/typings/trimesh/units.pyi @@ -0,0 +1,71 @@ +""" +This type stub file was generated by pyright. +""" + +from .parent import Geometry + +""" +units.py +-------------- +Deal with physical unit systems (i.e. inches, mm) + +Very basic conversions, and no requirement for +sympy.physics.units or pint. +""" +_lookup = ... + +def unit_conversion(current: str, desired: str) -> float: + """ + Calculate the conversion from one set of units to another. + + Parameters + --------- + current : str + Unit system values are in now (eg 'millimeters') + desired : str + Unit system we'd like values in (eg 'inches') + + Returns + --------- + conversion : float + Number to multiply by to put values into desired units + """ + ... + +def to_inch(unit: str) -> float: + """ + Calculate the conversion to an arbitrary common unit. + + Parameters + ------------ + unit + Either a key in `units_to_inches.json` or in the simple + `{float} * {str}` form, i.e. "1.2 * meters". We don't + support arbitrary `eval` of any math string + + Returns + ---------- + conversion + Factor to multiply by to get to an `inch` system. + """ + ... + +def units_from_metadata(obj: Geometry, guess: bool = ...) -> str: + """ + Try to extract hints from metadata and if that fails + guess based on the object scale. + + + Parameters + ------------ + obj + A geometry object. + guess + If metadata doesn't have units make a "best guess" + + Returns + ------------ + units + A guess of what the units might be + """ + ... diff --git a/typings/trimesh/util.pyi b/typings/trimesh/util.pyi new file mode 100644 index 00000000..21a9da91 --- /dev/null +++ b/typings/trimesh/util.pyi @@ -0,0 +1,1317 @@ +""" +This type stub file was generated by pyright. +""" + +import abc +import json +from collections.abc import Mapping + +import numpy as np + +""" +util.py +----------- + +Standalone functions which require only imports from numpy and the +standard library. + +Other libraries may be imported must be wrapped in try/except blocks +or imported inside of a function +""" +log = ... +ABC = abc.ABC +now = ... +which = ... +TOL_ZERO = ... +TOL_MERGE = ... +_STRICT = ... +_IDENTITY = np.eye(4, dtype=np.float64) + +def has_module(name): # -> bool: + """ + Check to see if a module is installed by name without + actually importing the module. + + Parameters + ------------ + name : str + The name of the module to check + + Returns + ------------ + installed : bool + True if module is installed + """ + ... + +def unitize(vectors, check_valid=..., threshold=...): # -> tuple[Unknown, Any | Unknown]: + """ + Unitize a vector or an array or row-vectors. + + Parameters + ------------ + vectors : (n,m) or (j) float + Vector or vectors to be unitized + check_valid : bool + If set, will return mask of nonzero vectors + threshold : float + Cutoff for a value to be considered zero. + + Returns + --------- + unit : (n,m) or (j) float + Input vectors but unitized + valid : (n,) bool or bool + Mask of nonzero vectors returned if `check_valid` + """ + ... + +def euclidean(a, b): # -> Any: + """ + Euclidean distance between vectors a and b. + + Parameters + ------------ + a : (n,) float + First vector + b : (n,) float + Second vector + + Returns + ------------ + distance : float + Euclidean distance between A and B + """ + ... + +def is_file(obj): # -> bool: + """ + Check if an object is file-like + + Parameters + ------------ + obj : object + Any object type to be checked + + Returns + ----------- + is_file : bool + True if object is a file + """ + ... + +def is_pathlib(obj): # -> Literal[False]: + """ + Check if the object is a `pathlib.Path` or subclass. + + Parameters + ------------ + obj : object + Object to be checked + + Returns + ------------ + is_pathlib : bool + Is the input object a pathlib path + """ + ... + +def is_string(obj): # -> bool: + """ + Check if an object is a string. + + Parameters + ------------ + obj : object + Any object type to be checked + + Returns + ------------ + is_string : bool + True if obj is a string + """ + ... + +def is_none(obj): # -> bool: + """ + Check to see if an object is None or not. + + Handles the case of np.array(None) as well. + + Parameters + ------------- + obj : object + Any object type to be checked + + Returns + ------------- + is_none : bool + True if obj is None or numpy None-like + """ + ... + +def is_sequence(obj): # -> bool: + """ + Check if an object is a sequence or not. + + Parameters + ------------- + obj : object + Any object type to be checked + + Returns + ------------- + is_sequence : bool + True if object is sequence + """ + ... + +def is_shape(obj, shape, allow_zeros=...): # -> bool: + """ + Compare the shape of a numpy.ndarray to a target shape, + with any value less than zero being considered a wildcard + + Note that if a list-like object is passed that is not a numpy + array, this function will not convert it and will return False. + + Parameters + ------------ + obj : np.ndarray + Array to check the shape on + shape : list or tuple + Any negative term will be considered a wildcard + Any tuple term will be evaluated as an OR + allow_zeros: bool + if False, zeros do not match negatives in shape + + Returns + --------- + shape_ok : bool + True if shape of obj matches query shape + + Examples + ------------------------ + In [1]: a = np.random.random((100, 3)) + + In [2]: a.shape + Out[2]: (100, 3) + + In [3]: trimesh.util.is_shape(a, (-1, 3)) + Out[3]: True + + In [4]: trimesh.util.is_shape(a, (-1, 3, 5)) + Out[4]: False + + In [5]: trimesh.util.is_shape(a, (100, -1)) + Out[5]: True + + In [6]: trimesh.util.is_shape(a, (-1, (3, 4))) + Out[6]: True + + In [7]: trimesh.util.is_shape(a, (-1, (4, 5))) + Out[7]: False + """ + ... + +def make_sequence(obj): # -> NDArray[Unknown]: + """ + Given an object, if it is a sequence return, otherwise + add it to a length 1 sequence and return. + + Useful for wrapping functions which sometimes return single + objects and other times return lists of objects. + + Parameters + ------------- + obj : object + An object to be made a sequence + + Returns + -------------- + as_sequence : (n,) sequence + Contains input value + """ + ... + +def vector_hemisphere( + vectors, return_sign=... +): # -> tuple[NDArray[floating[Any]], NDArray[float64]] | NDArray[floating[Any]]: + """ + For a set of 3D vectors alter the sign so they are all in the + upper hemisphere. + + If the vector lies on the plane all vectors with negative Y + will be reversed. + + If the vector has a zero Z and Y value vectors with a + negative X value will be reversed. + + Parameters + ------------ + vectors : (n, 3) float + Input vectors + return_sign : bool + Return the sign mask or not + + Returns + ---------- + oriented: (n, 3) float + Vectors with same magnitude as source + but possibly reversed to ensure all vectors + are in the same hemisphere. + sign : (n,) float + [OPTIONAL] sign of original vectors + """ + ... + +def vector_to_spherical(cartesian): # -> NDArray[float64]: + """ + Convert a set of cartesian points to (n, 2) spherical unit + vectors. + + Parameters + ------------ + cartesian : (n, 3) float + Points in space + + Returns + ------------ + spherical : (n, 2) float + Angles, in radians + """ + ... + +def spherical_to_vector(spherical): # -> NDArray[Any]: + """ + Convert a set of (n, 2) spherical vectors to (n, 3) vectors + + Parameters + ------------ + spherical : (n , 2) float + Angles, in radians + + Returns + ----------- + vectors : (n, 3) float + Unit vectors + """ + ... + +def pairwise(iterable): # -> ndarray[Any, dtype[Unknown]] | zip[tuple[Unknown, Unknown]]: + """ + For an iterable, group values into pairs. + + Parameters + ------------ + iterable : (m, ) list + A sequence of values + + Returns + ----------- + pairs: (n, 2) + Pairs of sequential values + + Example + ----------- + In [1]: data + Out[1]: [0, 1, 2, 3, 4, 5, 6] + + In [2]: list(trimesh.util.pairwise(data)) + Out[2]: [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)] + + """ + ... + +multi_dot = ... + +def diagonal_dot(a, b): # -> Any: + """ + Dot product by row of a and b. + + There are a lot of ways to do this though + performance varies very widely. This method + uses a dot product to sum the row and avoids + function calls if at all possible. + + Comparing performance of some equivalent versions: + ``` + In [1]: import numpy as np; import trimesh + + In [2]: a = np.random.random((10000, 3)) + + In [3]: b = np.random.random((10000, 3)) + + In [4]: %timeit (a * b).sum(axis=1) + 1000 loops, best of 3: 181 us per loop + + In [5]: %timeit np.einsum('ij,ij->i', a, b) + 10000 loops, best of 3: 62.7 us per loop + + In [6]: %timeit np.diag(np.dot(a, b.T)) + 1 loop, best of 3: 429 ms per loop + + In [7]: %timeit np.dot(a * b, np.ones(a.shape[1])) + 10000 loops, best of 3: 61.3 us per loop + + In [8]: %timeit trimesh.util.diagonal_dot(a, b) + 10000 loops, best of 3: 55.2 us per loop + ``` + + Parameters + ------------ + a : (m, d) float + First array + b : (m, d) float + Second array + + Returns + ------------- + result : (m,) float + Dot product of each row + """ + ... + +def row_norm(data): # -> Any: + """ + Compute the norm per-row of a numpy array. + + This is identical to np.linalg.norm(data, axis=1) but roughly + three times faster due to being less general. + + In [3]: %timeit trimesh.util.row_norm(a) + 76.3 us +/- 651 ns per loop + + In [4]: %timeit np.linalg.norm(a, axis=1) + 220 us +/- 5.41 us per loop + + Parameters + ------------- + data : (n, d) float + Input 2D data to calculate per-row norm of + + Returns + ------------- + norm : (n,) float + Norm of each row of input array + """ + ... + +def stack_3D(points, return_2D=...): # -> tuple[NDArray[float64], bool] | NDArray[float64]: + """ + For a list of (n, 2) or (n, 3) points return them + as (n, 3) 3D points, 2D points on the XY plane. + + Parameters + ------------ + points : (n, 2) or (n, 3) float + Points in either 2D or 3D space + return_2D : bool + Were the original points 2D? + + Returns + ---------- + points : (n, 3) float + Points in space + is_2D : bool + [OPTIONAL] if source points were (n, 2) + """ + ... + +def grid_arange(bounds, step): # -> ndarray[Any, dtype[Any]]: + """ + Return a grid from an (2,dimension) bounds with samples step distance apart. + + Parameters + ------------ + bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]] + step: float, or (dimension) floats, separation between points + + Returns + --------- + grid: (n, dimension), points inside the specified bounds + """ + ... + +def grid_linspace(bounds, count): # -> ndarray[Any, dtype[Any]]: + """ + Return a grid spaced inside a bounding box with edges spaced using np.linspace. + + Parameters + ------------ + bounds: (2,dimension) list of [[min x, min y, etc], [max x, max y, etc]] + count: int, or (dimension,) int, number of samples per side + + Returns + --------- + grid: (n, dimension) float, points in the specified bounds + """ + ... + +def multi_dict(pairs): # -> defaultdict[Unknown, list[Unknown]]: + """ + Given a set of key value pairs, create a dictionary. + If a key occurs multiple times, stack the values into an array. + + Can be called like the regular dict(pairs) constructor + + Parameters + ------------ + pairs: (n, 2) array of key, value pairs + + Returns + ---------- + result: dict, with all values stored (rather than last with regular dict) + + """ + ... + +def tolist(data): # -> Any: + """ + Ensure that any arrays or dicts passed containing + numpy arrays are properly converted to lists + + Parameters + ------------- + data : any + Usually a dict with some numpy arrays as values + + Returns + ---------- + result : any + JSON-serializable version of data + """ + ... + +def is_binary_file(file_obj): # -> bool: + """ + Returns True if file has non-ASCII characters (> 0x7F, or 127) + Should work in both Python 2 and 3 + """ + ... + +def distance_to_end(file_obj): + """ + For an open file object how far is it to the end + + Parameters + ------------ + file_obj: open file-like object + + Returns + ---------- + distance: int, bytes to end of file + """ + ... + +def decimal_to_digits(decimal, min_digits=...): # -> Any | int: + """ + Return the number of digits to the first nonzero decimal. + + Parameters + ----------- + decimal: float + min_digits: int, minimum number of digits to return + + Returns + ----------- + + digits: int, number of digits to the first nonzero decimal + """ + ... + +def attach_to_log(level=..., handler=..., loggers=..., colors=..., capture_warnings=..., blacklist=...): # -> None: + """ + Attach a stream handler to all loggers. + + Parameters + ------------ + level : enum + Logging level, like logging.INFO + handler : None or logging.Handler + Handler to attach + loggers : None or (n,) logging.Logger + If None, will try to attach to all available + colors : bool + If True try to use colorlog formatter + blacklist : (n,) str + Names of loggers NOT to attach to + """ + ... + +def stack_lines(indices): # -> NDArray[Any] | ndarray[Any, dtype[Unknown]]: + """ + Stack a list of values that represent a polyline into + individual line segments with duplicated consecutive values. + + Parameters + ------------ + indices : (m,) any + List of items to be stacked + + Returns + --------- + stacked : (n, 2) any + Stacked items + + Examples + ---------- + In [1]: trimesh.util.stack_lines([0, 1, 2]) + Out[1]: + array([[0, 1], + [1, 2]]) + + In [2]: trimesh.util.stack_lines([0, 1, 2, 4, 5]) + Out[2]: + array([[0, 1], + [1, 2], + [2, 4], + [4, 5]]) + + In [3]: trimesh.util.stack_lines([[0, 0], [1, 1], [2, 2], [3, 3]]) + Out[3]: + array([[0, 0], + [1, 1], + [1, 1], + [2, 2], + [2, 2], + [3, 3]]) + + """ + ... + +def append_faces( + vertices_seq, faces_seq +): # -> tuple[Unknown | NDArray[Any] | NDArray[Unknown], Unknown | NDArray[Any] | NDArray[Unknown]]: + """ + Given a sequence of zero-indexed faces and vertices + combine them into a single array of faces and + a single array of vertices. + + Parameters + ----------- + vertices_seq : (n, ) sequence of (m, d) float + Multiple arrays of verticesvertex arrays + faces_seq : (n, ) sequence of (p, j) int + Zero indexed faces for matching vertices + + Returns + ---------- + vertices : (i, d) float + Points in space + faces : (j, 3) int + Reference vertex indices + """ + ... + +def array_to_string(array, col_delim=..., row_delim=..., digits=..., value_format=...): # -> str: + """ + Convert a 1 or 2D array into a string with a specified number + of digits and delimiter. The reason this exists is that the + basic numpy array to string conversions are surprisingly bad. + + Parameters + ------------ + array : (n,) or (n, d) float or int + Data to be converted + If shape is (n,) only column delimiter will be used + col_delim : str + What string should separate values in a column + row_delim : str + What string should separate values in a row + digits : int + How many digits should floating point numbers include + value_format : str + Format string for each value or sequence of values + If multiple values per value_format it must divide + into array evenly. + + Returns + ---------- + formatted : str + String representation of original array + """ + ... + +def structured_array_to_string(array, col_delim=..., row_delim=..., digits=..., value_format=...): # -> str: + """ + Convert an unstructured array into a string with a specified + number of digits and delimiter. The reason thisexists is + that the basic numpy array to string conversionsare + surprisingly bad. + + Parameters + ------------ + array : (n,) or (n, d) float or int + Data to be converted + If shape is (n,) only column delimiter will be used + col_delim : str + What string should separate values in a column + row_delim : str + What string should separate values in a row + digits : int + How many digits should floating point numbers include + value_format : str + Format string for each value or sequence of values + If multiple values per value_format it must divide + into array evenly. + + Returns + ---------- + formatted : str + String representation of original array + """ + ... + +def array_to_encoded(array, dtype=..., encoding=...): # -> dict[str, Unknown]: + """ + Export a numpy array to a compact serializable dictionary. + + Parameters + ------------ + array : array + Any numpy array + dtype : str or None + Optional dtype to encode array + encoding : str + 'base64' or 'binary' + + Returns + --------- + encoded : dict + Has keys: + 'dtype': str, of dtype + 'shape': tuple of shape + 'base64': str, base64 encoded string + """ + ... + +def decode_keys(store, encoding=...): + """ + If a dictionary has keys that are bytes decode them to a str. + + Parameters + ------------ + store : dict + Dictionary with data + + Returns + --------- + result : dict + Values are untouched but keys that were bytes + are converted to ASCII strings. + + Example + ----------- + In [1]: d + Out[1]: {1020: 'nah', b'hi': 'stuff'} + + In [2]: trimesh.util.decode_keys(d) + Out[2]: {1020: 'nah', 'hi': 'stuff'} + """ + ... + +def comment_strip(text, starts_with=..., new_line=...): # -> str: + """ + Strip comments from a text block. + + Parameters + ----------- + text : str + Text to remove comments from + starts_with : str + Character or substring that starts a comment + new_line : str + Character or substring that ends a comment + + Returns + ----------- + stripped : str + Text with comments stripped + """ + ... + +def encoded_to_array(encoded): # -> ndarray[Any, dtype[Unknown]]: + """ + Turn a dictionary with base64 encoded strings back into a numpy array. + + Parameters + ------------ + encoded : dict + Has keys: + dtype: string of dtype + shape: int tuple of shape + base64: base64 encoded string of flat array + binary: decode result coming from numpy.tobytes + + Returns + ---------- + array: numpy array + """ + ... + +def is_instance_named(obj, name): # -> bool: + """ + Given an object, if it is a member of the class 'name', + or a subclass of 'name', return True. + + Parameters + ------------ + obj : instance + Some object of some class + name: str + The name of the class we want to check for + + Returns + --------- + is_instance : bool + Whether the object is a member of the named class + """ + ... + +def type_bases(obj, depth=...): # -> list[Any | Unknown]: + """ + Return the bases of the object passed. + """ + ... + +def type_named(obj, name): # -> Any: + """ + Similar to the type() builtin, but looks in class bases + for named instance. + + Parameters + ------------ + obj : any + Object to look for class of + name : str + Nnme of class + + Returns + ---------- + class : Optional[Callable] + Camed class, or None + """ + ... + +def concatenate(a, b=...): # -> list[Unknown] | Any: + """ + Concatenate two or more meshes. + + Parameters + ------------ + a : trimesh.Trimesh + Mesh or list of meshes to be concatenated + object, or list of such + b : trimesh.Trimesh + Mesh or list of meshes to be concatenated + + Returns + ---------- + result : trimesh.Trimesh + Concatenated mesh + """ + ... + +def submesh( + mesh, faces_sequence, repair=..., only_watertight=..., min_faces=..., append=... +): # -> list[Unknown] | NDArray[Any] | Any | ndarray[Any, dtype[Unknown | Any]]: + """ + Return a subset of a mesh. + + Parameters + ------------ + mesh : Trimesh + Source mesh to take geometry from + faces_sequence : sequence (p,) int + Indexes of mesh.faces + repair : bool + Try to make submeshes watertight + only_watertight : bool + Only return submeshes which are watertight + append : bool + Return a single mesh which has the faces appended, + if this flag is set, only_watertight is ignored + + Returns + --------- + if append : Trimesh object + else list of Trimesh objects + """ + ... + +def zero_pad(data, count, right=...): # -> NDArray[float64]: + """ + Parameters + ------------ + data : (n,) + 1D array + count : int + Minimum length of result array + + Returns + --------- + padded : (m,) + 1D array where m >= count + """ + ... + +def jsonify(obj, **kwargs): # -> str: + """ + A version of json.dumps that can handle numpy arrays + by creating a custom encoder for numpy dtypes. + + Parameters + -------------- + obj : list, dict + A JSON-serializable blob + kwargs : dict + Passed to json.dumps + + Returns + -------------- + dumped : str + JSON dump of obj + """ + + class EdgeEncoder(json.JSONEncoder): ... + +def convert_like(item, like): + """ + Convert an item to have the dtype of another item + + Parameters + ------------ + item : any + Item to be converted + like : any + Object with target dtype + If None, item is returned unmodified + + Returns + ---------- + result: item, but in dtype of like + """ + ... + +def bounds_tree(bounds): + """ + Given a set of axis aligned bounds create an r-tree for + broad-phase collision detection. + + Parameters + ------------ + bounds : (n, 2D) or (n, 2, D) float + Non-interleaved bounds where D=dimension + E.G a 2D bounds tree: + [(minx, miny, maxx, maxy), ...] + + Returns + --------- + tree : Rtree + Tree containing bounds by index + """ + ... + +def wrap_as_stream(item): # -> StringIO | BytesIO: + """ + Wrap a string or bytes object as a file object. + + Parameters + ------------ + item: str or bytes + Item to be wrapped + + Returns + --------- + wrapped : file-like object + Contains data from item + """ + ... + +def sigfig_round(values, sigfig=...): + """ + Round a single value to a specified number of significant figures. + + Parameters + ------------ + values : float + Value to be rounded + sigfig : int + Number of significant figures to reduce to + + Returns + ---------- + rounded : float + Value rounded to the specified number of significant figures + + + Examples + ---------- + In [1]: trimesh.util.round_sigfig(-232453.00014045456, 1) + Out[1]: -200000.0 + + In [2]: trimesh.util.round_sigfig(.00014045456, 1) + Out[2]: 0.0001 + + In [3]: trimesh.util.round_sigfig(.00014045456, 4) + Out[3]: 0.0001405 + """ + ... + +def sigfig_int(values, sigfig): # -> tuple[Unknown, NDArray[floating[Any]]]: + """ + Convert a set of floating point values into integers + with a specified number of significant figures and an + exponent. + + Parameters + ------------ + values : (n,) float or int + Array of values + sigfig : (n,) int + Number of significant figures to keep + + Returns + ------------ + as_int : (n,) int + Every value[i] has sigfig[i] digits + multiplier : (n, int) + Exponent, so as_int * 10 ** multiplier is + the same order of magnitude as the input + """ + ... + +def decompress(file_obj, file_type): # -> dict[str, StringIO | BytesIO] | dict[str, IO[bytes] | None]: + """ + Given an open file object and a file type, return all components + of the archive as open file objects in a dict. + + Parameters + ------------ + file_obj : file-like + Containing compressed data + file_type : str + File extension, 'zip', 'tar.gz', etc + + Returns + --------- + decompressed : dict + Data from archive in format {file name : file-like} + """ + ... + +def compress(info, **kwargs): # -> bytes: + """ + Compress data stored in a dict. + + Parameters + ----------- + info : dict + Data to compress in form: + {file name in archive: bytes or file-like object} + kwargs : dict + Passed to zipfile.ZipFile + Returns + ----------- + compressed : bytes + Compressed file data + """ + ... + +def split_extension(file_name, special=...): # -> str: + """ + Find the file extension of a file name, including support for + special case multipart file extensions (like .tar.gz) + + Parameters + ------------ + file_name : str + File name + special : list of str + Multipart extensions + eg: ['tar.bz2', 'tar.gz'] + + Returns + ---------- + extension : str + Last characters after a period, or + a value from 'special' + """ + ... + +def triangle_strips_to_faces(strips): # -> ndarray[Any, dtype[int64]]: + """ + Convert a sequence of triangle strips to (n, 3) faces. + + Processes all strips at once using np.concatenate and is significantly + faster than loop-based methods. + + From the OpenGL programming guide describing a single triangle + strip [v0, v1, v2, v3, v4]: + + Draws a series of triangles (three-sided polygons) using vertices + v0, v1, v2, then v2, v1, v3 (note the order), then v2, v3, v4, + and so on. The ordering is to ensure that the triangles are all + drawn with the same orientation so that the strip can correctly form + part of a surface. + + Parameters + ------------ + strips: (n,) list of (m,) int + Vertex indices + + Returns + ------------ + faces : (m, 3) int + Vertex indices representing triangles + """ + ... + +def triangle_fans_to_faces(fans): # -> NDArray[Unknown]: + """ + Convert fans of m + 2 vertex indices in fan format to m triangles + + Parameters + ---------- + fans: (n,) list of (m + 2,) int + Vertex indices + + Returns + ------- + faces: (m, 3) int + Vertex indices representing triangles + """ + ... + +def vstack_empty(tup): # -> NDArray[Any] | NDArray[Unknown]: + """ + A thin wrapper for numpy.vstack that ignores empty lists. + + Parameters + ------------ + tup : tuple or list of arrays + With the same number of columns + + Returns + ------------ + stacked : (n, d) array + With same number of columns as + constituent arrays. + """ + ... + +def write_encoded(file_obj, stuff, encoding=...): + """ + If a file is open in binary mode and a + string is passed, encode and write. + + If a file is open in text mode and bytes are + passed decode bytes to str and write. + + Assumes binary mode if file_obj does not have + a 'mode' attribute (e.g. io.BufferedRandom). + + Parameters + ----------- + file_obj : file object + With 'write' and 'mode' + stuff : str or bytes + Stuff to be written + encoding : str + Encoding of text + """ + ... + +def unique_id(length=...): # -> str: + """ + Generate a random alphanumeric unique identifier + using UUID logic. + + Parameters + ------------ + length : int + Length of desired identifier + + Returns + ------------ + unique : str + Unique alphanumeric identifier + """ + ... + +def generate_basis(z, epsilon=...): # -> NDArray[float64]: + """ + Generate an arbitrary basis (also known as a coordinate frame) + from a given z-axis vector. + + Parameters + ------------ + z : (3,) float + A vector along the positive z-axis. + epsilon : float + Numbers smaller than this considered zero. + + Returns + --------- + x : (3,) float + Vector along x axis. + y : (3,) float + Vector along y axis. + z : (3,) float + Vector along z axis. + """ + ... + +def isclose(a, b, atol): # -> Any: + """ + A replacement for np.isclose that does fewer checks + and validation and as a result is roughly 4x faster. + + Note that this is used in tight loops, and as such + a and b MUST be np.ndarray, not list or "array-like" + + Parameters + ------------ + a : np.ndarray + To be compared + b : np.ndarray + To be compared + atol : float + Acceptable distance between `a` and `b` to be "close" + + Returns + ----------- + close : np.ndarray, bool + Per-element closeness + """ + ... + +def allclose(a, b, atol=...): # -> bool: + """ + A replacement for np.allclose that does few checks + and validation and as a result is faster. + + Parameters + ------------ + a : np.ndarray + To be compared + b : np.ndarray + To be compared + atol : float + Acceptable distance between `a` and `b` to be "close" + + Returns + ----------- + bool indicating if all elements are within `atol`. + """ + ... + +class FunctionRegistry(Mapping): + """ + Non-overwritable mapping of string keys to functions. + + This allows external packages to register additional implementations + of common functionality without risk of breaking implementations provided + by trimesh. + + See trimesh.voxel.morphology for example usage. + """ + + def __init__(self, **kwargs) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value): # -> None: + ... + def __iter__(self): # -> Iterator[Unknown]: + ... + def __len__(self): # -> int: + ... + def __contains__(self, key): # -> bool: + ... + def __call__(self, key, *args, **kwargs): ... + +def decode_text(text, initial=...): + """ + Try to decode byte input as a string. + + Tries initial guess (UTF-8) then if that fails it + uses chardet to try another guess before failing. + + Parameters + ------------ + text : bytes + Data that might be a string + initial : str + Initial guess for text encoding. + + Returns + ------------ + decoded : str + Data as a string + """ + ... + +def to_ascii(text): # -> str: + """ + Force a string or other to ASCII text ignoring errors. + + Parameters + ----------- + text : any + Input to be converted to ASCII string + + Returns + ----------- + ascii : str + Input as an ASCII string + """ + ... + +def is_ccw(points, return_all=...): # -> Any | tuple[Any, Any, Any]: + """ + Check if connected 2D points are counterclockwise. + + Parameters + ----------- + points : (n, 2) float + Connected points on a plane + return_all : bool + Return polygon area and centroid or just counter-clockwise. + + Returns + ---------- + ccw : bool + True if points are counter-clockwise + area : float + Only returned if `return_centroid` + centroid : (2,) float + Centroid of the polygon. + """ + ... + +def unique_name(start, contains, counts=...): # -> str: + """ + Deterministically generate a unique name not + contained in a dict, set or other grouping with + `__includes__` defined. Will create names of the + form "start_10" and increment accordingly. + + Parameters + ----------- + start : str + Initial guess for name. + contains : dict, set, or list + Bundle of existing names we can *not* use. + counts : None or dict + Maps name starts encountered before to increments in + order to speed up finding a unique name as otherwise + it potentially has to iterate through all of contains. + Should map to "how many times has this `start` + been attempted, i.e. `counts[start]: int`. + Note that this *will be mutated* in-place by this function! + + Returns + --------- + unique : str + A name that is not contained in `contains` + """ + ... diff --git a/typings/trimesh/version.pyi b/typings/trimesh/version.pyi new file mode 100644 index 00000000..99e59836 --- /dev/null +++ b/typings/trimesh/version.pyi @@ -0,0 +1,12 @@ +""" +This type stub file was generated by pyright. +""" + +""" +# version.py + +Get the current version from package metadata or pyproject.toml +if everything else fails. +""" +__version__ = ... +if __name__ == "__main__": ... diff --git a/typings/trimesh/viewer/__init__.pyi b/typings/trimesh/viewer/__init__.pyi new file mode 100644 index 00000000..14215193 --- /dev/null +++ b/typings/trimesh/viewer/__init__.pyi @@ -0,0 +1,16 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import exceptions +from .notebook import in_notebook, scene_to_html, scene_to_notebook +from .widget import SceneWidget +from .windowed import SceneViewer, render_scene + +""" +viewer +------------- + +View meshes and scenes via pyglet or inline HTML. +""" +__all__ = ["SceneWidget", "SceneViewer", "render_scene", "in_notebook", "scene_to_notebook", "scene_to_html"] diff --git a/typings/trimesh/viewer/notebook.pyi b/typings/trimesh/viewer/notebook.pyi new file mode 100644 index 00000000..eba771a2 --- /dev/null +++ b/typings/trimesh/viewer/notebook.pyi @@ -0,0 +1,57 @@ +""" +This type stub file was generated by pyright. +""" + +""" +notebook.py +------------- + +Render trimesh.Scene objects in HTML +and jupyter notebooks using three.js +""" + +def scene_to_html(scene): # -> str: + """ + Return HTML that will render the scene using + GLTF/GLB encoded to base64 loaded by three.js + + Parameters + -------------- + scene : trimesh.Scene + Source geometry + + Returns + -------------- + html : str + HTML containing embedded geometry + """ + ... + +def scene_to_notebook(scene, height=..., **kwargs): + """ + Convert a scene to HTML containing embedded geometry + and a three.js viewer that will display nicely in + an IPython/Jupyter notebook. + + Parameters + ------------- + scene : trimesh.Scene + Source geometry + + Returns + ------------- + html : IPython.display.HTML + Object containing rendered scene + """ + ... + +def in_notebook(): # -> bool: + """ + Check to see if we are in an IPython or Jypyter notebook. + + Returns + ----------- + in_notebook : bool + Returns True if we are in a notebook + """ + ... diff --git a/typings/trimesh/viewer/trackball.pyi b/typings/trimesh/viewer/trackball.pyi new file mode 100644 index 00000000..d9a1874c --- /dev/null +++ b/typings/trimesh/viewer/trackball.pyi @@ -0,0 +1,99 @@ +""" +This type stub file was generated by pyright. +""" + +"""Trackball class for 3D manipulation of viewpoints. +""" + +class Trackball: + """A trackball class for creating camera transforms from mouse movements.""" + + STATE_ROTATE = ... + STATE_PAN = ... + STATE_ROLL = ... + STATE_ZOOM = ... + def __init__(self, pose, size, scale, target=...) -> None: + """Initialize a trackball with an initial camera-to-world pose + and the given parameters. + + Parameters + ---------- + pose : [4,4] + An initial camera-to-world pose for the trackball. + + size : (float, float) + The width and height of the camera image in pixels. + + scale : float + The diagonal of the scene's bounding box -- + used for ensuring translation motions are sufficiently + fast for differently-sized scenes. + + target : (3,) float + The center of the scene in world coordinates. + The trackball will revolve around this point. + """ + ... + @property + def pose(self): # -> Unknown | Any | ndarray[Unknown, Unknown]: + """autolab_core.RigidTransform : The current camera-to-world pose.""" + ... + def set_state(self, state): # -> None: + """Set the state of the trackball in order to change the effect of + dragging motions. + + Parameters + ---------- + state : int + One of Trackball.STATE_ROTATE, Trackball.STATE_PAN, + Trackball.STATE_ROLL, and Trackball.STATE_ZOOM. + """ + ... + def resize(self, size): # -> None: + """Resize the window. + + Parameters + ---------- + size : (float, float) + The new width and height of the camera image in pixels. + """ + ... + def down(self, point): # -> None: + """Record an initial mouse press at a given point. + + Parameters + ---------- + point : (2,) int + The x and y pixel coordinates of the mouse press. + """ + ... + def drag(self, point): # -> None: + """Update the tracball during a drag. + + Parameters + ---------- + point : (2,) int + The current x and y pixel coordinates of the mouse during a drag. + This will compute a movement for the trackball with the relative + motion between this point and the one marked by down(). + """ + ... + def scroll(self, clicks): # -> None: + """Zoom using a mouse scroll wheel motion. + + Parameters + ---------- + clicks : int + The number of clicks. Positive numbers indicate forward wheel + movement. + """ + ... + def rotate(self, azimuth, axis=...): # -> None: + """Rotate the trackball about the "Up" axis by azimuth radians. + + Parameters + ---------- + azimuth : float + The number of radians to rotate. + """ + ... diff --git a/typings/trimesh/viewer/widget.pyi b/typings/trimesh/viewer/widget.pyi new file mode 100644 index 00000000..1fe20fe1 --- /dev/null +++ b/typings/trimesh/viewer/widget.pyi @@ -0,0 +1,53 @@ +""" +This type stub file was generated by pyright. +""" + +import glooey +import pyglet + +""" +widget.py +------------- + +A widget which can visualize trimesh.Scene objects in a glooey window. + +Check out an example in `examples/widget.py` +""" + +class SceneGroup(pyglet.graphics.Group): + def __init__(self, rect, scene, background=..., pixel_per_point=..., parent=...) -> None: ... + def set_state(self): # -> None: + ... + def unset_state(self): # -> None: + ... + +class MeshGroup(pyglet.graphics.Group): + def __init__(self, transform=..., texture=..., parent=...) -> None: ... + def set_state(self): # -> None: + ... + def unset_state(self): # -> None: + ... + +class SceneWidget(glooey.Widget): + def __init__(self, scene, **kwargs) -> None: ... + @property + def scene_group(self): # -> SceneGroup: + ... + def clear(self): # -> None: + ... + def reset_view(self): # -> None: + ... + def do_claim(self): # -> tuple[Literal[0], Literal[0]]: + ... + def do_regroup(self): # -> None: + ... + def do_draw(self): # -> None: + ... + def do_undraw(self): # -> None: + ... + def on_mouse_press(self, x, y, buttons, modifiers): # -> None: + ... + def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers): # -> None: + ... + def on_mouse_scroll(self, x, y, dx, dy): # -> None: + ... diff --git a/typings/trimesh/viewer/windowed.pyi b/typings/trimesh/viewer/windowed.pyi new file mode 100644 index 00000000..5ece63e0 --- /dev/null +++ b/typings/trimesh/viewer/windowed.pyi @@ -0,0 +1,249 @@ +""" +This type stub file was generated by pyright. +""" + +import pyglet + +""" +windowed.py +--------------- + +Provides a pyglet- based windowed viewer to preview +Trimesh, Scene, PointCloud, and Path objects. + +Works on all major platforms: Windows, Linux, and OSX. +""" +if int(pyglet.version.split(".")[0]) >= 2: ... +_SMOOTH_MAX_FACES = ... + +class SceneViewer(pyglet.window.Window): + def __init__( + self, + scene, + smooth=..., + flags=..., + visible=..., + resolution=..., + start_loop=..., + callback=..., + callback_period=..., + caption=..., + fixed=..., + offset_lines=..., + line_settings=..., + background=..., + window_conf=..., + profile=..., + record=..., + **kwargs, + ) -> None: + """ + Create a window that will display a trimesh.Scene object + in an OpenGL context via pyglet. + + Parameters + --------------- + scene : trimesh.scene.Scene + Scene with geometry and transforms + smooth : bool + If True try to smooth shade things + flags : dict + If passed apply keys to self.view: + ['cull', 'wireframe', etc] + visible : bool + Display window or not + resolution : (2,) int + Initial resolution of window + start_loop : bool + Call pyglet.app.run() at the end of init + callback : function + A function which can be called periodically to + update things in the scene + callback_period : float + How often to call the callback, in seconds + fixed : None or iterable + List of keys in scene.geometry to skip view + transform on to keep fixed relative to camera + offset_lines : bool + If True, will offset lines slightly so if drawn + coplanar with mesh geometry they will be visible + background : None or (4,) uint8 + Color for background + window_conf : None, or gl.Config + Passed to window init + profile : bool + If set will run a `pyinstrument` profile for + every call to `on_draw` and print the output. + record : bool + If True, will save a list of `png` bytes to + a list located in `scene.metadata['recording']` + kwargs : dict + Additional arguments to pass, including + 'background' for to set background color + """ + ... + def add_geometry(self, name, geometry, **kwargs): # -> None: + """ + Add a geometry to the viewer. + + Parameters + -------------- + name : hashable + Name that references geometry + geometry : Trimesh, Path2D, Path3D, PointCloud + Geometry to display in the viewer window + kwargs ** + Passed to rendering.convert_to_vertexlist + """ + ... + def cleanup_geometries(self): # -> None: + """ + Remove any stored vertex lists that no longer + exist in the scene. + """ + ... + def unhide_geometry(self, node): # -> None: + """ + If a node is hidden remove the flag and show the + geometry on the next draw. + + Parameters + ------------- + node : str + Node to display + """ + ... + def hide_geometry(self, node): # -> None: + """ + Don't display the geometry contained at a node on + the next draw. + + Parameters + ------------- + node : str + Node to not display + """ + ... + def reset_view(self, flags=...): # -> None: + """ + Set view to the default view. + + Parameters + -------------- + flags : None or dict + If any view key passed override the default + e.g. {'cull': False} + """ + ... + def init_gl(self): # -> None: + """ + Perform the magic incantations to create an + OpenGL scene using pyglet. + """ + ... + def toggle_culling(self): # -> None: + """ + Toggle back face culling. + + It is on by default but if you are dealing with + non- watertight meshes you probably want to be able + to see the back sides. + """ + ... + def toggle_wireframe(self): # -> None: + """ + Toggle wireframe mode + + Good for looking inside meshes, off by default. + """ + ... + def toggle_fullscreen(self): # -> None: + """ + Toggle between fullscreen and windowed mode. + """ + ... + def toggle_axis(self): # -> None: + """ + Toggle a rendered XYZ/RGB axis marker: + off, world frame, every frame + """ + ... + def toggle_grid(self): # -> None: + """ + Toggle a rendered grid. + """ + ... + def update_flags(self): # -> None: + """ + Check the view flags, and call required GL functions. + """ + ... + def on_resize(self, width, height): # -> None: + """ + Handle resized windows. + """ + ... + def on_mouse_press(self, x, y, buttons, modifiers): # -> None: + """ + Set the start point of the drag. + """ + ... + def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers): # -> None: + """ + Pan or rotate the view. + """ + ... + def on_mouse_scroll(self, x, y, dx, dy): # -> None: + """ + Zoom the view. + """ + ... + def on_key_press(self, symbol, modifiers): # -> None: + """ + Call appropriate functions given key presses. + """ + ... + def on_draw(self): # -> None: + """ + Run the actual draw calls. + """ + ... + def flip(self): # -> None: + ... + def save_image(self, file_obj): + """ + Save the current color buffer to a file object + in PNG format. + + Parameters + ------------- + file_obj: file name, or file- like object + """ + ... + +def render_scene(scene, resolution=..., visible=..., **kwargs): # -> bytes: + """ + Render a preview of a scene to a PNG. Note that + whether this works or not highly variable based on + platform and graphics driver. + + Parameters + ------------ + scene : trimesh.Scene + Geometry to be rendered + resolution : (2,) int or None + Resolution in pixels or set from scene.camera + visible : bool + Show a window during rendering. Note that MANY + platforms refuse to render with hidden windows + and will likely return a blank image; this is a + platform issue and cannot be fixed in Python. + kwargs : ** + Passed to SceneViewer + + Returns + --------- + render : bytes + Image in PNG format + """ + ... diff --git a/typings/trimesh/visual/__init__.pyi b/typings/trimesh/visual/__init__.pyi new file mode 100644 index 00000000..ea903a9f --- /dev/null +++ b/typings/trimesh/visual/__init__.pyi @@ -0,0 +1,40 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import resolvers +from . import color, material, objects, texture +from .color import ( + DEFAULT_COLOR, + ColorVisuals, + interpolate, + linear_color_map, + random_color, + to_rgba, + uv_to_color, + uv_to_interpolated_color, +) +from .objects import concatenate, create_visual +from .texture import TextureVisuals + +""" +visual +------------- + +Handle visual properties for meshes, including color and texture +""" +__all__ = [ + "color", + "texture", + "resolvers", + "TextureVisuals", + "ColorVisuals", + "random_color", + "to_rgba", + "create_visual", + "DEFAULT_COLOR", + "interpolate", + "linear_color_map", + "uv_to_color", + "uv_to_interpolated_color", +] diff --git a/typings/trimesh/visual/base.pyi b/typings/trimesh/visual/base.pyi new file mode 100644 index 00000000..23a3c5ba --- /dev/null +++ b/typings/trimesh/visual/base.pyi @@ -0,0 +1,53 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +from ..util import ABC + +""" +base.py +------------- + +The base class for `Visual` objects +""" + +class Visuals(ABC): + """ + Parent of Visual classes. + """ + + @abc.abstractproperty + def kind(self): # -> None: + ... + @abc.abstractmethod + def update_vertices(self, mask): # -> None: + ... + @abc.abstractmethod + def update_faces(self, mask): # -> None: + ... + @abc.abstractmethod + def concatenate(self, other): # -> None: + ... + @abc.abstractmethod + def __hash__(self) -> int: ... + @abc.abstractmethod + def copy(self): # -> None: + ... + def __add__(self, other): # -> None: + """ + Concatenate two ColorVisuals objects into a single object. + + Parameters + ----------- + other : Visuals + Other visual to concatenate + + Returns + ----------- + result : Visuals + Object containing information from current + object and other in the order (self, other) + """ + ... diff --git a/typings/trimesh/visual/color.pyi b/typings/trimesh/visual/color.pyi new file mode 100644 index 00000000..fc5e2f80 --- /dev/null +++ b/typings/trimesh/visual/color.pyi @@ -0,0 +1,474 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +from .. import caching +from .base import Visuals + +""" +color.py +------------- + +Hold and deal with visual information about meshes. + +There are lots of ways to encode visual information, and the goal of this +architecture is to make it possible to define one, and then transparently +get the others. The two general categories are: + +1) colors, defined for a face, vertex, or material +2) textures, defined as an image and UV coordinates for each vertex + +This module only implements diffuse colors at the moment. + +Goals +---------- +1) If nothing is defined sane defaults should be returned +2) If a user alters or sets a value, that is considered user data + and should be saved and treated as such. +3) Only one 'mode' of visual (vertex or face) is allowed at a time + and setting or altering a value should automatically change the mode. +""" + +class ColorVisuals(Visuals): + """ + Store color information about a mesh. + """ + + def __init__(self, mesh=..., face_colors=..., vertex_colors=...) -> None: + """ + Store color information about a mesh. + + Parameters + ---------- + mesh : Trimesh + Object that these visual properties + are associated with + face_ colors : (n,3|4) or (3,) or (4,) uint8 + Colors per-face + vertex_colors : (n,3|4) or (3,) or (4,) uint8 + Colors per-vertex + """ + ... + @caching.cache_decorator + def transparency(self): # -> bool: + """ + Does the current object contain any transparency. + + Returns + ---------- + transparency: bool, does the current visual contain transparency + """ + ... + @property + def defined(self): # -> bool: + """ + Are any colors defined for the current mesh. + + Returns + --------- + defined : bool + Are colors defined or not. + """ + ... + @property + def kind(self): # -> Literal['vertex', 'face'] | None: + """ + What color mode has been set. + + Returns + ---------- + mode : str or None + One of ('face', 'vertex', None) + """ + ... + def __hash__(self) -> int: ... + def copy(self): # -> ColorVisuals: + """ + Return a copy of the current ColorVisuals object. + + + Returns + ---------- + copied : ColorVisuals + Contains the same information as self + """ + ... + @property + def face_colors(self): # -> TrackedArray: + """ + Colors defined for each face of a mesh. + + If no colors are defined, defaults are returned. + + Returns + ---------- + colors : (len(mesh.faces), 4) uint8 + RGBA color for each face + """ + ... + @face_colors.setter + def face_colors(self, values): # -> None: + """ + Set the colors for each face of a mesh. + + This will apply these colors and delete any previously specified + color information. + + Parameters + ------------ + colors : (len(mesh.faces), 3), set each face to the specified color + (len(mesh.faces), 4), set each face to the specified color + (3,) int, set the whole mesh this color + (4,) int, set the whole mesh this color + """ + ... + @property + def vertex_colors(self): + """ + Return the colors for each vertex of a mesh + + Returns + ------------ + colors: (len(mesh.vertices), 4) uint8, color for each vertex + """ + ... + @vertex_colors.setter + def vertex_colors(self, values): # -> None: + """ + Set the colors for each vertex of a mesh + + This will apply these colors and delete any previously specified + color information. + + Parameters + ------------ + colors : (len(mesh.vertices), 3), set each face to the color + (len(mesh.vertices), 4), set each face to the color + (3,) int, set the whole mesh this color + (4,) int, set the whole mesh this color + """ + ... + def update_vertices(self, mask): # -> None: + """ + Apply a mask to remove or duplicate vertex properties. + """ + ... + def update_faces(self, mask): # -> None: + """ + Apply a mask to remove or duplicate face properties + """ + ... + def face_subset(self, face_index): # -> ColorVisuals: + """ + Given a mask of face indices, return a sliced version. + + Parameters + ---------- + face_index: (n,) int, mask for faces + (n,) bool, mask for faces + + Returns + ---------- + visual: ColorVisuals object containing a subset of faces. + """ + ... + @property + def main_color(self): # -> NDArray[uint8] | ndarray[Any, Unknown]: + """ + What is the most commonly occurring color. + + Returns + ------------ + color: (4,) uint8, most common color + """ + ... + def to_texture(self): # -> TextureVisuals: + """ + Convert the current ColorVisuals object to a texture + with a `SimpleMaterial` defined. + + Returns + ------------ + visual : trimesh.visual.TextureVisuals + Copy of the current visuals as a texture. + """ + ... + def concatenate(self, other, *args): # -> TextureVisuals | ColorVisuals: + """ + Concatenate two or more ColorVisuals objects + into a single object. + + Parameters + ----------- + other : ColorVisuals + Object to append + *args: ColorVisuals objects + + Returns + ----------- + result : ColorVisuals + Containing information from current + object and others in the order it was passed. + """ + ... + +class VertexColor(Visuals): + """ + Create a simple visual object to hold just vertex colors + for objects such as PointClouds. + """ + + def __init__(self, colors=..., obj=...) -> None: + """ + Create a vertex color visual + """ + ... + @property + def kind(self): # -> Literal['vertex']: + ... + def update_vertices(self, mask): # -> None: + ... + def update_faces(self, mask): # -> None: + ... + @property + def vertex_colors(self): # -> ndarray[Any, Unknown] | TrackedArray: + ... + @vertex_colors.setter + def vertex_colors(self, data): # -> None: + ... + def copy(self): # -> Self@VertexColor: + """ + Return a copy of the current visuals + """ + ... + def concatenate(self, other): # -> VertexColor: + """ + Concatenate this visual object with another + VertexVisuals. + + Parameters + ----------- + other : VertexColors or ColorVisuals + Other object to concatenate + + Returns + ------------ + concate : VertexColor + Object with both colors + """ + ... + def __hash__(self) -> int: ... + +def to_rgba(colors, dtype=...): # -> NDArray[uint8]: + """ + Convert a single or multiple RGB colors to RGBA colors. + + Parameters + ---------- + colors : (n, 3) or (n, 4) array + RGB or RGBA colors + + Returns + ---------- + colors : (n, 4) list of RGBA colors + (4,) single RGBA color + """ + ... + +def to_float(colors): + """ + Convert integer colors to 0.0 - 1.0 floating point colors + + Parameters + ------------- + colors : (n, d) int + Integer colors + + Returns + ------------- + as_float : (n, d) float + Float colors 0.0 - 1.0 + """ + ... + +def hex_to_rgba(color): # -> NDArray[uint8]: + """ + Turn a string hex color to a (4,) RGBA color. + + Parameters + ----------- + color: str, hex color + + Returns + ----------- + rgba: (4,) np.uint8, RGBA color + """ + ... + +def random_color(dtype=...): # -> NDArray[uint8]: + """ + Return a random RGB color using datatype specified. + + Parameters + ---------- + dtype: numpy dtype of result + + Returns + ---------- + color: (4,) dtype, random color that looks OK + """ + ... + +def vertex_to_face_color(vertex_colors, faces): # -> Any: + """ + Convert a list of vertex colors to face colors. + + Parameters + ---------- + vertex_colors: (n,(3,4)), colors + faces: (m,3) int, face indexes + + Returns + ----------- + face_colors: (m,4) colors + """ + ... + +def face_to_vertex_color(mesh, face_colors, dtype=...): + """ + Convert face colors into vertex colors. + + Parameters + ----------- + mesh : trimesh.Trimesh object + face_colors: (n, (3,4)) int, face colors + dtype: data type of output + + Returns + ----------- + vertex_colors: (m,4) dtype, colors for each vertex + """ + ... + +def colors_to_materials( + colors, count=... +): # -> tuple[ndarray[Any, dtype[uint8]] | Unknown, NDArray[int64] | Unknown | NDArray[intp]]: + """ + Convert a list of colors into a list of unique materials + and material indexes. + + Parameters + ----------- + colors : (n, 3) or (n, 4) float + RGB or RGBA colors + count : int + Number of entities to apply color to + + Returns + ----------- + diffuse : (m, 4) int + Colors + index : (count,) int + Index of each color + """ + ... + +def linear_color_map(values, color_range=...): + """ + Linearly interpolate between two colors. + + If colors are not specified the function will + interpolate between 0.0 values as red and 1.0 as green. + + Parameters + -------------- + values : (n, ) float + Values to interpolate + color_range : None, or (2, 4) uint8 + What colors should extrema be set to + + Returns + --------------- + colors : (n, 4) uint8 + RGBA colors for interpolated values + """ + ... + +def interpolate(values, color_map=..., dtype=...): # -> NDArray[uint8]: + """ + Given a 1D list of values, return interpolated colors + for the range. + + Parameters + --------------- + values : (n, ) float + Values to be interpolated over + color_map : None, or str + Key to a colormap contained in: + matplotlib.pyplot.colormaps() + e.g: 'viridis' + + Returns + ------------- + interpolated : (n, 4) dtype + Interpolated RGBA colors + """ + ... + +def uv_to_color(uv, image): # -> None: + """ + Get the color in a texture image. + + Parameters + ------------- + uv : (n, 2) float + UV coordinates on texture image + image : PIL.Image + Texture image + + Returns + ---------- + colors : (n, 4) uint4 + RGBA color at each of the UV coordinates + """ + ... + +def uv_to_interpolated_color(uv, image): # -> None: + """ + Get the color from texture image using bilinear sampling. + + Parameters + ------------- + uv : (n, 2) float + UV coordinates on texture image + image : PIL.Image + Texture image + + Returns + ---------- + colors : (n, 4) uint8 + RGBA color at each of the UV coordinates. + """ + ... + +def color_to_uv(vertex_colors): # -> tuple[SimpleMaterial, NDArray[floating[Any]]]: + """ + Pack vertex colors into UV coordinates and a simple image material + + Parameters + ------------ + vertex_colors : (n, 4) float + Array of vertex colors. + + Returns + ------------ + material : SimpleMaterial + Material containing color information. + uv : (n, 2) float + Normalized UV coordinates + """ + ... + +DEFAULT_COLOR = np.array([102, 102, 102, 255], dtype=np.uint8) diff --git a/typings/trimesh/visual/gloss.pyi b/typings/trimesh/visual/gloss.pyi new file mode 100644 index 00000000..e9486f59 --- /dev/null +++ b/typings/trimesh/visual/gloss.pyi @@ -0,0 +1,44 @@ +""" +This type stub file was generated by pyright. +""" + +def specular_to_pbr( + specularFactor=..., + glossinessFactor=..., + specularGlossinessTexture=..., + diffuseTexture=..., + diffuseFactor=..., + **kwargs, +): # -> dict[Unknown, Unknown]: + """ + Convert the KHR_materials_pbrSpecularGlossiness to a + metallicRoughness visual. + + Parameters + ----------- + specularFactor : list[float] + Specular color values. Ignored if specularGlossinessTexture + is present and defaults to [1.0, 1.0, 1.0]. + glossinessFactor : float + glossiness factor in range [0, 1], scaled + specularGlossinessTexture if present. + Defaults to 1.0. + specularGlossinessTexture : PIL.Image + Texture with 4 color channels. With [0,1,2] representing + specular RGB and 3 glossiness. + diffuseTexture : PIL.Image + Texture with 4 color channels. With [0,1,2] representing diffuse + RGB and 3 opacity. + diffuseFactor: float + Diffuse RGBA color. scales diffuseTexture if present. + Defaults to [1.0, 1.0, 1.0, 1.0]. + + Returns + ---------- + kwargs : dict + Constructor args for a PBRMaterial object. + Containing: + - either baseColorTexture or baseColorFactor + - either metallicRoughnessTexture or metallicFactor and roughnessFactor + """ + ... diff --git a/typings/trimesh/visual/material.pyi b/typings/trimesh/visual/material.pyi new file mode 100644 index 00000000..03f77088 --- /dev/null +++ b/typings/trimesh/visual/material.pyi @@ -0,0 +1,503 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +import numpy as np +from PIL import Image + +from .. import util +from ..typed import NDArray, Optional + +""" +material.py +------------- + +Store visual materials as objects. +""" +_eps = ... + +class Material(util.ABC): + def __init__(self, *args, **kwargs) -> None: ... + @abc.abstractmethod + def __hash__(self) -> int: ... + @abc.abstractproperty + def main_color(self): # -> None: + """ + The "average" color of this material. + + Returns + --------- + color : (4,) uint8 + Average color of this material. + """ + ... + @property + def name(self): # -> Literal['material_0']: + ... + @name.setter + def name(self, value): # -> None: + ... + def copy(self): # -> Self@Material: + ... + +class SimpleMaterial(Material): + """ + Hold a single image texture. + """ + + def __init__(self, image=..., diffuse=..., ambient=..., specular=..., glossiness=..., **kwargs) -> None: ... + def to_color(self, uv): # -> None: + ... + def to_obj(self, name=...): # -> tuple[dict[Unknown, Unknown], Unknown | Literal['material_0']]: + """ + Convert the current material to an OBJ format + material. + + Parameters + ----------- + name : str or None + Name to apply to the material + + Returns + ----------- + tex_name : str + Name of material + mtl_name : str + Name of mtl file in files + files : dict + Data as {file name : bytes} + """ + ... + def __hash__(self) -> int: + """ + Provide a hash of the material so we can detect + duplicates. + + Returns + ------------ + hash : int + Hash of image and parameters + """ + ... + @property + def main_color(self): # -> NDArray[uint8]: + """ + Return the most prominent color. + """ + ... + @property + def glossiness(self): # -> float: + ... + @glossiness.setter + def glossiness(self, value): # -> None: + ... + def to_pbr(self): # -> PBRMaterial: + """ + Convert the current simple material to a + PBR material. + + Returns + ------------ + pbr : PBRMaterial + Contains material information in PBR format. + """ + ... + +class MultiMaterial(Material): + def __init__(self, materials=..., **kwargs) -> None: + """ + Wrapper for a list of Materials. + + Parameters + ---------- + materials : Optional[List[Material]] + List of materials with which the container to be initialized. + """ + ... + def to_pbr(self): # -> PBRMaterial: + """ + TODO : IMPLEMENT + """ + ... + def __hash__(self) -> int: + """ + Provide a hash of the multi material so we can detect + duplicates. + + Returns + ------------ + hash : int + Xor hash of the contained materials. + """ + ... + def __iter__(self): # -> Iterator[Unknown]: + ... + def __next__(self): ... + def __len__(self): # -> int: + ... + @property + def main_color(self): # -> None: + """ + The "average" color of this material. + + Returns + --------- + color : (4,) uint8 + Average color of this material. + """ + ... + def add(self, material): # -> None: + """ + Adds new material to the container. + + Parameters + ---------- + material : Material + The material to be added. + """ + ... + def get(self, idx): + """ + Get material by index. + + Parameters + ---------- + idx : int + Index of the material to be retrieved. + + Returns + ------- + The material on the given index. + """ + ... + +class PBRMaterial(Material): + """ + Create a material for physically based rendering as + specified by GLTF 2.0: + https://git.io/fhkPZ + + Parameters with `Texture` in them must be PIL.Image objects + """ + + def __init__( + self, + name=..., + emissiveFactor=..., + emissiveTexture=..., + baseColorFactor=..., + metallicFactor=..., + roughnessFactor=..., + normalTexture=..., + occlusionTexture=..., + baseColorTexture=..., + metallicRoughnessTexture=..., + doubleSided=..., + alphaMode=..., + alphaCutoff=..., + **kwargs, + ) -> None: ... + @property + def emissiveFactor(self): # -> None: + """ + The factors for the emissive color of the material. + This value defines linear multipliers for the sampled + texels of the emissive texture. + + Returns + ----------- + emissiveFactor : (3,) float + Ech element in the array MUST be greater than + or equal to 0 and less than or equal to 1. + """ + ... + @emissiveFactor.setter + def emissiveFactor(self, value): # -> None: + ... + @property + def alphaMode(self): # -> None: + """ + The material alpha rendering mode enumeration + specifying the interpretation of the alpha value of + the base color. + + Returns + ----------- + alphaMode : str + One of 'OPAQUE', 'MASK', 'BLEND' + """ + ... + @alphaMode.setter + def alphaMode(self, value): # -> None: + ... + @property + def alphaCutoff(self): # -> None: + """ + Specifies the cutoff threshold when in MASK alpha mode. + If the alpha value is greater than or equal to this value + then it is rendered as fully opaque, otherwise, it is rendered + as fully transparent. A value greater than 1.0 will render + the entire material as fully transparent. This value MUST be + ignored for other alpha modes. When alphaMode is not defined, + this value MUST NOT be defined. + + Returns + ----------- + alphaCutoff : float + Value of cutoff. + """ + ... + @alphaCutoff.setter + def alphaCutoff(self, value): # -> None: + ... + @property + def doubleSided(self): # -> None: + """ + Specifies whether the material is double sided. + + Returns + ----------- + doubleSided : bool + Specifies whether the material is double sided. + """ + ... + @doubleSided.setter + def doubleSided(self, value): # -> None: + ... + @property + def metallicFactor(self): # -> None: + """ + The factor for the metalness of the material. This value + defines a linear multiplier for the sampled metalness values + of the metallic-roughness texture. + + + Returns + ----------- + metallicFactor : float + How metally is the material + """ + ... + @metallicFactor.setter + def metallicFactor(self, value): # -> None: + ... + @property + def roughnessFactor(self): # -> None: + """ + The factor for the roughness of the material. This value + defines a linear multiplier for the sampled roughness values + of the metallic-roughness texture. + + Returns + ----------- + roughnessFactor : float + Roughness of material. + """ + ... + @roughnessFactor.setter + def roughnessFactor(self, value): # -> None: + ... + @property + def baseColorFactor(self): # -> None: + """ + The factors for the base color of the material. This + value defines linear multipliers for the sampled texels + of the base color texture. + + Returns + --------- + color : (4,) uint8 + RGBA color + """ + ... + @baseColorFactor.setter + def baseColorFactor(self, value): # -> None: + ... + @property + def normalTexture(self): # -> None: + """ + The normal map texture. + + Returns + ---------- + image : PIL.Image + Normal texture. + """ + ... + @normalTexture.setter + def normalTexture(self, value): # -> None: + ... + @property + def emissiveTexture(self): # -> None: + """ + The emissive texture. + + Returns + ---------- + image : PIL.Image + Emissive texture. + """ + ... + @emissiveTexture.setter + def emissiveTexture(self, value): # -> None: + ... + @property + def occlusionTexture(self): # -> None: + """ + The occlusion texture. + + Returns + ---------- + image : PIL.Image + Occlusion texture. + """ + ... + @occlusionTexture.setter + def occlusionTexture(self, value): # -> None: + ... + @property + def baseColorTexture(self): # -> None: + """ + The base color texture image. + + Returns + ---------- + image : PIL.Image + Color texture. + """ + ... + @baseColorTexture.setter + def baseColorTexture(self, value): # -> None: + ... + @property + def metallicRoughnessTexture(self): # -> None: + """ + The metallic-roughness texture. + + Returns + ---------- + image : PIL.Image + Metallic-roughness texture. + """ + ... + @metallicRoughnessTexture.setter + def metallicRoughnessTexture(self, value): # -> None: + ... + @property + def name(self): # -> None: + ... + @name.setter + def name(self, value): # -> None: + ... + def copy(self): # -> PBRMaterial: + ... + def to_color(self, uv): # -> None: + """ + Get the rough color at a list of specified UV + coordinates. + + Parameters + ------------- + uv : (n, 2) float + UV coordinates on the material + + Returns + ------------- + colors + """ + ... + def to_simple(self): # -> SimpleMaterial: + """ + Get a copy of the current PBR material as + a simple material. + + Returns + ------------ + simple : SimpleMaterial + Contains material information in a simple manner + """ + ... + @property + def main_color(self): # -> NDArray[uint8]: + ... + def __hash__(self) -> int: + """ + Provide a hash of the material so we can detect + duplicate materials. + + Returns + ------------ + hash : int + Hash of image and parameters + """ + ... + +def empty_material(color: Optional[NDArray[np.uint8]] = ...) -> SimpleMaterial: + """ + Return an empty material set to a single color + + Parameters + ----------- + color : None or (3,) uint8 + RGB color + + Returns + ------------- + material : SimpleMaterial + Image is a a four pixel RGB + """ + ... + +def color_image(color: Optional[NDArray[np.uint8]] = ...) -> Image: + """ + Generate an image with one color. + + Parameters + ---------- + color + Optional uint8 color + + Returns + ---------- + image + A (2, 2) RGBA image with the specified color. + """ + ... + +def pack( + materials, uvs, deduplicate=..., padding: int = ..., max_tex_size_individual=..., max_tex_size_fused=... +): # -> tuple[Unknown, NDArray[Unknown]] | tuple[PBRMaterial, NDArray[Unknown]] | tuple[SimpleMaterial, NDArray[Unknown]]: + """ + Pack multiple materials with texture into a single material. + + UV coordinates outside of the 0.0-1.0 range will be coerced + into this range using a "wrap" behavior (i.e. modulus). + + Alpha blending and backface culling settings are not supported! + Returns a material with alpha values set, but alpha blending disabled. + + Parameters + ----------- + materials : (n,) Material + List of multiple materials + uvs : (n, m, 2) float + Original UV coordinates + padding : int + Number of pixels to pad each image with. + max_tex_size_individual : int + Maximum size of each individual texture. + max_tex_size_fused : int | None + Maximum size of the combined texture. + Individual texture size will be reduced to fit. + Set to None to allow infinite size. + + Returns + ------------ + material : SimpleMaterial + Combined material. + uv : (p, 2) float + Combined UV coordinates in the 0.0-1.0 range. + """ + ... diff --git a/typings/trimesh/visual/objects.pyi b/typings/trimesh/visual/objects.pyi new file mode 100644 index 00000000..e373d9c3 --- /dev/null +++ b/typings/trimesh/visual/objects.pyi @@ -0,0 +1,49 @@ +""" +This type stub file was generated by pyright. +""" + +""" +objects.py +-------------- + +Deal with objects which hold visual properties, like +ColorVisuals and TextureVisuals. +""" + +def create_visual(**kwargs): # -> ColorVisuals: + """ + Create Visuals object from keyword arguments. + + Parameters + ----------- + face_colors : (n, 3|4) uint8 + Face colors + vertex_colors : (n, 3|4) uint8 + Vertex colors + mesh : trimesh.Trimesh + Mesh object + + Returns + ---------- + visuals : ColorVisuals + Visual object created from arguments + """ + ... + +def concatenate(visuals, *args): # -> TextureVisuals | ColorVisuals: + """ + Concatenate multiple visual objects. + + Parameters + ---------- + visuals : ColorVisuals or list + Visuals to concatenate + *args : ColorVisuals or list + More visuals to concatenate + + Returns + ---------- + concat : Visuals + If all are color + """ + ... diff --git a/typings/trimesh/visual/texture.pyi b/typings/trimesh/visual/texture.pyi new file mode 100644 index 00000000..67a331fa --- /dev/null +++ b/typings/trimesh/visual/texture.pyi @@ -0,0 +1,189 @@ +""" +This type stub file was generated by pyright. +""" + +from .base import Visuals + +class TextureVisuals(Visuals): + def __init__(self, uv=..., material=..., image=..., face_materials=...) -> None: + """ + Store a single material and per-vertex UV coordinates + for a mesh. + + If passed UV coordinates and a single image it will + create a SimpleMaterial for the image. + + Parameters + -------------- + uv : (n, 2) float + UV coordinates for the mesh + material : Material + Store images and properties + image : PIL.Image + Can be passed to automatically create material + """ + ... + @property + def kind(self): # -> Literal['texture']: + """ + Return the type of visual data stored + + Returns + ---------- + kind : str + What type of visuals are defined + """ + ... + @property + def defined(self): # -> bool: + """ + Check if any data is stored + + Returns + ---------- + defined : bool + Are UV coordinates and images set? + """ + ... + def __hash__(self) -> int: + """ + Get a CRC of the stored data. + + Returns + -------------- + crc : int + Hash of items in self.vertex_attributes + """ + ... + @property + def uv(self): # -> None: + """ + Get the stored UV coordinates. + + Returns + ------------ + uv : (n, 2) float or None + Pixel position per-vertex. + """ + ... + @uv.setter + def uv(self, values): # -> None: + """ + Set the UV coordinates. + + Parameters + -------------- + values : (n, 2) float or None + Pixel locations on a texture per- vertex + """ + ... + def copy(self, uv=...): # -> TextureVisuals: + """ + Return a copy of the current TextureVisuals object. + + Returns + ---------- + copied : TextureVisuals + Contains the same information in a new object + """ + ... + def to_color(self): # -> ColorVisuals: + """ + Convert textured visuals to a ColorVisuals with vertex + color calculated from texture. + + Returns + ----------- + vis : trimesh.visuals.ColorVisuals + Contains vertex color from texture + """ + ... + def face_subset(self, face_index): # -> TextureVisuals: + """ + Get a copy of + """ + ... + def update_vertices(self, mask): # -> None: + """ + Apply a mask to remove or duplicate vertex properties. + + Parameters + ------------ + mask : (len(vertices),) bool or (n,) int + Mask which can be used like: `vertex_attribute[mask]` + """ + ... + def update_faces(self, mask): # -> None: + """ + Apply a mask to remove or duplicate face properties, + not applicable to texture visuals. + """ + ... + def concatenate(self, others): # -> TextureVisuals | ColorVisuals: + """ + Concatenate this TextureVisuals object with others + and return the result without modifying this visual. + + Parameters + ----------- + others : (n,) Visuals + Other visual objects to concatenate + + Returns + ----------- + concatenated : TextureVisuals + Concatenated visual objects + """ + ... + +def unmerge_faces(faces, *args, **kwargs): # -> list[Unknown] | list[ndarray[Any, dtype[int64]]]: + """ + Textured meshes can come with faces referencing vertex + indices (`v`) and an array the same shape which references + vertex texture indices (`vt`) and sometimes even normal (`vn`). + + Vertex locations with different values of any of these can't + be considered the "same" vertex, and for our simple data + model we need to not combine these vertices. + + Parameters + ------------- + faces : (n, d) int + References vertex indices + *args : (n, d) int + Various references of corresponding values + This is usually UV coordinates or normal indexes + maintain_faces : bool + Do not alter original faces and return no-op masks. + + Returns + ------------- + new_faces : (m, d) int + New faces for masked vertices + mask_v : (p,) int + A mask to apply to vertices + mask_* : (p,) int + A mask to apply to vt array to get matching UV coordinates + Returns as many of these as args were passed + """ + ... + +def power_resize(image, resample=..., square=...): + """ + Resize a PIL image so every dimension is a power of two. + + Parameters + ------------ + image : PIL.Image + Input image + resample : int + Passed to Image.resize + square : bool + If True, upsize to a square image + + Returns + ------------- + resized : PIL.Image + Input image resized + """ + ... diff --git a/typings/trimesh/voxel/__init__.pyi b/typings/trimesh/voxel/__init__.pyi new file mode 100644 index 00000000..c5bff389 --- /dev/null +++ b/typings/trimesh/voxel/__init__.pyi @@ -0,0 +1,7 @@ +""" +This type stub file was generated by pyright. +""" + +from .base import VoxelGrid + +__all__ = ["VoxelGrid"] diff --git a/typings/trimesh/voxel/base.pyi b/typings/trimesh/voxel/base.pyi new file mode 100644 index 00000000..4c8fb39f --- /dev/null +++ b/typings/trimesh/voxel/base.pyi @@ -0,0 +1,291 @@ +""" +This type stub file was generated by pyright. +""" + +from .. import caching +from ..parent import Geometry + +""" +voxel.py +----------- + +Convert meshes to a simple voxel data structure and back again. +""" + +class VoxelGrid(Geometry): + """ + Store 3D voxels. + """ + + def __init__(self, encoding, transform=..., metadata=...) -> None: ... + def __hash__(self) -> int: + """ + Get the hash of the current transformation matrix. + + Returns + ------------ + hash : str + Hash of transformation matrix + """ + ... + @property + def encoding(self): + """ + `Encoding` object providing the occupancy grid. + + See `trimesh.voxel.encoding` for implementations. + """ + ... + @encoding.setter + def encoding(self, encoding): # -> None: + ... + @property + def transform(self): + """4x4 homogeneous transformation matrix.""" + ... + @transform.setter + def transform(self, matrix): # -> None: + """4x4 homogeneous transformation matrix.""" + ... + @property + def translation(self): + """Location of voxel at [0, 0, 0].""" + ... + @property + def scale(self): # -> Any: + """ + 3-element float representing per-axis scale. + + Raises a `RuntimeError` if `self.transform` has rotation or + shear components. + """ + ... + @property + def pitch(self): # -> Any: + """ + Uniform scaling factor representing the side length of + each voxel. + + Returns + ----------- + pitch : float + Pitch of the voxels. + + Raises + ------------ + `RuntimeError` + If `self.transformation` has rotation or shear + components of has non-uniform scaling. + """ + ... + @property + def element_volume(self): # -> Any: + ... + def apply_transform(self, matrix): # -> Self@VoxelGrid: + ... + def strip(self): # -> Self@VoxelGrid: + """ + Mutate self by stripping leading/trailing planes of zeros. + + Returns + -------- + self after mutation occurs in-place + """ + ... + @caching.cache_decorator + def bounds(self): # -> NDArray[Any]: + ... + @caching.cache_decorator + def extents(self): # -> Any: + ... + @caching.cache_decorator + def is_empty(self): ... + @property + def shape(self): + """3-tuple of ints denoting shape of occupancy grid.""" + ... + @caching.cache_decorator + def filled_count(self): + """int, number of occupied voxels in the grid.""" + ... + def is_filled(self, point): # -> Any: + """ + Query points to see if the voxel cells they lie in are + filled or not. + + Parameters + ---------- + point : (n, 3) float + Points in space + + Returns + --------- + is_filled : (n,) bool + Is cell occupied or not for each point + """ + ... + def fill(self, method=..., **kwargs): # -> Self@VoxelGrid: + """ + Mutates self by filling in the encoding according + to `morphology.fill`. + + Parameters + ---------- + method : hashable + Implementation key, one of + `trimesh.voxel.morphology.fill.fillers` keys + **kwargs : dict + Additional kwargs passed through to + the keyed implementation. + + Returns + ---------- + self : VoxelGrid + After replacing encoding with a filled version. + """ + ... + def hollow(self): # -> Self@VoxelGrid: + """ + Mutates self by removing internal voxels + leaving only surface elements. + + Surviving elements are those in encoding that are + adjacent to an empty voxel where adjacency is + controlled by `structure`. + + Returns + ---------- + self : VoxelGrid + After replacing encoding with a surface version. + """ + ... + @caching.cache_decorator + def marching_cubes(self): # -> Trimesh: + """ + A marching cubes Trimesh representation of the voxels. + + No effort was made to clean or smooth the result in any way; + it is merely the result of applying the scikit-image + measure.marching_cubes function to self.encoding.dense. + + Returns + --------- + meshed : trimesh.Trimesh + Representing the current voxel + object as returned by marching cubes algorithm. + """ + ... + @property + def matrix(self): + """ + Return a DENSE matrix of the current voxel encoding. + + Returns + ------------- + dense : (a, b, c) bool + Numpy array of dense matrix + Shortcut to voxel.encoding.dense + """ + ... + @caching.cache_decorator + def volume(self): # -> Any: + """ + What is the volume of the filled cells in the current + voxel object. + + Returns + --------- + volume : float + Volume of filled cells. + """ + ... + @caching.cache_decorator + def points(self): # -> ndarray[Any, dtype[float64]] | Any: + """ + The center of each filled cell as a list of points. + + Returns + ---------- + points : (self.filled, 3) float + Points in space. + """ + ... + @property + def sparse_indices(self): + """(n, 3) int array of sparse indices of occupied voxels.""" + ... + def as_boxes(self, colors=..., **kwargs): # -> Trimesh: + """ + A rough Trimesh representation of the voxels with a box + for each filled voxel. + + Parameters + ---------- + colors : None, (3,) or (4,) float or uint8 + (X, Y, Z, 3) or (X, Y, Z, 4) float or uint8 + Where matrix.shape == (X, Y, Z) + + Returns + --------- + mesh : trimesh.Trimesh + Mesh with one box per filled cell. + """ + ... + def points_to_indices(self, points): # -> NDArray[Any]: + """ + Convert points to indices in the matrix array. + + Parameters + ---------- + points: (n, 3) float, point in space + + Returns + --------- + indices: (n, 3) int array of indices into self.encoding + """ + ... + def indices_to_points(self, indices): # -> ndarray[Any, dtype[float64]] | Any: + ... + def show(self, *args, **kwargs): # -> SceneViewer: + """ + Convert the current set of voxels into a trimesh for visualization + and show that via its built- in preview method. + """ + ... + def copy(self): # -> VoxelGrid: + ... + def export(self, file_obj=..., file_type=..., **kwargs): + """ + Export the current VoxelGrid. + + Parameters + ------------ + file_obj : file-like or str + File or file-name to export to. + file_type : None or str + Only 'binvox' currently supported. + + Returns + --------- + export : bytes + Value of export. + """ + ... + def revoxelized(self, shape): # -> VoxelGrid: + """ + Create a new VoxelGrid without rotations, reflections + or shearing. + + Parameters + ---------- + shape : (3, int) + The shape of the returned VoxelGrid. + + Returns + ---------- + vox : VoxelGrid + Of the given shape with possibly non-uniform + scale and translation transformation matrix. + """ + ... + def __add__(self, other): ... diff --git a/typings/trimesh/voxel/creation.pyi b/typings/trimesh/voxel/creation.pyi new file mode 100644 index 00000000..af917081 --- /dev/null +++ b/typings/trimesh/voxel/creation.pyi @@ -0,0 +1,130 @@ +""" +This type stub file was generated by pyright. +""" + +from ..constants import log_time + +@log_time +def voxelize_subdivide(mesh, pitch, max_iter=..., edge_factor=...): # -> VoxelGrid: + """ + Voxelize a surface by subdividing a mesh until every edge is + shorter than: (pitch / edge_factor) + + Parameters + ----------- + mesh : trimesh.Trimesh + Source mesh + pitch : float + Side length of a single voxel cube + max_iter : int + Cap maximum subdivisions or None for no limit. + edge_factor : float + Proportion of pitch maximum edge length. + + Returns + ----------- + VoxelGrid instance representing the voxelized mesh. + """ + ... + +def local_voxelize(mesh, point, pitch, radius, fill=..., **kwargs): # -> VoxelGrid | None: + """ + Voxelize a mesh in the region of a cube around a point. When fill=True, + uses proximity.contains to fill the resulting voxels so may be meaningless + for non-watertight meshes. Useful to reduce memory cost for small values of + pitch as opposed to global voxelization. + + Parameters + ----------- + mesh : trimesh.Trimesh + Source geometry + point : (3, ) float + Point in space to voxelize around + pitch : float + Side length of a single voxel cube + radius : int + Number of voxel cubes to return in each direction. + kwargs : parameters to pass to voxelize_subdivide + + Returns + ----------- + voxels : VoxelGrid instance with resolution (m, m, m) where m=2*radius+1 + or None if the volume is empty + """ + ... + +@log_time +def voxelize_ray(mesh, pitch, per_cell=...): # -> VoxelGrid: + """ + Voxelize a mesh using ray queries. + + Parameters + ------------- + mesh : Trimesh object + Mesh to be voxelized + pitch : float + Length of voxel cube + per_cell : (2,) int + How many ray queries to make per cell + + Returns + ------------- + VoxelGrid instance representing the voxelized mesh. + """ + ... + +@log_time +def voxelize_binvox(mesh, pitch=..., dimension=..., bounds=..., **binvoxer_kwargs): # -> VoxelGrid: + """ + Voxelize via binvox tool. + + Parameters + -------------- + mesh : trimesh.Trimesh + Mesh to voxelize + pitch : float + Side length of each voxel. Ignored if dimension is provided + dimension: int + Number of voxels along each dimension. If not provided, this is + calculated based on pitch and bounds/mesh extents + bounds: (2, 3) float + min/max values of the returned `VoxelGrid` in each instance. Uses + `mesh.bounds` if not provided. + **binvoxer_kwargs: + Passed to `trimesh.exchange.binvox.Binvoxer`. + Should not contain `bounding_box` if bounds is not None. + + Returns + -------------- + `VoxelGrid` instance + + Raises + -------------- + `ValueError` if `bounds is not None and 'bounding_box' in binvoxer_kwargs`. + """ + ... + +voxelizers = ... + +def voxelize(mesh, pitch, method=..., **kwargs): + """ + Voxelize the given mesh using the specified implementation. + + See `voxelizers` for available implementations or to add your own, e.g. via + `voxelizers['custom_key'] = custom_fn`. + + `custom_fn` should have signature `(mesh, pitch, **kwargs) -> VoxelGrid` + and should not modify encoding. + + Parameters + -------------- + mesh: Trimesh object (left unchanged). + pitch: float, side length of each voxel. + method: implementation method. Must be in `fillers`. + **kwargs: additional kwargs passed to the specified implementation. + + Returns + -------------- + A VoxelGrid instance. + """ + ... diff --git a/typings/trimesh/voxel/encoding.pyi b/typings/trimesh/voxel/encoding.pyi new file mode 100644 index 00000000..119d6755 --- /dev/null +++ b/typings/trimesh/voxel/encoding.pyi @@ -0,0 +1,484 @@ +""" +This type stub file was generated by pyright. +""" + +import abc + +from .. import caching +from ..util import ABC + +"""OO interfaces to encodings for ND arrays which caching.""" + +class Encoding(ABC): + """ + Base class for objects that implement a specific subset of of ndarray ops. + + This presents a unified interface for various different ways of encoding + conceptually dense arrays and to interoperate between them. + + Example implementations are ND sparse arrays, run length encoded arrays + and dense encodings (wrappers around np.ndarrays). + """ + + def __init__(self, data) -> None: ... + @abc.abstractproperty + def dtype(self): # -> None: + ... + @abc.abstractproperty + def shape(self): # -> None: + ... + @abc.abstractproperty + def sum(self): # -> None: + ... + @abc.abstractproperty + def size(self): # -> None: + ... + @abc.abstractproperty + def sparse_indices(self): # -> None: + ... + @abc.abstractproperty + def sparse_values(self): # -> None: + ... + @abc.abstractproperty + def dense(self): # -> None: + ... + @abc.abstractmethod + def gather_nd(self, indices): # -> None: + ... + @abc.abstractmethod + def mask(self, mask): # -> None: + ... + @abc.abstractmethod + def get_value(self, index): # -> None: + ... + @abc.abstractmethod + def copy(self): # -> None: + ... + @property + def is_empty(self): ... + @caching.cache_decorator + def stripped(self): # -> tuple[DenseEncoding, NDArray[Any]]: + """ + Get encoding with all zeros stripped from the start and end + of each axis. + + Returns + ------------ + encoding: ? + padding : (n, 2) int + Padding at the start and end that was stripped + """ + ... + def __hash__(self) -> int: + """ + Get the hash of the current transformation matrix. + + Returns + ------------ + hash : str + Hash of transformation matrix + """ + ... + @property + def ndims(self): # -> int: + ... + def reshape(self, shape): # -> FlattenedEncoding | ShapedEncoding: + ... + @property + def flat(self): # -> FlattenedEncoding: + ... + def flip(self, axis=...): # -> Self@Encoding | FlippedEncoding: + ... + @property + def sparse_components(self): # -> tuple[None, None]: + ... + @property + def data(self): # -> Unknown: + ... + def run_length_data(self, dtype=...): # -> ndarray[Any, dtype[int64]]: + ... + def binary_run_length_data(self, dtype=...): # -> NDArray[int64] | NDArray[Unknown]: + ... + def transpose(self, perm): # -> Self@Encoding | TransposedEncoding: + ... + @property + def mutable(self): ... + @mutable.setter + def mutable(self, value): # -> None: + ... + +class DenseEncoding(Encoding): + """Simple `Encoding` implementation based on a numpy ndarray.""" + + def __init__(self, data) -> None: ... + @property + def dtype(self): ... + @property + def shape(self): ... + @caching.cache_decorator + def sum(self): ... + @caching.cache_decorator + def is_empty(self): # -> bool: + ... + @property + def size(self): ... + @property + def sparse_components(self): # -> tuple[Any, Unknown]: + ... + @caching.cache_decorator + def sparse_indices(self): # -> NDArray[intp]: + ... + @caching.cache_decorator + def sparse_values(self): ... + @property + def dense(self): # -> Unknown: + ... + def gather(self, indices): ... + def gather_nd(self, indices): ... + def mask(self, mask): ... + def get_value(self, index): ... + def reshape(self, shape): # -> DenseEncoding: + ... + @property + def flat(self): # -> DenseEncoding: + ... + def copy(self): # -> DenseEncoding: + ... + +class SparseEncoding(Encoding): + """ + `Encoding` implementation based on an ND sparse implementation. + + Since the scipy.sparse implementations are for 2D arrays only, this + implementation uses a single-column CSC matrix with index + raveling/unraveling. + """ + + def __init__(self, indices, values, shape=...) -> None: + """ + Parameters + ------------ + indices: (m, n)-sized int array of indices + values: (m, n)-sized dtype array of values at the specified indices + shape: (n,) iterable of integers. If None, the maximum value of indices + + 1 is used. + """ + ... + @staticmethod + def from_dense(dense_data): # -> SparseEncoding: + ... + def copy(self): # -> SparseEncoding: + ... + @property + def sparse_indices(self): ... + @property + def sparse_values(self): ... + @property + def dtype(self): ... + @caching.cache_decorator + def sum(self): ... + @property + def ndims(self): ... + @property + def shape(self): # -> tuple[Unknown, ...]: + ... + @property + def size(self): # -> int_: + ... + @property + def sparse_components(self): # -> tuple[Unknown, Unknown]: + ... + @caching.cache_decorator + def dense(self): # -> NDArray[float64]: + ... + def gather_nd(self, indices): # -> ndarray[Any, dtype[Any]]: + ... + def mask(self, mask): ... + def get_value(self, index): ... + @caching.cache_decorator + def stripped(self): # -> tuple[DenseEncoding, NDArray[Any]] | tuple[SparseEncoding, NDArray[Any]]: + """ + Get encoding with all zeros stripped from the start/end of each axis. + + Returns: + encoding: SparseEncoding with same values but indices shifted down + by padding[:, 0] + padding: (n, 2) array of ints denoting padding at the start/end + that was stripped + """ + ... + +def SparseBinaryEncoding(indices, shape=...): # -> SparseEncoding: + """ + Convenient factory constructor for SparseEncodings with values all ones. + + Parameters + ------------ + indices: (m, n) sparse indices into conceptual rank-n array + shape: length n iterable or None. If None, maximum of indices along first + axis + 1 is used + + Returns + ------------ + rank n bool `SparseEncoding` with True values at each index. + """ + ... + +class RunLengthEncoding(Encoding): + """1D run length encoding. + + See `trimesh.voxel.runlength` documentation for implementation details. + """ + + def __init__(self, data, dtype=...) -> None: + """ + Parameters + ------------ + data: run length encoded data. + dtype: dtype of encoded data. Each second value of data is cast will be + cast to this dtype if provided. + """ + ... + @caching.cache_decorator + def is_empty(self): # -> bool: + ... + @property + def ndims(self): # -> Literal[1]: + ... + @property + def shape(self): # -> tuple[Any]: + ... + @property + def dtype(self): ... + def __hash__(self) -> int: + """ + Get the hash of the current transformation matrix. + + Returns + ------------ + hash : str + Hash of transformation matrix + """ + ... + @staticmethod + def from_dense(dense_data, dtype=..., encoding_dtype=...): # -> RunLengthEncoding: + ... + @staticmethod + def from_rle(rle_data, dtype=...): # -> RunLengthEncoding: + ... + @staticmethod + def from_brle(brle_data, dtype=...): # -> RunLengthEncoding: + ... + @caching.cache_decorator + def stripped( + self, + ): # -> tuple[DenseEncoding, NDArray[Any]] | tuple[Self@RunLengthEncoding | RunLengthEncoding, NDArray[Any]]: + ... + @caching.cache_decorator + def sum(self): ... + @caching.cache_decorator + def size(self): ... + @caching.cache_decorator + def sparse_components(self): # -> tuple[list[Unknown], list[Unknown]] | tuple[NDArray[Unknown], NDArray[Unknown]]: + ... + @caching.cache_decorator + def sparse_indices(self): # -> Any: + ... + @caching.cache_decorator + def sparse_values(self): # -> Any: + ... + @caching.cache_decorator + def dense(self): # -> NDArray[int64]: + ... + def gather(self, indices): # -> NDArray[float64]: + ... + def gather_nd(self, indices): # -> NDArray[float64]: + ... + def sorted_gather(self, ordered_indices): # -> NDArray[Unknown]: + ... + def mask(self, mask): # -> NDArray[Unknown]: + ... + def get_value(self, index): # -> Any | None: + ... + def copy(self): # -> RunLengthEncoding: + ... + def run_length_data(self, dtype=...): # -> ndarray[Any, dtype[int64]]: + ... + def binary_run_length_data(self, dtype=...): # -> NDArray[int64] | NDArray[Unknown] | NDArray[Any] | list[int]: + ... + +class BinaryRunLengthEncoding(RunLengthEncoding): + """1D binary run length encoding. + + See `trimesh.voxel.runlength` documentation for implementation details. + """ + + def __init__(self, data) -> None: + """ + Parameters + ------------ + data: binary run length encoded data. + """ + ... + @caching.cache_decorator + def is_empty(self): # -> bool: + ... + @staticmethod + def from_dense(dense_data, encoding_dtype=...): # -> BinaryRunLengthEncoding: + ... + @staticmethod + def from_rle(rle_data, dtype=...): # -> BinaryRunLengthEncoding: + ... + @staticmethod + def from_brle(brle_data, dtype=...): # -> BinaryRunLengthEncoding: + ... + @caching.cache_decorator + def stripped( + self, + ): # -> tuple[DenseEncoding, NDArray[Any]] | tuple[Self@BinaryRunLengthEncoding | BinaryRunLengthEncoding, NDArray[Any]]: + ... + @caching.cache_decorator + def sum(self): ... + @caching.cache_decorator + def size(self): ... + @property + def sparse_components(self): # -> tuple[Any, Any]: + ... + @caching.cache_decorator + def sparse_values(self): # -> NDArray[Any]: + ... + @caching.cache_decorator + def sparse_indices(self): # -> NDArray[int64]: + ... + @caching.cache_decorator + def dense(self): # -> ndarray[Any, dtype[Any]]: + ... + def gather(self, indices): # -> NDArray[float64]: + ... + def gather_nd(self, indices): # -> NDArray[float64]: + ... + def sorted_gather(self, ordered_indices): # -> NDArray[Any]: + ... + def mask(self, mask): # -> NDArray[Any]: + ... + def copy(self): # -> BinaryRunLengthEncoding: + ... + def run_length_data(self, dtype=...): # -> ndarray[Any, dtype[int64]]: + ... + def binary_run_length_data(self, dtype=...): # -> NDArray[int64] | NDArray[Unknown] | NDArray[Any]: + ... + +class LazyIndexMap(Encoding): + """ + Abstract class for implementing lazy index mapping operations. + + Implementations include transpose, flatten/reshaping and flipping + + Derived classes must implement: + * _to_base_indices(indices) + * _from_base_indices(base_indices) + * shape + * dense + * mask(mask) + """ + + @property + def is_empty(self): ... + @property + def dtype(self): ... + @property + def sum(self): ... + @property + def size(self): ... + @property + def sparse_indices(self): # -> None: + ... + @property + def sparse_values(self): ... + def gather_nd(self, indices): ... + def get_value(self, index): ... + +class FlattenedEncoding(LazyIndexMap): + """ + Lazily flattened encoding. + + Dense equivalent is np.reshape(data, (-1,)) (np.flatten creates a copy). + """ + + @property + def shape(self): # -> tuple[Unknown]: + ... + @property + def dense(self): ... + def mask(self, mask): ... + @property + def flat(self): # -> Self@FlattenedEncoding: + ... + def copy(self): # -> FlattenedEncoding: + ... + +class ShapedEncoding(LazyIndexMap): + """ + Lazily reshaped encoding. + + Numpy equivalent is `np.reshape` + """ + + def __init__(self, encoding, shape) -> None: ... + @property + def flat(self): # -> Unknown: + ... + @property + def shape(self): # -> tuple[Unknown, ...]: + ... + @property + def dense(self): ... + def mask(self, mask): ... + def copy(self): # -> ShapedEncoding: + ... + +class TransposedEncoding(LazyIndexMap): + """ + Lazily transposed encoding + + Dense equivalent is `np.transpose` + """ + + def __init__(self, base_encoding, perm) -> None: ... + def transpose(self, perm): # -> Unknown: + ... + @property + def perm(self): # -> NDArray[int64]: + ... + @property + def shape(self): # -> tuple[Unknown, ...]: + ... + @property + def dense(self): ... + def gather(self, indices): ... + def mask(self, mask): ... + def get_value(self, index): ... + @property + def data(self): # -> Unknown: + ... + def copy(self): # -> TransposedEncoding: + ... + +class FlippedEncoding(LazyIndexMap): + """ + Encoding with entries flipped along one or more axes. + + Dense equivalent is `np.flip` + """ + + def __init__(self, encoding, axes) -> None: ... + @property + def shape(self): ... + @property + def dense(self): # -> NDArray[Unknown]: + ... + def mask(self, mask): ... + def copy(self): # -> FlippedEncoding: + ... + def flip(self, axis=...): # -> Self@FlippedEncoding: + ... diff --git a/typings/trimesh/voxel/morphology.pyi b/typings/trimesh/voxel/morphology.pyi new file mode 100644 index 00000000..3b904bae --- /dev/null +++ b/typings/trimesh/voxel/morphology.pyi @@ -0,0 +1,117 @@ +""" +This type stub file was generated by pyright. +""" + +from ..constants import log_time + +"""Basic morphology operations that create new encodings.""" + +@log_time +def fill_base(encoding): # -> SparseEncoding: + """ + Given a sparse surface voxelization, fill in between columns. + + Parameters + -------------- + encoding: Encoding object or sparse array with shape (?, 3) + + Returns + -------------- + A new filled encoding object. + """ + ... + +@log_time +def fill_orthographic(encoding): # -> DenseEncoding: + """ + Fill the given encoding by orthographic projection method. + + Any voxel in the dense representation with no free ray along the x, y, z + axes in each direction is assigned filled. This is likely faster than fill + holes, and is more stable with regards to small holes. + + Parameters + -------------- + encoding: Encoding object or dense rank-3 array. + + Returns + -------------- + A new filled encoding object. + """ + ... + +@log_time +def fill_holes(encoding, **kwargs): # -> DenseEncoding: + """ + Encoding wrapper around scipy.ndimage.morphology.binary_fill_holes. + + https://docs.scipy.org/doc/scipy-0.15.1/reference/generated/scipy.ndimage.morphology.binary_fill_holes.html#scipy.ndimage.morphology.binary_fill_holes + + Parameters + -------------- + encoding: Encoding object or dense rank-3 array. + **kwargs: see scipy.ndimage.morphology.binary_fill_holes. + + Returns + -------------- + A new filled in encoding object. + """ + ... + +fillers = ... + +def fill(encoding, method=..., **kwargs): + """ + Fill the given encoding using the specified implementation. + + See `fillers` for available implementations or to add your own, e.g. via + `fillers['custom_key'] = custom_fn`. + + `custom_fn` should have signature `(encoding, **kwargs) -> filled_encoding` + and should not modify encoding. + + Parameters + -------------- + encoding: Encoding object (left unchanged). + method: method present in `fillers`. + **kwargs: additional kwargs passed to the specified implementation. + + Returns + -------------- + A new filled Encoding object. + """ + ... + +def binary_dilation(encoding, **kwargs): # -> DenseEncoding: + """ + Encoding wrapper around scipy.ndimage.morphology.binary_dilation. + + https://docs.scipy.org/doc/scipy-0.15.1/reference/generated/scipy.ndimage.morphology.binary_dilation.html#scipy.ndimage.morphology.binary_dilation + """ + ... + +def binary_closing(encoding, **kwargs): # -> DenseEncoding: + """ + Encoding wrapper around scipy.ndimage.morphology.binary_closing. + + https://docs.scipy.org/doc/scipy-0.15.1/reference/generated/scipy.ndimage.morphology.binary_closing.html#scipy.ndimage.morphology.binary_closing + """ + ... + +def surface(encoding, structure=...): # -> DenseEncoding: + """ + Get elements on the surface of encoding. + + A surface element is any one in encoding that is adjacent to an empty + voxel. + + Parameters + -------------- + encoding: Encoding or dense rank-3 array + structure: adjacency structure. If None, square connectivity is used. + + Returns + -------------- + new surface Encoding. + """ + ... diff --git a/typings/trimesh/voxel/ops.pyi b/typings/trimesh/voxel/ops.pyi new file mode 100644 index 00000000..a8b68f30 --- /dev/null +++ b/typings/trimesh/voxel/ops.pyi @@ -0,0 +1,169 @@ +""" +This type stub file was generated by pyright. +""" + +def fill_orthographic(dense): # -> Any: + ... + +def fill_base(sparse_indices): # -> NDArray[signedinteger[_NBitIntP]]: + """ + Given a sparse surface voxelization, fill in between columns. + + Parameters + -------------- + sparse_indices: (n, 3) int, location of filled cells + + Returns + -------------- + filled: (m, 3) int, location of filled cells + """ + ... + +fill_voxelization = ... + +def matrix_to_marching_cubes(matrix, pitch=...): # -> Trimesh: + """ + Convert an (n, m, p) matrix into a mesh, using marching_cubes. + + Parameters + ----------- + matrix : (n, m, p) bool + Occupancy array + + Returns + ---------- + mesh : trimesh.Trimesh + Mesh generated by meshing voxels using + the marching cubes algorithm in skimage + """ + ... + +def sparse_to_matrix(sparse): # -> ndarray[Any, dtype[Any]]: + """ + Take a sparse (n,3) list of integer indexes of filled cells, + turn it into a dense (m,o,p) matrix. + + Parameters + ----------- + sparse : (n, 3) int + Index of filled cells + + Returns + ------------ + dense : (m, o, p) bool + Matrix of filled cells + """ + ... + +def points_to_marching_cubes(points, pitch=...): # -> Trimesh: + """ + Mesh points by assuming they fill a voxel box, and then + running marching cubes on them + + Parameters + ------------ + points : (n, 3) float + Points in 3D space + + Returns + ------------- + mesh : trimesh.Trimesh + Points meshed using marching cubes + """ + ... + +def multibox(centers, pitch=..., colors=...): # -> Trimesh: + """ + Return a Trimesh object with a box at every center. + + Doesn't do anything nice or fancy. + + Parameters + ----------- + centers : (n, 3) float + Center of boxes that are occupied + pitch : float + The edge length of a voxel + colors : (3,) or (4,) or (n,3) or (n, 4) float + Color of boxes + + Returns + --------- + rough : Trimesh + Mesh object representing inputs + """ + ... + +def boolean_sparse(a, b, operation=...): + """ + Find common rows between two arrays very quickly + using 3D boolean sparse matrices. + + Parameters + ----------- + a: (n, d) int, coordinates in space + b: (m, d) int, coordinates in space + operation: numpy operation function, ie: + np.logical_and + np.logical_or + + Returns + ----------- + coords: (q, d) int, coordinates in space + """ + ... + +def strip_array(data): # -> tuple[Unknown, NDArray[Any]]: + ... + +def indices_to_points(indices, pitch=..., origin=...): # -> NDArray[floating[_64Bit]]: + """ + Convert indices of an (n,m,p) matrix into a set of voxel center points. + + Parameters + ---------- + indices: (q, 3) int, index of voxel matrix (n,m,p) + pitch: float, what pitch was the voxel matrix computed with + origin: (3,) float, what is the origin of the voxel matrix + + Returns + ---------- + points: (q, 3) float, list of points + """ + ... + +def matrix_to_points(matrix, pitch=..., origin=...): # -> NDArray[floating[_64Bit]]: + """ + Convert an (n,m,p) matrix into a set of points for each voxel center. + + Parameters + ----------- + matrix: (n,m,p) bool, voxel matrix + pitch: float, what pitch was the voxel matrix computed with + origin: (3,) float, what is the origin of the voxel matrix + + Returns + ---------- + points: (q, 3) list of points + """ + ... + +def points_to_indices(points, pitch=..., origin=...): # -> NDArray[Any]: + """ + Convert center points of an (n,m,p) matrix into its indices. + + Parameters + ---------- + points : (q, 3) float + Center points of voxel matrix (n,m,p) + pitch : float + What pitch was the voxel matrix computed with + origin : (3,) float + What is the origin of the voxel matrix + + Returns + ---------- + indices : (q, 3) int + List of indices + """ + ... diff --git a/typings/trimesh/voxel/runlength.pyi b/typings/trimesh/voxel/runlength.pyi new file mode 100644 index 00000000..1c72d246 --- /dev/null +++ b/typings/trimesh/voxel/runlength.pyi @@ -0,0 +1,416 @@ +""" +This type stub file was generated by pyright. +""" + +import numpy as np + +""" +Numpy encode/decode/utility implementations for run length encodings. + +# Run Length Encoded Features + +Encoding/decoding functions for run length encoded data. + +We include code for two variations: + +* run length encoding (RLE) +* binary run length encdoing (BRLE) + +RLE stores sequences of repeated values as the value followed by its count, e.g. + +```python +dense_to_rle([5, 5, 3, 2, 2, 2, 2, 6]) == [5, 2, 3, 1, 2, 4, 6, 1] +``` + +i.e. the value `5` is repeated `2` times, then `3` is repeated `1` time, `2` is +repeated `4` times and `6` is repeated `1` time. + +BRLE is an optimized form for when the stored values can only be `0` or `1`. +This means we only need to save the counts, and assume the values alternate +(starting at `0`). + +```python +dense_to_brle([1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]) == \ + [0, 2, 4, 7, 2] +``` + +i.e. the value zero occurs `0` times, followed by `2` ones, `4` zeros, `7` ones +and `2` zeros. + +Sequences with counts exceeding the data type's maximum value have to be +handled carefully. For example, the `uint8` encoding of 300 zeros +(`uint8` has a max value of 255) is: + +* RLE: `[0, 255, 0, 45]` (`0` repeated `255` times + `0` repeated `45` times) +* BRLE: `[255, 0, 45, 0]` (`255` zeros + `0` ones + `45` zeros + `0` ones) + +This module contains implementations of various RLE/BRLE operations. +""" + +def brle_length(brle): + """Optimized implementation of `len(brle_to_dense(brle))`""" + ... + +def rle_length(rle): + """Optimized implementation of `len(rle_to_dense(rle_to_brle(rle)))`""" + ... + +def rle_to_brle(rle, dtype=...): # -> NDArray[int64] | NDArray[Unknown] | NDArray[Any] | list[int]: + """ + Convert run length encoded (RLE) value/counts to BRLE. + + RLE data is stored in a rank 1 array with each pair giving: + (value, count) + + e.g. the RLE encoding of [4, 4, 4, 1, 1, 6] is [4, 3, 1, 2, 6, 1]. + + Parameters + ---------- + rle : (n,) int + Run length encoded data + + Returns + ---------- + equivalent binary run length encoding. a list if dtype is None, + otherwise brle_to_brle is called on that list before returning. + + Raises + ---------- + ValueError + If any of the even counts of `rle` are not zero or 1. + """ + ... + +def brle_logical_not(brle): + """ + Get the BRLE encoding of the `logical_not`ed dense form of `brle`. + + Equivalent to `dense_to_brle(np.logical_not(brle_to_dense(brle)))` but + highly optimized - just pads brle with a 0 on each end (or strips is + existing endpoints are both zero). + + Parameters + ---------- + brle: rank 1 int array of binary run length encoded data + + Returns + ---------- + rank 1 int array of binary run length encoded data corresponding to + element-wise not of the input. + """ + ... + +def merge_brle_lengths(lengths): # -> list[Unknown] | list[int]: + """Inverse of split_long_brle_lengths.""" + ... + +def split_long_brle_lengths(lengths, dtype=...): # -> NDArray[int64] | NDArray[Unknown]: + """ + Split lengths that exceed max dtype value. + + Lengths `l` are converted into [max_val, 0] * l // max_val + [l % max_val] + + e.g. for dtype=np.uint8 (max_value == 255) + ``` + split_long_brle_lengths([600, 300, 2, 6], np.uint8) == \ + [255, 0, 255, 0, 90, 255, 0, 45, 2, 6] + ``` + """ + ... + +def dense_to_brle(dense_data, dtype=...): # -> NDArray[int64] | NDArray[Unknown]: + """ + Get the binary run length encoding of `dense_data`. + + Parameters + ---------- + dense_data: rank 1 bool array of data to encode. + dtype: numpy int type. + + Returns + ---------- + Binary run length encoded rank 1 array of dtype `dtype`. + + Raises + ---------- + ValuError if dense_data is not a rank 1 bool array. + """ + ... + +_ft = np.array([False, True], dtype=bool) + +def brle_to_dense(brle_data, vals=...): # -> ndarray[Any, dtype[Any]]: + """Decode binary run length encoded data to dense. + + Parameters + ---------- + brle_data: BRLE counts of False/True values + vals: if not `None`, a length 2 array/list/tuple with False/True substitute + values, e.g. brle_to_dense([2, 3, 1, 0], [7, 9]) == [7, 7, 9, 9, 9, 7] + + Returns + ---------- + rank 1 dense data of dtype `bool if vals is None else vals.dtype` + + Raises + ---------- + ValueError if vals it not None and shape is not (2,) + """ + ... + +def rle_to_dense(rle_data, dtype=...): # -> NDArray[int64]: + """Get the dense decoding of the associated run length encoded data.""" + ... + +def dense_to_rle(dense_data, dtype=...): # -> ndarray[Any, dtype[int64]]: + """Get run length encoding of the provided dense data.""" + ... + +def split_long_rle_lengths( + values, lengths, dtype=... +): # -> tuple[NDArray[Unknown] | Unknown, NDArray[int64] | NDArray[Unknown]]: + """ + Split long lengths in the associated run length encoding. + + e.g. + ```python + split_long_rle_lengths([5, 300, 2, 12], np.uint8) == [5, 255, 5, 45, 2, 12] + ``` + + Parameters + ---------- + values: values column of run length encoding, or `rle[::2]` + lengths: counts in run length encoding, or `rle[1::2]` + dtype: numpy data type indicating the maximum value. + + Returns + ---------- + values, lengths associated with the appropriate splits. `lengths` will be + of type `dtype`, while `values` will be the same as the value passed in. + """ + ... + +def merge_rle_lengths(values, lengths): # -> tuple[list[Unknown], list[Unknown]]: + """Inverse of split_long_rle_lengths except returns normal python lists.""" + ... + +def brle_to_rle(brle, dtype=...): # -> ndarray[Any, dtype[int64]]: + ... + +def brle_to_brle(brle, dtype=...): # -> NDArray[int64] | NDArray[Unknown] | NDArray[Any]: + """ + Almost the identity function. + + Checks for possible merges and required splits. + """ + ... + +def rle_to_rle(rle, dtype=...): # -> ndarray[Any, dtype[int64]]: + """ + Almost the identity function. + + Checks for possible merges and required splits. + """ + ... + +def sorted_rle_gather_1d(rle_data, ordered_indices): # -> Generator[Unbound | Unknown, Any, None]: + """ + Gather brle_data at ordered_indices. + + This is equivalent to `rle_to_dense(brle_data)[ordered_indices]` but avoids + the decoding. + + Parameters + ---------- + brle_data: iterable of run-length-encoded data. + ordered_indices: iterable of ints in ascending order. + + Returns + ---------- + `brle_data` iterable of values at the dense indices, same length as + ordered indices. + """ + ... + +def rle_mask(rle_data, mask): # -> Generator[Unknown, Any, None]: + """ + Perform masking of the input run-length data. + + Parameters + ---------- + rle_data: iterable of run length encoded data + mask: iterable of bools corresponding to the dense mask. + + Returns + ---------- + iterable of dense values of rle_data wherever mask is True. + """ + ... + +def brle_mask(rle_data, mask): # -> Generator[bool, Any, None]: + """ + Perform masking of the input binary run-length data. + + Parameters + ---------- + brle_data: iterable of binary run length encoded data + mask: iterable of bools corresponding to the dense mask. + + Returns + ---------- + iterable dense values of brle_data wherever mask is True. + """ + ... + +def rle_gatherer_1d(indices): # -> (data: Unknown, dtype: Unknown | None = None) -> NDArray[float64]: + """ + Get a gather function at the given indices. + + Because gathering on RLE data requires sorting, for instances where + gathering at the same indices on different RLE data this can save the + sorting process. + + If only gathering on a single RLE iterable, use `rle_gather_1d`. + + Parameters + ---------- + indices: iterable of integers + + Returns + ---------- + gather function, mapping `(rle_data, dtype=None) -> values`. + `values` will have the same length as `indices` and dtype provided, + or rle_data.dtype if no dtype is provided. + """ + ... + +def rle_gather_1d(rle_data, indices, dtype=...): # -> NDArray[float64]: + """ + Gather RLE data values at the provided dense indices. + + This is equivalent to `rle_to_dense(rle_data)[indices]` but the + implementation does not require the construction of the dense array. + + If indices is known to be in order, use `sorted_gather_1d`. + + Parameters + ---------- + rle_data: run length encoded data + indices: dense indices + dtype: numpy dtype. If not provided, uses rle_data.dtype + + Returns + ---------- + numpy array, dense data at indices, same length as indices and dtype as + rle_data + """ + ... + +def sorted_brle_gather_1d(brle_data, ordered_indices): # -> Generator[bool, Any, None]: + """ + Gather brle_data at ordered_indices. + + This is equivalent to `brle_to_dense(brle_data)[ordered_indices]` but + avoids the decoding. + + Parameters + ---------- + raw_data: iterable of run-length-encoded data. + ordered_indices: iterable of ints in ascending order. + + Returns + ---------- + `raw_data` iterable of values at the dense indices, same length as + ordered indices. + """ + ... + +def brle_gatherer_1d(indices): # -> partial[NDArray[float64]]: + """ + Get a gather function at the given indices. + + Because gathering on BRLE data requires sorting, for instances where + gathering at the same indices on different RLE data this can save the + sorting process. + + If only gathering on a single RLE iterable, use `brle_gather_1d`. + + Parameters + ---------- + indices: iterable of integers + + Returns + ---------- + gather function, mapping `(rle_data, dtype=None) -> values`. + `values` will have the same length as `indices` and dtype provided, + or rle_data.dtype if no dtype is provided. + """ + ... + +def brle_gather_1d(brle_data, indices): # -> NDArray[float64]: + """ + Gather BRLE data values at the provided dense indices. + + This is equivalent to `rle_to_dense(rle_data)[indices]` but the + implementation does not require the construction of the dense array. + + If indices is known to be in order, use `sorted_brle_gather_1d`. + + Parameters + ---------- + rle_data: run length encoded data + indices: dense indices + + Returns + ---------- + numpy array, dense data at indices, same length as indices and dtype as + rle_data + """ + ... + +def brle_reverse(brle_data): + """Equivalent to dense_to_brle(brle_to_dense(brle_data)[-1::-1]).""" + ... + +def rle_reverse(rle_data): # -> NDArray[Unknown]: + """Get the rle encoding of the reversed dense array.""" + ... + +def rle_to_sparse(rle_data): # -> tuple[list[Unknown], list[Unknown]] | tuple[NDArray[Unknown], NDArray[Unknown]]: + """Get dense indices associated with non-zeros.""" + ... + +def brle_to_sparse(brle_data, dtype=...): # -> NDArray[int64]: + ... + +def rle_strip(rle_data): # -> tuple[ndarray[Any, dtype[Unknown]], tuple[Any | Literal[0], Any | Literal[0]]]: + """ + Remove leading and trailing zeros. + + Parameters + ---------- + rle_data: run length encoded data + + Returns + ---------- + (stripped_rle_data, padding) + stripped_rle_data: rle data without any leading or trailing zeros + padding: 2-element dense padding + """ + ... + +def brle_strip(brle_data): # -> tuple[Unknown, tuple[Unknown | Literal[0], Unknown | Literal[0]]]: + """ + Remove leading and trailing zeros. + + Parameters + ---------- + brle_data: binary run length encoded data. + + Returns + ---------- + (stripped_brle_data, padding) + stripped_brle_data: rle data without any leading or trailing zeros + padding: 2-element dense padding + """ + ... diff --git a/typings/trimesh/voxel/transforms.pyi b/typings/trimesh/voxel/transforms.pyi new file mode 100644 index 00000000..9d41c1dc --- /dev/null +++ b/typings/trimesh/voxel/transforms.pyi @@ -0,0 +1,127 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Optional + +from .. import caching + +class Transform: + """ + Class for caching metadata associated with 4x4 transformations. + + The transformation matrix is used to define relevant properties + for the voxels, including pitch and origin. + """ + + def __init__(self, matrix, datastore: Optional[caching.DataStore] = ...) -> None: + """ + Initialize with a transform. + + Parameters + ----------- + matrix : (4, 4) float + Homogeneous transformation matrix + datastore + If passed store the actual values in a reference to + another datastore. + """ + ... + def __hash__(self) -> int: + """ + Get the hash of the current transformation matrix. + + Returns + ------------ + hash : str + Hash of transformation matrix + """ + ... + @property + def translation(self): + """ + Get the translation component of the matrix + + Returns + ------------ + translation : (3,) float + Cartesian translation + """ + ... + @property + def matrix(self): + """ + Get the homogeneous transformation matrix. + + Returns + ------------- + matrix : (4, 4) float + Transformation matrix + """ + ... + @matrix.setter + def matrix(self, values): # -> None: + """ + Set the homogeneous transformation matrix. + + Parameters + ------------- + matrix : (4, 4) float + Transformation matrix + """ + ... + @caching.cache_decorator + def scale(self): # -> NDArray[Unknown]: + """ + Get the scale factor of the current transformation. + + Returns + ------------- + scale : (3,) float + Scale factor from the matrix + """ + ... + @caching.cache_decorator + def pitch(self): # -> Any: + ... + @caching.cache_decorator + def unit_volume(self): # -> Any: + """Volume of a transformed unit cube.""" + ... + def apply_transform(self, matrix): # -> Self@Transform: + """Mutate the transform in-place and return self.""" + ... + def apply_translation(self, translation): # -> Self@Transform: + """Mutate the transform in-place and return self.""" + ... + def apply_scale(self, scale): # -> Self@Transform: + """Mutate the transform in-place and return self.""" + ... + def transform_points(self, points): # -> ndarray[Any, dtype[float64]] | Any: + """ + Apply the transformation to points (not in-place). + + Parameters + ---------- + points: (n, 3) float + Points in cartesian space + + Returns + ---------- + transformed : (n, 3) float + Points transformed by matrix + """ + ... + def inverse_transform_points(self, points): # -> ndarray[Any, dtype[float64]] | Any: + """Apply the inverse transformation to points (not in-place).""" + ... + @caching.cache_decorator + def inverse_matrix(self): ... + def copy(self): # -> Transform: + ... + @caching.cache_decorator + def is_identity(self): # -> bool: + """ + Flags this transformation being sufficiently close to eye(4). + """ + ...