<<<<<<< HEAD
6 机械臂使用场景案例
本章节呈现了经典的机械臂使用案例,以展示产品在富有代表性的场景中的应用。这包括了机械臂在不同领域的典型应用,突显了产品的多功能性和适用性。通过这些案例,用户可以深入了解机械臂在实际应用中的灵活性和效能,为他们在特定场景中的应用提供参考。
移动抓取木块案例
相关依赖文件可在
https://github.com/elephantrobotics/mercury_demo 下载
6 Robot arm usage scenario case
This chapter presents classic robotic arm use cases to demonstrate the application of the product in representative scenarios. This includes typical applications of robotic arms in different fields, highlighting the product's versatility and applicability. Through these cases, users can gain an in-depth understanding of the flexibility and performance of the robotic arm in practical applications, providing a reference for their application in specific scenarios.
Mobile grabbing wooden block case
Related dependency files are available at https://github.com/elephantrobotics/mercury_demo to download
5673ade8eb00a28794557125b7d1c8cd332434a9
import time
from pymycobot import Mercury
from uvc_camera import UVCCamera
from marker_utils import *
import stag
<<<<<<< HEAD
# 夹爪工具长度
Tool_LEN = 175
# 相机中心到法兰距离
Camera_LEN = 78
np.set_printoptions(suppress=True, formatter={'float_kind': '{:.2f}'.format})
# 相机配置文件
camera_params = np.load("src/camera_params.npz")
mtx, dist = camera_params["mtx"], camera_params["dist"]
# 二维码大小
MARKER_SIZE = 32
# 设置左臂端口
ml = Mercury("/dev/left_arm")
# 将旋转矩阵转为欧拉角
=======
# Gripper tool length
Tool_LEN = 175
# Distance from camera center to flange
Camera_LEN = 78
np.set_printoptions(suppress=True, formatter={'float_kind': '{:.2f}'.format})
# Camera configuration file
camera_params = np.load("src/camera_params.npz")
mtx, dist = camera_params["mtx"], camera_params["dist"]
# QR code size
MARKER_SIZE = 32
# Set left arm port
ml = Mercury("/dev/left_arm")
# Convert rotation matrix to Euler angles
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
def CvtRotationMatrixToEulerAngle(pdtRotationMatrix):
pdtEulerAngle = np.zeros(3)
pdtEulerAngle[2] = np.arctan2(pdtRotationMatrix[1, 0], pdtRotationMatrix[0, 0])
fCosRoll = np.cos(pdtEulerAngle[2])
fSinRoll = np.sin(pdtEulerAngle[2])
pdtEulerAngle[1] = np.arctan2(-pdtRotationMatrix[2, 0],
(fCosRoll * pdtRotationMatrix[0, 0]) + (fSinRoll * pdtRotationMatrix[1, 0]))
pdtEulerAngle[0] = np.arctan2((fSinRoll * pdtRotationMatrix[0, 2]) - (fCosRoll * pdtRotationMatrix[1, 2]),
(-fSinRoll * pdtRotationMatrix[0, 1]) + (fCosRoll * pdtRotationMatrix[1, 1]))
return pdtEulerAngle
<<<<<<< HEAD
# 将欧拉角转为旋转矩阵
=======
# Convert Euler angles to rotation matrix
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
def CvtEulerAngleToRotationMatrix(ptrEulerAngle):
ptrSinAngle = np.sin(ptrEulerAngle)
ptrCosAngle = np.cos(ptrEulerAngle)
ptrRotationMatrix = np.zeros((3, 3))
ptrRotationMatrix[0, 0] = ptrCosAngle[2] * ptrCosAngle[1]
ptrRotationMatrix[0, 1] = ptrCosAngle[2] * ptrSinAngle[1] * ptrSinAngle[0] - ptrSinAngle[2] * ptrCosAngle[0]
ptrRotationMatrix[0, 2] = ptrCosAngle[2] * ptrSinAngle[1] * ptrCosAngle[0] + ptrSinAngle[2] * ptrSinAngle[0]
ptrRotationMatrix[1, 0] = ptrSinAngle[2] * ptrCosAngle[1]
ptrRotationMatrix[1, 1] = ptrSinAngle[2] * ptrSinAngle[1] * ptrSinAngle[0] + ptrCosAngle[2] * ptrCosAngle[0]
ptrRotationMatrix[1, 2] = ptrSinAngle[2] * ptrSinAngle[1] * ptrCosAngle[0] - ptrCosAngle[2] * ptrSinAngle[0]
ptrRotationMatrix[2, 0] = -ptrSinAngle[1]
ptrRotationMatrix[2, 1] = ptrCosAngle[1] * ptrSinAngle[0]
ptrRotationMatrix[2, 2] = ptrCosAngle[1] * ptrCosAngle[0]
return ptrRotationMatrix
<<<<<<< HEAD
# 坐标转换为齐次变换矩阵,(x,y,z,rx,ry,rz)单位rad
def Transformation_matrix(coord):
position_robot = coord[:3]
pose_robot = coord[3:]
# 将欧拉角转为旋转矩阵
=======
# Coordinates are converted into homogeneous transformation matrices, (x, y, z, rx, ry, rz) unit rad
def Transformation_matrix(coord):
position_robot = coord[:3]
pose_robot = coord[3:]
# Convert Euler angles to rotation matrix
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
RBT = CvtEulerAngleToRotationMatrix(pose_robot)
PBT = np.array([[position_robot[0]],
[position_robot[1]],
[position_robot[2]]])
temp = np.concatenate((RBT, PBT), axis=1)
array_1x4 = np.array([[0, 0, 0, 1]])
<<<<<<< HEAD
# 将两个数组按行拼接起来
=======
# Splice the two arrays row by row
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
matrix = np.concatenate((temp, array_1x4), axis=0)
return matrix
def Eyes_in_hand_left(coord, camera):
<<<<<<< HEAD
# 相机坐标
Position_Camera = np.transpose(camera[:3])
# 机械臂坐标矩阵
Matrix_BT = Transformation_matrix(coord)
# 手眼矩阵
=======
# Camera coordinates
Position_Camera = np.transpose(camera[:3])
# Robotic arm coordinate matrix
Matrix_BT = Transformation_matrix(coord)
# Hand-eye matrix
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
Matrix_TC = np.array([[0, -1, 0, Camera_LEN],
[1, 0, 0, 0],
[0, 0, 1, -Tool_LEN],
[0, 0, 0, 1]])
<<<<<<< HEAD
# 物体坐标(相机系)
Position_Camera = np.append(Position_Camera, 1)
# 物体坐标(基坐标系)
=======
# Object coordinates (camera system)
Position_Camera = np.append(Position_Camera, 1)
# Object coordinates (base coordinate system)
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
Position_B = Matrix_BT @ Matrix_TC @ Position_Camera
return Position_B
<<<<<<< HEAD
# 等待机械臂运行结束
=======
# Wait for the end of the robot arm operation
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
def waitl():
time.sleep(0.2)
while (ml.is_moving()):
time.sleep(0.03)
<<<<<<< HEAD
# 获取物体坐标(相机系)
def calc_markers_base_position(corners: NDArray, ids: T.List, marker_size: int, mtx: NDArray, dist: NDArray) -> T.List:
if len(corners) == 0:
return []
# 通过二维码角点获取物体旋转向量和平移向量
=======
# Get object coordinates (camera system)
def calc_markers_base_position(corners: NDArray, ids: T.List, marker_size: int, mtx: NDArray, dist: NDArray) -> T.List:
if len(corners) == 0:
return []
# Obtain the rotation vector and translation vector of the object through the QR code corner point
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
rvecs, tvecs = solve_marker_pnp(corners, marker_size, mtx, dist)
for i, tvec, rvec in zip(ids, tvecs, rvecs):
tvec = tvec.squeeze().tolist()
rvec = rvec.squeeze().tolist()
rotvector = np.array([[rvec[0], rvec[1], rvec[2]]])
<<<<<<< HEAD
# 将旋转向量转为旋转矩阵
Rotation = cv2.Rodrigues(rotvector)[0]
# 将旋转矩阵转为欧拉角
Euler = CvtRotationMatrixToEulerAngle(Rotation)
# 物体坐标(相机系)
=======
# Convert rotation vector to rotation matrix
Rotation = cv2.Rodrigues(rotvector)[0]
# Convert rotation matrix to Euler angles
Euler = CvtRotationMatrixToEulerAngle(Rotation)
# Object coordinates (camera system)
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
target_coords = np.array([tvec[0], tvec[1], tvec[2], Euler[0], Euler[1], Euler[2]])
return target_coords
if __name__ == "__main__":
<<<<<<< HEAD
# 设置摄像头id
camera = UVCCamera(5, mtx, dist)
# 打开摄像头
camera.capture()
# 设置左臂观察点
origin_anglesL = [-44.24, 15.56, 0.0, -102.59, 65.28, 52.06, 23.49]
# 设置夹爪运动模式
ml.set_gripper_mode(0)
# 设置工具坐标系
ml.set_tool_reference([0, 0, Tool_LEN, 0, 0, 0])
# 将末端坐标系设置为工具
ml.set_end_type(1)
# 设置移动速度
sp = 40
# 移动到观测点
ml.send_angles(origin_anglesL, sp)
# 等待机械臂运动结束
waitl()
# 刷新相机界面
camera.update_frame()
# 获取当前帧
frame = camera.color_frame()
# 获取画面中二维码的角度和id
(corners, ids, rejected_corners) = stag.detectMarkers(frame, 11)
# 获取物的坐标(相机系)
marker_pos_pack = calc_markers_base_position(corners, ids, MARKER_SIZE, mtx, dist)
# 获取机械臂当前坐标
cur_coords = np.array(ml.get_base_coords())
# 将角度值转为弧度值
cur_bcl = cur_coords.copy()
cur_bcl[-3:] *= (np.pi / 180)
# 通过矩阵变化将物体坐标(相机系)转成(基坐标系)
=======
# Set camera id
camera = UVCCamera(5, mtx, dist)
# Open camera
camera.capture()
# Set the left arm observation point
origin_anglesL = [-44.24, 15.56, 0.0, -102.59, 65.28, 52.06, 23.49]
# Set the gripper movement mode
ml.set_gripper_mode(0)
# Set tool coordinate system
ml.set_tool_reference([0, 0, Tool_LEN, 0, 0, 0])
# Set the end coordinate system to the tool
ml.set_end_type(1)
# Set movement speed
sp = 40
# Move to the observation point
ml.send_angles(origin_anglesL, sp)
# Wait for the robot arm movement to end
waitl()
# Refresh camera interface
camera.update_frame()
# Get the current frame
frame = camera.color_frame()
# Get the angle and id of the QR code in the screen
(corners, ids, rejected_corners) = stag.detectMarkers(frame, 11)
# Get the coordinates of the object (camera system)
marker_pos_pack = calc_markers_base_position(corners, ids, MARKER_SIZE, mtx, dist)
# Get the current coordinates of the robotic arm
cur_coords = np.array(ml.get_base_coords())
# Convert angle value to radian value
cur_bcl = cur_coords.copy()
cur_bcl[-3:] *= (np.pi / 180)
# Convert object coordinates (camera system) to (base coordinate system) through matrix change
>>>>>>> 5673ade8eb00a28794557125b7d1c8cd332434a9
fact_bcl = Eyes_in_hand_left(cur_bcl, marker_pos_pack)
target_coords = cur_coords.copy()
target_coords[0] = fact_bcl[0]
target_coords[1] = fact_bcl[1]
target_coords[2] = fact_bcl[2] + 50
<<<<<<< HEAD
# 机械臂移动到二维码上方
ml.send_base_coords(target_coords, 30)
# 等待机械臂运动结束
waitl()
# 打开夹爪
ml.set_gripper_value(100, 100)
# 机械臂沿z轴向下移动
ml.send_base_coord(3, fact_bcl[2], 10)
# 等待机械臂运动结束
waitl()
# 闭合夹爪
ml.set_gripper_value(20, 100)
# 等待夹爪闭合
time.sleep(2)
# 抬起夹爪
ml.send_base_coord(3, fact_bcl[2] + 50, 10)
← 上一章 | 下一页 →
# The robotic arm moves above the QR code
ml.send_base_coords(target_coords, 30)
# Wait for the robot arm movement to end
waitl()
# Open the gripper
ml.set_gripper_value(100, 100)
# The robot arm moves downward along the z-axis
ml.send_base_coord(3, fact_bcl[2], 10)
# Wait for the robot arm movement to end
waitl()
# Close jaws
ml.set_gripper_value(20, 100)
# Wait for the jaws to close
time.sleep(2)
# Lift the gripper
ml.send_base_coord(3, fact_bcl[2] + 50, 10)
```
5673ade8eb00a28794557125b7d1c8cd332434a9