forked from Roarou/DLAD
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtask4.py
More file actions
111 lines (87 loc) · 4.5 KB
/
task4.py
File metadata and controls
111 lines (87 loc) · 4.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import matplotlib.pyplot as plt
import os
import numpy as np
import math
import data_utils as d
def main(image_number):
if image_number<10:
image_number_string = '00%s'%image_number
elif image_number>=10 and image_number<100:
image_number_string = '0%s'%image_number
elif image_number >=100:
image_number_string = '%s'%image_number
#We import lidar's data
data_path = os.path.join('data/problem_4/velodyne_points/data/0000000%s.bin'%image_number_string)
lidar = d.load_from_bin(data_path)
x_lidar = lidar[:,0].flatten()
y_lidar = lidar[:,1].flatten()
z_lidar = lidar[:,2].flatten()
lidar_length = x_lidar.shape[0]
#We project lidar data to the camera
R, T = d.calib_velo2cam('data/problem_4/calib_velo_to_cam.txt')
cam_coord = np.matmul(R, lidar.T) + np.tile(T, (1, lidar.shape[0]))
#from cam0 to cam2
P = d.calib_cam2cam('data/problem_4/calib_cam_to_cam.txt', '02')
cam2 = np.matmul(P, np.vstack((cam_coord, np.ones(np.shape(cam_coord)[1]))))
cam2 = cam2/ cam2[2, :].reshape(1,-1)
x_cam = (cam2.T[lidar[:, 0]>0][:, 0])
y_cam = (cam2.T[lidar[:, 0]>0][:, 1])
#we compute de distance between the points and the car to associate each point to the correct color
dist = np.sqrt(x_lidar * x_lidar + y_lidar * y_lidar + z_lidar * z_lidar).reshape(-1,1)
color = d.depth_color(dist, dist[lidar[:, 0]>0].min(), dist[lidar[:, 0]>0].max())[lidar[:, 0]>0].reshape(-1,1)
image = plt.imread('data/problem_4/image_02/data/0000000%s.png'%image_number_string)
# Uncorrected plot
im_proj = d.print_projection_plt(np.vstack((x_cam, y_cam)), color, (image * 255).astype(np.uint8))
plt.imshow(im_proj)
plt.show()
# We then add the motion distortion
velocity = d.load_oxts_velocity('data/problem_4/oxts/data/0000000%s.txt'%image_number_string )
angular_rate_f, angular_rate_l, angular_rate_u = d.load_oxts_angular_rate('data/problem_4/oxts/data/0000000%s.txt'%image_number_string)
t0 = d.compute_timestamps('data/problem_4/velodyne_points/timestamps_start.txt', image_number)
tf = d.compute_timestamps('data/problem_4/velodyne_points/timestamps_end.txt', image_number)
t_cam = d.compute_timestamps('data/problem_4/velodyne_points/timestamps.txt', image_number)
# velodyne to imu referential
R_imu_to_velo, T_imu_to_velo = d.calib_velo2cam('data/problem_4/calib_imu_to_velo.txt')
imu_coord = np.matmul(np.linalg.inv(R_imu_to_velo), lidar.T - np.tile(T_imu_to_velo, (1, lidar.shape[0])))
x_imu = imu_coord[0, :]
y_imu = imu_coord[1, :]
z_imu = imu_coord[2, :]
imu_ext = np.vstack((imu_coord, np.ones(len(x_imu))))
angle = np.arctan(-y_lidar / (x_lidar + np.finfo(float).eps))
T_tot = tf - t0
dt = (angle / (2 * np.pi)) * T_tot
coord_rect = np.zeros((np.shape(imu_ext)))
for i in range(lidar_length):
teta = dt[i] * angular_rate_u
dx = velocity[0] * dt[i]
dy = velocity[1] * dt[i]
dz = velocity[2] * dt[i]
Def = np.array([[math.cos(teta), -math.sin(teta), 0, dx],
[math.sin(teta), math.cos(teta), 0, dy],
[0, 0, 1, dz], [0, 0, 0, 1]])
coord_rect[:, i] = np.matmul(Def, imu_ext[:, i])
coord_rect = coord_rect.T
# Rectified coordinates in IMU frame
x_rect = (coord_rect[:, 0] / coord_rect[:, 3])
y_rect = (coord_rect[:, 1] / coord_rect[:, 3])
z_rect = (coord_rect[:, 2] / coord_rect[:, 3])
lidar_rect = np.vstack((x_rect, y_rect, z_rect))
lidar_rect = np.matmul(R_imu_to_velo, lidar_rect) + np.tile(T_imu_to_velo, (1, lidar.shape[0]))
x_rect = lidar_rect[0, :]
y_rect = lidar_rect[1, :]
z_rect = lidar_rect[2, :]
depth_ = np.sqrt(x_rect * x_rect + y_rect * y_rect + z_rect * z_rect)
# Then we project onto the image and plot the depth colored points
cam_coord_rect = np.matmul(R, lidar_rect) + np.tile(T, (1, lidar_rect.shape[1]))
cam2_rect = np.matmul(P, np.vstack((cam_coord_rect, np.ones(np.shape(cam_coord_rect)[1]))))
cam2_rect = cam2_rect / cam2_rect[2, :].reshape(1, -1)
x_cam_rect = (cam2_rect.T[lidar[:, 0] > 0][:, 0])
y_cam_rect = (cam2_rect.T[lidar[:, 0] > 0][:, 1])
color_ = d.depth_color(dist, depth_[lidar[:, 0] > 0].min(), depth_[lidar[:, 0] > 0].max())[
lidar[:, 0] > 0].reshape(-1, 1)
# Corrected plot
im_proj_rect = d.print_projection_plt(np.vstack((x_cam_rect, y_cam_rect)), color_, (image * 255).astype(np.uint8))
plt.imshow(im_proj_rect)
plt.show()
if __name__ == '__main__':
main(37)