summaryrefslogtreecommitdiff
path: root/controllers/final/utilities.py
blob: 9b3abdd814f0e5d18fecf7e0678741cfe8f23a38 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import numpy as np
from constants import ARM_OFFSET

# Convert radian measurements to degrees.
def deg(radians):
    return radians * 180 / np.pi

# Make sure a variable is between -bound and +bound.
def bound_by(variable, bound):
    return max(-bound, min(bound, variable))

# Calculate the euclidean distance between two points.
# Only uses the x and y coordinates.
def euclidean(u, v):
    return np.sqrt( (u[0]-v[0])**2 + (u[1]-v[1])**2 )

# Calculate the position to which the robot should move to grab the jar, given
# the position of the jar provided by the camera and the robot's current
# position.
def grasp_point(jar_pos, robot_pos, jar_offset=0.):
    d_dist = jar_pos - robot_pos[:2] # dx,dy between the jar and the robot.
    distance = np.linalg.norm(d_dist) # Euclidean distance.
    d_dist *= (distance - ARM_OFFSET) / distance # Account for ARM_OFFSET.
    d_dist[0] += jar_offset # Add the jar_offset to the x-axis.
    return robot_pos[:2] + d_dist

# Make sure angles are between -pi and +pi.
def fix_arctan(radians):
    radians += np.pi
    radians %= 2 * np.pi
    return radians - np.pi

# Convert from robot coordinates to world coordinates.
def robot_to_world_coords(robot_coor, gps_pos, theta):
    c, s = np.cos(theta), np.sin(theta)
    matrix = np.array([[c, -s], [s, c]]) # Rotation matrix for theta.
    # Adjust for angle and add the robot's position to convert to world coords.
    return matrix @ robot_coor[:2] + gps_pos[:2]