import numpy as np
def calculate_time(x_values):
"""Calculates the total travel time given the entry points."""
speeds = [9, 8, 7, 6, 5, 10]
marsh_width = 10 * np.sqrt(2) # Width of each region in the East-West direction
# Ensure x_values has at least 6 elements
if len(x_values) < 6:
raise ValueError("x_values should have at least 6 elements.")
x = [0.0] + x_values # Include initial x0=0
total_time = 0.0
for i in range(len(speeds)):
# Horizontal distance in this region
dx = marsh_width if i < 5 else (50 + x[5]) - (5 * marsh_width)
# Vertical distance in this segment
if i+1 < len(x):
dy = abs(x[i+1] - x[i])
else:
dy = 0 # Prevent out-of-range errors
# Total distance in this segment
distance = np.sqrt(dx**2 + dy**2)
# Time taken for this segment
total_time += distance / speeds[i]
return total_time * 2 - calculate_time_last_segment(x_values)
def calculate_time_last_segment(x_values):
"""Calculates time for the last segment separately."""
speeds = [10]
# Ensure x_values has at least 6 elements
if len(x_values) < 6:
raise ValueError("x_values should have at least 6 elements.")
x = [0.0] + x_values # Include initial x0=0
total_time = 0.0
i = 5
# Horizontal distance in this region
dx = (50 + x[5]) - (5 * 10 * np.sqrt(2))
# Vertical distance in this segment
if i+1 < len(x):
dy = abs(x[i+1] - x[i])
else:
dy = 0 # Prevent out-of-range errors
# Total distance in this segment
distance = np.sqrt(dx**2 + dy**2)
total_time += distance / speeds[0] # Use first element instead of `speeds[i]`
return total_time
def numerical_gradient(func, x_values, h=1e-6):
"""Calculates the numerical gradient of a function."""
gradient = []
for i in range(len(x_values)):
x_plus_h = x_values.copy()
x_minus_h = x_values.copy()
x_plus_h[i] += h
x_minus_h[i] -= h
partial_derivative = (func(x_plus_h) - func(x_minus_h)) / (2 * h)
gradient.append(partial_derivative)
return np.array(gradient)
def gradient_descent(initial_x, learning_rate, num_iterations, h=1e-5):
"""Performs gradient descent."""
x_values = np.array(initial_x) # Convert list to numpy array
for i in range(num_iterations):
grad = numerical_gradient(calculate_time, x_values, h)
x_values = x_values - learning_rate * grad
x_values = np.maximum(0, x_values) # Constraint: x_values must be non-negative
# Learning rate annealing with lower bound
learning_rate = max(learning_rate * 0.999, 1e-4)
if i % 100 == 0:
time = calculate_time(x_values)
print(f"Iteration {i}: Time = {time:.10f}")
return x_values
# Initial guess (straight line path)
initial_x = [5 * np.sqrt(2) * i for i in range(1, 6)]
initial_x.append(0.0)
# Gradient Descent parameters
learning_rate = 0.1 # Tunable parameter
num_iterations = 2000
final_x = gradient_descent(initial_x, learning_rate, num_iterations)
final_time = calculate_time(final_x)
print(f"Final x values: {final_x}")
print(f"Shortest time: {final_time:.10f} days")