Net Elevation Change: -17.965383836 m
Total Uphill Travel: 6337.844659619 m
Total Downhill Travel: -6355.810043455 m
We should also plot elevation when accounting for the curvature of the Earth.
Features Analysis
We count the number of stop lights, stop signs, and flashing yellows:
Code
# stop light if segment_end_condition is traffic_lightnum_stop_lights = race_route[race_route['segment_end_condition'] =='traffic_light'].shape[0]num_stop_signs = race_route[race_route['segment_end_condition'] =='red_sign'].shape[0]num_yield_signs = race_route[race_route['segment_end_condition'] =='yield_sign_or_blinking_yellow'].shape[0]print(f'Number of Stop Lights: {num_stop_lights}')print(f'Number of Stop Signs: {num_stop_signs}')print(f'Number of Flashing Yellows: {num_yield_signs}')
Number of Stop Lights: 39
Number of Stop Signs: 0
Number of Flashing Yellows: 1
Gravity Analysis
First, plot gravity over distance.
Code
plt.plot(route['distance'].cumsum() /1000, route['gravity'])plt.xlabel('Distance Traveled (km)')plt.ylabel('Acceleration due to Gravity (m/s/s)')plt.title('Acceleration due to Gravity vs Distance')plt.xlim([0, total_distance /1000])set_xticks_race_distance()plt.ylim([9.782, 9.798])plt.show()
Now, take weighted averages of gravity:
Code
distance_weighted_avg_g = (route['gravity'] * route['distance']).sum() / route['distance'].sum()# uphill travel is distance traveled uphill per segmentuphill_travel = route['grade'] * route['distance'] * (route['grade'] >0)uphill_work_weighted_avg_g = (route['gravity'] * uphill_travel).sum() / uphill_travel.sum()# downhill travel is distance traveled downhill per segmentdownhill_travel = route['grade'] * route['distance'] * (route['grade'] <0)downhill_work_weighted_avg_g = (route['gravity'] * downhill_travel).sum() / downhill_travel.sum()print(f'Distance-Weighted Average g: {distance_weighted_avg_g}')print(f'Uphill Work-Weighted Average g: {uphill_work_weighted_avg_g}')print(f'Downhill Work-Weighted Average g: {downhill_work_weighted_avg_g}')
Distance-Weighted Average g: 9.780757340905776
Uphill Work-Weighted Average g: 9.78097156500531
Downhill Work-Weighted Average g: 9.78147959325745
Hill-Climbing Analysis
Plot a distribution of grade weighted by distance:
# write speed_limit distribution to integersplt.hist((route['speed_limit'] *3.6).astype(int), bins=101, weights=route['distance'], density=True)plt.gca().yaxis.set_major_formatter(mpl.ticker.PercentFormatter(xmax=1))plt.xlabel('Speed Limit (kph)')plt.ylabel('Density')plt.title('Distance-Weighted Speed Limit Distribution')plt.xlim([20, 140])plt.ylim([0,0.6])plt.show()
Now, we calculate the minimum reasonable time to complete this, assuming a 0.3g acceleration and deceleration. We also stop at stop signs and lights.
Code
CONTROL_STOP_RULES_DURATION =30*60CONTROL_STOP_TRUE_DURATION = CONTROL_STOP_RULES_DURATION +90DRIVER_SWAPS_RULES_DURATION =5*60DRIVER_SWAPS_TRUE_DURATION = DRIVER_SWAPS_RULES_DURATION +15RED_SIGN_DURATION =15TRAFFIC_LIGHT_DURATION =60YIELD_SIGN_DURATION =10ACCELERATION_TIME_RESOLUTION =0.1def generate_speed_profile(route, target_speed_kph, acceleration_mpsps=0.5, deceleration_mpsps=0.5, acceptable_speeding_kph=0.0, hot_lap_delay=0.0):# iterate through the route in both directions to determine what speed we should go# never exceed the speed limit + acceptable speeding# stop for all stop signs, stop lights, and flashing yellows# stop for control stops and driver swaps# stop at end of day (after 9, 18, 27, etc...)# finish end at 60 kph + acceptable speeding# start from 0 kph target_v = target_speed_kph /3.6 speeding = acceptable_speeding_kph /3.6 a_target = acceleration_mpsps da_target = deceleration_mpsps# create two profiles, one for driving forward, one for driving backward# each profile has distance, time, speed, and acceleration# we will sequentially add points to each profile profile = pd.DataFrame(columns=['distance', 'time', 'speed', 'target_speed', 'acceleration', 'process'])# we also create a profile for speed_limit speed_limit_profile = pd.DataFrame(columns=['distance', 'speed_limit'])# we also create a list of points where we have to stop control_stops = [] driver_swaps = [] red_sign_stops = [] traffic_light_stops = [] flashing_yellow_stops = [] eod_stop_times =9*3600* np.arange(1, 10)# populate speed_limit_profile with speed limits# make it only places where the speed limit changesfor i inrange(1,len(route)):if route['speed_limit'][i] != route['speed_limit'][i -1]: speed_limit_profile = pd.concat([speed_limit_profile, pd.DataFrame({'distance': route['distance'].cumsum()[i], 'speed_limit': route['speed_limit'][i]}, index=[0])], ignore_index=True)# add 40 to very start speed_limit_profile = pd.concat([pd.DataFrame({'distance': 0, 'speed_limit': 40/3.6}, index=[0]), speed_limit_profile], ignore_index=True) speed_limit_profile.sort_values('distance', inplace=True)# populate stop oointsfor i inrange(len(route)):if route['segment_end_condition'][i] =='control_stop': control_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='driver_swap': driver_swaps.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='red_sign': red_sign_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='traffic_light': traffic_light_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='yield_sign_or_blinking_yellow': flashing_yellow_stops.append(route['distance'].cumsum()[i])# start from the beginning d_traveled =0# meters v =0# speed t = hot_lap_delay *3600# seconds a =0# acceleration at_stop =False stop_type =None eod_locations = []while d_traveled < race_distance:# we are either accelerating or cruising# we do calcs until next steps:# 1. we accelerate to the target speed / speed limit# 2. we reach a change in speed limit# 3. we reach a spot where we have to stop speed_limit = speed_limit_profile['speed_limit'][speed_limit_profile['distance'] <= d_traveled].iloc[-1] next_speed_limit_change_d = speed_limit_profile['distance'][speed_limit_profile['distance'] > d_traveled].iloc[0] next_control_stop_d =min([x for x in control_stops if x > d_traveled], default=int(1e15)) next_driver_swap_d =min([x for x in driver_swaps if x > d_traveled], default=int(1e15)) next_red_sign_d =min([x for x in red_sign_stops if x > d_traveled], default=int(1e15)) next_traffic_light_d =min([x for x in traffic_light_stops if x > d_traveled], default=int(1e15)) next_flashing_yellow_d =min([x for x in flashing_yellow_stops if x > d_traveled], default=int(1e15))# determine next stop and what type it is next_stop_d =min(next_control_stop_d, next_driver_swap_d, next_red_sign_d, next_traffic_light_d, next_flashing_yellow_d)if next_stop_d == next_control_stop_d: stop_type ='control_stop'elif next_stop_d == next_driver_swap_d: stop_type ='driver_swap'elif next_stop_d == next_red_sign_d: stop_type ='red_sign'elif next_stop_d == next_traffic_light_d: stop_type ='traffic_light'elif next_stop_d == next_flashing_yellow_d: stop_type ='flashing_yellow' next_eod_stop_time =min([x for x in eod_stop_times if x > t], default=int(1e15)) actual_target_v =min(target_v, speed_limit + speeding)ifabs(v - actual_target_v) <0.1:# keep cruising until next speed limit change, stop, or end profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'c1'}, index=[0])], ignore_index=True) time_to_next_stop = (next_stop_d - d_traveled) / v time_to_next_speed_limit_change = (next_speed_limit_change_d - d_traveled) / v time_to_end_of_day = next_eod_stop_time - t time_to_end = (race_distance - d_traveled) / v t_cruise =min(time_to_next_stop, time_to_next_speed_limit_change, time_to_end_of_day, time_to_end) next_reason =Noneif t_cruise == time_to_next_stop: next_reason ='stop'elif t_cruise == time_to_next_speed_limit_change: next_reason ='speed_limit_change'elif t_cruise == time_to_end_of_day: next_reason ='end_of_day'elif t_cruise == time_to_end: next_reason ='end' d_traveled += v * t_cruise t += t_cruise profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'c2'}, index=[0])], ignore_index=True)if next_reason =='stop': v =0# add corresponding timeif stop_type =='control_stop': t += CONTROL_STOP_TRUE_DURATIONelif stop_type =='driver_swap': t += DRIVER_SWAPS_TRUE_DURATIONelif stop_type =='red_sign': t += RED_SIGN_DURATIONelif stop_type =='traffic_light': t += TRAFFIC_LIGHT_DURATIONelif stop_type =='flashing_yellow': t += YIELD_SIGN_DURATIONif next_reason =='end_of_day': v =0 eod_locations.append(d_traveled)if next_reason =='end': v =60/3.6 profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a_target, 'process': 'c3'}, index=[0])], ignore_index=True)elif v < actual_target_v:# we need to accelerate, let's start by accelerating up to speed a = a_target t_to_target = (actual_target_v - v) / a d_acc = v * t_to_target +0.5* a * t_to_target **2 acc_to =min(next_stop_d, next_speed_limit_change_d, race_distance)if d_traveled + d_acc > acc_to: # accelerating past our next stop :( profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a, 'process': 'atp1'}, index=[0])], ignore_index=True)# we will reach the next point before we reach the target speed# find the time, speed at which we reach the next point while accelerating delta_t = (-v + np.sqrt(v **2+2* (acc_to - d_traveled))) / a t += delta_t v += a * delta_t d_traveled = acc_to at_stop =True profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'atp2'}, index=[0])], ignore_index=True)else:# we're good to reach the target speed profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a, 'process': 'af1'}, index=[0])], ignore_index=True) t += t_to_target v = actual_target_v a =0 d_traveled += d_acc stop_type =None profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'af2'}, index=[0])], ignore_index=True)elif v > actual_target_v: profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'd1'}, index=[0])], ignore_index=True) profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': actual_target_v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'd2'}, index=[0])], ignore_index=True) v = actual_target_v# print number of stops by category# print(f'Control Stops: {len(control_stops)}')# print(f'Driver Swaps: {len(driver_swaps)}')# print(f'Red Signs: {len(red_sign_stops)}')# print(f'Traffic Lights: {len(traffic_light_stops)}')# print(f'Flashing Yellows: {len(flashing_yellow_stops)}')# print(f'End of Days: {len(eod_locations)}')# convert speedreturn profile, speed_limit_profile, eod_locations
Now we want to plot race time as a function of target speed.
Code
def get_true_race_time(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0): profile, speed_limit_profile, eod_locations = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)return profile['time'].iloc[-1] - (30*9+2*10) *60- hot_lap_delay *3600def get_official_race_time(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): true_race_time = get_true_race_time(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)return true_race_time + (hot_lap_delay + wasted_time) *3600, true_race_time# plot true race time vs speedspeeds = np.linspace(60, 130, 71)times = np.array([get_official_race_time(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in speeds])official_race_times = times[:, 0]true_race_times = times[:, 1]# calculate what speed is needed to finish in 4 days = 31.33 hours# use interpolationf = interp1d(official_race_times, speeds)target_speed =float(f(31*3600+19*60+59))g = interp1d(speeds, official_race_times)time_at_110 =float(g(110))# plot the resultsplt.xlabel('Target Speed [kph]')plt.xlim([60, 130])plt.ylabel('Official Race Time [h]')plt.ylim([25+1/3, 49+1/3])set_yticks_rt_days()plt.plot([60, target_speed, target_speed], [31+1/3, 31+1/3, 0], '--', color=MICHIGAN_MAIZE)plt.plot([110, 110], [0, time_at_110 /3600], '--', color=ANGELL_HALL_ASH)plt.plot(speeds, official_race_times /3600, label='Official')plt.plot(speeds, true_race_times /3600, label='True', color=ROSS_ORANGE)plt.title('Official Race Time vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(60* (HOT_LAP_DELAY_TIME + WASTED_TIME))} min.')add_annotation(f'To finish in 4 days,\ntarget {round(target_speed, 1)} kph.', target_speed, 31+1/3, x_offset=0, y_offset=20, ha='left', va='bottom')add_annotation(f'S. Australia Speed Limit',110, time_at_110 /3600, x_offset=0, y_offset=20, ha='left', va='bottom')plt.legend()fit_to_gslide()target_speed_df = pd.DataFrame({'target_speed': speeds, 'true_race_time': true_race_times, 'official_race_time': official_race_times})# save with 900 dpiplt.savefig('official_race_time_vs_target_speed', dpi=600)
We also want to calculate how much race time we could save by uniformly speeding:
Code
# Calculate benefits of speeding ... at target speed, how much time can we save by speedingspeeding = [0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5]times = np.array([get_true_race_time(route, target_speed, 1.5, 0.5, speed, HOT_LAP_DELAY_TIME) for speed in speeding])times = times[0] - times# plot resultsplt.xlabel('Speeding [kph]')plt.ylabel('Race Time Saved [min]')plt.plot(speeding, times /60)# do a linear fit with intercept forcecd to be 0slope, intercept, r_value, p_value, std_err = linregress(speeding, times)plt.plot(speeding, [x * slope /60+ intercept /60for x in speeding], '--', color=MICHIGAN_MAIZE)plt.figtext(0.11, -0.03, f'*At target speed of {round(target_speed,2)} kph. Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.', horizontalalignment='left', verticalalignment='bottom', fontsize=8)plt.title('Time Saved by Speeding')# plt.annotate(f'~{round(slope)} s/kph',# (2.5,4),# textcoords="offset points", xytext=(30, -30),# ha='left', va='bottom',# bbox=dict(fc="white", ec="black", lw=0.5),# arrowprops=dict(arrowstyle='->'),# fontsize=8)add_annotation(f'~{round(slope)} s/kph',2.5, times[-6] /60, x_offset=20, y_offset=-20, ha='left', va='bottom')plt.ylim([0, 10])plt.xlim([0, 5])
We also want to study decelerations:
Code
def characterize_decelerations(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): speed_profile, _, _ = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay) squared_speed_sum =0 num_decelerations =1for i inrange(1, len(speed_profile)):if speed_profile['speed'][i] < speed_profile['speed'][i -1]: squared_speed_sum += (speed_profile['speed'][i -1] **2- speed_profile['speed'][i] **2) num_decelerations +=1return squared_speed_sum, num_decelerations# get squared_speed_sums and num_decelerationsdecelerations = np.array([characterize_decelerations(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in SPEED_AXIS])squared_speed_sums = np.array([x[0] for x in decelerations])num_decelerations = np.array([x[1] for x in decelerations])# add square speed sums and num decelerations to target_speed_dftarget_speed_df['squared_speed_sums'] = squared_speed_sumstarget_speed_df['num_decelerations'] = num_decelerationsplt.plot(SPEED_AXIS, squared_speed_sums)plt.xlabel('Target Speed [kph]')plt.ylabel('Sum of Squared Speeds [(m/s)^2]')plt.title('Deceleration vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')# plot polynomial regressionp = Polynomial.fit(SPEED_AXIS, squared_speed_sums, 2)plt.plot(SPEED_AXIS, p(SPEED_AXIS), '--', color=MICHIGAN_MAIZE)add_annotation(f'{round(p.coef[2])}v^2 + {round(p.coef[1])}v + {round(p.coef[0])}',100, 30000, x_offset=30, y_offset=-30, ha='left', va='bottom')plt.xlim([60, 130])plt.ylim([10000, 50000])plt.show()plt.plot(SPEED_AXIS, num_decelerations)plt.xlabel('Target Speed [kph]')plt.ylabel('Number of Decelerations')plt.title('Number of Decelerations vs. Target Speed')plt.figtext(0.01, -0.03, f'*Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.', horizontalalignment='left', verticalalignment='bottom', fontsize=8)plt.xlim([70, 130])plt.ylim([60, 110])plt.show()# plot the above but vs true race timex_axis = target_speed_df['true_race_time'] /3600plt.plot(x_axis, target_speed_df['squared_speed_sums'])plt.xlim([25, 50])plt.xlabel('True Race Time [h]')plt.ylabel('Sum of Squared Speeds [(m/s)^2]')plt.title('Sum of Squared Speeds vs. Race Time')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')# fit a polynomial only to race times between 27 h and 40 htarget_speed_df_fit = target_speed_df[(target_speed_df['true_race_time'] /3600>27) & (target_speed_df['true_race_time'] /3600<40)]# polyfit with numpyp = np.polyfit(target_speed_df_fit['true_race_time'] /3600, target_speed_df_fit['squared_speed_sums'], 2)plt.plot(target_speed_df_fit['true_race_time'] /3600, np.polyval(p, target_speed_df_fit['true_race_time'] /3600), '--', color=MICHIGAN_MAIZE)add_annotation(f'{round(p[0])}t^2 + {round(p[1])}t + {round(p[2])}',34, np.polyval(p,34), x_offset=30, y_offset=30, ha='left', va='bottom')plt.show()
We also want to calculate the RMS velocity given a target speed:
Code
def get_rms_velocity(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): speed_profile, _, _ = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)# weight by distance, make sure to do the weighting correctly# first, calculate the distance between each point speed_profile['distance_step'] = speed_profile['distance'].shift(-1) - speed_profile['distance']# then, calculate the squared velocity between each point speed_profile['squared_velocity'] = speed_profile['speed'] **2# then, calculate the RMS velocity for the whole profile rms_velocity = np.sqrt((speed_profile['squared_velocity'] * speed_profile['distance_step']).sum() / speed_profile['distance_step'].sum())return rms_velocity# plot rms velocity vs target speedrms_velocities = np.array([get_rms_velocity(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in SPEED_AXIS])plt.plot(SPEED_AXIS, rms_velocities *3.6)plt.xlabel('Target Speed [kph]')plt.ylabel('RMS Velocity [kph]')plt.title('RMS Velocity vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')plt.xlim([60, 130])plt.ylim([60, 130])plt.show()# add rms velocities to dftarget_speed_df['rms_velocity'] = rms_velocities
Code
# fp, slp, eod_locations = generate_speed_profile(route, 105, 1, 0.5, 0)# slp2 = slp.copy()# slp2['distance'] = slp['distance'].shift(-1) - 0.001# slp = pd.concat([slp, slp2])# slp = slp.sort_values('distance')# plt.plot(fp['distance'] / 1000, fp['speed'] * 3.6)# plt.plot(slp['distance'] / 1000, slp['speed_limit'] * 3.6, alpha=0.5)# plt.scatter([x / 1000 for x in eod_locations], [0] * len(eod_locations), color='red', label='Stops')# # plot acceleration on other y axis# plt.ylim([0, 140])# ax2 = plt.gca().twinx()# plt.show()# def calculate_race_time(route, target_speed_kph, acceleration_mpsps=0.5, acceptable_speeding_kph=0.0):# target_speed = target_speed_kph / 3.6# acceptable_speeding = acceptable_speeding_kph / 3.6# a = acceleration_mpsps# dist_step = 100# num_steps = int(route['distance'].sum() / dist_step + 0.5)# distance_profile = np.linspace(0, route['distance'].sum(), num_steps)# speed_limit_profile = np.zeros(num_steps)# effective_speed_profile = np.zeros(num_steps)# # for each step go through, and calculate the effective speed limit# # this is the maximum speed that can be achieved at that point# v = 0# d = 0# for i in range(num_steps):# d = i * dist_step# if d < 10:# speed_limit = 40 / 3.6# else:# speed_limit = route['speed_limit'][route['distance'].cumsum() <= d].iloc[-1]# speed_limit_profile[i] = speed_limit# # if we're already going the right speed, don't accelerate# max_speed = min(target_speed, speed_limit + acceptable_speeding)# if abs(v - max_speed) < 0.01:# v = max_speed# effective_speed_profile[i] = max_speed# continue# # if we're going too slow, accelerate to the target speed# t = - v / a + np.sqrt(v ** 2 / a ** 2 + 2 * dist_step / a)# if v + a * t > max_speed:# # find t such that we reach this point# t_to_max_speed = (max_speed - v) / a# d_acc = v * t_to_max_speed + 0.5 * a * t_to_max_speed ** 2# t = t_to_max_speed + (dist_step - d_acc) / max_speed# v = max_speed# else:# v = v + a * t# effective_speed_profile[i] = v# # do same in reverse# v = speed_limit_profile[-1]# d = race_distance# for i in range(num_steps - 1, -1, -1):# speed_limit = speed_limit_profile[i]# d = race_distance - i * dist_step# # if we're already going the right speed, don't accelerate# max_speed = min(target_speed, speed_limit + acceptable_speeding)# # if segment end condition is 'control_stop' or 'driver_swap'# if route['segment_end_condition']# max_speed = 0# if v == max_speed:# effective_speed_profile[i] = min(effective_speed_profile[i], v)# continue# # if we're going too slow, accelerate to the target speed# t = - v / a + np.sqrt(v ** 2 / a ** 2 + 2 * dist_step / a)# if v + a * t > max_speed:# # find t such that we reach this point# t_to_max_speed = (max_speed - v) / a# d_acc = v * t_to_max_speed + 0.5 * a * t_to_max_speed ** 2# t = t_to_max_speed + (dist_step - d_acc) / max_speed# v = max_speed# else:# v = v + a * t# effective_speed_profile[i] = min(effective_speed_profile[i], v)# return distance_profile, effective_speed_profile, speed_limit_profile# dp, esp, slp = calculate_race_time(race_route, 110, 0.5, 0.0)# plt.plot(dp / 1000, slp)# plt.plot(dp / 1000, esp)# time = 0# speed = 0# distance = 0# segment_index = 0# route['distance_traveled'] = route['distance'].cumsum()# # TODO: account for stop signs, stop lights, and flashing yellows# # TODO: account for slowdowns for control stops, driver swaps, etc.# while(distance < route['distance'].sum()):# # get the current segment# while(route.iloc[segment_index]['distance_traveled'] < distance):# segment_index += 1# segment = route.iloc[segment_index]# segment_distance = segment['distance']# segment_distance_traveled = segment['distance_traveled']# limited_target_speed = min(target_speed, segment['speed_limit'] + acceptable_speeding)# # TODO: also have to take into account the need to decelerate for the next segment# # check if accelerating, decelerating, or cruising# if speed == limited_target_speed:# acceleration_time = 0# acceleration_distance = 0# time += (segment_distance_traveled - distance) / speed# distance = segment_distance_traveled + 0.001# elif speed < limited_target_speed:# acceleration_time = (limited_target_speed - speed) / acceleration# acceleration_distance = speed * acceleration_time + 0.5 * acceleration * acceleration_time ** 2# time += acceleration_time# distance += acceleration_distance# speed = limited_target_speed# # TODO: account for segment cutoffs# else:# acceleration_time = (speed - limited_target_speed) / acceleration# acceleration_distance = speed * acceleration_time - 0.5 * acceleration * acceleration_time ** 2# time += acceleration_time# distance += acceleration_distance# speed = limited_target_speed# # TODO: account for segment cutoffs# return time# print(calculate_race_time(race_route, 1000, 0.3, 0.0))
Weather Analysis
Drive Cycle Analysis
Possible Speed Profiles
True Race Time vs. Speed (given Acceleration) ==> Target Speed
True Race Time vs. Willingness to Speed @ Target Speed
Number of Accelerations
Number of Decelerations
Irradiance Map (vs. Total Driver Ingress Time)
Pointing Times
Wind Map, Yaw Distribution, Apparent Wind Speed Distribution
Temperature Map
Air Density Map
v^2 d Weighted Yaw Distribution
CdA and Array Power Level Curves
Stops: Start, BOD, EOD, Control Points, Lights, Signs, Flashing Yellows
Load Weather Data
First, we find the weather station for each segment of the route.
Code
# read in route# TODO(agoettsc): solarset should be a submodule or something like thatweather_stations = pd.read_csv("./data/australia_stations.csv")# determine the fractional weather station for each segment for interpolationseg_station_distances = pd.DataFrame(columns=[f"dist_station_{i}"for i in weather_stations["group"]], index=route.index)def haversine_distance(seg, station): seg_coords_rad = (np.deg2rad(seg["start_latitude"]), np.deg2rad(seg["start_longitude"])) station_coords_rad = (np.deg2rad(station["lat"]), np.deg2rad(station["lon"])) avg_lat = (seg_coords_rad[0] + station_coords_rad[0]) /2 r0 =6378137; r1 =6356752; a = (r0 **2) * np.cos(avg_lat); b = (r1 **2) * np.sin(avg_lat); c = r0 * np.cos(avg_lat); d = r1 * np.sin(avg_lat); R = np.sqrt((a **2+ b **2) / (c **2+ d **2)) haversine_output = np.sin((seg_coords_rad[0] - station_coords_rad[0]) /2) **2\+ np.cos(seg_coords_rad[0]) * np.cos(station_coords_rad[0]) \* np.sin((seg_coords_rad[1] - station_coords_rad[1]) /2) **2 arc_length =2* np.arcsin(np.sqrt(haversine_output))return R * arc_length /1000for _, weather_station in weather_stations.iterrows(): seg_station_distances[f"dist_station_{weather_station["group"]}"] = haversine_distance(route, weather_station)def get_seg_weather_station(seg): seg_distances = seg_station_distances.iloc[seg.name] closest_stations = np.array([seg_distances[f"dist_station_{weather_station["group"]}"] for _, weather_station in weather_stations.iterrows()]).argpartition(2)[:2] +1 weights = np.array([1/ (seg_distances[f"dist_station_{group_idx}"]) for group_idx in closest_stations]) station = np.sum(weights * closest_stations) / np.sum(weights)return stationroute["weather_station"] = route.apply(get_seg_weather_station, axis=1)
Then, we generate routes annotated with the weather the vehicle experiences for a given speed profile.
Then, we use a schedule and a speed profile to find the time that we are at each segment. Using that time, we look up weather for each segment.
Code
SCHEDULE_FILES = glob.glob("data/schedule/october/*.toml")AEROBODY_COP_Z =0.558SURFACE_ROUGHNESS =1e-3WIND_REFERENCE_HEIGHT =10wind_speed_multiplier = np.log(AEROBODY_COP_Z / SURFACE_ROUGHNESS) / np.log(WIND_REFERENCE_HEIGHT / SURFACE_ROUGHNESS)def parse_schedule_file(schedule_file_path):withopen(schedule_file_path, "rb") as f: schedule_toml = tomli.load(f)return {"day_starts": [pd.Timestamp(day_schedule["race_start"]) for day_schedule in schedule_toml["schedule"]],"day_ends": [pd.Timestamp(day_schedule["race_end"]) for day_schedule in schedule_toml["schedule"]], }def get_time_for_seg_or_nan(seg, profile, schedule): matching_profile_segs = profile[seg["cum_distance"] == profile["distance"]]iflen(matching_profile_segs) >0: profile_seg_time = matching_profile_segs.iloc[0]["time"] profile_seg_day =int(np.floor(profile_seg_time / (9*3600)))return pd.Timedelta(seconds=profile_seg_time - (9*3600* profile_seg_day)) + schedule["day_starts"][profile_seg_day]returnfloat('nan')def apply_weather_to_speed_profile(target_speed_kph, weather, schedule): res = route.copy() res = res[res["segment_type"] !="marshaling"]# first, we need to resolve the time at each segment# we assign speed based on the segment of the profile and time based on if we used that exact segment in our profile res["cum_distance"] = res["distance"].cumsum().shift(1) res.loc[0, "cum_distance"] =0# TODO(agoettsc): this is problematic, need to deal with accel# res["speed"] = res.apply(lambda seg: profile[profile["distance"] >= seg["cum_distance"]].iloc[0]["speed"], axis=1)# res.loc[0, "speed"] = profile.loc[1, "speed"]# res["localtime"] = res.apply(lambda seg: get_time_for_seg_or_nan(seg, profile, schedule), axis=1)# res["time_taken"] = res["distance"] / res["speed"] # # offset times based on previous segments to fill gaps# for i, seg in res.iterrows():# if pd.isna(seg["localtime"]):# prev_t = res.loc[i - 1, "localtime"]# next_t = prev_t + pd.Timedelta(seconds=res.loc[i - 1, "time_taken"])# for j, day_end in enumerate(schedule["day_ends"]):# if prev_t < day_end and next_t > day_end:# next_t = schedule["day_starts"][j + 1] + (next_t - day_end)# res.loc[i, "localtime"] = next_t res["speed"] = np.minimum(res["speed_limit"], target_speed_kph /3.6) res["time_taken"] = res["distance"] / res["speed"] res.loc[0, "localtime"] = schedule["day_starts"][0] + pd.Timedelta(hours=HOT_LAP_DELAY_TIME)# offset times based on previous segments to fill gapsfor i, seg in res.iterrows():if i ==0:continue prev_t = res.loc[i -1, "localtime"] next_t = prev_t + pd.Timedelta(seconds=res.loc[i -1, "time_taken"])if res.loc[i -1, "segment_end_condition"] =="control_stop": next_t += pd.Timedelta(minutes=30)for j, day_end inenumerate(schedule["day_ends"]):if prev_t < day_end and next_t > day_end: next_t = schedule["day_starts"][j +1] + (next_t - day_end) res.loc[i, "localtime"] = next_tfor k, spline in weather.items(): res[k] = res.apply(lambda seg: spline([seg["weather_station"], seg["localtime"].timestamp()])[0], axis=1)# now that we have time, we get solar position solpos_df = pvlib.solarposition.get_solarposition(res["localtime"], res["start_latitude"], res["start_longitude"], altitude=res["elevation"]) res["solar_zenith"] = np.deg2rad(solpos_df["apparent_zenith"].to_numpy()) res["solar_azimuth"] =-np.deg2rad(solpos_df["azimuth"].to_numpy()) res["vehicle_relative_azimuth"] = (res["heading"] + res["solar_azimuth"]) % (2* np.pi)# now we resolve the wind vector res["wind_speed"] = wind_speed_multiplier * np.sqrt(res["wind_velocity_10m_ns"] **2+ res["wind_velocity_10m_ew"] **2)# heading coordinate system is north at 0 degrees, cw positive, so east at 90 degrees# so, to correctly resolve an angle, east-west wind is the y value and north-south wind is the x value# offset by 180 because wind speed is positive when wind is from west/south res["wind_heading"] = np.atan2(-res["wind_velocity_10m_ew"], -res["wind_velocity_10m_ns"])# now, we have two polar vectors that we can add to get the world relative wind experienced by the vehicle r1 = res["speed"] r2 = res["wind_speed"] theta1 = res["heading"] theta2 = res["wind_heading"] res["apparent_wind_speed"] = np.sqrt(r1 **2+ r2 **2+2* r1 * np.cos(theta2 - theta1))# this would be theta_1 + atan2(...) but we want vehicle relative yaw, so theta_1 = heading will cancel out res["yaw"] = np.atan2(r2 * np.sin(theta2 - theta1), r1 + r2 * np.cos(theta2 - theta1))return resdef get_races_for_speed(speed):# speed_profile = generate_speed_profile(route, speed, hot_lap_delay=HOT_LAP_DELAY_TIME)[0] schedules = {re.match(r".*/Schedule(.*)\.toml", s)[1]: parse_schedule_file(s) for s in SCHEDULE_FILES} races = {}for k, schedule in schedules.items(): weather =next(w for min_t, max_t, w in weathers if min_t < pd.Timestamp.timestamp(schedule["day_starts"][0]) < max_t) races[k] = apply_weather_to_speed_profile(speed, weather, schedule)return racesraces = get_races_for_speed(target_speed)race_2022 = races["2022"]
Now that we have segment speeds, we plot speed and position over time
Code
fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["speed"] *3.6)ax.set_title("Speed over time")ax.set_ylabel("Speed (kph)")ax.set_xlabel("Time")fit_to_gslide()
Code
fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["weather_station"])ax.set_title("Cumulative distance over time")ax.set_ylabel("Distance (km)")ax.set_xlabel("Time")fit_to_gslide()
Solar Position and Irradiance
First, we plot DNI, DHI, and GHI over time.
Code
fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["dni"], label="dni")ax.plot(race_2022["localtime"], race_2022["dhi"], label="dhi")ax.plot(race_2022["localtime"], race_2022["ghi"], label="ghi")ax.set_title("Irradiance during 4 days in August 2022")ax.set_ylabel("Irradiance ($W/m^2$)")ax.set_xlabel("Time")fig.legend()fit_to_gslide()
Then, we plot the position of the sun relative to the vehicle over a 4 day race.
Code
fig, ax = plt.subplots(subplot_kw={'projection': 'polar'})ax.scatter(race_2022["vehicle_relative_azimuth"], race_2022["solar_zenith"] *180/ np.pi)ax.set_theta_zero_location('N')ax.set_rmax(90)ax.set_rticks([0, 30, 60, 90], labels=['0', '30', '60', '90'])ax.set_title("Vehicle relative solar position (October 2022)")fit_to_gslide()
We also care about how often the sun is forward of horizontal:
Code
sun_forwards_of_of_horizontal_Jpersqm = (((race_2022["vehicle_relative_azimuth"] < (np.pi /2)) | (race_2022["vehicle_relative_azimuth"] > (3* np.pi /2))) * race_2022["dni"] * race_2022["time_taken"]).sum()sun_forwards_of_of_horizontal_Jpersqm_percent = (sun_forwards_of_of_horizontal_Jpersqm / (race_2022["dni"] * race_2022["time_taken"]).sum()) *100print(f"{sun_forwards_of_of_horizontal_Jpersqm_percent:.2f}% of irradiance is collected forwards of horizontal")
31.18% of irradiance is collected forwards of horizontal
We would like to determine the optimal orientation of solar cells. First, we generate 2000 sample normal vectors. Then, we calculate the irradiance on them at each route step and find specific power collected in \(W/m^2\) as a function of orientation.
Code
SAMPLE_CELL_VECTORS =2000HEATMAP_RES_AZ =1000HEATMAP_RES_ZEN =500AZIMUTH_BOUNDS = (0, np.pi *2)ZENITH_BOUNDS = (0, np.pi /2)def get_cell_normal_spline(race_profile): race_profile = race_profile.copy()def make_hemisphere_normals(num_points): point_normals = [(0, 0)] num_equator_points =2* math.ceil(math.sqrt(num_points)) point_normals += [(2* math.pi * i / num_equator_points, math.pi /2) for i inrange(num_equator_points)] num_fibonacci_points = num_points - num_equator_points -1 phi = math.pi * (math.sqrt(5) -1)for i inrange(num_fibonacci_points): z =1- (i / num_fibonacci_points) radius = math.sqrt(1- z * z) theta = phi * i x = math.cos(theta) * radius y = math.sin(theta) * radius point_normals.append((math.atan2(y, x) % (2* math.pi), math.acos(z))) df = pd.DataFrame(point_normals, columns=["static_cell_azimuth", "static_cell_zenith"])# scale up rho for the cell normal vectors to produce a convex hull for interpolation slightly larger than the unit# sphere so that all points on the unit sphere are inside the convex hull, even when the facets of it are triangular RHO_EPSILON =0.001 RHO =1+ RHO_EPSILON df["x"] = RHO * np.sin(df["static_cell_zenith"]) * np.cos(df["static_cell_azimuth"]) df["y"] = RHO * np.sin(df["static_cell_zenith"]) * np.sin(df["static_cell_azimuth"]) df["z"] = RHO * np.cos(df["static_cell_zenith"])return df cell_normals = make_hemisphere_normals(SAMPLE_CELL_VECTORS)def get_cell_cos_incident_angle(cell): cosine_incident_angle = np.cos(race_profile["solar_zenith"]) * np.cos(cell_normals.loc[cell, "static_cell_zenith"]) +\ np.sin(race_profile["solar_zenith"]) * np.sin(cell_normals.loc[cell, "static_cell_zenith"]) *\ (np.cos(race_profile["vehicle_relative_azimuth"]) * np.cos(cell_normals.loc[cell, "static_cell_azimuth"])+ np.sin(race_profile["vehicle_relative_azimuth"]) * np.sin(cell_normals.loc[cell, "static_cell_azimuth"])) cosine_incident_angle[cosine_incident_angle <0] =0return cosine_incident_angle encapsulant_eff_raw = pd.read_csv("data/encapsulant/mito-skyiar-2025.csv").iloc[::-1] encapsulant_eff_interp = CubicSpline(encapsulant_eff_raw["cos_incident_angle"].to_numpy(), encapsulant_eff_raw["encapsulant_efficiency"])def get_cell_beam_irradiance_seen(cell): cosine_incident_angle = get_cell_cos_incident_angle(cell)return cosine_incident_angle * race_profile["dni"] * encapsulant_eff_interp(cosine_incident_angle)def get_sky_diffuse_irradiance(cell): isotropic_diffuse_irradiance = race_profile["dhi"] * (1+ np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2 circumsolar_horizon_diffuse_irradiance = race_profile["ghi"] * (0.012* np.rad2deg(race_profile["solar_zenith"]) -0.04) * (1- np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2return isotropic_diffuse_irradiance + circumsolar_horizon_diffuse_irradiancedef get_ground_reflected_irradiance(cell):return race_profile["ghi"] * (1- np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2*0.1for cell_id in cell_normals.index: race_profile[f"cell-{cell_id}-beam-irradiance-collected"] = get_cell_beam_irradiance_seen(cell_id) race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"] = get_sky_diffuse_irradiance(cell_id) race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"] = get_ground_reflected_irradiance(cell_id) race_profile[f"cell-{cell_id}-irradiance-collected"] = race_profile[f"cell-{cell_id}-beam-irradiance-collected"] + race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"] + race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"] cell_normals.loc[cell_id, "avg_cell_beam_irradiance"] = np.average(race_profile[f"cell-{cell_id}-beam-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_sky_diffuse_irradiance"] = np.average(race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_ground_reflected_irradiance"] = np.average(race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_irradiance_collected"] = np.average(race_profile[f"cell-{cell_id}-irradiance-collected"], weights=race_profile["time_taken"]) zenith_linspace = np.linspace(*ZENITH_BOUNDS, HEATMAP_RES_ZEN) sample_points_azimuth, sample_points_zenith = np.meshgrid(np.linspace(*AZIMUTH_BOUNDS, HEATMAP_RES_AZ), zenith_linspace) total_time_s = np.sum(race_profile["time_taken"])print(f"Total racetime is {total_time_s /3600} hrs") best_cell = cell_normals.iloc[cell_normals["avg_irradiance_collected"].idxmax()] best_cell_over_6m2 = best_cell['avg_irradiance_collected'] *6print(f"Best orientation collects {best_cell['avg_irradiance_collected']:.2f} W/m^2 ({best_cell_over_6m2:.2f} W if this was the entire array), azimuth: {np.rad2deg(best_cell["static_cell_azimuth"]):.2f} deg, zenith: {np.rad2deg(best_cell["static_cell_zenith"]):.2f} deg") flat_cell = cell_normals.iloc[0] flat_cell_over_6m2 = flat_cell['avg_irradiance_collected'] *6print(f"Flat cell collects {flat_cell['avg_irradiance_collected']:.2f} W/m^2 ({flat_cell_over_6m2:.2f} W if this was the entire array), azimuth: {np.rad2deg(flat_cell["static_cell_azimuth"]):.2f} deg zenith: {np.rad2deg(flat_cell["static_cell_zenith"]):.2f} deg")return ( LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_irradiance_collected"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_cell_beam_irradiance"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_sky_diffuse_irradiance"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_ground_reflected_irradiance"]), best_cell, )specific_irradiance_spline, specific_beam_irradiance_spline, specific_sky_diffuse_spline, specific_ground_reflected_spline, best_cell = get_cell_normal_spline(pd.concat(races.values()))zenith_linspace = np.linspace(*ZENITH_BOUNDS, HEATMAP_RES_ZEN)sample_points_azimuth, sample_points_zenith = np.meshgrid(np.linspace(*AZIMUTH_BOUNDS, HEATMAP_RES_AZ), zenith_linspace)sample_points_x = np.sin(sample_points_zenith) * np.cos(sample_points_azimuth)sample_points_y = np.sin(sample_points_zenith) * np.sin(sample_points_azimuth)sample_points_z = np.cos(sample_points_zenith)irradiance_collected_sampled = specific_irradiance_spline(sample_points_x, sample_points_y, sample_points_z)fig, ax = plt.subplots(subplot_kw={'projection': 'polar'})pc = ax.pcolormesh(sample_points_azimuth, sample_points_zenith *180/ np.pi, irradiance_collected_sampled, shading='auto')fig.colorbar(pc)# ax.scatter(best_cell["static_cell_azimuth"], best_cell["static_cell_zenith"] * 180 / np.pi, label="best attitude")ax.set_theta_zero_location('N')ax.set_rmax(90)# ax.set_rticks([0, 30, 60, 90], labels=['0', '30', '60', '90'])ax.set_title(r"Average cell irradiance ($W/m^2$) collected at attitude (October 2022 w/ 2007-2022 weather)")ax.scatter(best_cell["static_cell_azimuth"], best_cell["static_cell_zenith"] *180/ np.pi, label="best attitude")fig.legend(loc="lower left")# fit_to_gslide()
Total racetime is 1508.128394329492 hrs
Best orientation collects 694.74 W/m^2 (4168.42 W if this was the entire array), azimuth: 174.70 deg, zenith: 11.45 deg
Flat cell collects 687.50 W/m^2 (4125.02 W if this was the entire array), azimuth: 0.00 deg zenith: 0.00 deg
It is also of interest what the section of the plot representing a cell tilting along the X axis [TODO(agoettsc): this could be worded better] of the vehicle looks like.
Code
SECTION_AZIMUTH_DEG =180SECTION_AZIMUTH = np.deg2rad(SECTION_AZIMUTH_DEG)section_zenith_linspace = np.linspace(-ZENITH_BOUNDS[1], ZENITH_BOUNDS[1], HEATMAP_RES_ZEN *2)section_x_samples = np.sin(section_zenith_linspace) * np.cos(SECTION_AZIMUTH)section_y_samples = np.full(HEATMAP_RES_ZEN *2, np.sin(SECTION_AZIMUTH))section_z_samples = np.cos(section_zenith_linspace)section_zenith_deg_plot_x = np.rad2deg(section_zenith_linspace)section_irradiance = specific_irradiance_spline(section_x_samples, section_y_samples, section_z_samples)section_max_index = np.argmax(section_irradiance)section_max_irradiance = section_irradiance[section_max_index]section_max_irradiance_over_6m2 = section_max_irradiance *6print(f"Best cell along azimuth {SECTION_AZIMUTH_DEG} deg collects {section_irradiance[section_max_index]:.2f} W/m^2 ({section_max_irradiance_over_6m2:.2f} W if this was the entire array), zenith: {section_zenith_deg_plot_x[section_max_index]:.2f} deg")fig = plt.figure()section_ax = fig.add_subplot()section_ax.plot(section_zenith_deg_plot_x, section_irradiance, label="total irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_beam_irradiance_spline(section_x_samples, section_y_samples, section_z_samples), label="beam irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_sky_diffuse_spline(section_x_samples, section_y_samples, section_z_samples), label="sky diffuse irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_ground_reflected_spline(section_x_samples, section_y_samples, section_z_samples), label="ground reflected irradiance collected")# section_ax.legend()section_ax.set_title(f"Irradiance collected by cells oriented along the x-axis of the vehicle")section_ax.set_xlabel("Pitch (deg)")section_ax.set_ylabel(r"Irradiance ($W/m^2$)")ylim = section_ax.get_ylim()section_ax.plot([0, 0], [-1000, 1000], '--', color=ANGELL_HALL_ASH)section_ax.set_ylim(ylim)fit_to_gslide()
Best cell along azimuth 180 deg collects 694.57 W/m^2 (4167.44 W if this was the entire array), zenith: 10.18 deg
Wind Analysis
First, we plot a histogram of yaw frequencies.
Code
races_df = pd.concat(races.values())races_df["yaw_weight"] = (races_df["apparent_wind_speed"] **2) * races_df["distance"]rms_apparent_wind_speed = np.sqrt(np.sum(races_df["yaw_weight"]) / np.sum(races_df["distance"]))weight_denom = np.sum(races_df["yaw_weight"])counts, bins = np.histogram(np.rad2deg(races_df["yaw"]), weights=races_df["yaw_weight"] / weight_denom, bins=401, range=(-20.05, 20.05))fig = plt.figure()ax = fig.add_subplot()ax.bar(bins[:-1], counts)ax.set_title("Apparent yaw frequency over race days in October (2007-2022)")ax.set_xlabel("Yaw angle (deg)")ax.set_ylabel("Frequency")
Text(0, 0.5, 'Frequency')
Then, we get the RMS apparent wind speed at different speed values
---title: "The Strategic Report on the 2025 World Solar Challenge"authors: The University of Michigan Solar Car Teamdate: todaydate-format: longtitle-block-banner: images/astrum_windfarm.jpgabstract: We provide a complete report of the race, route, weather, and vehicle.format: html: toc: true toc-title: Contents toc-depth: 5 code-fold: true code-overflow: wrap code-tools: true self-contained: trueexecute: warning: false---# IntroductionImport the relevant values:```{python}# Disable pandas warnings# TODO(agoettsc): come up with a good justificationfrom warnings import simplefiltersimplefilter(action="ignore", category=FutureWarning)import pandas as pdsimplefilter(action="ignore", category=pd.errors.PerformanceWarning)import numpy as npfrom numpy.polynomial.polynomial import Polynomialfrom tqdm import tqdmimport matplotlib as mplimport matplotlib.pyplot as pltimport matplotlib.font_manager as fmfrom mpl_toolkits.axes_grid1.inset_locator import inset_axesfrom mpl_toolkits.axes_grid1.inset_locator import mark_insetfrom scipy.stats import linregressfrom scipy.interpolate import interp1d, LinearNDInterpolator, CubicSpline, RegularGridInterpolatorimport seabornimport globimport tomliimport pvlibimport mathimport re```Set up our custom matplotlib theme:```{python}# make custom matplotlib themeMICHIGAN_BLUE ='#00274C'MICHIGAN_MAIZE ='#FFCB05'TAPPAN_RED ='#9A3324'ROSS_ORANGE ='#D86018'WAVE_FIELD_GREEN ='#A5A508'A2_AMETHYST ='#702082'TAUBMAN_TEAL ='#00B2A9'LAW_QUAD_STONE ='#655A52'ANGELL_HALL_ASH ='#989C97'PUMA_BLACK ='#131516'SOLID_WHITE ='#FFFFFF'# make black #131516 insteadmpl.rcParams['text.color'] = PUMA_BLACKmpl.rcParams['axes.labelcolor'] = PUMA_BLACKmpl.rcParams['xtick.color'] = PUMA_BLACKmpl.rcParams['ytick.color'] = PUMA_BLACKmpl.rcParams['axes.edgecolor'] = PUMA_BLACKmpl.rcParams['axes.facecolor'] = SOLID_WHITE# set text to Montserratfe = fm.FontEntry( fname='./Montserrat-Regular.ttf', name='Montserrat')fm.fontManager.ttflist.insert(0, fe) # or append is finempl.rcParams['font.family'] = fe.namempl.rcParams['mathtext.fontset'] ='custom'mpl.rcParams['mathtext.it'] = fe.name +':italic'mpl.rcParams['mathtext.bf'] = fe.name +':italic:bold'# make lines thickermpl.rcParams['lines.linewidth'] =2# makes axes text largermpl.rcParams['axes.labelsize'] =12mpl.rcParams['axes.titlesize'] =12# set color palettempl.rcParams['axes.prop_cycle'] = mpl.cycler(color=[MICHIGAN_BLUE, MICHIGAN_MAIZE, TAPPAN_RED, A2_AMETHYST, ROSS_ORANGE])# turn grid onmpl.rcParams['axes.grid'] =True# put it behindmpl.rcParams['axes.axisbelow'] =Truedef set_yticks_rt_days(): plt.yticks(1/3+ np.arange(25, 50, 3), ['3d +3h', '3d +6h', '4d', '4d +3h', '4d +6h', '5d', '5d + 3h', '5d + 6h', '6d'])def set_xticks_race_distance(remaining=False): RD =3024.7408# Should be confirmedifnot remaining: plt.xticks([0, 500, 1000, 1500, 2000, 2500, RD], ['Start', '500', '1000', '1500', '2000', '2500', 'End'])else: plt.gca().invert_xaxis() plt.xticks([0, 500, 1000, 1500, 2000, 2500, RD], ['End', '500', '1000', '1500', '2000', '2500', 'Start'])def add_footnote(text): plt.figtext(0.1, -0.03, '†'+ text, horizontalalignment='left', verticalalignment='bottom', fontsize=8)def add_annotation(text, x, y, x_offset=0, y_offset=0, ha='center', va='center'): plt.annotate(text, (x, y), textcoords="offset points", xytext=(x_offset, y_offset), ha=ha, va=va, bbox=dict(fc="white", ec="black", lw=0.5), arrowprops=dict(arrowstyle='->'), fontsize=8)def fit_to_gslide(blank=True, half=False): fig = plt.gcf() height =4.64if blank else3.69 width =9.32ifnot half else3.74 fig.set_size_inches(width, height)```Establish relevant constant values:```{python}SPEED_AXIS = np.linspace(60, 130, 71)HOT_LAP_DELAY_TIME =0.258WASTED_TIME =0.239```# Route SurveyWe start with a simple analysis of our route.```{python}route = pd.read_csv("./data/route_6544_0m.csv")race_route = route[route['segment_type'] =='race']```## Route Survey Summary| Parameter | Value || ----------------------------------------------------- | ----- || Race Distance | 3,024,740.80 m || Marshalling Distance | 3,357.33 m || Total Distance | 3,028,098.13 m|| Net Elevation Change | -32.646 m || Total Uphill Travel | 6359.825 m || Total Downhill Travel | 6392.471 m || Number of Control Stops | 9 || Number of Driver Swap Stops | 2 || Number of Stop Lights | 39 || Number of Stop Signs | 0 || Number of Flashing Yellows | 1 || Finish Line Speed Limit | 60 kph || Longitudinal Distance-Weighted Average $g$ | 9.7891 || Uphill Work-Weighted Average $g$ | 9.7885 || Downhill Work-Weighted Average $g$ | 9.7890 || Minimum Possible Legal Race Time ($a = \infty$) | 93200.26s = 25:53:20.26 || Minimum Reasonable Legal Race Time ($a = 0.3g$) | TBD || Minimum Reasonable Race Time ($a = 0.3g$, 2kph speed) | TBD |## Geographical AnalysisFirst, we analyze total distances:```{python}race_distance = route[route['segment_type'] =='race']['distance'].sum()marshalling_distance = route[route['segment_type'] =='marshaling']['distance'].sum()total_distance = race_distance + marshalling_distanceprint(f'Race Distance: {race_distance} m')print(f'Marshaling Distance: {marshalling_distance} m')print(f'Total Distance: {total_distance} m')```Next, we analyze elevation changes:```{python}# use grade data, not elevation data, since its more reliablerace_route['elevation_change'] = race_route['grade'] * race_route['distance']net_elevation_change = race_route['elevation_change'].sum()total_uphill_travel = race_route[race_route['grade'] >0]['elevation_change'].sum()total_downhill_travel = race_route[race_route['grade'] <0]['elevation_change'].sum()print(f'Net Elevation Change: {net_elevation_change} m')print(f'Total Uphill Travel: {total_uphill_travel} m')print(f'Total Downhill Travel: {total_downhill_travel} m')plt.plot(route['distance'].cumsum() /1000, route['elevation'])plt.xlabel('Distance Traveled (km)')plt.ylabel('Elevation (m)')plt.title('Elevation vs Distance')plt.xlim([0, total_distance /1000])plt.ylim([0, 800])# annotate peak elevationpeak_elevation = route['elevation'].max()peak_elevation_distance = route['distance'].cumsum()[route['elevation'].idxmax()] /1000add_annotation(f'Peak Elevation: {round(peak_elevation,1)} m\nat {round(peak_elevation_distance)} km', peak_elevation_distance, peak_elevation, x_offset=20, y_offset=0, ha='left', va='center')plt.show()```We should also plot elevation when accounting for the curvature of the Earth.## Features AnalysisWe count the number of stop lights, stop signs, and flashing yellows:```{python}# stop light if segment_end_condition is traffic_lightnum_stop_lights = race_route[race_route['segment_end_condition'] =='traffic_light'].shape[0]num_stop_signs = race_route[race_route['segment_end_condition'] =='red_sign'].shape[0]num_yield_signs = race_route[race_route['segment_end_condition'] =='yield_sign_or_blinking_yellow'].shape[0]print(f'Number of Stop Lights: {num_stop_lights}')print(f'Number of Stop Signs: {num_stop_signs}')print(f'Number of Flashing Yellows: {num_yield_signs}')```## Gravity AnalysisFirst, plot gravity over distance.```{python}plt.plot(route['distance'].cumsum() /1000, route['gravity'])plt.xlabel('Distance Traveled (km)')plt.ylabel('Acceleration due to Gravity (m/s/s)')plt.title('Acceleration due to Gravity vs Distance')plt.xlim([0, total_distance /1000])set_xticks_race_distance()plt.ylim([9.782, 9.798])plt.show()```Now, take weighted averages of gravity:```{python}distance_weighted_avg_g = (route['gravity'] * route['distance']).sum() / route['distance'].sum()# uphill travel is distance traveled uphill per segmentuphill_travel = route['grade'] * route['distance'] * (route['grade'] >0)uphill_work_weighted_avg_g = (route['gravity'] * uphill_travel).sum() / uphill_travel.sum()# downhill travel is distance traveled downhill per segmentdownhill_travel = route['grade'] * route['distance'] * (route['grade'] <0)downhill_work_weighted_avg_g = (route['gravity'] * downhill_travel).sum() / downhill_travel.sum()print(f'Distance-Weighted Average g: {distance_weighted_avg_g}')print(f'Uphill Work-Weighted Average g: {uphill_work_weighted_avg_g}')print(f'Downhill Work-Weighted Average g: {downhill_work_weighted_avg_g}')```## Hill-Climbing AnalysisPlot a distribution of grade weighted by distance:```{python}plt.hist(route['grade'] *100, bins=100, weights=route['distance'], density=True)plt.xlabel('Grade (%)')plt.ylabel('Density')plt.title('Distance-Weighted Grade Distribution')plt.xlim([-6,6])plt.show()```Plot a distribution of grade weighted by gravity times distance:```{python}plt.hist(route['grade'] *100, bins=100, weights=route['gravity'] * route['distance'], density=True)plt.xlabel('Grade (%)')plt.ylabel('Gravity * Distance')plt.title('Gravity-times-Distance-Weighted Grade Distribution')plt.xlim([-6,6])plt.show()```Determine the minimum and maximum grades:```{python}min_grade = route['grade'].min()max_grade = route['grade'].max()print(f'Min Grade: {min_grade}')print(f'Max Grade: {max_grade}')``````{python}largest_incline_left = []distance_left = []route['normalized_grade'] = route['grade'] * route['gravity'] / distance_weighted_avg_gend_of_race = route[route['segment_end_condition'] =='endOfRace'].index[0]for i inrange(len(route)):if i < end_of_race: largest_incline_left.append(max(max(route['grade'][i:end_of_race] *100),0)) distance_left.append(race_distance - route['distance'][:i].sum())else: largest_incline_left.append(max(max(route['grade'][i:] *100),0)) distance_left.append(race_distance - route['distance'][:i].sum())largest_incline_left_scatter = largest_incline_left.copy()distance_left_scatter = distance_left.copy()for i inrange(len(largest_incline_left) -1):if largest_incline_left_scatter[i] == largest_incline_left_scatter[i +1]: largest_incline_left_scatter[i] =None distance_left_scatter[i] =Nonedistance_left_other_scatter = []grade_other_scatter = []for i inrange(len(route)):if route['grade'][i] >0: distance_left_other_scatter.append(total_distance - route['distance'][:i].sum()) grade_other_scatter.append(route['grade'][i] *100)i =0while i <len(distance_left_other_scatter) -1:ifabs(distance_left_other_scatter[i +1] - distance_left_other_scatter[i]) <20000:# remove less steep pointif grade_other_scatter[i] < grade_other_scatter[i +1]:del distance_left_other_scatter[i]del grade_other_scatter[i]else:del distance_left_other_scatter[i +1]del grade_other_scatter[i +1] i -=1 i +=1# remove all points that overlap with the steepest hillsii = [i for i inrange(len(distance_left_other_scatter)) if distance_left_other_scatter[i] in distance_left_scatter]for i in ii: distance_left_other_scatter[i] =None grade_other_scatter[i] =Nonelargest_incline_left_scatter = [x for x in largest_incline_left_scatter if x isnotNone]distance_left_scatter = [x for x in distance_left_scatter if x isnotNone]grade_other_scatter = [x for x in grade_other_scatter if x isnotNone]distance_left_other_scatter = [x for x in distance_left_other_scatter if x isnotNone]kk = np.where(np.array(distance_left) >0)[0][-1]jj = np.where(np.array(distance_left_scatter) >0)[0][-1]plt.plot(np.array(distance_left[:kk]) /1000, largest_incline_left[:kk], color ='#00274C')plt.scatter(np.array(distance_left_other_scatter) /1000, grade_other_scatter, color='#989C97', label='Other Steep Hills', facecolors='none')plt.scatter(np.array(distance_left_scatter[:jj]) /1000, largest_incline_left_scatter[:jj], label='Steepest Hills', color ='#00274C')plt.xlim([0-100, total_distance /1000])set_xticks_race_distance(remaining=True)plt.xlabel('Distance Left [km]')plt.ylabel('Steepest (Normalized) Grade Remaining [%]')plt.ylim([0, 6])plt.figtext(0.11, 0.01, f'Normalized to g={round(distance_weighted_avg_g,4)}', horizontalalignment='left', verticalalignment='bottom', fontsize=8)plt.title('WSC\'s Steepest Hills')route_index = route.index[abs(distance_left_scatter[0] - (race_distance - route['distance'].cumsum())) <0.5]add_annotation(f'{round(largest_incline_left_scatter[0], 2)}% grade\n{round(distance_left_scatter[0] /1000)} km left\n{round(route["distance"][route_index].values[0], 1)} m long', distance_left_scatter[0] /1000, largest_incline_left_scatter[0], x_offset=30, y_offset=0, ha='left', va='center')route_index = route.index[abs(distance_left_scatter[1] - (race_distance - route['distance'].cumsum())) <0.5]add_annotation(f'{round(largest_incline_left_scatter[1], 2)}% grade\n{round(distance_left_scatter[1] /1000)} km left\n{round(route["distance"][route_index].values[0], 1)} m long', distance_left_scatter[1] /1000, largest_incline_left_scatter[1], x_offset=10, y_offset=30, ha='left', va='center')route_index = route.index[abs(distance_left_scatter[2] - (race_distance - route['distance'].cumsum())) <0.5]add_annotation(f'{round(largest_incline_left_scatter[2], 2)}% grade\n{round(distance_left_scatter[2] /1000)} km left\n{round(route["distance"][route_index].values[0], 1)} m long', distance_left_scatter[2] /1000, largest_incline_left_scatter[2], x_offset=20, y_offset=30, ha='left', va='center')route_index = route.index[abs(distance_left_scatter[3] - (race_distance - route['distance'].cumsum())) <0.5]add_annotation(f'{round(largest_incline_left_scatter[3], 2)}% grade\n{round(distance_left_scatter[3] /1000)} km left\n{round(route["distance"][route_index].values[0], 1)} m long', distance_left_scatter[3] /1000, largest_incline_left_scatter[3], x_offset=40, y_offset=0, ha='left', va='bottom')for i inrange(3): lat = route['start_latitude'][abs(distance_left_scatter[i] - (race_distance - route['distance'].cumsum())) <0.5].values[0] lon = route['start_longitude'][abs(distance_left_scatter[i] - (race_distance - route['distance'].cumsum())) <0.5].values[0]print(f'https://www.google.com/maps/?q={lat},{lon}')# do a mark inset for last 50 kmfinal_stretch =0kk = np.where(np.array(distance_left) < final_stretch)[0][0]ii = np.where(np.array(distance_left_scatter) < final_stretch)[0][0]inset_ax = plt.axes([1, 0.2, 0.25, 0.25])plt.plot(np.array(distance_left[kk:]), largest_incline_left[kk:], color ='#00274C')plt.scatter(np.array(distance_left_scatter[ii:]), largest_incline_left_scatter[ii:], color ='#00274C')inset_ax.set_xticks([-marshalling_distance, 0], [f'Finish', f'Marshal Pt.\n({round(marshalling_distance)} m)'])plt.gca().invert_xaxis()route_index = route.index[abs(distance_left_scatter[-4] - (race_distance - route['distance'].cumsum())) <0.01]add_annotation(f'{round(largest_incline_left_scatter[-4], 2)}% grade\n{round(distance_left_scatter[-4] /1000)} km left\n{round(route["distance"][route_index].values[0], 1)} m long', distance_left_scatter[-4] /1000, largest_incline_left_scatter[-4], x_offset=-20, y_offset=-20, ha='left', va='center')# set y limitsplt.ylim([0, 6])plt.title('Hills in Marshalling')plt.show()```## Velocity AnalysisFirst, plot speed limit vs distance.```{python}plt.plot(route['distance'].cumsum() /1000, route['speed_limit'] *3.6)plt.xlabel('Distance Traveled (km)')plt.ylabel('Speed Limit (kph)')plt.title('Speed Limit vs Distance')plt.show()```Second, plot distance-weighted speed limit distribution:```{python}# write speed_limit distribution to integersplt.hist((route['speed_limit'] *3.6).astype(int), bins=101, weights=route['distance'], density=True)plt.gca().yaxis.set_major_formatter(mpl.ticker.PercentFormatter(xmax=1))plt.xlabel('Speed Limit (kph)')plt.ylabel('Density')plt.title('Distance-Weighted Speed Limit Distribution')plt.xlim([20, 140])plt.ylim([0,0.6])plt.show()```Now, we calculate the minimum reasonable time to complete this, assuming a 0.3g acceleration and deceleration. We also stop at stop signs and lights.```{python}CONTROL_STOP_RULES_DURATION =30*60CONTROL_STOP_TRUE_DURATION = CONTROL_STOP_RULES_DURATION +90DRIVER_SWAPS_RULES_DURATION =5*60DRIVER_SWAPS_TRUE_DURATION = DRIVER_SWAPS_RULES_DURATION +15RED_SIGN_DURATION =15TRAFFIC_LIGHT_DURATION =60YIELD_SIGN_DURATION =10ACCELERATION_TIME_RESOLUTION =0.1def generate_speed_profile(route, target_speed_kph, acceleration_mpsps=0.5, deceleration_mpsps=0.5, acceptable_speeding_kph=0.0, hot_lap_delay=0.0):# iterate through the route in both directions to determine what speed we should go# never exceed the speed limit + acceptable speeding# stop for all stop signs, stop lights, and flashing yellows# stop for control stops and driver swaps# stop at end of day (after 9, 18, 27, etc...)# finish end at 60 kph + acceptable speeding# start from 0 kph target_v = target_speed_kph /3.6 speeding = acceptable_speeding_kph /3.6 a_target = acceleration_mpsps da_target = deceleration_mpsps# create two profiles, one for driving forward, one for driving backward# each profile has distance, time, speed, and acceleration# we will sequentially add points to each profile profile = pd.DataFrame(columns=['distance', 'time', 'speed', 'target_speed', 'acceleration', 'process'])# we also create a profile for speed_limit speed_limit_profile = pd.DataFrame(columns=['distance', 'speed_limit'])# we also create a list of points where we have to stop control_stops = [] driver_swaps = [] red_sign_stops = [] traffic_light_stops = [] flashing_yellow_stops = [] eod_stop_times =9*3600* np.arange(1, 10)# populate speed_limit_profile with speed limits# make it only places where the speed limit changesfor i inrange(1,len(route)):if route['speed_limit'][i] != route['speed_limit'][i -1]: speed_limit_profile = pd.concat([speed_limit_profile, pd.DataFrame({'distance': route['distance'].cumsum()[i], 'speed_limit': route['speed_limit'][i]}, index=[0])], ignore_index=True)# add 40 to very start speed_limit_profile = pd.concat([pd.DataFrame({'distance': 0, 'speed_limit': 40/3.6}, index=[0]), speed_limit_profile], ignore_index=True) speed_limit_profile.sort_values('distance', inplace=True)# populate stop oointsfor i inrange(len(route)):if route['segment_end_condition'][i] =='control_stop': control_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='driver_swap': driver_swaps.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='red_sign': red_sign_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='traffic_light': traffic_light_stops.append(route['distance'].cumsum()[i])elif route['segment_end_condition'][i] =='yield_sign_or_blinking_yellow': flashing_yellow_stops.append(route['distance'].cumsum()[i])# start from the beginning d_traveled =0# meters v =0# speed t = hot_lap_delay *3600# seconds a =0# acceleration at_stop =False stop_type =None eod_locations = []while d_traveled < race_distance:# we are either accelerating or cruising# we do calcs until next steps:# 1. we accelerate to the target speed / speed limit# 2. we reach a change in speed limit# 3. we reach a spot where we have to stop speed_limit = speed_limit_profile['speed_limit'][speed_limit_profile['distance'] <= d_traveled].iloc[-1] next_speed_limit_change_d = speed_limit_profile['distance'][speed_limit_profile['distance'] > d_traveled].iloc[0] next_control_stop_d =min([x for x in control_stops if x > d_traveled], default=int(1e15)) next_driver_swap_d =min([x for x in driver_swaps if x > d_traveled], default=int(1e15)) next_red_sign_d =min([x for x in red_sign_stops if x > d_traveled], default=int(1e15)) next_traffic_light_d =min([x for x in traffic_light_stops if x > d_traveled], default=int(1e15)) next_flashing_yellow_d =min([x for x in flashing_yellow_stops if x > d_traveled], default=int(1e15))# determine next stop and what type it is next_stop_d =min(next_control_stop_d, next_driver_swap_d, next_red_sign_d, next_traffic_light_d, next_flashing_yellow_d)if next_stop_d == next_control_stop_d: stop_type ='control_stop'elif next_stop_d == next_driver_swap_d: stop_type ='driver_swap'elif next_stop_d == next_red_sign_d: stop_type ='red_sign'elif next_stop_d == next_traffic_light_d: stop_type ='traffic_light'elif next_stop_d == next_flashing_yellow_d: stop_type ='flashing_yellow' next_eod_stop_time =min([x for x in eod_stop_times if x > t], default=int(1e15)) actual_target_v =min(target_v, speed_limit + speeding)ifabs(v - actual_target_v) <0.1:# keep cruising until next speed limit change, stop, or end profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'c1'}, index=[0])], ignore_index=True) time_to_next_stop = (next_stop_d - d_traveled) / v time_to_next_speed_limit_change = (next_speed_limit_change_d - d_traveled) / v time_to_end_of_day = next_eod_stop_time - t time_to_end = (race_distance - d_traveled) / v t_cruise =min(time_to_next_stop, time_to_next_speed_limit_change, time_to_end_of_day, time_to_end) next_reason =Noneif t_cruise == time_to_next_stop: next_reason ='stop'elif t_cruise == time_to_next_speed_limit_change: next_reason ='speed_limit_change'elif t_cruise == time_to_end_of_day: next_reason ='end_of_day'elif t_cruise == time_to_end: next_reason ='end' d_traveled += v * t_cruise t += t_cruise profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'c2'}, index=[0])], ignore_index=True)if next_reason =='stop': v =0# add corresponding timeif stop_type =='control_stop': t += CONTROL_STOP_TRUE_DURATIONelif stop_type =='driver_swap': t += DRIVER_SWAPS_TRUE_DURATIONelif stop_type =='red_sign': t += RED_SIGN_DURATIONelif stop_type =='traffic_light': t += TRAFFIC_LIGHT_DURATIONelif stop_type =='flashing_yellow': t += YIELD_SIGN_DURATIONif next_reason =='end_of_day': v =0 eod_locations.append(d_traveled)if next_reason =='end': v =60/3.6 profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a_target, 'process': 'c3'}, index=[0])], ignore_index=True)elif v < actual_target_v:# we need to accelerate, let's start by accelerating up to speed a = a_target t_to_target = (actual_target_v - v) / a d_acc = v * t_to_target +0.5* a * t_to_target **2 acc_to =min(next_stop_d, next_speed_limit_change_d, race_distance)if d_traveled + d_acc > acc_to: # accelerating past our next stop :( profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a, 'process': 'atp1'}, index=[0])], ignore_index=True)# we will reach the next point before we reach the target speed# find the time, speed at which we reach the next point while accelerating delta_t = (-v + np.sqrt(v **2+2* (acc_to - d_traveled))) / a t += delta_t v += a * delta_t d_traveled = acc_to at_stop =True profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'atp2'}, index=[0])], ignore_index=True)else:# we're good to reach the target speed profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': a, 'process': 'af1'}, index=[0])], ignore_index=True) t += t_to_target v = actual_target_v a =0 d_traveled += d_acc stop_type =None profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'af2'}, index=[0])], ignore_index=True)elif v > actual_target_v: profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'd1'}, index=[0])], ignore_index=True) profile = pd.concat([profile, pd.DataFrame({'distance': d_traveled, 'time': t, 'speed': actual_target_v, 'target_speed': actual_target_v, 'acceleration': 0, 'process': 'd2'}, index=[0])], ignore_index=True) v = actual_target_v# print number of stops by category# print(f'Control Stops: {len(control_stops)}')# print(f'Driver Swaps: {len(driver_swaps)}')# print(f'Red Signs: {len(red_sign_stops)}')# print(f'Traffic Lights: {len(traffic_light_stops)}')# print(f'Flashing Yellows: {len(flashing_yellow_stops)}')# print(f'End of Days: {len(eod_locations)}')# convert speedreturn profile, speed_limit_profile, eod_locations```Now we want to plot race time as a function of target speed.```{python}def get_true_race_time(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0): profile, speed_limit_profile, eod_locations = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)return profile['time'].iloc[-1] - (30*9+2*10) *60- hot_lap_delay *3600def get_official_race_time(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): true_race_time = get_true_race_time(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)return true_race_time + (hot_lap_delay + wasted_time) *3600, true_race_time# plot true race time vs speedspeeds = np.linspace(60, 130, 71)times = np.array([get_official_race_time(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in speeds])official_race_times = times[:, 0]true_race_times = times[:, 1]# calculate what speed is needed to finish in 4 days = 31.33 hours# use interpolationf = interp1d(official_race_times, speeds)target_speed =float(f(31*3600+19*60+59))g = interp1d(speeds, official_race_times)time_at_110 =float(g(110))# plot the resultsplt.xlabel('Target Speed [kph]')plt.xlim([60, 130])plt.ylabel('Official Race Time [h]')plt.ylim([25+1/3, 49+1/3])set_yticks_rt_days()plt.plot([60, target_speed, target_speed], [31+1/3, 31+1/3, 0], '--', color=MICHIGAN_MAIZE)plt.plot([110, 110], [0, time_at_110 /3600], '--', color=ANGELL_HALL_ASH)plt.plot(speeds, official_race_times /3600, label='Official')plt.plot(speeds, true_race_times /3600, label='True', color=ROSS_ORANGE)plt.title('Official Race Time vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(60* (HOT_LAP_DELAY_TIME + WASTED_TIME))} min.')add_annotation(f'To finish in 4 days,\ntarget {round(target_speed, 1)} kph.', target_speed, 31+1/3, x_offset=0, y_offset=20, ha='left', va='bottom')add_annotation(f'S. Australia Speed Limit',110, time_at_110 /3600, x_offset=0, y_offset=20, ha='left', va='bottom')plt.legend()fit_to_gslide()target_speed_df = pd.DataFrame({'target_speed': speeds, 'true_race_time': true_race_times, 'official_race_time': official_race_times})# save with 900 dpiplt.savefig('official_race_time_vs_target_speed', dpi=600)```We also want to calculate how much race time we could save by uniformly speeding:```{python}# Calculate benefits of speeding ... at target speed, how much time can we save by speedingspeeding = [0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 4.5, 5]times = np.array([get_true_race_time(route, target_speed, 1.5, 0.5, speed, HOT_LAP_DELAY_TIME) for speed in speeding])times = times[0] - times# plot resultsplt.xlabel('Speeding [kph]')plt.ylabel('Race Time Saved [min]')plt.plot(speeding, times /60)# do a linear fit with intercept forcecd to be 0slope, intercept, r_value, p_value, std_err = linregress(speeding, times)plt.plot(speeding, [x * slope /60+ intercept /60for x in speeding], '--', color=MICHIGAN_MAIZE)plt.figtext(0.11, -0.03, f'*At target speed of {round(target_speed,2)} kph. Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.', horizontalalignment='left', verticalalignment='bottom', fontsize=8)plt.title('Time Saved by Speeding')# plt.annotate(f'~{round(slope)} s/kph',# (2.5,4),# textcoords="offset points", xytext=(30, -30),# ha='left', va='bottom',# bbox=dict(fc="white", ec="black", lw=0.5),# arrowprops=dict(arrowstyle='->'),# fontsize=8)add_annotation(f'~{round(slope)} s/kph',2.5, times[-6] /60, x_offset=20, y_offset=-20, ha='left', va='bottom')plt.ylim([0, 10])plt.xlim([0, 5])```We also want to study decelerations:```{python}def characterize_decelerations(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): speed_profile, _, _ = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay) squared_speed_sum =0 num_decelerations =1for i inrange(1, len(speed_profile)):if speed_profile['speed'][i] < speed_profile['speed'][i -1]: squared_speed_sum += (speed_profile['speed'][i -1] **2- speed_profile['speed'][i] **2) num_decelerations +=1return squared_speed_sum, num_decelerations# get squared_speed_sums and num_decelerationsdecelerations = np.array([characterize_decelerations(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in SPEED_AXIS])squared_speed_sums = np.array([x[0] for x in decelerations])num_decelerations = np.array([x[1] for x in decelerations])# add square speed sums and num decelerations to target_speed_dftarget_speed_df['squared_speed_sums'] = squared_speed_sumstarget_speed_df['num_decelerations'] = num_decelerationsplt.plot(SPEED_AXIS, squared_speed_sums)plt.xlabel('Target Speed [kph]')plt.ylabel('Sum of Squared Speeds [(m/s)^2]')plt.title('Deceleration vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')# plot polynomial regressionp = Polynomial.fit(SPEED_AXIS, squared_speed_sums, 2)plt.plot(SPEED_AXIS, p(SPEED_AXIS), '--', color=MICHIGAN_MAIZE)add_annotation(f'{round(p.coef[2])}v^2 + {round(p.coef[1])}v + {round(p.coef[0])}',100, 30000, x_offset=30, y_offset=-30, ha='left', va='bottom')plt.xlim([60, 130])plt.ylim([10000, 50000])plt.show()plt.plot(SPEED_AXIS, num_decelerations)plt.xlabel('Target Speed [kph]')plt.ylabel('Number of Decelerations')plt.title('Number of Decelerations vs. Target Speed')plt.figtext(0.01, -0.03, f'*Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.', horizontalalignment='left', verticalalignment='bottom', fontsize=8)plt.xlim([70, 130])plt.ylim([60, 110])plt.show()# plot the above but vs true race timex_axis = target_speed_df['true_race_time'] /3600plt.plot(x_axis, target_speed_df['squared_speed_sums'])plt.xlim([25, 50])plt.xlabel('True Race Time [h]')plt.ylabel('Sum of Squared Speeds [(m/s)^2]')plt.title('Sum of Squared Speeds vs. Race Time')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')# fit a polynomial only to race times between 27 h and 40 htarget_speed_df_fit = target_speed_df[(target_speed_df['true_race_time'] /3600>27) & (target_speed_df['true_race_time'] /3600<40)]# polyfit with numpyp = np.polyfit(target_speed_df_fit['true_race_time'] /3600, target_speed_df_fit['squared_speed_sums'], 2)plt.plot(target_speed_df_fit['true_race_time'] /3600, np.polyval(p, target_speed_df_fit['true_race_time'] /3600), '--', color=MICHIGAN_MAIZE)add_annotation(f'{round(p[0])}t^2 + {round(p[1])}t + {round(p[2])}',34, np.polyval(p,34), x_offset=30, y_offset=30, ha='left', va='bottom')plt.show()```We also want to calculate the RMS velocity given a target speed:```{python}def get_rms_velocity(route, target_speed_kph, acceleration_mpsps=1, deceleration_mpsps=0.5, acceptable_speeding_kph=0, hot_lap_delay=0, wasted_time=0): speed_profile, _, _ = generate_speed_profile(route, target_speed_kph, acceleration_mpsps, deceleration_mpsps, acceptable_speeding_kph, hot_lap_delay)# weight by distance, make sure to do the weighting correctly# first, calculate the distance between each point speed_profile['distance_step'] = speed_profile['distance'].shift(-1) - speed_profile['distance']# then, calculate the squared velocity between each point speed_profile['squared_velocity'] = speed_profile['speed'] **2# then, calculate the RMS velocity for the whole profile rms_velocity = np.sqrt((speed_profile['squared_velocity'] * speed_profile['distance_step']).sum() / speed_profile['distance_step'].sum())return rms_velocity# plot rms velocity vs target speedrms_velocities = np.array([get_rms_velocity(route, speed, 1.5, 0.5, 0, HOT_LAP_DELAY_TIME, WASTED_TIME) for speed in SPEED_AXIS])plt.plot(SPEED_AXIS, rms_velocities *3.6)plt.xlabel('Target Speed [kph]')plt.ylabel('RMS Velocity [kph]')plt.title('RMS Velocity vs. Target Speed')add_footnote(f'Given hot lap delay plus wasted time of {round(HOT_LAP_DELAY_TIME + WASTED_TIME, 2)} h.')plt.xlim([60, 130])plt.ylim([60, 130])plt.show()# add rms velocities to dftarget_speed_df['rms_velocity'] = rms_velocities``````{python}# fp, slp, eod_locations = generate_speed_profile(route, 105, 1, 0.5, 0)# slp2 = slp.copy()# slp2['distance'] = slp['distance'].shift(-1) - 0.001# slp = pd.concat([slp, slp2])# slp = slp.sort_values('distance')# plt.plot(fp['distance'] / 1000, fp['speed'] * 3.6)# plt.plot(slp['distance'] / 1000, slp['speed_limit'] * 3.6, alpha=0.5)# plt.scatter([x / 1000 for x in eod_locations], [0] * len(eod_locations), color='red', label='Stops')# # plot acceleration on other y axis# plt.ylim([0, 140])# ax2 = plt.gca().twinx()# plt.show()# def calculate_race_time(route, target_speed_kph, acceleration_mpsps=0.5, acceptable_speeding_kph=0.0):# target_speed = target_speed_kph / 3.6# acceptable_speeding = acceptable_speeding_kph / 3.6# a = acceleration_mpsps# dist_step = 100# num_steps = int(route['distance'].sum() / dist_step + 0.5)# distance_profile = np.linspace(0, route['distance'].sum(), num_steps)# speed_limit_profile = np.zeros(num_steps)# effective_speed_profile = np.zeros(num_steps)# # for each step go through, and calculate the effective speed limit# # this is the maximum speed that can be achieved at that point# v = 0# d = 0# for i in range(num_steps):# d = i * dist_step# if d < 10:# speed_limit = 40 / 3.6# else:# speed_limit = route['speed_limit'][route['distance'].cumsum() <= d].iloc[-1]# speed_limit_profile[i] = speed_limit# # if we're already going the right speed, don't accelerate# max_speed = min(target_speed, speed_limit + acceptable_speeding)# if abs(v - max_speed) < 0.01:# v = max_speed# effective_speed_profile[i] = max_speed# continue# # if we're going too slow, accelerate to the target speed# t = - v / a + np.sqrt(v ** 2 / a ** 2 + 2 * dist_step / a)# if v + a * t > max_speed:# # find t such that we reach this point# t_to_max_speed = (max_speed - v) / a# d_acc = v * t_to_max_speed + 0.5 * a * t_to_max_speed ** 2# t = t_to_max_speed + (dist_step - d_acc) / max_speed# v = max_speed# else:# v = v + a * t# effective_speed_profile[i] = v# # do same in reverse# v = speed_limit_profile[-1]# d = race_distance# for i in range(num_steps - 1, -1, -1):# speed_limit = speed_limit_profile[i]# d = race_distance - i * dist_step# # if we're already going the right speed, don't accelerate# max_speed = min(target_speed, speed_limit + acceptable_speeding)# # if segment end condition is 'control_stop' or 'driver_swap'# if route['segment_end_condition']# max_speed = 0# if v == max_speed:# effective_speed_profile[i] = min(effective_speed_profile[i], v)# continue# # if we're going too slow, accelerate to the target speed# t = - v / a + np.sqrt(v ** 2 / a ** 2 + 2 * dist_step / a)# if v + a * t > max_speed:# # find t such that we reach this point# t_to_max_speed = (max_speed - v) / a# d_acc = v * t_to_max_speed + 0.5 * a * t_to_max_speed ** 2# t = t_to_max_speed + (dist_step - d_acc) / max_speed# v = max_speed# else:# v = v + a * t# effective_speed_profile[i] = min(effective_speed_profile[i], v)# return distance_profile, effective_speed_profile, speed_limit_profile# dp, esp, slp = calculate_race_time(race_route, 110, 0.5, 0.0)# plt.plot(dp / 1000, slp)# plt.plot(dp / 1000, esp)# time = 0# speed = 0# distance = 0# segment_index = 0# route['distance_traveled'] = route['distance'].cumsum()# # TODO: account for stop signs, stop lights, and flashing yellows# # TODO: account for slowdowns for control stops, driver swaps, etc.# while(distance < route['distance'].sum()):# # get the current segment# while(route.iloc[segment_index]['distance_traveled'] < distance):# segment_index += 1# segment = route.iloc[segment_index]# segment_distance = segment['distance']# segment_distance_traveled = segment['distance_traveled']# limited_target_speed = min(target_speed, segment['speed_limit'] + acceptable_speeding)# # TODO: also have to take into account the need to decelerate for the next segment# # check if accelerating, decelerating, or cruising# if speed == limited_target_speed:# acceleration_time = 0# acceleration_distance = 0# time += (segment_distance_traveled - distance) / speed# distance = segment_distance_traveled + 0.001# elif speed < limited_target_speed:# acceleration_time = (limited_target_speed - speed) / acceleration# acceleration_distance = speed * acceleration_time + 0.5 * acceleration * acceleration_time ** 2# time += acceleration_time# distance += acceleration_distance# speed = limited_target_speed# # TODO: account for segment cutoffs# else:# acceleration_time = (speed - limited_target_speed) / acceleration# acceleration_distance = speed * acceleration_time - 0.5 * acceleration * acceleration_time ** 2# time += acceleration_time# distance += acceleration_distance# speed = limited_target_speed# # TODO: account for segment cutoffs# return time# print(calculate_race_time(race_route, 1000, 0.3, 0.0))```# Weather Analysis# Drive Cycle Analysis- Possible Speed Profiles- True Race Time vs. Speed (given Acceleration) ==> Target Speed- True Race Time vs. Willingness to Speed @ Target Speed- Number of Accelerations- Number of Decelerations- Irradiance Map (vs. Total Driver Ingress Time)- Pointing Times- Wind Map, Yaw Distribution, Apparent Wind Speed Distribution- Temperature Map- Air Density Map- v^2 d Weighted Yaw Distribution- CdA and Array Power Level CurvesStops: Start, BOD, EOD, Control Points, Lights, Signs, Flashing Yellows## Load Weather DataFirst, we find the weather station for each segment of the route.```{python}# read in route# TODO(agoettsc): solarset should be a submodule or something like thatweather_stations = pd.read_csv("./data/australia_stations.csv")# determine the fractional weather station for each segment for interpolationseg_station_distances = pd.DataFrame(columns=[f"dist_station_{i}"for i in weather_stations["group"]], index=route.index)def haversine_distance(seg, station): seg_coords_rad = (np.deg2rad(seg["start_latitude"]), np.deg2rad(seg["start_longitude"])) station_coords_rad = (np.deg2rad(station["lat"]), np.deg2rad(station["lon"])) avg_lat = (seg_coords_rad[0] + station_coords_rad[0]) /2 r0 =6378137; r1 =6356752; a = (r0 **2) * np.cos(avg_lat); b = (r1 **2) * np.sin(avg_lat); c = r0 * np.cos(avg_lat); d = r1 * np.sin(avg_lat); R = np.sqrt((a **2+ b **2) / (c **2+ d **2)) haversine_output = np.sin((seg_coords_rad[0] - station_coords_rad[0]) /2) **2\+ np.cos(seg_coords_rad[0]) * np.cos(station_coords_rad[0]) \* np.sin((seg_coords_rad[1] - station_coords_rad[1]) /2) **2 arc_length =2* np.arcsin(np.sqrt(haversine_output))return R * arc_length /1000for _, weather_station in weather_stations.iterrows(): seg_station_distances[f"dist_station_{weather_station["group"]}"] = haversine_distance(route, weather_station)def get_seg_weather_station(seg): seg_distances = seg_station_distances.iloc[seg.name] closest_stations = np.array([seg_distances[f"dist_station_{weather_station["group"]}"] for _, weather_station in weather_stations.iterrows()]).argpartition(2)[:2] +1 weights = np.array([1/ (seg_distances[f"dist_station_{group_idx}"]) for group_idx in closest_stations]) station = np.sum(weights * closest_stations) / np.sum(weights)return stationroute["weather_station"] = route.apply(get_seg_weather_station, axis=1)```Then, we generate routes annotated with the weather the vehicle experiences for a given speed profile.```{python}WEATHER_FILES = glob.glob("data/weather/august/*.csv")WEATHER_FILES_MEDIAN = glob.glob("data/weather/august-median/*.csv")def parse_weather_file(weather_file_path): weather = pd.read_csv(weather_file_path)# weather_xy = weather[["weather_group", "period_time_unix"]].to_numpy() weather = weather.sort_values(by=["weather_group", "period_time_unix"]) weather_groups = weather["weather_group"].unique() weather_times = weather["period_time_unix"].unique() weather_xy = [ weather_groups, weather_times, ]# return weather["period_time_unix"].min(), weather["period_time_unix"].max(), {# "dhi": LinearNDInterpolator(weather_xy, weather["dhi"]),# "dni": LinearNDInterpolator(weather_xy, weather["dni"]),# "ghi": LinearNDInterpolator(weather_xy, weather["ghi"]),# "wind_velocity_10m_ns": LinearNDInterpolator(weather_xy, weather["wind_velocity_10m_ns"]),# "wind_velocity_10m_ew": LinearNDInterpolator(weather_xy, weather["wind_velocity_10m_ew"]),# "air_temp_2m": LinearNDInterpolator(weather_xy, weather["air_temp_2m"]),# "surface_pressure": LinearNDInterpolator(weather_xy, weather["surface_pressure"]),# "air_density": LinearNDInterpolator(weather_xy, weather["air_density"]),# }return weather["period_time_unix"].min(), weather["period_time_unix"].max(), {"dhi": RegularGridInterpolator(weather_xy, weather["dhi"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"dni": RegularGridInterpolator(weather_xy, weather["dni"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"ghi": RegularGridInterpolator(weather_xy, weather["ghi"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"wind_velocity_10m_ns": RegularGridInterpolator(weather_xy, weather["wind_velocity_10m_ns"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"wind_velocity_10m_ew": RegularGridInterpolator(weather_xy, weather["wind_velocity_10m_ew"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"air_temp_2m": RegularGridInterpolator(weather_xy, weather["air_temp_2m"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"surface_pressure": RegularGridInterpolator(weather_xy, weather["surface_pressure"].ravel().reshape(len(weather_groups), len(weather_times), order="C")),"air_density": RegularGridInterpolator(weather_xy, weather["air_density"].ravel().reshape(len(weather_groups), len(weather_times), order="C")), }weathers = [parse_weather_file(f) for f in WEATHER_FILES]```Then, we use a schedule and a speed profile to find the time that we are at each segment. Using that time, we look up weather for each segment.```{python}SCHEDULE_FILES = glob.glob("data/schedule/august/*.toml")AEROBODY_COP_Z =0.558SURFACE_ROUGHNESS =1e-3WIND_REFERENCE_HEIGHT =10wind_speed_multiplier = np.log(AEROBODY_COP_Z / SURFACE_ROUGHNESS) / np.log(WIND_REFERENCE_HEIGHT / SURFACE_ROUGHNESS)def parse_schedule_file(schedule_file_path):withopen(schedule_file_path, "rb") as f: schedule_toml = tomli.load(f)return {"day_starts": [pd.Timestamp(day_schedule["race_start"]) for day_schedule in schedule_toml["schedule"]],"day_ends": [pd.Timestamp(day_schedule["race_end"]) for day_schedule in schedule_toml["schedule"]], }def get_time_for_seg_or_nan(seg, profile, schedule): matching_profile_segs = profile[seg["cum_distance"] == profile["distance"]]iflen(matching_profile_segs) >0: profile_seg_time = matching_profile_segs.iloc[0]["time"] profile_seg_day =int(np.floor(profile_seg_time / (9*3600)))return pd.Timedelta(seconds=profile_seg_time - (9*3600* profile_seg_day)) + schedule["day_starts"][profile_seg_day]returnfloat('nan')def apply_weather_to_speed_profile(target_speed_kph, weather, schedule): res = route.copy() res = res[res["segment_type"] !="marshaling"]# first, we need to resolve the time at each segment# we assign speed based on the segment of the profile and time based on if we used that exact segment in our profile res["cum_distance"] = res["distance"].cumsum().shift(1) res.loc[0, "cum_distance"] =0# TODO(agoettsc): this is problematic, need to deal with accel# res["speed"] = res.apply(lambda seg: profile[profile["distance"] >= seg["cum_distance"]].iloc[0]["speed"], axis=1)# res.loc[0, "speed"] = profile.loc[1, "speed"]# res["localtime"] = res.apply(lambda seg: get_time_for_seg_or_nan(seg, profile, schedule), axis=1)# res["time_taken"] = res["distance"] / res["speed"] # # offset times based on previous segments to fill gaps# for i, seg in res.iterrows():# if pd.isna(seg["localtime"]):# prev_t = res.loc[i - 1, "localtime"]# next_t = prev_t + pd.Timedelta(seconds=res.loc[i - 1, "time_taken"])# for j, day_end in enumerate(schedule["day_ends"]):# if prev_t < day_end and next_t > day_end:# next_t = schedule["day_starts"][j + 1] + (next_t - day_end)# res.loc[i, "localtime"] = next_t res["speed"] = np.minimum(res["speed_limit"], target_speed_kph /3.6) res["time_taken"] = res["distance"] / res["speed"] res.loc[0, "localtime"] = schedule["day_starts"][0] + pd.Timedelta(hours=HOT_LAP_DELAY_TIME)# offset times based on previous segments to fill gapsfor i, seg in res.iterrows():if i ==0:continue prev_t = res.loc[i -1, "localtime"] next_t = prev_t + pd.Timedelta(seconds=res.loc[i -1, "time_taken"])if res.loc[i -1, "segment_end_condition"] =="control_stop": next_t += pd.Timedelta(minutes=30)for j, day_end inenumerate(schedule["day_ends"]):if prev_t < day_end and next_t > day_end: next_t = schedule["day_starts"][j +1] + (next_t - day_end) res.loc[i, "localtime"] = next_tfor k, spline in weather.items(): res[k] = res.apply(lambda seg: spline([seg["weather_station"], seg["localtime"].timestamp()])[0], axis=1)# now that we have time, we get solar position solpos_df = pvlib.solarposition.get_solarposition(res["localtime"], res["start_latitude"], res["start_longitude"], altitude=res["elevation"]) res["solar_zenith"] = np.deg2rad(solpos_df["apparent_zenith"].to_numpy()) res["solar_azimuth"] =-np.deg2rad(solpos_df["azimuth"].to_numpy()) res["vehicle_relative_azimuth"] = (res["heading"] + res["solar_azimuth"]) % (2* np.pi)# now we resolve the wind vector res["wind_speed"] = wind_speed_multiplier * np.sqrt(res["wind_velocity_10m_ns"] **2+ res["wind_velocity_10m_ew"] **2)# heading coordinate system is north at 0 degrees, cw positive, so east at 90 degrees# so, to correctly resolve an angle, east-west wind is the y value and north-south wind is the x value# offset by 180 because wind speed is positive when wind is from west/south res["wind_heading"] = np.atan2(-res["wind_velocity_10m_ew"], -res["wind_velocity_10m_ns"])# now, we have two polar vectors that we can add to get the world relative wind experienced by the vehicle r1 = res["speed"] r2 = res["wind_speed"] theta1 = res["heading"] theta2 = res["wind_heading"] res["apparent_wind_speed"] = np.sqrt(r1 **2+ r2 **2+2* r1 * np.cos(theta2 - theta1))# this would be theta_1 + atan2(...) but we want vehicle relative yaw, so theta_1 = heading will cancel out res["yaw"] = np.atan2(r2 * np.sin(theta2 - theta1), r1 + r2 * np.cos(theta2 - theta1))return resdef get_races_for_speed(speed):# speed_profile = generate_speed_profile(route, speed, hot_lap_delay=HOT_LAP_DELAY_TIME)[0] schedules = {re.match(r".*/Schedule(.*)\.toml", s)[1]: parse_schedule_file(s) for s in SCHEDULE_FILES} races = {}for k, schedule in schedules.items(): weather =next(w for min_t, max_t, w in weathers if min_t < pd.Timestamp.timestamp(schedule["day_starts"][0]) < max_t) races[k] = apply_weather_to_speed_profile(speed, weather, schedule)return racesraces = get_races_for_speed(target_speed)race_2022 = races["2022"]```Now that we have segment speeds, we plot speed and position over time```{python}fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["speed"] *3.6)ax.set_title("Speed over time")ax.set_ylabel("Speed (kph)")ax.set_xlabel("Time")fit_to_gslide()``````{python}fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["weather_station"])ax.set_title("Cumulative distance over time")ax.set_ylabel("Distance (km)")ax.set_xlabel("Time")fit_to_gslide()```## Solar Position and IrradianceFirst, we plot DNI, DHI, and GHI over time.```{python}fig, ax = plt.subplots()ax.plot(race_2022["localtime"], race_2022["dni"], label="dni")ax.plot(race_2022["localtime"], race_2022["dhi"], label="dhi")ax.plot(race_2022["localtime"], race_2022["ghi"], label="ghi")ax.set_title("Irradiance during 4 days in August 2022")ax.set_ylabel("Irradiance ($W/m^2$)")ax.set_xlabel("Time")fig.legend()fit_to_gslide()```Then, we plot the position of the sun relative to the vehicle over a 4 day race.```{python}fig, ax = plt.subplots(subplot_kw={'projection': 'polar'})ax.scatter(race_2022["vehicle_relative_azimuth"], race_2022["solar_zenith"] *180/ np.pi)ax.set_theta_zero_location('N')ax.set_rmax(90)ax.set_rticks([0, 30, 60, 90], labels=['0', '30', '60', '90'])ax.set_title("Vehicle relative solar position (October 2022)")fit_to_gslide()```As a KDE plot:```{python}fig, ax = plt.subplots(subplot_kw={'projection': 'polar'})kde = seaborn.kdeplot(x=race_2022["vehicle_relative_azimuth"], y=race_2022["solar_zenith"] *180/ np.pi, weights=race_2022["dni"] * race_2022["time_taken"], fill=True, cbar=True)# fig.colorbar(kde)ax.set_theta_zero_location('N')ax.set_rmax(90)ax.set_rticks([0, 30, 60, 90], labels=['0', '30', '60', '90'])ax.set_title("Vehicle relative solar position (October 2022 w/ 2007-2022 weather)")ax.set_xlabel("")ax.set_ylabel("")fit_to_gslide()```We also care about how often the sun is forward of horizontal:```{python}sun_forwards_of_of_horizontal_Jpersqm = (((race_2022["vehicle_relative_azimuth"] < (np.pi /2)) | (race_2022["vehicle_relative_azimuth"] > (3* np.pi /2))) * race_2022["dni"] * race_2022["time_taken"]).sum()sun_forwards_of_of_horizontal_Jpersqm_percent = (sun_forwards_of_of_horizontal_Jpersqm / (race_2022["dni"] * race_2022["time_taken"]).sum()) *100print(f"{sun_forwards_of_of_horizontal_Jpersqm_percent:.2f}% of irradiance is collected forwards of horizontal")```We would like to determine the optimal orientation of solar cells. First, we generate 2000 sample normal vectors. Then, we calculate the irradiance on them at each route step and find specific power collected in $W/m^2$ as a function of orientation.```{python}SAMPLE_CELL_VECTORS =2000HEATMAP_RES_AZ =1000HEATMAP_RES_ZEN =500AZIMUTH_BOUNDS = (0, np.pi *2)ZENITH_BOUNDS = (0, np.pi /2)def get_cell_normal_spline(race_profile): race_profile = race_profile.copy()def make_hemisphere_normals(num_points): point_normals = [(0, 0)] num_equator_points =2* math.ceil(math.sqrt(num_points)) point_normals += [(2* math.pi * i / num_equator_points, math.pi /2) for i inrange(num_equator_points)] num_fibonacci_points = num_points - num_equator_points -1 phi = math.pi * (math.sqrt(5) -1)for i inrange(num_fibonacci_points): z =1- (i / num_fibonacci_points) radius = math.sqrt(1- z * z) theta = phi * i x = math.cos(theta) * radius y = math.sin(theta) * radius point_normals.append((math.atan2(y, x) % (2* math.pi), math.acos(z))) df = pd.DataFrame(point_normals, columns=["static_cell_azimuth", "static_cell_zenith"])# scale up rho for the cell normal vectors to produce a convex hull for interpolation slightly larger than the unit# sphere so that all points on the unit sphere are inside the convex hull, even when the facets of it are triangular RHO_EPSILON =0.001 RHO =1+ RHO_EPSILON df["x"] = RHO * np.sin(df["static_cell_zenith"]) * np.cos(df["static_cell_azimuth"]) df["y"] = RHO * np.sin(df["static_cell_zenith"]) * np.sin(df["static_cell_azimuth"]) df["z"] = RHO * np.cos(df["static_cell_zenith"])return df cell_normals = make_hemisphere_normals(SAMPLE_CELL_VECTORS)def get_cell_cos_incident_angle(cell): cosine_incident_angle = np.cos(race_profile["solar_zenith"]) * np.cos(cell_normals.loc[cell, "static_cell_zenith"]) +\ np.sin(race_profile["solar_zenith"]) * np.sin(cell_normals.loc[cell, "static_cell_zenith"]) *\ (np.cos(race_profile["vehicle_relative_azimuth"]) * np.cos(cell_normals.loc[cell, "static_cell_azimuth"])+ np.sin(race_profile["vehicle_relative_azimuth"]) * np.sin(cell_normals.loc[cell, "static_cell_azimuth"])) cosine_incident_angle[cosine_incident_angle <0] =0return cosine_incident_angle encapsulant_eff_raw = pd.read_csv("data/encapsulant/mito-skyiar-2025.csv").iloc[::-1] encapsulant_eff_interp = CubicSpline(encapsulant_eff_raw["cos_incident_angle"].to_numpy(), encapsulant_eff_raw["encapsulant_efficiency"])def get_cell_beam_irradiance_seen(cell): cosine_incident_angle = get_cell_cos_incident_angle(cell)return cosine_incident_angle * race_profile["dni"] * encapsulant_eff_interp(cosine_incident_angle)def get_sky_diffuse_irradiance(cell): isotropic_diffuse_irradiance = race_profile["dhi"] * (1+ np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2 circumsolar_horizon_diffuse_irradiance = race_profile["ghi"] * (0.012* np.rad2deg(race_profile["solar_zenith"]) -0.04) * (1- np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2return isotropic_diffuse_irradiance + circumsolar_horizon_diffuse_irradiancedef get_ground_reflected_irradiance(cell):return race_profile["ghi"] * (1- np.cos(cell_normals.loc[cell, "static_cell_zenith"])) /2*0.1for cell_id in cell_normals.index: race_profile[f"cell-{cell_id}-beam-irradiance-collected"] = get_cell_beam_irradiance_seen(cell_id) race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"] = get_sky_diffuse_irradiance(cell_id) race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"] = get_ground_reflected_irradiance(cell_id) race_profile[f"cell-{cell_id}-irradiance-collected"] = race_profile[f"cell-{cell_id}-beam-irradiance-collected"] + race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"] + race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"] cell_normals.loc[cell_id, "avg_cell_beam_irradiance"] = np.average(race_profile[f"cell-{cell_id}-beam-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_sky_diffuse_irradiance"] = np.average(race_profile[f"cell-{cell_id}-sky-diffuse-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_ground_reflected_irradiance"] = np.average(race_profile[f"cell-{cell_id}-ground-reflected-irradiance-collected"], weights=race_profile["time_taken"]) cell_normals.loc[cell_id, "avg_irradiance_collected"] = np.average(race_profile[f"cell-{cell_id}-irradiance-collected"], weights=race_profile["time_taken"]) zenith_linspace = np.linspace(*ZENITH_BOUNDS, HEATMAP_RES_ZEN) sample_points_azimuth, sample_points_zenith = np.meshgrid(np.linspace(*AZIMUTH_BOUNDS, HEATMAP_RES_AZ), zenith_linspace) total_time_s = np.sum(race_profile["time_taken"])print(f"Total racetime is {total_time_s /3600} hrs") best_cell = cell_normals.iloc[cell_normals["avg_irradiance_collected"].idxmax()] best_cell_over_6m2 = best_cell['avg_irradiance_collected'] *6print(f"Best orientation collects {best_cell['avg_irradiance_collected']:.2f} W/m^2 ({best_cell_over_6m2:.2f} W if this was the entire array), azimuth: {np.rad2deg(best_cell["static_cell_azimuth"]):.2f} deg, zenith: {np.rad2deg(best_cell["static_cell_zenith"]):.2f} deg") flat_cell = cell_normals.iloc[0] flat_cell_over_6m2 = flat_cell['avg_irradiance_collected'] *6print(f"Flat cell collects {flat_cell['avg_irradiance_collected']:.2f} W/m^2 ({flat_cell_over_6m2:.2f} W if this was the entire array), azimuth: {np.rad2deg(flat_cell["static_cell_azimuth"]):.2f} deg zenith: {np.rad2deg(flat_cell["static_cell_zenith"]):.2f} deg")return ( LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_irradiance_collected"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_cell_beam_irradiance"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_sky_diffuse_irradiance"]), LinearNDInterpolator(cell_normals[["x", "y", "z"]], cell_normals["avg_ground_reflected_irradiance"]), best_cell, )specific_irradiance_spline, specific_beam_irradiance_spline, specific_sky_diffuse_spline, specific_ground_reflected_spline, best_cell = get_cell_normal_spline(pd.concat(races.values()))zenith_linspace = np.linspace(*ZENITH_BOUNDS, HEATMAP_RES_ZEN)sample_points_azimuth, sample_points_zenith = np.meshgrid(np.linspace(*AZIMUTH_BOUNDS, HEATMAP_RES_AZ), zenith_linspace)sample_points_x = np.sin(sample_points_zenith) * np.cos(sample_points_azimuth)sample_points_y = np.sin(sample_points_zenith) * np.sin(sample_points_azimuth)sample_points_z = np.cos(sample_points_zenith)irradiance_collected_sampled = specific_irradiance_spline(sample_points_x, sample_points_y, sample_points_z)fig, ax = plt.subplots(subplot_kw={'projection': 'polar'})pc = ax.pcolormesh(sample_points_azimuth, sample_points_zenith *180/ np.pi, irradiance_collected_sampled, shading='auto')fig.colorbar(pc)# ax.scatter(best_cell["static_cell_azimuth"], best_cell["static_cell_zenith"] * 180 / np.pi, label="best attitude")ax.set_theta_zero_location('N')ax.set_rmax(90)# ax.set_rticks([0, 30, 60, 90], labels=['0', '30', '60', '90'])ax.set_title(r"Average cell irradiance ($W/m^2$) collected at attitude (October 2022 w/ 2007-2022 weather)")ax.scatter(best_cell["static_cell_azimuth"], best_cell["static_cell_zenith"] *180/ np.pi, label="best attitude")fig.legend(loc="lower left")# fit_to_gslide()```It is also of interest what the section of the plot representing a cell tilting along the X axis [TODO(agoettsc): this could be worded better] of the vehicle looks like.```{python}SECTION_AZIMUTH_DEG =180SECTION_AZIMUTH = np.deg2rad(SECTION_AZIMUTH_DEG)section_zenith_linspace = np.linspace(-ZENITH_BOUNDS[1], ZENITH_BOUNDS[1], HEATMAP_RES_ZEN *2)section_x_samples = np.sin(section_zenith_linspace) * np.cos(SECTION_AZIMUTH)section_y_samples = np.full(HEATMAP_RES_ZEN *2, np.sin(SECTION_AZIMUTH))section_z_samples = np.cos(section_zenith_linspace)section_zenith_deg_plot_x = np.rad2deg(section_zenith_linspace)section_irradiance = specific_irradiance_spline(section_x_samples, section_y_samples, section_z_samples)section_max_index = np.argmax(section_irradiance)section_max_irradiance = section_irradiance[section_max_index]section_max_irradiance_over_6m2 = section_max_irradiance *6print(f"Best cell along azimuth {SECTION_AZIMUTH_DEG} deg collects {section_irradiance[section_max_index]:.2f} W/m^2 ({section_max_irradiance_over_6m2:.2f} W if this was the entire array), zenith: {section_zenith_deg_plot_x[section_max_index]:.2f} deg")fig = plt.figure()section_ax = fig.add_subplot()section_ax.plot(section_zenith_deg_plot_x, section_irradiance, label="total irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_beam_irradiance_spline(section_x_samples, section_y_samples, section_z_samples), label="beam irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_sky_diffuse_spline(section_x_samples, section_y_samples, section_z_samples), label="sky diffuse irradiance collected")section_ax.plot(section_zenith_deg_plot_x, specific_ground_reflected_spline(section_x_samples, section_y_samples, section_z_samples), label="ground reflected irradiance collected")# section_ax.legend()section_ax.set_title(f"Irradiance collected by cells oriented along the x-axis of the vehicle")section_ax.set_xlabel("Pitch (deg)")section_ax.set_ylabel(r"Irradiance ($W/m^2$)")ylim = section_ax.get_ylim()section_ax.plot([0, 0], [-1000, 1000], '--', color=ANGELL_HALL_ASH)section_ax.set_ylim(ylim)fit_to_gslide()```## Wind AnalysisFirst, we plot a histogram of yaw frequencies.```{python}races_df = pd.concat(races.values())races_df["yaw_weight"] = (races_df["apparent_wind_speed"] **2) * races_df["distance"]rms_apparent_wind_speed = np.sqrt(np.sum(races_df["yaw_weight"]) / np.sum(races_df["distance"]))weight_denom = np.sum(races_df["yaw_weight"])counts, bins = np.histogram(np.rad2deg(races_df["yaw"]), weights=races_df["yaw_weight"] / weight_denom, bins=401, range=(-20.05, 20.05))fig = plt.figure()ax = fig.add_subplot()ax.bar(bins[:-1], counts)ax.set_title("Apparent yaw frequency over race days in August (2007-2022)")ax.set_xlabel("Yaw angle (deg)")ax.set_ylabel("Frequency")ax.set_ybound(0, 0.0118)bin_centers = (bins[1:] + bins[:-1]) /2output_df = pd.DataFrame({"yaw_deg": bin_centers,"density": counts,})output_df.to_csv("yaw_density_august.csv", index=False)``````{python}```Then, we get the RMS apparent wind speed at different speed values```{python}RMS_WIND_SPEED_SWEEP_RANGE_KPH = (80, 130)RMS_WIND_SAMPLES =10def get_rms_apparent_wind_speed(speed): races_at_speed = get_races_for_speed(speed) races_df = pd.concat(races_at_speed.values()) races_df["yaw_weight"] = (races_df["apparent_wind_speed"] **2) * races_df["distance"]return np.sqrt(np.sum(races_df["yaw_weight"]) / np.sum(races_df["distance"]))target_speeds_kph = np.linspace(*RMS_WIND_SPEED_SWEEP_RANGE_KPH, RMS_WIND_SAMPLES)rms_apparent_wind_speeds = np.fromiter((get_rms_apparent_wind_speed(s) for s in target_speeds_kph), float, count=-1)fig, ax = plt.subplots()ax.plot(target_speeds_kph, rms_apparent_wind_speeds *3.6)ax.set_title("RMS apparent wind speed vs. target speed")ax.set_xlabel("Target speed [kph]")ax.set_ylabel("RMS apparent wind speed [kph]")ax.set_xlim([80, 130])ax.set_ylim([80, 130])fit_to_gslide()```# Target Race Time Analysis# Concept Analysis```{python}# CdA on x-axis, using tex# Array Energy In on y-axis# Level curves at different racetimes [4d - 1h, 4d, 4d + 1h, 4d + 2h, etc]def get_array_cda_from_trt(official_race_time, cda_axis): true_race_time = official_race_time - (HOT_LAP_DELAY_TIME + WASTED_TIME) *3600# for a balanced energy budget OTHER_ENERGY_IN =2942+3672+9653 OTHER_ENERGY_OUT =30805-18691 NOMINAL_MPPT_EFFICIENCY =0.98 NOMINAL_AIR_DENSITY =1.159# calculate the array power and CdA required to achieve true_race_time# return the array power and CdA# now determine array power and CdA which balance this budget# fix CdA, find array power array_power_axis = []for cda in cda_axis: f = interp1d(target_speed_df['true_race_time'], target_speed_df['rms_velocity']) rms_velocity = f(true_race_time) rms_wind_velocity = rms_velocity +3 drag_energy =0.5* NOMINAL_AIR_DENSITY * cda /1000* rms_wind_velocity **2* race_distance /3600print(cda, drag_energy) needed_array_energy = drag_energy + OTHER_ENERGY_OUT - OTHER_ENERGY_IN# use solver to determine array power array_power = needed_array_energy / ((true_race_time + WASTED_TIME) /3600) / NOMINAL_MPPT_EFFICIENCY array_power_axis.append(array_power)return array_power_axisfig = plt.figure()plt.xlabel(r'$C_d A$ ($m^2$/1000)')plt.xlim([30, 100])plt.ylabel('Array Energy In [W]')# plt.ylim([24, 40])plt.title('Concept Selection Status')# plot level curvesrace_times = ['4d', '4d + 3h', '4d + 6h', '5d']official_race_times =1/3+ np.array([31, 34, 37, 40])cda_axis = np.linspace(30, 100, 71)for i inrange(len(race_times)): array_power_axis = get_array_cda_from_trt(official_race_times[i] *3600, cda_axis) p = plt.plot(cda_axis, array_power_axis, color=MICHIGAN_BLUE, alpha=0.75)# make it dashed if official_race_time != 31 + 1/3if official_race_times[i] !=31+1/3: p[0].set_linestyle('--')# add label at end of the line plt.text(cda_axis[-1], array_power_axis[-1], ' '+ race_times[i], color=MICHIGAN_BLUE, va='center', ha='left')# plot different models of car# scatter plots with specialized colorsdef plot_concept_car(cda, array_power, color, label=None): plt.scatter([cda], [array_power], color=color, label=label, s=70, edgecolors='black', linewidths=0.5, alpha=1if label isnotNoneelse0.5)plot_concept_car(70.93, 858.40, color=MICHIGAN_MAIZE, label='Monohull')plot_concept_car(73.45, 870.50, color=MICHIGAN_MAIZE) # Monohullplot_concept_car(90.90, 928.07, color=LAW_QUAD_STONE, label='Teardrop')SIZE=1# square shapeplt.scatter([71], [640], color=ANGELL_HALL_ASH, label='Astrum (Scaled)', s=SIZE, edgecolors='black', linewidths=0.5, marker='s')plt.scatter([45], [600], color=TAUBMAN_TEAL, label='Infinite (Scaled)', s=SIZE, edgecolors='black', linewidths=0.5, marker='s')# display legend only for labeled itemsplt.legend()fit_to_gslide()# set dpi to 600plt.savefig('concept_selection.png', dpi=600)plt.show()```