Hello. I am trying to optimize the MSE (mean squared error) between experimental data and theoretical data. I want to constrain my parameter values within their bounds and to the constraint function, calculated the optimal delay time that gives the minimal MSE. I tried an initial guess that matches another block of code I have (which is correct, the one following the #), but when trying the initial guess I have written out (which I can verify gives larger theoretical data and thus larger/worse MSE), it returns the actual theoretical value that I have from another block of code and just the initial guess parameters as the optimal ones. Any advice? Could the blocks of code be interfering with one another? Should I open a new notebook? Or is something fundamentally wrong?

```
import numpy as np
import scipy.optimize
from scipy.optimize import LinearConstraint
from scipy.optimize import Bounds
from scipy.optimize import minimize
lab_delayvalues_glu = {2: 2.58,
4: 3.84,
5: 5.68,
6: 4.43,
8: 3.60,
10: 3.10}
def function(z, N):
z = k_off, P_in, P_out
# Set initial conditions
y0 = [0, r_min+(lambda_0/kappa_t), 0, sigma_i]
# Define the time range
tMax=(N+1)+4+(30/lambda_0)
time=np.linspace(0,tMax,int(10*tMax))
growth_rates_list = []
# Solve the ODEs for each value of N
solution = solve_ivp(model_equations, [time[0], time[-1]], y0, method='LSODA', t_eval=time)
gr1_values = np.zeros_like(time)
gr2_values = np.zeros_like(time)
# Calculate gr1 and gr2 for all time points
for i, t in enumerate(time):
if 0 < t < N + 1:
gr1_values[i] = ((solution.y[1, i] - r_min) * kappa_t)
else:
gr2_values[i] = ((r_max - solution.y[1, i] - solution.y[2, i]) * kappa_n)
# Calculate the total growth rate by element-wise addition
growth_rate = gr1_values + gr2_values
# Append growth rate to the list
growth_rates_list.append(growth_rate)
ODs_and_x_intercepts = []
data_list = []
# Calculate OD and x-intercept data for each N
OD = np.cumsum(growth_rate) * (time[1] - time[0])
start_x = time[-1]
start_index = np.where(time == start_x)[0][0]
start_y = OD[start_index]
if abs(1-(OD[start_index]-OD[start_index - 1])/(time[1]-time[0])/lambda_0) > 0.01:
print(N, lambda_0)
desired_slope = lambda_0
x_intercept = start_x - (start_y / desired_slope)
return x_intercept
# Define the Mean Squared Error (MSE) objective function for a specific N
def mse_objective(params, N):
# Get the experimental delay time for the current N
exp_delay_time = lab_delayvalues_glu[N]
# Calculate delay time using current parameters
calc_delay_time = function(params, N)
# Calculate squared error and return MSE
mse = (exp_delay_time - calc_delay_time) ** 2
return mse
# Define the constraint function
def constraint_function(params):
# Calculate the value of the constraint (IC_50 - 1.75)
k_off, P_in, P_out = params
calc_IC_50 = (k_off/k_on)*(P_out/P_in)*(lambda_max/lambda_0)
return abs(calc_IC_50 - 1.75) # Ensure IC_50 is greater than or equal to 1.75
# Initial guess for the parameters
initial_guess = [513.0, 1360.0, 80.0] #[4000.0, 1540.8822857142854, 100.0]
# Define bounds for the parameters
param_bounds = [(513, 20520), (1360, 2040), (80, 120)] # Example bounds for k_off, P_in, P_out
# Define the constraint dictionary
constraints = {'type': 'ineq', 'fun': constraint_function}
# Dictionary to store experimental and optimized delay times for plotting
experimental_delay_times = []
optimized_delay_times = []
# Iterate over all N values in lab_delayvalues_glu and minimize MSE for each
for N in lab_delayvalues_glu:
# Experimental delay time for current N
experimental_delay_time = lab_delayvalues_glu[N]
experimental_delay_times.append(experimental_delay_time)
# Minimize MSE for current N
result = minimize(mse_objective, initial_guess, args=(N,), method='trust-constr', bounds=param_bounds, constraints=constraints)
# Optimized delay time for current N
optimized_delay_time = function(result.x, N)
optimized_delay_times.append(optimized_delay_time)
# Print the results for current N
print(f"N={N}:")
print("Experimental Delay Time:", experimental_delay_time)
print("Optimized Delay Time:", optimized_delay_time)
print("Optimized Parameters [k_off, P_in, P_out]:", result.x)
print("Mean Squared Error (MSE):", result.fun)
print()
# Plot experimental and calculated delay times for each N
plt.figure(figsize=(8, 6))
plt.plot(list(lab_delayvalues_glu.keys()), experimental_delay_times, marker='o', color='blue', label='Experimental')
plt.plot(list(lab_delayvalues_glu.keys()), optimized_delay_times, marker='o', color='red', label='Calculated')
plt.title('Experimental vs Calculated Delay Times')
plt.xlabel('N Value')
plt.ylabel('Delay Time')
plt.xticks(list(lab_delayvalues_glu.keys())) # Set x-axis ticks to N values
plt.legend()
plt.grid(True)
plt.show()
```