Hello,
I am working with the O’Regan parameter set to get temperature prediction for LGM50 to compare the predictions with the model that I have developed. I am using this code as the reference:
data_file = loadmat('WLTP_2_CYCLES_FULL_SET.mat')
Crate, Temp = "2C",0
t,T,T_inf,I,SOC_0 =load_data(Crate,Temp)
options = {"thermal":"lumped","dimensionality":0,"cell geometry":"arbitrary"}
model = pybamm.lithium_ion.DFN(options=options)
#O'Regan parameter set
param = pybamm.ParameterValues("ORegan2022")
solver = pybamm.IDAKLUSolver()
param["Total heat transfer coefficient [W.m-2.K-1]"] = 14
param["Ambient temperature [K]"] = T_inf
param["Initial temperature [K]"] = T_inf
param["Current function [A]"] = I
var_pts = {"x_n": 30, "x_s": 30, "x_p": 30, "r_n": 40, "r_p": 40}
submesh_types = model.default_submesh_types
submesh_types["negative particle"] = pybamm.MeshGenerator(
pybamm.Exponential1DSubMesh, submesh_params={"side": "right"}
)
submesh_types["positive particle"] = pybamm.MeshGenerator(
pybamm.Exponential1DSubMesh, submesh_params={"side": "right"}
)
sim = pybamm.Simulation(model,parameter_values=param,solver=solver
sol = sim.solve(t_eval=t, initial_soc=SOC_0)
When I run the simulation for the 2C 0°C, 2C 10°C and WLTP 0°C conditions I am getting the following error
---------------------------------------------------------------------------
SolverError Traceback (most recent call last)
Cell In[106], line 1
----> 1 sol = sim.solve(t_eval=t, initial_soc=SOC_0)
File ~\AppData\Local\anaconda3\envs\battery\Lib\site-packages\pybamm\simulation.py:488, in Simulation.solve(self, t_eval, solver, save_at_cycles, calc_esoh, starting_solution, initial_soc, callbacks, showprogress, inputs, **kwargs)
474 if dt_eval_max > dt_data_min + sys.float_info.epsilon:
475 warnings.warn(
476 f"""
477 The largest timestep in t_eval ({dt_eval_max}) is larger than
(...)
485 stacklevel=2,
486 )
--> 488 self._solution = solver.solve(
489 self.built_model, t_eval, inputs=inputs, **kwargs
490 )
492 elif self.operating_mode == "with experiment":
493 callbacks.on_experiment_start(logs)
File ~\AppData\Local\anaconda3\envs\battery\Lib\site-packages\pybamm\solvers\base_solver.py:903, in BaseSolver.solve(self, model, t_eval, inputs, nproc, calculate_sensitivities)
901 ninputs = len(model_inputs_list)
902 if ninputs == 1:
--> 903 new_solution = self._integrate(
904 model,
905 t_eval[start_index:end_index],
906 model_inputs_list[0],
907 )
908 new_solutions = [new_solution]
909 else:
File ~\AppData\Local\anaconda3\envs\battery\Lib\site-packages\pybamm\solvers\idaklu_solver.py:679, in IDAKLUSolver._integrate(self, model, t_eval, inputs_dict)
677 return newsol
678 else:
--> 679 raise pybamm.SolverError("idaklu solver failed")
SolverError: idaklu solver failed
The simulation works fine under conditions at:
0.5C 0°C
0.5C 10°C
0.5C 25°C
1C 0°C
1C 10°C
1C 25°C
2C 25°C
WLTP 10°C
WLTP 25°C
Any insight on why the solver fails at 2C 0°C, 2C 10°Cand WLTP 0°C?
Any help would be much appreciated