You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
While trying to convert from Keras, I get the following error:
Traceback (most recent call last):
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tf2onnx\tf_loader.py", line 218, in from_trackable
frozen_graph = from_function(concrete_func, inputs, outputs, large_model)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tf2onnx\tf_loader.py", line 277, in from_function
raise e
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tf2onnx\tf_loader.py", line 270, in from_function
frozen_func = convert_variables_to_constants_v2(func, lower_control_flow=False, aggressive_inlining=True)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\framework\convert_to_constants.py", line 1162, in convert_variables_to_constants_v2
return _construct_concrete_function(func, output_graph_def,
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\framework\convert_to_constants.py", line 1087, in _construct_concrete_function
new_func = wrap_function.function_from_graph_def(output_graph_def,
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\eager\wrap_function.py", line 655, in function_from_graph_def
wrapped_import = wrap_function(_imports_graph_def, [])
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\eager\wrap_function.py", line 619, in wrap_function
func_graph.func_graph_from_py_func(
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\framework\func_graph.py", line 1247, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\eager\wrap_function.py", line 83, in __call__
return self.call_with_variable_creator_scope(self._fn)(*args, **kwargs)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\eager\wrap_function.py", line 89, in wrapped
return fn(*args, **kwargs)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\eager\wrap_function.py", line 649, in _imports_graph_def
importer.import_graph_def(graph_def, name="")
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\util\deprecation.py", line 561, in new_func
return func(*args, **kwargs)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\framework\importer.py", line 403, in import_graph_def
return _import_graph_def_internal(
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tensorflow\python\framework\importer.py", line 497, in _import_graph_def_internal
with c_api_util.tf_buffer(graph_def.SerializeToString()) as serialized:
ValueError: Message tensorflow.GraphDef exceeds maximum protobuf size of 2GB: 4797467584
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\emers\OneDrive\Documents\Code\ml4\ui\convert.py", line 8, in <module>
onnx_model = onnxmltools.convert_keras(model)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\onnxmltools\convert\main.py", line 84, in convert_keras
model_proto, external_tensor_storage = tf2onnx.convert.from_keras(
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tf2onnx\convert.py", line 497, in from_keras
tf_loader.from_trackable(model, concrete_func, input_names, output_names, large_model)
File "C:\Users\emers\OneDrive\Documents\Code\ml4\.conda\lib\site-packages\tf2onnx\tf_loader.py", line 221, in from_trackable
raise ValueError(err_large_model)
ValueError: model exceeds maximum protobuf size of 2GB. Try setting large_model.
However, it's not possible to set the large_model=True parameter for tf2onnx - Which is always passed as false.
The text was updated successfully, but these errors were encountered:
While trying to convert from Keras, I get the following error:
However, it's not possible to set the large_model=True parameter for tf2onnx - Which is always passed as false.
The text was updated successfully, but these errors were encountered: