In the process of changing the keras model to onnx, I found error "'tuple' object has no attribute 'graph'"by using Onnx
My code is below, Please help me
import tf2onnx
import onnx
from tensorflow.python.keras import models
model = models.load_model("model.h5")
onnx_model = tf2onnx.convert.from_keras(model, opset=13)
onnx.save(model, 'test.onnx')
"C:\Users\test\PycharmProjects\OnnxTest\main.py", line 13, in
onnx.save(onnx_model, 'test.onnx')
File "C:\Users\bigpicture\anaconda3\lib\site-packages\onnx_init_.py", line 195, in save_model
proto = write_external_data_tensors(proto, basepath)
File "C:\Users\test\anaconda3\lib\site-packages\onnx\external_data_helper.py", line 276, in write_external_data_tensors
for tensor in _get_all_tensors(model):
File "C:\Users\test\anaconda3\lib\site-packages\onnx\external_data_helper.py", line 207, in _get_initializer_tensors
yield from _get_initializer_tensors_from_graph(onnx_model_proto.graph)
AttributeError: 'tuple' object has no attribute 'graph'
Related
python : 3.9.13
tensorflow : 2.9.1
I am making a custom dataset with 'tensorflow object detection'
The saved_model.pb file was generated by trainning with the FastRCNN dataset.
I took this file and applied it to Nuclio (Serverless function framework) but failed
It seems to have apply inference graph file type
I find export util pythonf file in models/research/object_detection directory "export_inference_graph.py"
But this file not working .
This is error message
Traceback (most recent call last):
File "export_inference_graph.py", line 211, in <module>
tf.app.run()
File "/home/namu/.local/lib/python3.8/site-packages/tensorflow/python/platform/app.py", line 36, in run
_run(main=main, argv=argv, flags_parser=_parse_flags_tolerate_undef)
File "/home/namu/.local/lib/python3.8/site-packages/absl/app.py", line 308, in run
_run_main(main, args)
File "/home/namu/.local/lib/python3.8/site-packages/absl/app.py", line 254, in _run_main
sys.exit(main(argv))
File "export_inference_graph.py", line 199, in main
exporter.export_inference_graph(
File "/home/namu/myspace/data/models/research/object_detection/exporter.py", line 618, in export_inference_graph
_export_inference_graph(
File "/home/namu/myspace/data/models/research/object_detection/exporter.py", line 521, in _export_inference_graph
profile_inference_graph(tf.get_default_graph())
File "/home/namu/myspace/data/models/research/object_detection/exporter.py", line 649, in profile_inference_graph
contrib_tfprof.model_analyzer.TRAINABLE_VARS_PARAMS_STAT_OPTIONS)
NameError: name 'contrib_tfprof' is not defined
I knew from Google that this did not work on tensorflow 2.x
https://medium.com/#sebastingarcaacosta/how-to-export-a-tensorflow-2-x-keras-model-to-a-frozen-and-optimized-graph-39740846d9eb
I am working on it by referring to the above site
But
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2
import numpy as np
#path of the directory where you want to save your model
frozen_out_path = "/home/namu/myspace/data/models/export_graph"
# name of the .pb file
frozen_model = "frozen_graph"
model = tf.keras.models.load_model('/home/namu/myspace/data/models/train_pb/saved_model') # tf_saved_model load
# model = tf.saved_model.load('/home/namu/myspace/data/models/train_pb/saved_model')
full_model = tf.function(lambda x: model(x))
full_model = full_model.get_concrete_function(tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype))
When i execute this code , error occurs
ValueError: Unable to create a Keras model from SavedModel at /home/namu/myspace/data/models/train_pb/saved_model. This SavedModel was exported with `tf.saved_model.save`, and lacks the Keras metadata file. Please save your Keras model by calling `model.save`or `tf.keras.models.save_model`. Note that you can still load this SavedModel with `tf.saved_model.load`.
How can i create inference graph pb file.
I have trained a model on google colab by adding a layer to resnet. Here is the model:
import tensorflow_hub as hub # Provides pretrained models
resnet_url = "https://tfhub.dev/google/imagenet/resnet_v2_50/feature_vector/5"
# Download ResNet model and save as Keras layer
# Trainable : False means we don't want to train it further
resnet_layer = hub.KerasLayer(resnet_url,
trainable=False,
input_shape=(256,256,3))
# Create model
resnet_model=tf.keras.Sequential([
# Puts images through downloaded model first
resnet_layer,
# Define we will use 20 classes
Dense(2,
activation="softmax")
])
It works fine on colab notebook. But when I export and want to deploy it into a flask service using the code below:
import base64
import numpy as np
import io
import os
from PIL import Image
import keras
from keras import backend as K
from keras.models import Sequential
from keras.models import load_model
from keras.preprocessing.image import ImageDataGenerator
#from keras.preprocessing.image import img_to_array
import tensorflow as tf
import tensorflow_hub as hub
from tensorflow.keras.utils import img_to_array
from flask import request
from flask import jsonify
from flask import Flask
app = Flask(__name__)
def get_model():
global model
model = load_model('resnet_model_1.h5')
print(" * Model loaded!")
def preprocess_image(image, target_size):
if image.mode != "RGB":
image = image.convert("RGB")
image = image.resize(target_size)
image = img_to_array(image)
image = np.expand_dims(image, axis=0)
return image
print(" * Loading Keras model...")
get_model()
#app.route("/predict", methods=["POST"])
def predict():
message = request.get_json(force=True)
encoded = message['image']
decoded = base64.b64decode(encoded)
image = Image.open(io.BytesIO(decoded))
processed_image = preprocess_image(image, target_size=(256, 256))
prediction = model.predict(processed_image).tolist()
response = {
'prediction': {
'dog': prediction[0][0],
'cat': prediction[0][1]
}
}
return jsonify(response)
I get this nasty error:
Traceback (most recent call last):
File "/usr/local/bin/flask", line 8, in <module>
sys.exit(main())
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 988, in main
cli.main()
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 579, in main
return super().main(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/click/core.py", line 1055, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.8/dist-packages/click/core.py", line 1657, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.8/dist-packages/click/core.py", line 1404, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.8/dist-packages/click/core.py", line 760, in invoke
return __callback(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/click/decorators.py", line 84, in new_func
return ctx.invoke(f, obj, *args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/click/core.py", line 760, in invoke
return __callback(*args, **kwargs)
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 850, in run_command
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 299, in __init__
self._load_unlocked()
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 333, in _load_unlocked
self._app = rv = self.loader()
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 389, in load_app
app = locate_app(import_name, name)
File "/usr/local/lib/python3.8/dist-packages/flask/cli.py", line 234, in locate_app
__import__(module_name)
File "/home/pc3/dev/flaskservice/predict_app.py", line 39, in <module>
get_model()
File "/home/pc3/dev/flaskservice/predict_app.py", line 26, in get_model
model = load_model('resnet_model_1.h5')
File "/usr/local/lib/python3.8/dist-packages/keras/utils/traceback_utils.py", line 67, in error_handler
raise e.with_traceback(filtered_tb) from None
File "/usr/local/lib/python3.8/dist-packages/keras/utils/generic_utils.py", line 562, in class_and_config_for_serialized_keras_object
raise ValueError(
ValueError: Unknown layer: KerasLayer. Please ensure this object is passed to the `custom_objects` argument. See https://www.tensorflow.org/guide/keras/save_and_serialize#registering_the_custom_object for details.
my tensorflow version on colab is 2.8.2 and my local ubuntu machine on which I deply the model it is 2.9.1.
I read here that this error has been resolved by passing ,custom_objects={'KerasLayer':hub.KerasLayer} to load_model. But for me this results in all images are classifed as cat. So clearly it messes up somthing in the model.
So I'm left clueless and appreciate your hints.
Please ensure that these lines are added before calling your inference, as you need to call an argmax to specify which category is the dominant one in your prediction.
list_of_categories = ["dog", "cat"]
pred = list_of_categories[np.argmax(prediction)]
response = pred
This is an error i am getting on spyder 5.0.3
File
"C:\Users\HP\anaconda3\lib\site-packages\tensorflow\python\keras\layers\core.py",
line 1057, in _parse_function_from_config
function = generic_utils.func_load(
File
"C:\Users\HP\anaconda3\lib\site-packages\tensorflow\python\keras\utils\generic_utils.py",
line 457, in func_load
code = marshal.loads(raw_code)
ValueError: bad marshal data (unknown type code)
library-versions:
I have tried downgrading my libraries.. but nothing has happened
import tensorflow as tf
from tensorflow import keras
model_path = 'facenet_keras.h5'
#model = load_model(model_path)
#model = tf.keras.models.load_model(model_path)
#model = keras.models.load_model(model_path)
tf.keras.models.load_model(
model_path, custom_objects=None, compile=True, options=None
)
tried all this!
The line "tflite_model = converter.convert()" gives the AttributeError: 'str' object has no attribute 'call'.
See screenshot of code ->1
CODE:
import tensorflow as tf
converter = tf.lite.TFLiteConverter.from_keras_model('///Users/theunskuhn/Desktop/Savedfile/basic_malaria_pos_neg_v3.h5')
converter.experimental_new_converter = True
tflite_model = converter.convert()
open("basic_malaria_pos_neg_v3.tflite", "wb").write(tflite_model)
ERROR:
AttributeError: 'str' object has no attribute 'call'
The Error points to the line 4: "tflite_model = converter.convert()"
Screenshot of new code from answer below
If you're using the TFLiteConverter API in TensorFlow 2.0 or above, the TFLiteConverter.from_keras_model takes in a Keras Model object and not the path of the model which is str.
First, load the model using tf.keras.models.load_model() and then pass this model to the TFLiteConverter API.
import tensorflow as tf
model = tf.keras.models.load_model( '///Users/theunskuhn/Desktop/Savedfile/basic_malaria_pos_neg_v3.h5' )
converter = tf.lite.TFLiteConverter.from_keras_model( model )
tflite_model = converter.convert()
open("basic_malaria_pos_neg_v3.tflite", "wb").write(tflite_model)
The method TFLiteConverter.from_keras_model_file() was replaced by TFLiteConverter.from_keras_model() in TF 2.0. See the docs.
Trying to load a Keras model using tf.keras.models.load_model I get the following error:
import tensorflow as tf
from tensorflow_addons.optimizers import RectifiedAdam
model = tf.keras.models.load_model('model', custom_objects = {'RectifiedAdam' : RectifiedAdam})
Error:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/saving/save.py", line 150, in load_model
return saved_model_load.load(filepath, compile)
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/saving/saved_model/load.py", line 99, in load
training_config))
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/saving/saving_utils.py", line 229, in compile_args_from_training_config
optimizer_config, custom_objects=custom_objects)
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/optimizers.py", line 819, in deserialize
printable_module_name='optimizer')
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/utils/generic_utils.py", line 292, in deserialize_keras_object
config, module_objects, custom_objects, printable_module_name)
File "/mylib/python3.7/site-packages/tensorflow_core/python/keras/utils/generic_utils.py", line 250, in class_and_config_for_serialized_keras_object
raise ValueError('Unknown ' + printable_module_name + ': ' + class_name)
ValueError: Unknown optimizer: RectifiedAdam
I can load the model with compile to False tf.keras.models.load_model('model', compile=False) and then compile it again with RectifiedAdam optimizer (as suggested here: https://stackoverflow.com/a/56565801) - however that is not ideal...
So any ideas on, what I'm doing wrong?
One quick hack around this is to manually assign RectifiedAdam to an object in the Tensorflow namespace:
import tensorflow as tf
from tensorflow_addons.optimizers import RectifiedAdam
tf.keras.optimizers.RectifiedAdam = RectifiedAdam
...
or do something like this:
models.load_model('myModel.h5', custom_objects={'MyOptimizer': MyOptimizer})
The model was trained on a tensorflow (Keras) version higher than the one you want to use to load the trained model. Find the compatible version of TensorFlow (Keras).