import onnxruntime as ort import numpy as np # Load the ONNX model session = ort.InferenceSession('./saved-model/model.onnx') # Get input and output names input_name = session.get_inputs()[0].name output_name = session.get_outputs()[0].name # Print the input shape to verify input_shape = session.get_inputs()[0].shape print(f"Expected input shape: {input_shape}") # Create a dummy input with the correct shape [1, 128, 128, 3] dummy_input = np.random.randn(1, 128, 128, 3).astype(np.float32) # Run inference outputs = session.run([output_name], {input_name: dummy_input}) print(f"Inference outputs: {outputs}")