def test_inference_no_inputshape():
torch.manual_seed(0)
in0 = torch.rand(1, 3, 224, 224, dtype=torch.float)
#in0 = torch.rand(dtype=null)
out = []
with ncnn.Net() as net:
net.load_param("D:\\myProgram\\ideaJava\\yiZhiXiangMuZu\\RWKV\\faster-rwkv\\models\\mobilenet_v2\\win_Optional\\mobilenet_v2.ncnn.param")
net.load_model("D:\\myProgram\\ideaJava\\yiZhiXiangMuZu\\RWKV\\faster-rwkv\\models\\mobilenet_v2\\win_Optional\\mobilenet_v2.ncnn.bin")
with net.create_extractor() as ex:
ex.input("in0", ncnn.Mat(in0.squeeze(0).numpy()).clone())
_, out0 = ex.extract("out0")
out.append(torch.from_numpy(np.array(out0)).unsqueeze(0))
print(len(out))
if len(out) == 1:
return out[0]
else:
return tuple(out)
result = torch.eq(test_inference(), test_inference_no_inputshape())
print(result)
test_inference 是一样的,只是模型路径改了(我通过不同方式获取的)。奇怪的是,为何刚好是 1000个true?谢谢
--
FROM 120.242.238.*