Unity-Technologies / barracuda-release

Other
564 stars 76 forks source link

Barracuda has different output, is there any operation unsupported? (LSTM) #291

Closed Jamesika closed 2 years ago

Jamesika commented 2 years ago
import torch
import torch.nn as nn

# Custom BI-LSTM, because unity barracuda doesn't support "bidirectional = True"
class BILSTM(nn.Module):
    def __init__(self, inputSize, hiddenSize, numLayers, dropOut):
        super(BILSTM, self).__init__()
        self.biLayer1 = nn.LSTM(input_size=inputSize,hidden_size=hiddenSize, num_layers=numLayers, batch_first=True, dropout=dropOut).cuda()
        self.biLayer2 = nn.LSTM(input_size=inputSize,hidden_size=hiddenSize, num_layers=numLayers, batch_first=True, dropout=dropOut).cuda()
    def forward(self, x):
        out1, (hidden1, _) = self.biLayer1(x)
        out2, (hidden2, _) = self.biLayer2(torch.flip(x, dims=[1]))
        out2 = torch.flip(out2,dims=[1])
        hidden = torch.cat([hidden1, hidden2], dim = 0)
        return (out1,out2), (hidden, 0)

class SimNN(nn.Module):
    def __init__(self, inputSize, hiddenSize, numLayers):
        super(SimNN, self).__init__()
        self.BILSTM = BILSTM(inputSize*2, hiddenSize, numLayers, 0.5)
        self.classifyLayer = nn.Linear(hiddenSize, 2)
        self.dropOut = nn.Dropout(p=0.2)

    def forward(self, x):
        xL = x[:,0,:,:]
        xR = x[:,1,:,:]
        x = torch.cat([xL, xR], dim = 2)
        _, (h_n, c_n) = self.BILSTM.forward(x)
        out = h_n[3]
        out = self.classifyLayer(out)
        out = self.dropOut(out)
        return out

This is the exported onnx model file: TestONNX.zip