sequenceInputLayer(4,"Name","input")
convolution1dLayer(4,32,"Name","conv1d","Padding","same")
globalAveragePooling1dLayer("Name","gapool1d")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = lstmLayer(25,"Name","lstm");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = lstmLayer(25,"Name","lstm_1");
lgraph = addLayers(lgraph,tempLayers);
concatenationLayer(1,2,"Name","concat")
lstmLayer(55,"Name","lstm_2")
dropoutLayer(0.5,"Name","drop")
fullyConnectedLayer(1,"Name","fc")
sigmoidLayer("Name","sigmoid")];
lgraph = addLayers(lgraph,tempLayers);
lgraph = connectLayers(lgraph,"gapool1d","lstm");
lgraph = connectLayers(lgraph,"gapool1d","lstm_1");
lgraph = connectLayers(lgraph,"lstm","concat/in1");
lgraph = connectLayers(lgraph,"lstm_1","concat/in2");
options = trainingOptions("adam", ...
SequencePaddingDirection="left", ...
Shuffle="every-epoch", ...
InitialLearnRate=InitialLR, ...
LearnRateSchedule="piecewise", ...
LearnRateDropPeriod=LRDropPeriod, ...
LearnRateDropFactor=LRDropFactor, ...
MiniBatchSize=miniBatchSize, ...
Plots="training-progress", ...
ExecutionEnvironment="parallel");
CNN_LTSM = trainnet(trainDataX, trainDataY, dlnetwork(lgraph),"mse",options);