Need an example on how to use fitDataset and how to make an iterator. Any help?
--
You received this message because you are subscribed to the Google Groups "TensorFlow.js Discussion" group.
To unsubscribe from this group and stop receiving emails from it, send an email to tfjs+uns...@tensorflow.org.
Visit this group at https://groups.google.com/a/tensorflow.org/group/tfjs/.
To view this discussion on the web visit https://groups.google.com/a/tensorflow.org/d/msgid/tfjs/4841279d-bfa2-4d91-b061-c026041117d4%40tensorflow.org.
Array(3) [ 0: Array(5797) [ 0: Array(6) [ 0: Array(5) [0.38893081667012425, 0.4813, 0.42895509502144097, 0.9022654831238969, 0.16803849563125353] 1: Array(5) [0.39946565520304406, 0.43979999999999997, 0.4418508092635008, 0.9021257329204674, 0.3310539867460218] 2: Array(5) [0.4342401952015181, 0.4142, 0.4523099466109793, 0.9020507750885401, 0.47325988772107735] 3: Array(5) [0.36607121037768436, 0.4354, 0.4681546129280907, 0.9021357055907799, 0.7571139354403309] 5: Array(5) [0.44003019602788285, 0.3106, 0.48640805489529576, 0.9019725064073448, 0.5841830879700972]] 11: Array(6) [ 0: Array(5) [0.40641968534085765, 0.4593, 0.4700194121374823, 0.9020672350678558, 0.6080878105141543] 1: Array(5) [0.4289789628259297, 0.4474, 0.47865869756581514, 0.9020140176177028, 0.4631629240324896] 2: Array(5) [0.40354603276477696, 0.45539999999999997, 0.4770856611052942, 0.9020485494746725, 0.477611196891967] 3: Array(5) [0.3513714671444261, 0.43979999999999997, 0.4722175697857155, 0.9020126407951335, 0.4263950025040185] 4: Array(5) [0.4300218421130165, 0.3839, 0.48760130595463286, 0.9019809646461395, 0.4434787453340014] 5: Array(5) [0.33031286133759746, 0.3942, 0.49133517029003415, 0.9020036533194778, 0.639283525544598]] … more]Error: The feature data generated by the dataset lacks the required input key 'conv1d_Conv1D33_input'.
flattenDataset = (features, labels, split = 0.35) => { let slice = Math.floor(features.length * split) const featuresTrain = features.slice(0, slice) const featuresVal = features.slice(slice) const labelsTrain = features.slice(0, slice) const labelsVal = features.slice(slice) const data = { train: tf.data.array(featuresTrain.map((c, i) => [c, labelsTrain[i]])), validation: tf.data.array(featuresVal.map((c, i) => [c, labelsVal[i]])) } return data}const [trainX, trainY] = await bigData const model = await cnnLSTM // gru performing well const BATCH_SIZE = 32 const dataSet = flattenDataset(trainX, trainY) model.compile({ loss: 'categoricalCrossentropy', optimizer: tf.train.adam(0.001), metrics: ['accuracy'] }) // const lossValues = []// const accuracyValues = [] await model.fitDataset(dataSet.train.batch(BATCH_SIZE), { epochs: C.trainSteps, validationData: dataSet.validation, callbacks: { onBatchEnd: async (batch, logs) => (await tf.nextFrame()), onEpochEnd: (epoch, logs) => { let i = epoch + 1 lossValues.push({'epoch': i, 'loss': logs.loss, 'val_loss': logs.val_loss, 'set': 'train'}) accuracyValues.push({'epoch': i, 'accuracy': logs.acc, 'val_accuracy': logs.val_acc, 'set': 'train'}) // await md `${await plotLosses(train.lossValues)} ${await plotAccuracy(train.accuracyValues)}` } } })