function translate(sentence, tokenizers, transformer, MAX_TOKENS) {
const encoderInput = sdk.wordsToIntTokens(sentence,tokenizers.English, MAX_TOKENS);
const startEnd = [1,2];
const start = startEnd[0];
const end = startEnd[1];
let outputArray = [start].concat(Array(MAX_TOKENS-1).fill(0));
for (let i = 0; i < MAX_TOKENS; i++) {
const encoderInputTensor = tf.tensor([encoderInput]);
const outputTensor = tf.tensor([outputArray]);
const predictions = transformer.predict([encoderInputTensor, outputTensor]);
const lastPrediction = predictions.slice([0, i, 0], [1, 1, predictions.shape[2]]);
const predictedId = lastPrediction.argMax(-1).dataSync()[0];
console.log(predictedId);
outputArray[i+1] = predictedId;
if (predictedId === end) {
break;
}
}
const translatedSentence = sdk.detokenizeSentence(outputArray, tokenizers.Spanish);
return translatedSentence;
}