This is my attempt...
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import os
import collections
os.environ['CUDA_VISIBLE_DEVICES']=''
from tensor2tensor import models
from tensor2tensor import problems
from tensor2tensor.layers import common_layers
from tensor2tensor.utils import trainer_lib
from tensor2tensor.utils import t2t_model
from tensor2tensor.utils import registry
from tensor2tensor.utils import metrics
# Enable TF Eager execution
from tensorflow.contrib.eager.python import tfe
tfe.enable_eager_execution()
# Other setup
Modes = tf.estimator.ModeKeys
# Setup some directories
data_dir = os.path.expanduser("~/t2t/data")
tmp_dir = os.path.expanduser("~/t2t/tmp")
train_dir = os.path.expanduser("~/t2t/train")
checkpoint_dir = os.path.expanduser("~/t2t/checkpoints")
tf.gfile.MakeDirs(data_dir)
tf.gfile.MakeDirs(tmp_dir)
tf.gfile.MakeDirs(train_dir)
tf.gfile.MakeDirs(checkpoint_dir)
gs_data_dir = "gs://tensor2tensor-data"
gs_ckpt_dir = "gs://tensor2tensor-checkpoints/"
problems.available()
ende_problem = problems.problem("translate_enfr_wmt_small8k")
ende_problem.generate_data(data_dir, tmp_dir)
# Setup the training data
BATCH_SIZE = 128
ende_problem_train_dataset = ende_problem.dataset(Modes.TRAIN, data_dir)
ende_problem_train_dataset = ende_problem_train_dataset.repeat(None).batch(BATCH_SIZE)
from tensor2tensor.models import transformer
model_name = "transformer"
hparams_set = "transformer_base"
hparams = trainer_lib.create_hparams(hparams_set, data_dir=data_dir, problem_name="translate_enfr_wmt_small8k")
VOCAB_SIZE=8374
from tensor2tensor.data_generators import problem_hparams
p_hparams = problem_hparams.test_problem_hparams(VOCAB_SIZE, VOCAB_SIZE)
hparams.problems = [p_hparams]
model=transformer.Transformer(hparams, Modes.TRAIN, p_hparams)