# -*- coding: utf-8 -*-
"""BERT-TorchAutomatically generated by Colaboratory.Original file is located at
https://colab.research.google.com/drive/1LVhb99B-YQJ1bGnaWIX-2bgANy78zAAt
"""'''
code by Tae Hwan Jung(Jeff Jung) @graykode, modify by wmathor
Reference : https://github.com/jadore801120/attention-is-all-you-need-pytorch
https://github.com/JayParks/transformer, https://github.com/dhlee347/pytorchic-bert
https://github.com/wmathor/nlp-tutorial/tree/master/5-2.BERT
'''
import re
import math
import torch
import numpy as np
from random imp