The last layer returns a wrong embedding dimension
#30
by
macleginn
- opened
In [18]: model_name = 'facebook/opt-350m'
...: tok1 = AutoTokenizer.from_pretrained(model_name, use_fast=False)
...: model1 = AutoModel.from_pretrained(model_name)
...: inputs1 = tok1(s, return_tensors='pt')
...: outputs = model1(**inputs1, output_hidden_states=True)
...: n_layers = len(outputs.hidden_states)
...: for i in range(n_layers-5, n_layers):
...: print(outputs.hidden_states[i][0, -1].size())
...:
torch.Size([1024])
torch.Size([1024])
torch.Size([1024])
torch.Size([1024])
torch.Size([512])
Cf.:
In [19]: model_name = 'facebook/opt-1.3b'
...: tok1 = AutoTokenizer.from_pretrained(model_name, use_fast=False)
...: model1 = AutoModel.from_pretrained(model_name)
...: inputs1 = tok1(s, return_tensors='pt')
...: outputs = model1(**inputs1, output_hidden_states=True)
...: n_layers = len(outputs.hidden_states)
...: for i in range(n_layers-5, n_layers):
...: print(outputs.hidden_states[i][0, -1].size())
...:
torch.Size([2048])
torch.Size([2048])
torch.Size([2048])
torch.Size([2048])
torch.Size([2048])