switched from mbert uncased to cased version
This commit is contained in:
parent
6995854e3d
commit
8354d76513
|
@ -100,7 +100,7 @@ class TextualTransformerGen(ViewGen, TransformerGen):
|
||||||
if "bert" == model_name:
|
if "bert" == model_name:
|
||||||
return "bert-base-uncased"
|
return "bert-base-uncased"
|
||||||
elif "mbert" == model_name:
|
elif "mbert" == model_name:
|
||||||
return "bert-base-multilingual-uncased"
|
return "bert-base-multilingual-cased"
|
||||||
elif "xlm-roberta" == model_name:
|
elif "xlm-roberta" == model_name:
|
||||||
return "xlm-roberta-base"
|
return "xlm-roberta-base"
|
||||||
elif "mt5" == model_name:
|
elif "mt5" == model_name:
|
||||||
|
@ -114,12 +114,14 @@ class TextualTransformerGen(ViewGen, TransformerGen):
|
||||||
model_name, num_labels=num_labels, output_hidden_states=True
|
model_name, num_labels=num_labels, output_hidden_states=True
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
model_name = "models/vgfs/trained_transformer/mbert-sentiment/checkpoint-8500" # TODO hardcoded to pre-traiend mbert
|
# model_name = "models/vgfs/trained_transformer/mbert-sentiment/checkpoint-8500" # TODO hardcoded to pre-traiend mbert
|
||||||
|
model_name = "mbert-rai-multi-2000/checkpoint-1500" # TODO hardcoded to pre-traiend mbert
|
||||||
return AutoModelForSequenceClassification.from_pretrained(
|
return AutoModelForSequenceClassification.from_pretrained(
|
||||||
model_name, num_labels=num_labels, output_hidden_states=True
|
model_name, num_labels=num_labels, output_hidden_states=True
|
||||||
)
|
)
|
||||||
|
|
||||||
def load_tokenizer(self, model_name):
|
def load_tokenizer(self, model_name):
|
||||||
|
# model_name = "mbert-rai-multi-2000/checkpoint-1500" # TODO hardcoded to pre-traiend mbert
|
||||||
return AutoTokenizer.from_pretrained(model_name)
|
return AutoTokenizer.from_pretrained(model_name)
|
||||||
|
|
||||||
def init_model(self, model_name, num_labels):
|
def init_model(self, model_name, num_labels):
|
||||||
|
@ -202,7 +204,8 @@ class TextualTransformerGen(ViewGen, TransformerGen):
|
||||||
# )
|
# )
|
||||||
|
|
||||||
if self.probabilistic:
|
if self.probabilistic:
|
||||||
self.feature2posterior_projector.fit(self.transform(lX), lY)
|
transformed = self.transform(lX)
|
||||||
|
self.feature2posterior_projector.fit(transformed, lY)
|
||||||
|
|
||||||
self.fitted = True
|
self.fitted = True
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue