Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/bitextor/bicleaner-ai.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'bicleaner_ai/models.py')
-rw-r--r--bicleaner_ai/models.py14
1 files changed, 7 insertions, 7 deletions
diff --git a/bicleaner_ai/models.py b/bicleaner_ai/models.py
index cdb403e..ab087c0 100644
--- a/bicleaner_ai/models.py
+++ b/bicleaner_ai/models.py
@@ -439,8 +439,8 @@ class Transformer(BaseModel):
decay_steps=self.settings["steps_per_epoch"]//4,
decay_rate=0.2)
self.settings["scheduler"] = scheduler
- self.settings["optimizer"] = Adam(learning_rate=settings["scheduler"],
- clipnorm=settings["clipnorm"])
+ self.settings["optimizer"] = Adam(learning_rate=self.settings["scheduler"],
+ clipnorm=self.settings["clipnorm"])
def get_generator(self, batch_size, shuffle):
return ConcatSentenceGenerator(
@@ -452,8 +452,10 @@ class Transformer(BaseModel):
def build_model(self, compile=True):
settings = self.settings
inputs = layers.Input(shape=(settings["maxlen"],), dtype='int32')
- embedding = TokenAndPositionEmbedding(self.wv,
+ embedding = TokenAndPositionEmbedding(settings['vocab_size'],
+ settings['emb_dim'],
settings["maxlen"],
+ self.wv,
trainable=True)
transformer_block = TransformerBlock(
settings["emb_dim"],
@@ -631,10 +633,8 @@ class BCXLMRoberta(BaseModel):
batch_size=self.settings["batch_size"],
callbacks=[earlystop],
verbose=verbose)
- self.model.save_pretrained(self.dir + '/'
- + self.settings["model_file"])
- self.tokenizer.save_pretrained(self.dir + '/'
- + self.settings["vocab_file"])
+ self.model.save_pretrained(self.dir)
+ self.tokenizer.save_pretrained(self.dir)
y_true = dev_generator.y
with redirect_stdout(sys.stderr):