Merge remote-tracking branch 'origin/feat/add-pytorch-model-support' into feat/add-pytorch-model-support

This commit is contained in:
Yinon Polak 2023-03-18 14:14:38 +02:00
commit 1c91b4427b
3 changed files with 4 additions and 4 deletions

View File

@ -446,7 +446,7 @@ class FreqaiDataDrawer:
dump(model, save_path / f"{dk.model_filename}_model.joblib") dump(model, save_path / f"{dk.model_filename}_model.joblib")
elif self.model_type == 'keras': elif self.model_type == 'keras':
model.save(save_path / f"{dk.model_filename}_model.h5") model.save(save_path / f"{dk.model_filename}_model.h5")
elif self.model_type in ["stable_baselines", "sb3_contrib", "pytorch"]: elif self.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
model.save(save_path / f"{dk.model_filename}_model.zip") model.save(save_path / f"{dk.model_filename}_model.zip")
if dk.svm_model is not None: if dk.svm_model is not None:
@ -496,7 +496,7 @@ class FreqaiDataDrawer:
dk.training_features_list = dk.data["training_features_list"] dk.training_features_list = dk.data["training_features_list"]
dk.label_list = dk.data["label_list"] dk.label_list = dk.data["label_list"]
def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any: def load_data(self, coin: str, dk: FreqaiDataKitchen) -> Any: # noqa: C901
""" """
loads all data required to make a prediction on a sub-train time range loads all data required to make a prediction on a sub-train time range
:returns: :returns:

View File

@ -563,7 +563,7 @@ class IFreqaiModel(ABC):
file_type = ".joblib" file_type = ".joblib"
elif self.dd.model_type == 'keras': elif self.dd.model_type == 'keras':
file_type = ".h5" file_type = ".h5"
elif self.dd.model_type in ["stable_baselines", "sb3_contrib", "pytorch"]: elif self.dd.model_type in ["stable_baselines3", "sb3_contrib", "pytorch"]:
file_type = ".zip" file_type = ".zip"
path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model{file_type}") path_to_modelfile = Path(dk.data_path / f"{dk.model_filename}_model{file_type}")

View File

@ -41,7 +41,7 @@ class PyTorchClassifierMultiTarget(BasePyTorchModel):
self.max_n_eval_batches: Optional[int] = model_training_params.get( self.max_n_eval_batches: Optional[int] = model_training_params.get(
"max_n_eval_batches", None "max_n_eval_batches", None
) )
self.model_kwargs: Dict = model_training_params.get("model_kwargs", {}) self.model_kwargs: Dict[str, any] = model_training_params.get("model_kwargs", {})
self.class_name_to_index = None self.class_name_to_index = None
self.index_to_class_name = None self.index_to_class_name = None