mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-12 19:23:55 +00:00
chore: make DOCS_LINK in constants.py, ensure datasieve is added to setup.py
This commit is contained in:
parent
ad8a4897ce
commit
75ec19062c
|
@ -8,6 +8,7 @@ from typing import Any, Dict, List, Literal, Tuple
|
||||||
from freqtrade.enums import CandleType, PriceType, RPCMessageType
|
from freqtrade.enums import CandleType, PriceType, RPCMessageType
|
||||||
|
|
||||||
|
|
||||||
|
DOCS_LINK = "https://www.freqtrade.io/en/stable"
|
||||||
DEFAULT_CONFIG = 'config.json'
|
DEFAULT_CONFIG = 'config.json'
|
||||||
DEFAULT_EXCHANGE = 'bittrex'
|
DEFAULT_EXCHANGE = 'bittrex'
|
||||||
PROCESS_THROTTLE_SECS = 5 # sec
|
PROCESS_THROTTLE_SECS = 5 # sec
|
||||||
|
|
|
@ -119,7 +119,6 @@ class BaseReinforcementLearningModel(IFreqaiModel):
|
||||||
prices_train, prices_test = self.build_ohlc_price_dataframes(dk.data_dictionary, pair, dk)
|
prices_train, prices_test = self.build_ohlc_price_dataframes(dk.data_dictionary, pair, dk)
|
||||||
|
|
||||||
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||||
dk.label_pipeline = self.define_label_pipeline(threads=dk.thread_count)
|
|
||||||
|
|
||||||
(dd["train_features"],
|
(dd["train_features"],
|
||||||
dd["train_labels"],
|
dd["train_labels"],
|
||||||
|
|
|
@ -12,7 +12,6 @@ import numpy.typing as npt
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import psutil
|
import psutil
|
||||||
from datasieve.pipeline import Pipeline
|
from datasieve.pipeline import Pipeline
|
||||||
from datasieve.transforms import SKLearnWrapper
|
|
||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from sklearn.model_selection import train_test_split
|
from sklearn.model_selection import train_test_split
|
||||||
|
|
||||||
|
@ -966,35 +965,7 @@ class FreqaiDataKitchen:
|
||||||
"in a future version.\n"
|
"in a future version.\n"
|
||||||
"This version does not include any outlier configurations")
|
"This version does not include any outlier configurations")
|
||||||
|
|
||||||
import datasieve.transforms as ds
|
return data_dictionary
|
||||||
from sklearn.preprocessing import MinMaxScaler
|
|
||||||
dd = data_dictionary
|
|
||||||
|
|
||||||
self.feature_pipeline = Pipeline([
|
|
||||||
('variance_threshold', ds.VarianceThreshold()),
|
|
||||||
('scaler', SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))
|
|
||||||
])
|
|
||||||
|
|
||||||
(dd["train_features"],
|
|
||||||
dd["train_labels"],
|
|
||||||
dd["train_weights"]) = self.feature_pipeline.fit_transform(dd["train_features"],
|
|
||||||
dd["train_labels"],
|
|
||||||
dd["train_weights"])
|
|
||||||
|
|
||||||
(dd["test_features"],
|
|
||||||
dd["test_labels"],
|
|
||||||
dd["test_weights"]) = self.feature_pipeline.transform(dd["test_features"],
|
|
||||||
dd["test_labels"],
|
|
||||||
dd["test_weights"])
|
|
||||||
|
|
||||||
self.label_pipeline = Pipeline([
|
|
||||||
('scaler', SKLearnWrapper(MinMaxScaler(feature_range=(-1, 1))))
|
|
||||||
])
|
|
||||||
|
|
||||||
dd["train_labels"], _, _ = self.label_pipeline.fit_transform(dd["train_labels"])
|
|
||||||
dd["test_labels"], _, _ = self.label_pipeline.transform(dd["test_labels"])
|
|
||||||
|
|
||||||
return dd
|
|
||||||
|
|
||||||
def denormalize_labels_from_metadata(self, df: DataFrame) -> DataFrame:
|
def denormalize_labels_from_metadata(self, df: DataFrame) -> DataFrame:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -18,7 +18,7 @@ from pandas import DataFrame
|
||||||
from sklearn.preprocessing import MinMaxScaler
|
from sklearn.preprocessing import MinMaxScaler
|
||||||
|
|
||||||
from freqtrade.configuration import TimeRange
|
from freqtrade.configuration import TimeRange
|
||||||
from freqtrade.constants import Config
|
from freqtrade.constants import DOCS_LINK, Config
|
||||||
from freqtrade.data.dataprovider import DataProvider
|
from freqtrade.data.dataprovider import DataProvider
|
||||||
from freqtrade.enums import RunMode
|
from freqtrade.enums import RunMode
|
||||||
from freqtrade.exceptions import OperationalException
|
from freqtrade.exceptions import OperationalException
|
||||||
|
@ -974,20 +974,20 @@ class IFreqaiModel(ABC):
|
||||||
"""
|
"""
|
||||||
throw deprecation warning if this function is called
|
throw deprecation warning if this function is called
|
||||||
"""
|
"""
|
||||||
ft = "https://www.freqtrade.io/en/latest"
|
|
||||||
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||||
" data pipeline. Please update your model to use the new data pipeline."
|
" data pipeline. Please update your model to use the new data pipeline."
|
||||||
" This can be achieved by following the migration guide at "
|
" This can be achieved by following the migration guide at "
|
||||||
f"{ft}/strategy_migration/#freqai-new-data-pipeline")
|
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline")
|
||||||
|
dk.feature_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||||
return
|
return
|
||||||
|
|
||||||
def data_cleaning_predict(self, dk: FreqaiDataKitchen, pair: str):
|
def data_cleaning_predict(self, dk: FreqaiDataKitchen, pair: str):
|
||||||
"""
|
"""
|
||||||
throw deprecation warning if this function is called
|
throw deprecation warning if this function is called
|
||||||
"""
|
"""
|
||||||
ft = "https://www.freqtrade.io/en/latest"
|
|
||||||
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
logger.warning(f"Your model {self.__class__.__name__} relies on the deprecated"
|
||||||
" data pipeline. Please update your model to use the new data pipeline."
|
" data pipeline. Please update your model to use the new data pipeline."
|
||||||
" This can be achieved by following the migration guide at "
|
" This can be achieved by following the migration guide at "
|
||||||
f"{ft}/strategy_migration/#freqai-new-data-pipeline")
|
f"{DOCS_LINK}/strategy_migration/#freqai-new-data-pipeline")
|
||||||
|
dk.label_pipeline = self.define_data_pipeline(threads=dk.thread_count)
|
||||||
return
|
return
|
||||||
|
|
Loading…
Reference in New Issue
Block a user