diff --git a/en/2022.2/404.html b/en/2022.2/404.html new file mode 100644 index 000000000..fa80bc23d --- /dev/null +++ b/en/2022.2/404.html @@ -0,0 +1,917 @@ + + + +
+ + + + + + + + + + + +This page explains some advanced Hyperopt topics that may require higher +coding skills and Python knowledge than creation of an ordinal hyperoptimization +class.
+To use a custom loss function class, make sure that the function hyperopt_loss_function
is defined in your custom hyperopt loss class.
+For the sample below, you then need to add the command line parameter --hyperopt-loss SuperDuperHyperOptLoss
to your hyperopt call so this function is being used.
A sample of this can be found below, which is identical to the Default Hyperopt loss implementation. A full sample can be found in userdata/hyperopts.
+``` python +from datetime import datetime +from typing import Any, Dict
+from pandas import DataFrame
+from freqtrade.optimize.hyperopt import IHyperOptLoss
+TARGET_TRADES = 600 +EXPECTED_MAX_PROFIT = 3.0 +MAX_ACCEPTED_TRADE_DURATION = 300
+class SuperDuperHyperOptLoss(IHyperOptLoss): + """ + Defines the default loss function for hyperopt + """
+@staticmethod
+def hyperopt_loss_function(results: DataFrame, trade_count: int,
+ min_date: datetime, max_date: datetime,
+ config: Dict, processed: Dict[str, DataFrame],
+ backtest_stats: Dict[str, Any],
+ *args, **kwargs) -> float:
+ """
+ Objective function, returns smaller number for better results
+ This is the legacy algorithm (used until now in freqtrade).
+ Weights are distributed as follows:
+ * 0.4 to trade duration
+ * 0.25: Avoiding trade loss
+ * 1.0 to total profit, compared to the expected value (`EXPECTED_MAX_PROFIT`) defined above
+ """
+ total_profit = results['profit_ratio'].sum()
+ trade_duration = results['trade_duration'].mean()
+
+ trade_loss = 1 - 0.25 * exp(-(trade_count - TARGET_TRADES) ** 2 / 10 ** 5.8)
+ profit_loss = max(0, 1 - total_profit / EXPECTED_MAX_PROFIT)
+ duration_loss = 0.4 * min(trade_duration / MAX_ACCEPTED_TRADE_DURATION, 1)
+ result = trade_loss + profit_loss + duration_loss
+ return result
+
```
+Currently, the arguments are:
+results
: DataFrame containing the resulting trades.
+ The following columns are available in results (corresponds to the output-file of backtesting when used with --export trades
):pair, profit_ratio, profit_abs, open_date, open_rate, fee_open, close_date, close_rate, fee_close, amount, trade_duration, is_open, sell_reason, stake_amount, min_rate, max_rate, stop_loss_ratio, stop_loss_abs
trade_count
: Amount of trades (identical to len(results)
)min_date
: Start date of the timerange usedmin_date
: End date of the timerange usedconfig
: Config object used (Note: Not all strategy-related parameters will be updated here if they are part of a hyperopt space).processed
: Dict of Dataframes with the pair as keys containing the data used for backtesting.backtest_stats
: Backtesting statistics using the same format as the backtesting file "strategy" substructure. Available fields can be seen in generate_strategy_stats()
in optimize_reports.py
.This function needs to return a floating point number (float
). Smaller numbers will be interpreted as better results. The parameters and balancing for this is up to you.
Note
+This function is called once per epoch - so please make sure to have this as optimized as possible to not slow hyperopt down unnecessarily.
+*args
and **kwargs
Please keep the arguments *args
and **kwargs
in the interface to allow us to extend this interface in the future.
To override a pre-defined space (roi_space
, generate_roi_table
, stoploss_space
, trailing_space
), define a nested class called Hyperopt and define the required spaces as follows:
```python +class MyAwesomeStrategy(IStrategy): + class HyperOpt: + # Define a custom stoploss space. + def stoploss_space(): + return [SKDecimal(-0.05, -0.01, decimals=3, name='stoploss')]
+ # Define custom ROI space
+ def roi_space() -> List[Dimension]:
+ return [
+ Integer(10, 120, name='roi_t1'),
+ Integer(10, 60, name='roi_t2'),
+ Integer(10, 40, name='roi_t3'),
+ SKDecimal(0.01, 0.04, decimals=3, name='roi_p1'),
+ SKDecimal(0.01, 0.07, decimals=3, name='roi_p2'),
+ SKDecimal(0.01, 0.20, decimals=3, name='roi_p3'),
+ ]
+
```
+Note
+All overrides are optional and can be mixed/matched as necessary.
+You can define your own estimator for Hyperopt by implementing generate_estimator()
in the Hyperopt subclass.
```python +class MyAwesomeStrategy(IStrategy): + class HyperOpt: + def generate_estimator(dimensions: List['Dimension'], **kwargs): + return "RF"
+```
+Possible values are either one of "GP", "RF", "ET", "GBRT" (Details can be found in the scikit-optimize documentation), or "an instance of a class that inherits from RegressorMixin
(from sklearn) and where the predict
method has an optional return_std
argument, which returns std(Y | x)
along with E[Y | x]
".
Some research will be necessary to find additional Regressors.
+Example for ExtraTreesRegressor
("ET") with additional parameters:
```python +class MyAwesomeStrategy(IStrategy): + class HyperOpt: + def generate_estimator(dimensions: List['Dimension'], **kwargs): + from skopt.learning import ExtraTreesRegressor + # Corresponds to "ET" - but allows additional parameters. + return ExtraTreesRegressor(n_estimators=100)
+```
+The dimensions
parameter is the list of skopt.space.Dimension
objects corresponding to the parameters to be optimized. It can be used to create isotropic kernels for the skopt.learning.GaussianProcessRegressor
estimator. Here's an example:
```python +class MyAwesomeStrategy(IStrategy): + class HyperOpt: + def generate_estimator(dimensions: List['Dimension'], **kwargs): + from skopt.utils import cook_estimator + from skopt.learning.gaussian_process.kernels import (Matern, ConstantKernel) + kernel_bounds = (0.0001, 10000) + kernel = ( + ConstantKernel(1.0, kernel_bounds) * + Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=2.5) + ) + kernel += ( + ConstantKernel(1.0, kernel_bounds) * + Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=1.5) + )
+ return cook_estimator("GP", space=dimensions, kernel=kernel, n_restarts_optimizer=2)
+
```
+Note
+While custom estimators can be provided, it's up to you as User to do research on possible parameters and analyze / understand which ones should be used.
+If you're unsure about this, best use one of the Defaults ("ET"
has proven to be the most versatile) without further parameters.
For the additional spaces, scikit-optimize (in combination with Freqtrade) provides the following space types:
+Categorical
- Pick from a list of categories (e.g. Categorical(['a', 'b', 'c'], name="cat")
)Integer
- Pick from a range of whole numbers (e.g. Integer(1, 10, name='rsi')
)SKDecimal
- Pick from a range of decimal numbers with limited precision (e.g. SKDecimal(0.1, 0.5, decimals=3, name='adx')
). Available only with freqtrade.Real
- Pick from a range of decimal numbers with full precision (e.g. Real(0.1, 0.5, name='adx')
You can import all of these from freqtrade.optimize.space
, although Categorical
, Integer
and Real
are only aliases for their corresponding scikit-optimize Spaces. SKDecimal
is provided by freqtrade for faster optimizations.
python
+from freqtrade.optimize.space import Categorical, Dimension, Integer, SKDecimal, Real # noqa
SKDecimal vs. Real
+We recommend to use SKDecimal
instead of the Real
space in almost all cases. While the Real space provides full accuracy (up to ~16 decimal places) - this precision is rarely needed, and leads to unnecessary long hyperopt times.
Assuming the definition of a rather small space (SKDecimal(0.10, 0.15, decimals=2, name='xxx')
) - SKDecimal will have 5 possibilities ([0.10, 0.11, 0.12, 0.13, 0.14, 0.15]
).
A corresponding real space Real(0.10, 0.15 name='xxx')
on the other hand has an almost unlimited number of possibilities ([0.10, 0.010000000001, 0.010000000002, ... 0.014999999999, 0.01500000000]
).
This page explains some advanced tasks and configuration options that can be performed after the bot installation and may be uselful in some environments.
+If you do not know what things mentioned here mean, you probably do not need it.
+This section will show you how to run multiple bots at the same time, on the same machine.
+In order to keep track of your trades, profits, etc., freqtrade is using a SQLite database where it stores various types of information such as the trades you performed in the past and the current position(s) you are holding at any time. This allows you to keep track of your profits, but most importantly, keep track of ongoing activity if the bot process would be restarted or would be terminated unexpectedly.
+Freqtrade will, by default, use separate database files for dry-run and live bots (this assumes no database-url is given in either configuration nor via command line argument).
+For live trading mode, the default database will be tradesv3.sqlite
and for dry-run it will be tradesv3.dryrun.sqlite
.
The optional argument to the trade command used to specify the path of these files is --db-url
, which requires a valid SQLAlchemy url.
+So when you are starting a bot with only the config and strategy arguments in dry-run mode, the following 2 commands would have the same outcome.
``` bash +freqtrade trade -c MyConfig.json -s MyStrategy
+freqtrade trade -c MyConfig.json -s MyStrategy --db-url sqlite:///tradesv3.dryrun.sqlite +```
+It means that if you are running the trade command in two different terminals, for example to test your strategy both for trades in USDT and in another instance for trades in BTC, you will have to run them with different databases.
+If you specify the URL of a database which does not exist, freqtrade will create one with the name you specified. So to test your custom strategy with BTC and USDT stake currencies, you could use the following commands (in 2 separate terminals):
+``` bash
+freqtrade trade -c MyConfigBTC.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesBTC.dryrun.sqlite
+freqtrade trade -c MyConfigUSDT.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesUSDT.dryrun.sqlite +```
+Conversely, if you wish to do the same thing in production mode, you will also have to create at least one new database (in addition to the default one) and specify the path to the "live" databases, for example:
+``` bash
+freqtrade trade -c MyConfigBTC.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesBTC.live.sqlite
+freqtrade trade -c MyConfigUSDT.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesUSDT.live.sqlite +```
+For more information regarding usage of the sqlite databases, for example to manually enter or remove trades, please refer to the SQL Cheatsheet.
+To run multiple instances of freqtrade using docker you will need to edit the docker-compose.yml file and add all the instances you want as separate services. Remember, you can separate your configuration into multiple files, so it's a good idea to think about making them modular, then if you need to edit something common to all bots, you can do that in a single config file. +``` yml
+version: '3'
+services:
+ freqtrade1:
+ image: freqtradeorg/freqtrade:stable
+ # image: freqtradeorg/freqtrade:develop
+ # Use plotting image
+ # image: freqtradeorg/freqtrade:develop_plot
+ # Build step - only needed when additional dependencies are needed
+ # build:
+ # context: .
+ # dockerfile: "./docker/Dockerfile.custom"
+ restart: always
+ container_name: freqtrade1
+ volumes:
+ - "./user_data:/freqtrade/user_data"
+ # Expose api on port 8080 (localhost only)
+ # Please read the https://www.freqtrade.io/en/latest/rest-api/ documentation
+ # before enabling this.
+ ports:
+ - "127.0.0.1:8080:8080"
+ # Default command used when running docker compose up
+ command: >
+ trade
+ --logfile /freqtrade/user_data/logs/freqtrade1.log
+ --db-url sqlite:////freqtrade/user_data/tradesv3_freqtrade1.sqlite
+ --config /freqtrade/user_data/config.json
+ --config /freqtrade/user_data/config.freqtrade1.json
+ --strategy SampleStrategy
freqtrade2:
+ image: freqtradeorg/freqtrade:stable
+ # image: freqtradeorg/freqtrade:develop
+ # Use plotting image
+ # image: freqtradeorg/freqtrade:develop_plot
+ # Build step - only needed when additional dependencies are needed
+ # build:
+ # context: .
+ # dockerfile: "./docker/Dockerfile.custom"
+ restart: always
+ container_name: freqtrade2
+ volumes:
+ - "./user_data:/freqtrade/user_data"
+ # Expose api on port 8080 (localhost only)
+ # Please read the https://www.freqtrade.io/en/latest/rest-api/ documentation
+ # before enabling this.
+ ports:
+ - "127.0.0.1:8081:8080"
+ # Default command used when running docker compose up
+ command: >
+ trade
+ --logfile /freqtrade/user_data/logs/freqtrade2.log
+ --db-url sqlite:////freqtrade/user_data/tradesv3_freqtrade2.sqlite
+ --config /freqtrade/user_data/config.json
+ --config /freqtrade/user_data/config.freqtrade2.json
+ --strategy SampleStrategy
``` +You can use whatever naming convention you want, freqtrade1 and 2 are arbitrary. Note, that you will need to use different database files, port mappings and telegram configurations for each instance, as mentioned above.
+Copy the freqtrade.service
file to your systemd user directory (usually ~/.config/systemd/user
) and update WorkingDirectory
and ExecStart
to match your setup.
Note
+Certain systems (like Raspbian) don't load service unit files from the user directory. In this case, copy freqtrade.service
into /etc/systemd/user/
(requires superuser permissions).
After that you can start the daemon with:
+bash
+systemctl --user start freqtrade
For this to be persistent (run when user is logged out) you'll need to enable linger
for your freqtrade user.
bash
+sudo loginctl enable-linger "$USER"
If you run the bot as a service, you can use systemd service manager as a software watchdog monitoring freqtrade bot
+state and restarting it in the case of failures. If the internals.sd_notify
parameter is set to true in the
+configuration or the --sd-notify
command line option is used, the bot will send keep-alive ping messages to systemd
+using the sd_notify (systemd notifications) protocol and will also tell systemd its current state (Running or Stopped)
+when it changes.
The freqtrade.service.watchdog
file contains an example of the service unit configuration file which uses systemd
+as the watchdog.
Note
+The sd_notify communication between the bot and the systemd service manager will not work if the bot runs in a Docker container.
+On many Linux systems the bot can be configured to send its log messages to syslog
or journald
system services. Logging to a remote syslog
server is also available on Windows. The special values for the --logfile
command line option can be used for this.
To send Freqtrade log messages to a local or remote syslog
service use the --logfile
command line option with the value in the following format:
--logfile syslog:<syslog_address>
-- send log messages to syslog
service using the <syslog_address>
as the syslog address.The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the :
character.
So, the following are the examples of possible usages:
+--logfile syslog:/dev/log
-- log to syslog (rsyslog) using the /dev/log
socket, suitable for most systems.--logfile syslog
-- same as above, the shortcut for /dev/log
.--logfile syslog:/var/run/syslog
-- log to syslog (rsyslog) using the /var/run/syslog
socket. Use this on MacOS.--logfile syslog:localhost:514
-- log to local syslog using UDP socket, if it listens on port 514.--logfile syslog:<ip>:514
-- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.Log messages are send to syslog
with the user
facility. So you can see them with the following commands:
tail -f /var/log/user
, or On many systems syslog
(rsyslog
) fetches data from journald
(and vice versa), so both --logfile syslog
or --logfile journald
can be used and the messages be viewed with both journalctl
and a syslog viewer utility. You can combine this in any way which suites you better.
For rsyslog
the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
if $programname startswith "freqtrade" then -/var/log/freqtrade.log
to one of the rsyslog configuration files, for example at the end of the /etc/rsyslog.d/50-default.conf
.
For syslog
(rsyslog
), the reduction mode can be switched on. This will reduce the number of repeating messages. For instance, multiple bot Heartbeat messages will be reduced to a single message when nothing else happens with the bot. To achieve this, set in /etc/rsyslog.conf
:
```
+$RepeatedMsgReduction on +```
+This needs the systemd
python package installed as the dependency, which is not available on Windows. Hence, the whole journald logging functionality is not available for a bot running on Windows.
To send Freqtrade log messages to journald
system service use the --logfile
command line option with the value in the following format:
--logfile journald
-- send log messages to journald
.Log messages are send to journald
with the user
facility. So you can see them with the following commands:
journalctl -f
-- shows Freqtrade log messages sent to journald
along with other log messages fetched by journald
.journalctl -f -u freqtrade.service
-- this command can be used when the bot is run as a systemd
service.There are many other options in the journalctl
utility to filter the messages, see manual pages for this utility.
On many systems syslog
(rsyslog
) fetches data from journald
(and vice versa), so both --logfile syslog
or --logfile journald
can be used and the messages be viewed with both journalctl
and a syslog viewer utility. You can combine this in any way which suites you better.
\n {translation(\"search.result.term.missing\")}: {...missing}\n
\n }\n