mirror of
https://github.com/freqtrade/freqtrade.git
synced 2024-11-10 18:23:55 +00:00
commit
eb40fc698d
|
@ -550,7 +550,7 @@ def test_start_install_ui(mocker):
|
|||
assert download_mock.call_count == 0
|
||||
|
||||
|
||||
def test_clean_ui_subdir(mocker, tmpdir, caplog):
|
||||
def test_clean_ui_subdir(mocker, tmp_path, caplog):
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.is_dir",
|
||||
side_effect=[True, True])
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.is_file",
|
||||
|
@ -560,14 +560,14 @@ def test_clean_ui_subdir(mocker, tmpdir, caplog):
|
|||
|
||||
mocker.patch("freqtrade.commands.deploy_commands.Path.glob",
|
||||
return_value=[Path('test1'), Path('test2'), Path('.gitkeep')])
|
||||
folder = Path(tmpdir) / "uitests"
|
||||
folder = tmp_path / "uitests"
|
||||
clean_ui_subdir(folder)
|
||||
assert log_has("Removing UI directory content.", caplog)
|
||||
assert rd_mock.call_count == 1
|
||||
assert ul_mock.call_count == 1
|
||||
|
||||
|
||||
def test_download_and_install_ui(mocker, tmpdir):
|
||||
def test_download_and_install_ui(mocker, tmp_path):
|
||||
# Create zipfile
|
||||
requests_mock = MagicMock()
|
||||
file_like_object = BytesIO()
|
||||
|
@ -583,7 +583,7 @@ def test_download_and_install_ui(mocker, tmpdir):
|
|||
side_effect=[True, False])
|
||||
wb_mock = mocker.patch("freqtrade.commands.deploy_commands.Path.write_bytes")
|
||||
|
||||
folder = Path(tmpdir) / "uitests_dl"
|
||||
folder = tmp_path / "uitests_dl"
|
||||
folder.mkdir(exist_ok=True)
|
||||
|
||||
assert read_ui_version(folder) is None
|
||||
|
@ -1010,8 +1010,8 @@ def test_start_test_pairlist(mocker, caplog, tickers, default_conf, capsys):
|
|||
pytest.fail(f'Expected well formed JSON, but failed to parse: {captured.out}')
|
||||
|
||||
|
||||
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmpdir):
|
||||
csv_file = Path(tmpdir) / "test.csv"
|
||||
def test_hyperopt_list(mocker, capsys, caplog, saved_hyperopt_results, tmp_path):
|
||||
csv_file = tmp_path / "test.csv"
|
||||
mocker.patch(
|
||||
'freqtrade.optimize.hyperopt_tools.HyperoptTools._test_hyperopt_results_exist',
|
||||
return_value=True
|
||||
|
@ -1512,10 +1512,10 @@ def test_backtesting_show(mocker, testdatadir, capsys):
|
|||
assert "Pairs for Strategy" in out
|
||||
|
||||
|
||||
def test_start_convert_db(mocker, fee, tmpdir, caplog):
|
||||
db_src_file = Path(f"{tmpdir}/db.sqlite")
|
||||
def test_start_convert_db(fee, tmp_path):
|
||||
db_src_file = tmp_path / "db.sqlite"
|
||||
db_from = f"sqlite:///{db_src_file}"
|
||||
db_target_file = Path(f"{tmpdir}/db_target.sqlite")
|
||||
db_target_file = tmp_path / "db_target.sqlite"
|
||||
db_to = f"sqlite:///{db_target_file}"
|
||||
args = [
|
||||
"convert-db",
|
||||
|
@ -1542,13 +1542,13 @@ def test_start_convert_db(mocker, fee, tmpdir, caplog):
|
|||
assert db_target_file.is_file()
|
||||
|
||||
|
||||
def test_start_strategy_updater(mocker, tmpdir):
|
||||
def test_start_strategy_updater(mocker, tmp_path):
|
||||
sc_mock = mocker.patch('freqtrade.commands.strategy_utils_commands.start_conversion')
|
||||
teststrats = Path(__file__).parent.parent / 'strategy/strats'
|
||||
args = [
|
||||
"strategy-updater",
|
||||
"--userdir",
|
||||
str(tmpdir),
|
||||
str(tmp_path),
|
||||
"--strategy-path",
|
||||
str(teststrats),
|
||||
]
|
||||
|
@ -1562,7 +1562,7 @@ def test_start_strategy_updater(mocker, tmpdir):
|
|||
args = [
|
||||
"strategy-updater",
|
||||
"--userdir",
|
||||
str(tmpdir),
|
||||
str(tmp_path),
|
||||
"--strategy-path",
|
||||
str(teststrats),
|
||||
"--strategy-list",
|
||||
|
|
|
@ -413,8 +413,8 @@ def patch_gc(mocker) -> None:
|
|||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def user_dir(mocker, tmpdir) -> Path:
|
||||
user_dir = Path(tmpdir) / "user_data"
|
||||
def user_dir(mocker, tmp_path) -> Path:
|
||||
user_dir = tmp_path / "user_data"
|
||||
mocker.patch('freqtrade.configuration.configuration.create_userdata_dir',
|
||||
return_value=user_dir)
|
||||
return user_dir
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# pragma pylint: disable=missing-docstring, C0103
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from shutil import copyfile
|
||||
|
||||
import numpy as np
|
||||
|
@ -323,18 +322,17 @@ def test_trades_dict_to_list(fetch_trades_result):
|
|||
assert t[6] == fetch_trades_result[i]['cost']
|
||||
|
||||
|
||||
def test_convert_trades_format(default_conf, testdatadir, tmpdir):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
files = [{'old': tmpdir1 / "XRP_ETH-trades.json.gz",
|
||||
'new': tmpdir1 / "XRP_ETH-trades.json"},
|
||||
{'old': tmpdir1 / "XRP_OLD-trades.json.gz",
|
||||
'new': tmpdir1 / "XRP_OLD-trades.json"},
|
||||
def test_convert_trades_format(default_conf, testdatadir, tmp_path):
|
||||
files = [{'old': tmp_path / "XRP_ETH-trades.json.gz",
|
||||
'new': tmp_path / "XRP_ETH-trades.json"},
|
||||
{'old': tmp_path / "XRP_OLD-trades.json.gz",
|
||||
'new': tmp_path / "XRP_OLD-trades.json"},
|
||||
]
|
||||
for file in files:
|
||||
copyfile(testdatadir / file['old'].name, file['old'])
|
||||
assert not file['new'].exists()
|
||||
|
||||
default_conf['datadir'] = tmpdir1
|
||||
default_conf['datadir'] = tmp_path
|
||||
|
||||
convert_trades_format(default_conf, convert_from='jsongz',
|
||||
convert_to='json', erase=False)
|
||||
|
@ -362,16 +360,15 @@ def test_convert_trades_format(default_conf, testdatadir, tmpdir):
|
|||
(['UNITTEST_USDT_USDT-1h-mark', 'XRP_USDT_USDT-1h-mark'], CandleType.MARK),
|
||||
(['XRP_USDT_USDT-1h-futures'], CandleType.FUTURES),
|
||||
])
|
||||
def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, candletype):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_convert_ohlcv_format(default_conf, testdatadir, tmp_path, file_base, candletype):
|
||||
prependix = '' if candletype == CandleType.SPOT else 'futures/'
|
||||
files_orig = []
|
||||
files_temp = []
|
||||
files_new = []
|
||||
for file in file_base:
|
||||
file_orig = testdatadir / f"{prependix}{file}.feather"
|
||||
file_temp = tmpdir1 / f"{prependix}{file}.feather"
|
||||
file_new = tmpdir1 / f"{prependix}{file}.json.gz"
|
||||
file_temp = tmp_path / f"{prependix}{file}.feather"
|
||||
file_new = tmp_path / f"{prependix}{file}.json.gz"
|
||||
IDataHandler.create_dir_if_needed(file_temp)
|
||||
copyfile(file_orig, file_temp)
|
||||
|
||||
|
@ -379,7 +376,7 @@ def test_convert_ohlcv_format(default_conf, testdatadir, tmpdir, file_base, cand
|
|||
files_temp.append(file_temp)
|
||||
files_new.append(file_new)
|
||||
|
||||
default_conf['datadir'] = tmpdir1
|
||||
default_conf['datadir'] = tmp_path
|
||||
default_conf['candle_types'] = [candletype]
|
||||
|
||||
if candletype == CandleType.SPOT:
|
||||
|
@ -445,30 +442,29 @@ def test_reduce_dataframe_footprint():
|
|||
assert df2['close_copy'].dtype == np.float32
|
||||
|
||||
|
||||
def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_convert_trades_to_ohlcv(testdatadir, tmp_path, caplog):
|
||||
pair = 'XRP/ETH'
|
||||
file1 = tmpdir1 / 'XRP_ETH-1m.feather'
|
||||
file5 = tmpdir1 / 'XRP_ETH-5m.feather'
|
||||
filetrades = tmpdir1 / 'XRP_ETH-trades.json.gz'
|
||||
file1 = tmp_path / 'XRP_ETH-1m.feather'
|
||||
file5 = tmp_path / 'XRP_ETH-5m.feather'
|
||||
filetrades = tmp_path / 'XRP_ETH-trades.json.gz'
|
||||
copyfile(testdatadir / file1.name, file1)
|
||||
copyfile(testdatadir / file5.name, file5)
|
||||
copyfile(testdatadir / filetrades.name, filetrades)
|
||||
|
||||
# Compare downloaded dataset with converted dataset
|
||||
dfbak_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
|
||||
dfbak_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
|
||||
dfbak_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
|
||||
dfbak_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
|
||||
|
||||
tr = TimeRange.parse_timerange('20191011-20191012')
|
||||
|
||||
convert_trades_to_ohlcv([pair], timeframes=['1m', '5m'],
|
||||
data_format_trades='jsongz',
|
||||
datadir=tmpdir1, timerange=tr, erase=True)
|
||||
datadir=tmp_path, timerange=tr, erase=True)
|
||||
|
||||
assert log_has("Deleting existing data for pair XRP/ETH, interval 1m.", caplog)
|
||||
# Load new data
|
||||
df_1m = load_pair_history(datadir=tmpdir1, timeframe="1m", pair=pair)
|
||||
df_5m = load_pair_history(datadir=tmpdir1, timeframe="5m", pair=pair)
|
||||
df_1m = load_pair_history(datadir=tmp_path, timeframe="1m", pair=pair)
|
||||
df_5m = load_pair_history(datadir=tmp_path, timeframe="5m", pair=pair)
|
||||
|
||||
assert_frame_equal(dfbak_1m, df_1m, check_exact=True)
|
||||
assert_frame_equal(dfbak_5m, df_5m, check_exact=True)
|
||||
|
@ -477,5 +473,5 @@ def test_convert_trades_to_ohlcv(testdatadir, tmpdir, caplog):
|
|||
|
||||
convert_trades_to_ohlcv(['NoDatapair'], timeframes=['1m', '5m'],
|
||||
data_format_trades='jsongz',
|
||||
datadir=tmpdir1, timerange=tr, erase=True)
|
||||
datadir=tmp_path, timerange=tr, erase=True)
|
||||
assert log_has(msg, caplog)
|
||||
|
|
|
@ -328,17 +328,16 @@ def test_hdf5datahandler_trades_load(testdatadir):
|
|||
])
|
||||
def test_hdf5datahandler_ohlcv_load_and_resave(
|
||||
testdatadir,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
candle_append,
|
||||
startdt, enddt
|
||||
):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
tmpdir2 = tmpdir1
|
||||
tmpdir2 = tmp_path
|
||||
if candle_type not in ('', 'spot'):
|
||||
tmpdir2 = tmpdir1 / 'futures'
|
||||
tmpdir2 = tmp_path / 'futures'
|
||||
tmpdir2.mkdir()
|
||||
dh = get_datahandler(testdatadir, 'hdf5')
|
||||
ohlcv = dh._ohlcv_load(pair, timeframe, None, candle_type=candle_type)
|
||||
|
@ -348,7 +347,7 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
|
|||
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.h5"
|
||||
assert not file.is_file()
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, 'hdf5')
|
||||
dh1 = get_datahandler(tmp_path, 'hdf5')
|
||||
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
|
||||
|
@ -379,17 +378,16 @@ def test_hdf5datahandler_ohlcv_load_and_resave(
|
|||
def test_generic_datahandler_ohlcv_load_and_resave(
|
||||
datahandler,
|
||||
testdatadir,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
pair,
|
||||
timeframe,
|
||||
candle_type,
|
||||
candle_append,
|
||||
startdt, enddt
|
||||
):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
tmpdir2 = tmpdir1
|
||||
tmpdir2 = tmp_path
|
||||
if candle_type not in ('', 'spot'):
|
||||
tmpdir2 = tmpdir1 / 'futures'
|
||||
tmpdir2 = tmp_path / 'futures'
|
||||
tmpdir2.mkdir()
|
||||
# Load data from one common file
|
||||
dhbase = get_datahandler(testdatadir, 'feather')
|
||||
|
@ -403,7 +401,7 @@ def test_generic_datahandler_ohlcv_load_and_resave(
|
|||
file = tmpdir2 / f"UNITTEST_NEW-{timeframe}{candle_append}.{dh._get_file_extension()}"
|
||||
assert not file.is_file()
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, datahandler)
|
||||
dh1 = get_datahandler(tmp_path, datahandler)
|
||||
dh1.ohlcv_store('UNITTEST/NEW', timeframe, ohlcv, candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
|
||||
|
@ -459,15 +457,14 @@ def test_datahandler_trades_load(testdatadir, datahandler):
|
|||
|
||||
|
||||
@pytest.mark.parametrize('datahandler', ['jsongz', 'hdf5', 'feather', 'parquet'])
|
||||
def test_datahandler_trades_store(testdatadir, tmpdir, datahandler):
|
||||
tmpdir1 = Path(tmpdir)
|
||||
def test_datahandler_trades_store(testdatadir, tmp_path, datahandler):
|
||||
dh = get_datahandler(testdatadir, datahandler)
|
||||
trades = dh.trades_load('XRP/ETH')
|
||||
|
||||
dh1 = get_datahandler(tmpdir1, datahandler)
|
||||
dh1 = get_datahandler(tmp_path, datahandler)
|
||||
dh1.trades_store('XRP/NEW', trades)
|
||||
|
||||
file = tmpdir1 / f'XRP_NEW-trades.{dh1._get_file_extension()}'
|
||||
file = tmp_path / f'XRP_NEW-trades.{dh1._get_file_extension()}'
|
||||
assert file.is_file()
|
||||
# Load trades back
|
||||
trades_new = dh1.trades_load('XRP/NEW')
|
||||
|
|
|
@ -106,17 +106,16 @@ def test_load_data_startup_candles(mocker, testdatadir) -> None:
|
|||
|
||||
@pytest.mark.parametrize('candle_type', ['mark', ''])
|
||||
def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
||||
default_conf, tmpdir, candle_type) -> None:
|
||||
default_conf, tmp_path, candle_type) -> None:
|
||||
"""
|
||||
Test load_pair_history() with 1 min timeframe
|
||||
"""
|
||||
tmpdir1 = Path(tmpdir)
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file = tmpdir1 / 'MEME_BTC-1m.feather'
|
||||
file = tmp_path / 'MEME_BTC-1m.feather'
|
||||
|
||||
# do not download a new pair if refresh_pairs isn't set
|
||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
assert not file.is_file()
|
||||
assert log_has(
|
||||
f"No history for MEME/BTC, {candle_type}, 1m found. "
|
||||
|
@ -124,10 +123,10 @@ def test_load_data_with_new_pair_1min(ohlcv_history_list, mocker, caplog,
|
|||
)
|
||||
|
||||
# download a new pair if refresh_pairs is set
|
||||
refresh_data(datadir=tmpdir1, timeframe='1m', pairs=['MEME/BTC'],
|
||||
refresh_data(datadir=tmp_path, timeframe='1m', pairs=['MEME/BTC'],
|
||||
exchange=exchange, candle_type=CandleType.SPOT
|
||||
)
|
||||
load_pair_history(datadir=tmpdir1, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
load_pair_history(datadir=tmp_path, timeframe='1m', pair='MEME/BTC', candle_type=candle_type)
|
||||
assert file.is_file()
|
||||
assert log_has_re(
|
||||
r'\(0/1\) - Download history data for "MEME/BTC", 1m, '
|
||||
|
@ -273,27 +272,26 @@ def test_download_pair_history(
|
|||
ohlcv_history_list,
|
||||
mocker,
|
||||
default_conf,
|
||||
tmpdir,
|
||||
tmp_path,
|
||||
candle_type,
|
||||
subdir,
|
||||
file_tail
|
||||
) -> None:
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv', return_value=ohlcv_history_list)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
tmpdir1 = Path(tmpdir)
|
||||
file1_1 = tmpdir1 / f'{subdir}MEME_BTC-1m{file_tail}.feather'
|
||||
file1_5 = tmpdir1 / f'{subdir}MEME_BTC-5m{file_tail}.feather'
|
||||
file2_1 = tmpdir1 / f'{subdir}CFI_BTC-1m{file_tail}.feather'
|
||||
file2_5 = tmpdir1 / f'{subdir}CFI_BTC-5m{file_tail}.feather'
|
||||
file1_1 = tmp_path / f'{subdir}MEME_BTC-1m{file_tail}.feather'
|
||||
file1_5 = tmp_path / f'{subdir}MEME_BTC-5m{file_tail}.feather'
|
||||
file2_1 = tmp_path / f'{subdir}CFI_BTC-1m{file_tail}.feather'
|
||||
file2_5 = tmp_path / f'{subdir}CFI_BTC-5m{file_tail}.feather'
|
||||
|
||||
assert not file1_1.is_file()
|
||||
assert not file2_1.is_file()
|
||||
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='1m',
|
||||
candle_type=candle_type)
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
timeframe='1m',
|
||||
candle_type=candle_type)
|
||||
|
@ -308,11 +306,11 @@ def test_download_pair_history(
|
|||
assert not file1_5.is_file()
|
||||
assert not file2_5.is_file()
|
||||
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='5m',
|
||||
candle_type=candle_type)
|
||||
assert _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='CFI/BTC',
|
||||
timeframe='5m',
|
||||
candle_type=candle_type)
|
||||
|
@ -340,13 +338,12 @@ def test_download_pair_history2(mocker, default_conf, testdatadir) -> None:
|
|||
assert json_dump_mock.call_count == 3
|
||||
|
||||
|
||||
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmpdir) -> None:
|
||||
def test_download_backtesting_data_exception(mocker, caplog, default_conf, tmp_path) -> None:
|
||||
mocker.patch(f'{EXMS}.get_historic_ohlcv',
|
||||
side_effect=Exception('File Error'))
|
||||
tmpdir1 = Path(tmpdir)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
|
||||
assert not _download_pair_history(datadir=tmpdir1, exchange=exchange,
|
||||
assert not _download_pair_history(datadir=tmp_path, exchange=exchange,
|
||||
pair='MEME/BTC',
|
||||
timeframe='1m', candle_type='spot')
|
||||
assert log_has('Failed to download history data for pair: "MEME/BTC", timeframe: 1m.', caplog)
|
||||
|
@ -570,16 +567,15 @@ def test_refresh_backtest_trades_data(mocker, default_conf, markets, caplog, tes
|
|||
|
||||
|
||||
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog,
|
||||
tmpdir, time_machine) -> None:
|
||||
tmp_path, time_machine) -> None:
|
||||
start_dt = dt_utc(2023, 1, 1)
|
||||
time_machine.move_to(start_dt, tick=False)
|
||||
|
||||
tmpdir1 = Path(tmpdir)
|
||||
ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history))
|
||||
mocker.patch(f'{EXMS}.get_historic_trades', ght_mock)
|
||||
exchange = get_patched_exchange(mocker, default_conf)
|
||||
file1 = tmpdir1 / 'ETH_BTC-trades.json.gz'
|
||||
data_handler = get_datahandler(tmpdir1, data_format='jsongz')
|
||||
file1 = tmp_path / 'ETH_BTC-trades.json.gz'
|
||||
data_handler = get_datahandler(tmp_path, data_format='jsongz')
|
||||
|
||||
assert not file1.is_file()
|
||||
|
||||
|
@ -614,7 +610,7 @@ def test_download_trades_history(trades_history, mocker, default_conf, testdatad
|
|||
pair='ETH/BTC')
|
||||
assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog)
|
||||
|
||||
file2 = tmpdir1 / 'XRP_ETH-trades.json.gz'
|
||||
file2 = tmp_path / 'XRP_ETH-trades.json.gz'
|
||||
copyfile(testdatadir / file2.name, file2)
|
||||
|
||||
ght_mock.reset_mock()
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from shutil import copytree
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
|
@ -11,7 +10,7 @@ from freqtrade.exceptions import OperationalException
|
|||
from tests.conftest import EXMS, log_has, log_has_re, patch_exchange
|
||||
|
||||
|
||||
def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf_usdt, mocker):
|
||||
def test_import_kraken_trades_from_csv(testdatadir, tmp_path, caplog, default_conf_usdt, mocker):
|
||||
with pytest.raises(OperationalException, match="This function is only for the kraken exchange"):
|
||||
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
|
||||
|
||||
|
@ -21,10 +20,9 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
mocker.patch(f'{EXMS}.markets', PropertyMock(return_value={
|
||||
'BCH/EUR': {'symbol': 'BCH/EUR', 'id': 'BCHEUR', 'altname': 'BCHEUR'},
|
||||
}))
|
||||
tmpdir1 = Path(tmpdir)
|
||||
dstfile = tmpdir1 / 'BCH_EUR-trades.feather'
|
||||
dstfile = tmp_path / 'BCH_EUR-trades.feather'
|
||||
assert not dstfile.is_file()
|
||||
default_conf_usdt['datadir'] = tmpdir1
|
||||
default_conf_usdt['datadir'] = tmp_path
|
||||
# There's 2 files in this tree, containing a total of 2 days.
|
||||
# tests/testdata/kraken/
|
||||
# └── trades_csv
|
||||
|
@ -32,7 +30,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
# └── incremental_q2
|
||||
# └── BCHEUR.csv <-- 2023-01-02
|
||||
|
||||
copytree(testdatadir / 'kraken/trades_csv', tmpdir1 / 'trades_csv')
|
||||
copytree(testdatadir / 'kraken/trades_csv', tmp_path / 'trades_csv')
|
||||
|
||||
import_kraken_trades_from_csv(default_conf_usdt, 'feather')
|
||||
assert log_has("Found csv files for BCHEUR.", caplog)
|
||||
|
@ -40,7 +38,7 @@ def test_import_kraken_trades_from_csv(testdatadir, tmpdir, caplog, default_conf
|
|||
|
||||
assert dstfile.is_file()
|
||||
|
||||
dh = get_datahandler(tmpdir1, 'feather')
|
||||
dh = get_datahandler(tmp_path, 'feather')
|
||||
trades = dh.trades_load('BCH_EUR')
|
||||
assert len(trades) == 340
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, PropertyMock
|
||||
|
||||
import ccxt
|
||||
|
@ -269,9 +268,9 @@ def test_additional_exchange_init_okx(default_conf, mocker):
|
|||
"additional_exchange_init", "fetch_accounts")
|
||||
|
||||
|
||||
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmpdir, caplog, time_machine):
|
||||
def test_load_leverage_tiers_okx(default_conf, mocker, markets, tmp_path, caplog, time_machine):
|
||||
|
||||
default_conf['datadir'] = Path(tmpdir)
|
||||
default_conf['datadir'] = tmp_path
|
||||
# fd_mock = mocker.patch('freqtrade.exchange.exchange.file_dump_json')
|
||||
api_mock = MagicMock()
|
||||
type(api_mock).has = PropertyMock(return_value={
|
||||
|
|
|
@ -21,13 +21,13 @@ def is_mac() -> bool:
|
|||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def freqai_conf(default_conf, tmpdir):
|
||||
def freqai_conf(default_conf, tmp_path):
|
||||
freqaiconf = deepcopy(default_conf)
|
||||
freqaiconf.update(
|
||||
{
|
||||
"datadir": Path(default_conf["datadir"]),
|
||||
"strategy": "freqai_test_strat",
|
||||
"user_data_dir": Path(tmpdir),
|
||||
"user_data_dir": tmp_path,
|
||||
"strategy-path": "freqtrade/tests/strategy/strats",
|
||||
"freqaimodel": "LightGBMRegressor",
|
||||
"freqaimodel_path": "freqai/prediction_models",
|
||||
|
|
|
@ -500,14 +500,14 @@ def test_get_required_data_timerange(mocker, freqai_conf):
|
|||
assert (time_range.stopts - time_range.startts) == 177300
|
||||
|
||||
|
||||
def test_download_all_data_for_training(mocker, freqai_conf, caplog, tmpdir):
|
||||
def test_download_all_data_for_training(mocker, freqai_conf, caplog, tmp_path):
|
||||
caplog.set_level(logging.DEBUG)
|
||||
strategy = get_patched_freqai_strategy(mocker, freqai_conf)
|
||||
exchange = get_patched_exchange(mocker, freqai_conf)
|
||||
pairlist = PairListManager(exchange, freqai_conf)
|
||||
strategy.dp = DataProvider(freqai_conf, exchange, pairlist)
|
||||
freqai_conf['pairs'] = freqai_conf['exchange']['pair_whitelist']
|
||||
freqai_conf['datadir'] = Path(tmpdir)
|
||||
freqai_conf['datadir'] = tmp_path
|
||||
download_all_data_for_training(strategy.dp, freqai_conf)
|
||||
|
||||
assert log_has_re(
|
||||
|
|
|
@ -193,8 +193,8 @@ def test_start_no_hyperopt_allowed(mocker, hyperopt_conf, caplog) -> None:
|
|||
start_hyperopt(pargs)
|
||||
|
||||
|
||||
def test_start_no_data(mocker, hyperopt_conf, tmpdir) -> None:
|
||||
hyperopt_conf['user_data_dir'] = Path(tmpdir)
|
||||
def test_start_no_data(mocker, hyperopt_conf, tmp_path) -> None:
|
||||
hyperopt_conf['user_data_dir'] = tmp_path
|
||||
patched_configuration_load_config_file(mocker, hyperopt_conf)
|
||||
mocker.patch('freqtrade.data.history.load_pair_history', MagicMock(return_value=pd.DataFrame))
|
||||
mocker.patch(
|
||||
|
@ -857,14 +857,14 @@ def test_simplified_interface_failed(mocker, hyperopt_conf, space) -> None:
|
|||
hyperopt.start()
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
# No hyperopt needed
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
})
|
||||
|
@ -897,17 +897,17 @@ def test_in_strategy_auto_hyperopt(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
|||
hyperopt.get_optimizer([], 2)
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
mocker.patch(f'{EXMS}.validate_config', MagicMock())
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
mocker.patch(f'{EXMS}._load_markets')
|
||||
mocker.patch(f'{EXMS}.markets',
|
||||
PropertyMock(return_value=get_markets()))
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
# No hyperopt needed
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
# Enforce parallelity
|
||||
|
@ -938,14 +938,14 @@ def test_in_strategy_auto_hyperopt_with_parallel(mocker, hyperopt_conf, tmpdir,
|
|||
hyperopt.start()
|
||||
|
||||
|
||||
def test_in_strategy_auto_hyperopt_per_epoch(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_in_strategy_auto_hyperopt_per_epoch(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['all'],
|
||||
'epochs': 3,
|
||||
|
@ -995,15 +995,15 @@ def test_SKDecimal():
|
|||
assert space.transform([1.5, 1.6]) == [150, 160]
|
||||
|
||||
|
||||
def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
# This test is to ensure that unlimited max_open_trades are ignored for the backtesting
|
||||
# if we have an unlimited stake amount
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
'stake_amount': 'unlimited'
|
||||
|
@ -1023,15 +1023,15 @@ def test_stake_amount_unlimited_max_open_trades(mocker, hyperopt_conf, tmpdir, f
|
|||
assert hyperopt.backtesting.strategy.max_open_trades == 1
|
||||
|
||||
|
||||
def test_max_open_trades_dump(mocker, hyperopt_conf, tmpdir, fee, capsys) -> None:
|
||||
def test_max_open_trades_dump(mocker, hyperopt_conf, tmp_path, fee, capsys) -> None:
|
||||
# This test is to ensure that after hyperopting, max_open_trades is never
|
||||
# saved as inf in the output json params
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
})
|
||||
|
@ -1069,16 +1069,16 @@ def test_max_open_trades_dump(mocker, hyperopt_conf, tmpdir, fee, capsys) -> Non
|
|||
assert '"max_open_trades":-1' in out
|
||||
|
||||
|
||||
def test_max_open_trades_consistency(mocker, hyperopt_conf, tmpdir, fee) -> None:
|
||||
def test_max_open_trades_consistency(mocker, hyperopt_conf, tmp_path, fee) -> None:
|
||||
# This test is to ensure that max_open_trades is the same across all functions needing it
|
||||
# after it has been changed from the hyperopt
|
||||
patch_exchange(mocker)
|
||||
mocker.patch(f'{EXMS}.get_fee', return_value=0)
|
||||
|
||||
(Path(tmpdir) / 'hyperopt_results').mkdir(parents=True)
|
||||
(tmp_path / 'hyperopt_results').mkdir(parents=True)
|
||||
hyperopt_conf.update({
|
||||
'strategy': 'HyperoptableStrategy',
|
||||
'user_data_dir': Path(tmpdir),
|
||||
'user_data_dir': tmp_path,
|
||||
'hyperopt_random_state': 42,
|
||||
'spaces': ['trades'],
|
||||
'stake_amount': 'unlimited',
|
||||
|
|
|
@ -19,9 +19,9 @@ def create_results() -> List[Dict]:
|
|||
return [{'loss': 1, 'result': 'foo', 'params': {}, 'is_best': True}]
|
||||
|
||||
|
||||
def test_save_results_saves_epochs(hyperopt, tmpdir, caplog) -> None:
|
||||
def test_save_results_saves_epochs(hyperopt, tmp_path, caplog) -> None:
|
||||
|
||||
hyperopt.results_file = Path(tmpdir / 'ut_results.fthypt')
|
||||
hyperopt.results_file = tmp_path / 'ut_results.fthypt'
|
||||
|
||||
hyperopt_epochs = HyperoptTools.load_filtered_results(hyperopt.results_file, {})
|
||||
assert log_has_re("Hyperopt file .* not found.", caplog)
|
||||
|
@ -182,9 +182,9 @@ def test_get_strategy_filename(default_conf):
|
|||
assert x is None
|
||||
|
||||
|
||||
def test_export_params(tmpdir):
|
||||
def test_export_params(tmp_path):
|
||||
|
||||
filename = Path(tmpdir) / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
filename = tmp_path / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
assert not filename.is_file()
|
||||
params = {
|
||||
"params_details": {
|
||||
|
@ -231,11 +231,11 @@ def test_export_params(tmpdir):
|
|||
assert "max_open_trades" in content["params"]
|
||||
|
||||
|
||||
def test_try_export_params(default_conf, tmpdir, caplog, mocker):
|
||||
def test_try_export_params(default_conf, tmp_path, caplog, mocker):
|
||||
default_conf['disableparamexport'] = False
|
||||
export_mock = mocker.patch("freqtrade.optimize.hyperopt_tools.HyperoptTools.export_params")
|
||||
|
||||
filename = Path(tmpdir) / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
filename = tmp_path / f"{CURRENT_TEST_STRATEGY}.json"
|
||||
assert not filename.is_file()
|
||||
params = {
|
||||
"params_details": {
|
||||
|
|
|
@ -74,7 +74,7 @@ def test_text_table_bt_results():
|
|||
assert text_table_bt_results(pair_results, stake_currency='BTC') == result_str
|
||||
|
||||
|
||||
def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
||||
def test_generate_backtest_stats(default_conf, testdatadir, tmp_path):
|
||||
default_conf.update({'strategy': CURRENT_TEST_STRATEGY})
|
||||
StrategyResolver.load_strategy(default_conf)
|
||||
|
||||
|
@ -185,8 +185,8 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
|||
assert strat_stats['pairlist'] == ['UNITTEST/BTC']
|
||||
|
||||
# Test storing stats
|
||||
filename = Path(tmpdir / 'btresult.json')
|
||||
filename_last = Path(tmpdir / LAST_BT_RESULT_FN)
|
||||
filename = tmp_path / 'btresult.json'
|
||||
filename_last = tmp_path / LAST_BT_RESULT_FN
|
||||
_backup_file(filename_last, copy_file=True)
|
||||
assert not filename.is_file()
|
||||
|
||||
|
@ -196,7 +196,7 @@ def test_generate_backtest_stats(default_conf, testdatadir, tmpdir):
|
|||
last_fn = get_latest_backtest_filename(filename_last.parent)
|
||||
assert re.match(r"btresult-.*\.json", last_fn)
|
||||
|
||||
filename1 = Path(tmpdir / last_fn)
|
||||
filename1 = tmp_path / last_fn
|
||||
assert filename1.is_file()
|
||||
content = filename1.read_text()
|
||||
assert 'max_drawdown_account' in content
|
||||
|
@ -254,14 +254,14 @@ def test_store_backtest_candles(testdatadir, mocker):
|
|||
dump_mock.reset_mock()
|
||||
|
||||
|
||||
def test_write_read_backtest_candles(tmpdir):
|
||||
def test_write_read_backtest_candles(tmp_path):
|
||||
|
||||
candle_dict = {'DefStrat': {'UNITTEST/BTC': pd.DataFrame()}}
|
||||
|
||||
# test directory exporting
|
||||
sample_date = '2022_01_01_15_05_13'
|
||||
store_backtest_analysis_results(Path(tmpdir), candle_dict, {}, sample_date)
|
||||
stored_file = Path(tmpdir / f'backtest-result-{sample_date}_signals.pkl')
|
||||
store_backtest_analysis_results(tmp_path, candle_dict, {}, sample_date)
|
||||
stored_file = tmp_path / f'backtest-result-{sample_date}_signals.pkl'
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
||||
|
@ -273,9 +273,9 @@ def test_write_read_backtest_candles(tmpdir):
|
|||
_clean_test_file(stored_file)
|
||||
|
||||
# test file exporting
|
||||
filename = Path(tmpdir / 'testresult')
|
||||
filename = tmp_path / 'testresult'
|
||||
store_backtest_analysis_results(filename, candle_dict, {}, sample_date)
|
||||
stored_file = Path(tmpdir / f'testresult-{sample_date}_signals.pkl')
|
||||
stored_file = tmp_path / f'testresult-{sample_date}_signals.pkl'
|
||||
with stored_file.open("rb") as scp:
|
||||
pickled_signal_candles = joblib.load(scp)
|
||||
|
||||
|
|
|
@ -29,15 +29,15 @@ def test_init_create_session(default_conf):
|
|||
assert 'scoped_session' in type(Trade.session).__name__
|
||||
|
||||
|
||||
def test_init_custom_db_url(default_conf, tmpdir):
|
||||
def test_init_custom_db_url(default_conf, tmp_path):
|
||||
# Update path to a value other than default, but still in-memory
|
||||
filename = f"{tmpdir}/freqtrade2_test.sqlite"
|
||||
assert not Path(filename).is_file()
|
||||
filename = tmp_path / "freqtrade2_test.sqlite"
|
||||
assert not filename.is_file()
|
||||
|
||||
default_conf.update({'db_url': f'sqlite:///{filename}'})
|
||||
|
||||
init_db(default_conf['db_url'])
|
||||
assert Path(filename).is_file()
|
||||
assert filename.is_file()
|
||||
r = Trade.session.execute(text("PRAGMA journal_mode"))
|
||||
assert r.first() == ('wal',)
|
||||
|
||||
|
|
|
@ -1616,9 +1616,9 @@ def test_api_plot_config(botclient, mocker):
|
|||
assert_response(rc)
|
||||
|
||||
|
||||
def test_api_strategies(botclient, tmpdir):
|
||||
def test_api_strategies(botclient, tmp_path):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/strategies")
|
||||
|
||||
|
@ -1701,9 +1701,9 @@ def test_api_exchanges(botclient):
|
|||
}
|
||||
|
||||
|
||||
def test_api_freqaimodels(botclient, tmpdir, mocker):
|
||||
def test_api_freqaimodels(botclient, tmp_path, mocker):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
mocker.patch(
|
||||
"freqtrade.resolvers.freqaimodel_resolver.FreqaiModelResolver.search_all_objects",
|
||||
return_value=[
|
||||
|
@ -1739,9 +1739,9 @@ def test_api_freqaimodels(botclient, tmpdir, mocker):
|
|||
]}
|
||||
|
||||
|
||||
def test_api_pairlists_available(botclient, tmpdir):
|
||||
def test_api_pairlists_available(botclient, tmp_path):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/pairlists/available")
|
||||
|
||||
|
@ -1768,9 +1768,9 @@ def test_api_pairlists_available(botclient, tmpdir):
|
|||
assert len(volumepl['params']) > 2
|
||||
|
||||
|
||||
def test_api_pairlists_evaluate(botclient, tmpdir, mocker):
|
||||
def test_api_pairlists_evaluate(botclient, tmp_path, mocker):
|
||||
ftbot, client = botclient
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
|
||||
rc = client_get(client, f"{BASE_URI}/pairlists/evaluate/randomJob")
|
||||
|
||||
|
@ -1905,7 +1905,7 @@ def test_sysinfo(botclient):
|
|||
assert 'ram_pct' in result
|
||||
|
||||
|
||||
def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
||||
def test_api_backtesting(botclient, mocker, fee, caplog, tmp_path):
|
||||
try:
|
||||
ftbot, client = botclient
|
||||
mocker.patch(f'{EXMS}.get_fee', fee)
|
||||
|
@ -1935,8 +1935,8 @@ def test_api_backtesting(botclient, mocker, fee, caplog, tmpdir):
|
|||
assert result['status_msg'] == 'Backtest reset'
|
||||
ftbot.config['export'] = 'trades'
|
||||
ftbot.config['backtest_cache'] = 'day'
|
||||
ftbot.config['user_data_dir'] = Path(tmpdir)
|
||||
ftbot.config['exportfilename'] = Path(tmpdir) / "backtest_results"
|
||||
ftbot.config['user_data_dir'] = tmp_path
|
||||
ftbot.config['exportfilename'] = tmp_path / "backtest_results"
|
||||
ftbot.config['exportfilename'].mkdir()
|
||||
|
||||
# start backtesting
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -10,7 +9,7 @@ from freqtrade.util.binance_mig import migrate_binance_futures_data, migrate_bin
|
|||
from tests.conftest import create_mock_trades_usdt, log_has
|
||||
|
||||
|
||||
def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
||||
def test_binance_mig_data_conversion(default_conf_usdt, tmp_path, testdatadir):
|
||||
|
||||
# call doing nothing (spot mode)
|
||||
migrate_binance_futures_data(default_conf_usdt)
|
||||
|
@ -18,7 +17,7 @@ def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
|||
pair_old = 'XRP_USDT'
|
||||
pair_unified = 'XRP_USDT_USDT'
|
||||
futures_src = testdatadir / 'futures'
|
||||
futures_dst = tmpdir / 'futures'
|
||||
futures_dst = tmp_path / 'futures'
|
||||
futures_dst.mkdir()
|
||||
files = [
|
||||
'-1h-mark.feather',
|
||||
|
@ -32,7 +31,7 @@ def test_binance_mig_data_conversion(default_conf_usdt, tmpdir, testdatadir):
|
|||
fn_after = futures_dst / f'{pair_old}{file}'
|
||||
shutil.copy(futures_src / f'{pair_unified}{file}', fn_after)
|
||||
|
||||
default_conf_usdt['datadir'] = Path(tmpdir)
|
||||
default_conf_usdt['datadir'] = tmp_path
|
||||
# Migrate files to unified namings
|
||||
migrate_binance_futures_data(default_conf_usdt)
|
||||
|
||||
|
|
|
@ -104,8 +104,8 @@ def test_load_config_file_error_range(default_conf, mocker, caplog) -> None:
|
|||
assert x == ''
|
||||
|
||||
|
||||
def test_load_file_error(tmpdir):
|
||||
testpath = Path(tmpdir) / 'config.json'
|
||||
def test_load_file_error(tmp_path):
|
||||
testpath = tmp_path / 'config.json'
|
||||
with pytest.raises(OperationalException, match=r"File .* not found!"):
|
||||
load_file(testpath)
|
||||
|
||||
|
@ -601,9 +601,9 @@ def test_cli_verbose_with_params(default_conf, mocker, caplog) -> None:
|
|||
assert log_has('Verbosity set to 3', caplog)
|
||||
|
||||
|
||||
def test_set_logfile(default_conf, mocker, tmpdir):
|
||||
def test_set_logfile(default_conf, mocker, tmp_path):
|
||||
patched_configuration_load_config_file(mocker, default_conf)
|
||||
f = Path(tmpdir / "test_file.log")
|
||||
f = tmp_path / "test_file.log"
|
||||
assert not f.is_file()
|
||||
arglist = [
|
||||
'trade', '--logfile', str(f),
|
||||
|
@ -1145,7 +1145,7 @@ def test_pairlist_resolving_with_config_pl_not_exists(mocker, default_conf):
|
|||
configuration.get_config()
|
||||
|
||||
|
||||
def test_pairlist_resolving_fallback(mocker, tmpdir):
|
||||
def test_pairlist_resolving_fallback(mocker, tmp_path):
|
||||
mocker.patch.object(Path, "exists", MagicMock(return_value=True))
|
||||
mocker.patch.object(Path, "open", MagicMock(return_value=MagicMock()))
|
||||
mocker.patch("freqtrade.configuration.configuration.load_file",
|
||||
|
@ -1164,7 +1164,7 @@ def test_pairlist_resolving_fallback(mocker, tmpdir):
|
|||
|
||||
assert config['pairs'] == ['ETH/BTC', 'XRP/BTC']
|
||||
assert config['exchange']['name'] == 'binance'
|
||||
assert config['datadir'] == Path(tmpdir) / "user_data/data/binance"
|
||||
assert config['datadir'] == tmp_path / "user_data/data/binance"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("setting", [
|
||||
|
|
|
@ -32,9 +32,9 @@ def test_create_userdata_dir(mocker, default_conf, caplog) -> None:
|
|||
assert str(x) == str(Path("/tmp/bar"))
|
||||
|
||||
|
||||
def test_create_userdata_dir_and_chown(mocker, tmpdir, caplog) -> None:
|
||||
def test_create_userdata_dir_and_chown(mocker, tmp_path, caplog) -> None:
|
||||
sp_mock = mocker.patch('subprocess.check_output')
|
||||
path = Path(tmpdir / 'bar')
|
||||
path = tmp_path / 'bar'
|
||||
assert not path.is_dir()
|
||||
|
||||
x = create_userdata_dir(str(path), create_dir=True)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -75,11 +74,11 @@ def test_set_loggers_syslog():
|
|||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
def test_set_loggers_Filehandler(tmpdir):
|
||||
def test_set_loggers_Filehandler(tmp_path):
|
||||
logger = logging.getLogger()
|
||||
orig_handlers = logger.handlers
|
||||
logger.handlers = []
|
||||
logfile = Path(tmpdir) / 'ft_logfile.log'
|
||||
logfile = tmp_path / 'ft_logfile.log'
|
||||
config = {'verbosity': 2,
|
||||
'logfile': str(logfile),
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ def test_strategy_updater_start(user_dir, capsys) -> None:
|
|||
# Backup file exists
|
||||
assert Path(user_dir / "strategies_orig_updater" / 'strategy_test_v2.py').exists()
|
||||
# updated file exists
|
||||
new_file = Path(tmpdirp / 'strategy_test_v2.py')
|
||||
new_file = tmpdirp / 'strategy_test_v2.py'
|
||||
assert new_file.exists()
|
||||
new_code = new_file.read_text()
|
||||
assert 'INTERFACE_VERSION = 3' in new_code
|
||||
|
|
Loading…
Reference in New Issue
Block a user