adjust rate limit for backtest data syncing

This commit is contained in:
c9s 2021-05-02 17:46:08 +08:00
parent 499f34179b
commit 8fea2022e5
3 changed files with 6 additions and 13 deletions

View File

@ -72,19 +72,12 @@ func (e KLineBatchQuery) Query(ctx context.Context, symbol string, interval type
errC = make(chan error, 1)
go func() {
limiter := rate.NewLimiter(rate.Every(5*time.Second), 2) // from binance (original 1200, use 1000 for safety)
defer close(c)
defer close(errC)
for startTime.Before(endTime) {
if err := limiter.Wait(ctx); err != nil {
logrus.WithError(err).Error("rate limit error")
}
kLines, err := e.QueryKLines(ctx, symbol, interval, types.KLineQueryOptions{
StartTime: &startTime,
Limit: 1000,
})
if err != nil {

View File

@ -714,9 +714,9 @@ func (e *Exchange) SubmitOrders(ctx context.Context, orders ...types.SubmitOrder
// QueryKLines queries the Kline/candlestick bars for a symbol. Klines are uniquely identified by their open time.
func (e *Exchange) QueryKLines(ctx context.Context, symbol string, interval types.Interval, options types.KLineQueryOptions) ([]types.KLine, error) {
var limit = 500
var limit = 1000
if options.Limit > 0 {
// default limit == 500
// default limit == 1000
limit = options.Limit
}

View File

@ -21,10 +21,10 @@ import (
"github.com/c9s/bbgo/pkg/util"
)
var closedOrderQueryLimiter = rate.NewLimiter(rate.Every(6*time.Second), 1)
var tradeQueryLimiter = rate.NewLimiter(rate.Every(4*time.Second), 1)
var accountQueryLimiter = rate.NewLimiter(rate.Every(5*time.Second), 1)
var marketDataLimiter = rate.NewLimiter(rate.Every(5*time.Second), 1)
var closedOrderQueryLimiter = rate.NewLimiter(rate.Every(5*time.Second), 1)
var tradeQueryLimiter = rate.NewLimiter(rate.Every(3*time.Second), 1)
var accountQueryLimiter = rate.NewLimiter(rate.Every(3*time.Second), 1)
var marketDataLimiter = rate.NewLimiter(rate.Every(2*time.Second), 10)
var log = logrus.WithField("exchange", "max")