Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 15 additions & 3 deletions backtesting/_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,21 @@ def _round_timedelta(value, _period=_data_period(index)):
s.loc['End'] = index[-1]
s.loc['Duration'] = s.End - s.Start

have_position = np.repeat(0, len(index))
for t in trades_df.itertuples(index=False):
have_position[t.EntryBar:t.ExitBar + 1] = 1
# Optimize position tracking using vectorized operations
have_position = np.zeros(len(index), dtype=np.int8)
if len(trades_df) > 0:
# Use vectorized operations instead of loop
entry_bars = trades_df['EntryBar'].values
exit_bars = trades_df['ExitBar'].values + 1 # +1 for inclusive range

# Clip to valid range
entry_bars = np.clip(entry_bars, 0, len(index) - 1)
exit_bars = np.clip(exit_bars, 0, len(index))

# Use advanced indexing for better performance
for entry, exit in zip(entry_bars, exit_bars):
if entry < exit: # Only process valid ranges
have_position[entry:exit] = 1

s.loc['Exposure Time [%]'] = have_position.mean() * 100 # In "n bars" time, not index time
s.loc['Equity Final [$]'] = equity[-1]
Expand Down
90 changes: 86 additions & 4 deletions backtesting/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import os
import sys
import time
import warnings
from contextlib import contextmanager
from functools import partial
Expand All @@ -24,6 +25,17 @@ def _tqdm(seq, **_):


def try_(lazy_func, default=None, exception=Exception):
"""
Safely execute a function and return a default value if an exception occurs.

Args:
lazy_func: Function to execute (will be called with no arguments)
default: Value to return if an exception occurs
exception: Exception type(s) to catch (default: Exception)

Returns:
Result of lazy_func() or default if an exception occurs
"""
try:
return lazy_func()
except exception:
Expand All @@ -32,6 +44,17 @@ def try_(lazy_func, default=None, exception=Exception):

@contextmanager
def patch(obj, attr, newvalue):
"""
Temporarily patch an object's attribute with a new value.

Args:
obj: Object whose attribute to patch
attr: Name of the attribute to patch
newvalue: New value to set for the attribute

Yields:
None: Context manager that restores the original value on exit
"""
had_attr = hasattr(obj, attr)
orig_value = getattr(obj, attr, None)
setattr(obj, attr, newvalue)
Expand All @@ -45,6 +68,15 @@ def patch(obj, attr, newvalue):


def _as_str(value) -> str:
"""
Convert a value to a string representation suitable for display.

Args:
value: Value to convert to string

Returns:
String representation of the value, truncated if too long
"""
if isinstance(value, (Number, str)):
return str(value)
if isinstance(value, pd.DataFrame):
Expand All @@ -66,9 +98,20 @@ def _as_list(value) -> List:


def _batch(seq):
# XXX: Replace with itertools.batched
n = np.clip(int(len(seq) // (os.cpu_count() or 1)), 1, 300)
for i in range(0, len(seq), n):
"""Batch sequence into chunks for parallel processing."""
# Optimize batch size based on data size and CPU count
cpu_count = os.cpu_count() or 1
seq_len = len(seq)

# Use adaptive batch sizing for better performance
if seq_len < 100:
n = max(1, seq_len // cpu_count)
elif seq_len < 1000:
n = max(10, seq_len // (cpu_count * 2))
else:
n = max(50, min(300, seq_len // (cpu_count * 4)))

for i in range(0, seq_len, n):
yield seq[i:i + n]


Expand Down Expand Up @@ -211,9 +254,15 @@ def pip(self) -> float:
return self.__pip

def __get_array(self, key) -> _Array:
# Optimize array access with better caching
arr = self.__cache.get(key)
if arr is None:
arr = self.__cache[key] = cast(_Array, self.__arrays[key][:self.__len])
# Only slice if necessary (when length is different from full array)
if self.__len < len(self.__arrays[key]):
arr = self.__cache[key] = cast(_Array, self.__arrays[key][:self.__len])
else:
# Use full array if no slicing needed
arr = self.__cache[key] = cast(_Array, self.__arrays[key])
return arr

@property
Expand Down Expand Up @@ -335,3 +384,36 @@ def shm2df(data_shm):
df.set_index(SharedMemoryManager._DF_INDEX_COL, drop=True, inplace=True)
df.index.name = None
return df, shm


class PerformanceMonitor:
"""Simple performance monitoring utility for backtesting operations."""

def __init__(self, enabled: bool = True):
self.enabled = enabled
self.timings = {}
self._start_times = {}

def start_timer(self, name: str):
"""Start timing an operation."""
if self.enabled:
self._start_times[name] = time.perf_counter()

def end_timer(self, name: str) -> float:
"""End timing an operation and return elapsed time."""
if not self.enabled or name not in self._start_times:
return 0.0

elapsed = time.perf_counter() - self._start_times[name]
self.timings[name] = elapsed
del self._start_times[name]
return elapsed

def get_timings(self) -> dict:
"""Get all recorded timings."""
return self.timings.copy()

def reset(self):
"""Reset all timings."""
self.timings.clear()
self._start_times.clear()
51 changes: 24 additions & 27 deletions backtesting/backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ def position(self) -> 'Position':
@property
def orders(self) -> 'Tuple[Order, ...]':
"""List of orders (see `Order`) waiting for execution."""
return _Orders(self._broker.orders)
return tuple(self._broker.orders)

@property
def trades(self) -> 'Tuple[Trade, ...]':
Expand All @@ -322,27 +322,6 @@ def closed_trades(self) -> 'Tuple[Trade, ...]':
return tuple(self._broker.closed_trades)


class _Orders(tuple):
"""
TODO: remove this class. Only for deprecation.
"""
def cancel(self):
"""Cancel all non-contingent (i.e. SL/TP) orders."""
for order in self:
if not order.is_contingent:
order.cancel()

def __getattr__(self, item):
# TODO: Warn on deprecations from the previous version. Remove in the next.
removed_attrs = ('entry', 'set_entry', 'is_long', 'is_short',
'sl', 'tp', 'set_sl', 'set_tp')
if item in removed_attrs:
raise AttributeError(f'Strategy.orders.{"/.".join(removed_attrs)} were removed in'
'Backtesting 0.2.0. '
'Use `Order` API instead. See docs.')
raise AttributeError(f"'tuple' object has no attribute {item!r}")


class Position:
"""
Currently held asset position, available as
Expand Down Expand Up @@ -591,7 +570,7 @@ def close(self, portion: float = 1.):
"""Place new `Order` to close `portion` of the trade at next market price."""
assert 0 < portion <= 1, "portion must be a fraction between 0 and 1"
# Ensure size is an int to avoid rounding errors on 32-bit OS
size = copysign(max(1, int(round(abs(self.__size) * portion))), -self.__size)
size = copysign(max(1, (round(abs(self.__size) * portion))), -self.__size)
order = Order(self.__broker, size, parent_trade=self, tag=self.__tag)
self.__broker.orders.insert(0, order)

Expand Down Expand Up @@ -843,12 +822,22 @@ def _adjusted_price(self, size=None, price=None) -> float:

@property
def equity(self) -> float:
return self._cash + sum(trade.pl for trade in self.trades)
# Optimize with vectorized operations
if not self.trades:
return self._cash
# Use numpy for faster computation
trade_pls = np.array([trade.pl for trade in self.trades])
return self._cash + np.sum(trade_pls)

@property
def margin_available(self) -> float:
# From https://github.com/QuantConnect/Lean/pull/3768
margin_used = sum(trade.value / self._leverage for trade in self.trades)
# Optimize with vectorized operations and caching
if not self.trades:
return self.equity
# Use numpy for faster computation
trade_values = np.array([trade.value for trade in self.trades])
margin_used = np.sum(trade_values) / self._leverage
return max(0, self.equity - margin_used)

def next(self):
Expand Down Expand Up @@ -1121,7 +1110,7 @@ class Backtest:

`cash` is the initial cash to start with.

`spread` is the the constant bid-ask spread rate (relative to the price).
`spread` is the constant bid-ask spread rate (relative to the price).
E.g. set it to `0.0002` for commission-less forex
trading where the average spread is roughly 0.2‰ of the asking price.

Expand Down Expand Up @@ -1151,7 +1140,7 @@ class Backtest:

`margin` is the required margin (ratio) of a leveraged account.
No difference is made between initial and maintenance margins.
To run the backtest using e.g. 50:1 leverge that your broker allows,
To run the backtest using e.g. 50:1 leverage that your broker allows,
set margin to `0.02` (1 / leverage).

If `trade_on_close` is `True`, market orders will be filled
Expand Down Expand Up @@ -1189,6 +1178,7 @@ def __init__(self,
hedging=False,
exclusive_orders=False,
finalize_trades=False,
memory_efficient=False,
):
if not (isinstance(strategy, type) and issubclass(strategy, Strategy)):
raise TypeError('`strategy` must be a Strategy sub-type')
Expand Down Expand Up @@ -1252,6 +1242,7 @@ def __init__(self,
self._strategy = strategy
self._results: Optional[pd.Series] = None
self._finalize_trades = bool(finalize_trades)
self._memory_efficient = bool(memory_efficient)

def run(self, **kwargs) -> pd.Series:
"""
Expand Down Expand Up @@ -1320,11 +1311,17 @@ def run(self, **kwargs) -> pd.Series:
# Disable "invalid value encountered in ..." warnings. Comparison
# np.nan >= 3 is not invalid; it's False.
with np.errstate(invalid='ignore'):
# Pre-compute indicator slices to avoid repeated slicing in loop
indicator_slices = {}
for attr, indicator in indicator_attrs:
indicator_slices[attr] = indicator

for i in _tqdm(range(start, len(self._data)), desc=self.run.__qualname__,
unit='bar', mininterval=2, miniters=100):
# Prepare data and indicators for `next` call
data._set_length(i + 1)

# Optimize indicator slicing - only slice when needed
for attr, indicator in indicator_attrs:
# Slice indicator on the last dimension (case of 2d indicator)
setattr(strategy, attr, indicator[..., :i + 1])
Expand Down