Compare commits
13 Commits
lg_branch_
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d137a1a0e | ||
|
|
0423a7d34f | ||
|
|
7ab09669b4 | ||
|
|
73f36ddcea | ||
|
|
80c3e8d54b | ||
|
|
8e6ac39674 | ||
|
|
0af334bdf9 | ||
|
|
b474752959 | ||
|
|
1b6b5e5735 | ||
|
|
1d73ce8070 | ||
|
|
c1c72f46a6 | ||
|
|
566dd9bbdc | ||
|
|
ed0c0fecb2 |
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,10 +1,10 @@
|
||||
# SpecStory explanation file
|
||||
__pycache__/
|
||||
__OLD__/
|
||||
.specstory/
|
||||
.history/
|
||||
.cursorindexingignore
|
||||
data
|
||||
####.vscode/
|
||||
cvttpy
|
||||
# SpecStory explanation file
|
||||
.specstory/.what-is-this.md
|
||||
|
||||
1
.vscode/.env
vendored
Normal file
1
.vscode/.env
vendored
Normal file
@ -0,0 +1 @@
|
||||
PYTHONPATH=/home/oleg/develop
|
||||
9
.vscode/extensions.json
vendored
Normal file
9
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-python.pylance",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.mypy-type-checker",
|
||||
"ms-python.isort"
|
||||
]
|
||||
}
|
||||
271
.vscode/launch.json
vendored
Normal file
271
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,271 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
|
||||
{
|
||||
"name": "Python Debugger: Current File",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib:${workspaceFolder}/.."
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "-------- Live Pair Trading --------",
|
||||
},
|
||||
{
|
||||
"name": "PAIRS TRADER",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/bin/pairs_trader.py",
|
||||
"console": "integratedTerminal",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib:${workspaceFolder}/.."
|
||||
},
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/pairs_trader.cfg",
|
||||
"--pair=PAIR-ADA-USDT:BNBSPOT,PAIR-SOL-USDT:BNBSPOT",
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "-------- OLS --------",
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO OLS (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.ols.ADA-SOL.20250605.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO OLS (optimized)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols-opt.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.ols-opt.ADA-SOL.20250605.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
// {
|
||||
// "name": "CRYPTO OLS (expanding)",
|
||||
// "type": "debugpy",
|
||||
// "request": "launch",
|
||||
// "python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
// "program": "${workspaceFolder}/research/backtest.py",
|
||||
// "args": [
|
||||
// "--config=${workspaceFolder}/configuration/ols-exp.cfg",
|
||||
// "--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
// "--date_pattern=20250605",
|
||||
// "--result_db=${workspaceFolder}/research/results/crypto/%T.ols-exp.ADA-SOL.20250605.crypto_results.db",
|
||||
// ],
|
||||
// "env": {
|
||||
// "PYTHONPATH": "${workspaceFolder}/lib"
|
||||
// },
|
||||
// "console": "integratedTerminal"
|
||||
// },
|
||||
{
|
||||
"name": "EQUITY OLS (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,MSTR:EQUITY:ALPACA",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/equity/%T.ols.COIN-MSTR.20250605.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY-CRYPTO OLS (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,BTC-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/intermarket/%T.ols.COIN-BTC.20250605.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- VECM --------",
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO VECM (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.vecm.ADA-SOL.20250605.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO VECM (optimized)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm-opt.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.vecm-opt.ADA-SOL.20250605.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
// {
|
||||
// "name": "CRYPTO VECM (expanding)",
|
||||
// "type": "debugpy",
|
||||
// "request": "launch",
|
||||
// "python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
// "program": "${workspaceFolder}/research/backtest.py",
|
||||
// "args": [
|
||||
// "--config=${workspaceFolder}/configuration/vecm-exp.cfg",
|
||||
// "--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
// "--date_pattern=20250605",
|
||||
// "--result_db=${workspaceFolder}/research/results/crypto/%T.vecm-exp.ADA-SOL.20250605.crypto_results.db",
|
||||
// ],
|
||||
// "env": {
|
||||
// "PYTHONPATH": "${workspaceFolder}/lib"
|
||||
// },
|
||||
// "console": "integratedTerminal"
|
||||
// },
|
||||
{
|
||||
"name": "EQUITY VECM (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,MSTR:EQUITY:ALPACA",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/equity/%T.vecm.COIN-MSTR.20250605.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY-CRYPTO VECM (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,BTC-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/intermarket/%T.vecm.COIN-BTC.20250601.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- B a t c h e s --------",
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO OLS Batch (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.ols.ADA-SOL.2025060-.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO VECM Batch (rolling)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.vecm.ADA-SOL.2025060-.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- Viz Test --------",
|
||||
},
|
||||
{
|
||||
"name": "Viz Test",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/tests/viz_test.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/ols.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
8
.vscode/pairs_trading.code-workspace
vendored
Normal file
8
.vscode/pairs_trading.code-workspace
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": ".."
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
||||
112
.vscode/settings.json
vendored
Normal file
112
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
{
|
||||
"PythonVersion": "3.12",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
// ===========================================================
|
||||
"workbench.activityBar.orientation": "vertical",
|
||||
// ===========================================================
|
||||
|
||||
// "markdown.styles": [
|
||||
// "/home/oleg/develop/cvtt2/.vscode/light-theme.css"
|
||||
// ],
|
||||
"markdown.preview.background": "#ffffff",
|
||||
"markdown.preview.textEditorTheme": "light",
|
||||
"markdown-pdf.styles": [
|
||||
"/home/oleg/develop/cvtt2/.vscode/light-theme.css"
|
||||
],
|
||||
"editor.detectIndentation": false,
|
||||
// Configure editor settings to be overridden for [yaml] language.
|
||||
"[yaml]": {
|
||||
"editor.insertSpaces": true,
|
||||
"editor.tabSize": 4,
|
||||
},
|
||||
"pylint.args": [
|
||||
"--disable=missing-docstring"
|
||||
, "--disable=invalid-name"
|
||||
, "--disable=too-few-public-methods"
|
||||
, "--disable=broad-exception-raised"
|
||||
, "--disable=broad-exception-caught"
|
||||
, "--disable=pointless-string-statement"
|
||||
, "--disable=unused-argument"
|
||||
, "--disable=line-too-long"
|
||||
, "--disable=import-outside-toplevel"
|
||||
, "--disable=fixme"
|
||||
, "--disable=protected-access"
|
||||
, "--disable=logging-fstring-interpolation"
|
||||
],
|
||||
|
||||
// ===== TESTING CONFIGURATION =====
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"python.testing.pytestArgs": [
|
||||
"-v",
|
||||
"--tb=short",
|
||||
"--disable-warnings"
|
||||
],
|
||||
"python.testing.envVars": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib:${workspaceFolder}/.."
|
||||
},
|
||||
"python.testing.cwd": "${workspaceFolder}",
|
||||
"python.testing.autoTestDiscoverOnSaveEnabled": true,
|
||||
"python.testing.pytestPath": "/home/oleg/.pyenv/python3.12-venv/bin/pytest",
|
||||
"python.testing.promptToConfigure": false,
|
||||
"python.testing.pytest.enabled": true,
|
||||
|
||||
|
||||
// Python interpreter settings
|
||||
"python.defaultInterpreterPath": "/home/oleg/.pyenv/python3.12-venv/bin/python3.12",
|
||||
|
||||
// Environment variables for Python execution
|
||||
"python.envFile": "${workspaceFolder}/.vscode/.env",
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.terminal.activateEnvInCurrentTerminal": false,
|
||||
|
||||
// Global environment variables for VS Code Python extension
|
||||
"terminal.integrated.env.linux": {
|
||||
"PYTHONPATH": "/home/oleg/develop/:${env:PYTHONPATH}"
|
||||
},
|
||||
|
||||
"pylint.enabled": true,
|
||||
"github.copilot.enable": false,
|
||||
"markdown.extension.print.theme": "dark",
|
||||
"python.analysis.extraPaths": [
|
||||
"${workspaceFolder}/..",
|
||||
"${workspaceFolder}/lib"
|
||||
],
|
||||
|
||||
// Try enabling regular Python language server alongside CursorPyright
|
||||
"python.languageServer": "None",
|
||||
"python.analysis.diagnosticMode": "workspace",
|
||||
"workbench.colorTheme": "Atom One Dark",
|
||||
"cursorpyright.analysis.enable": false,
|
||||
"cursorpyright.analysis.extraPaths": [
|
||||
"${workspaceFolder}/..",
|
||||
"${workspaceFolder}/lib"
|
||||
],
|
||||
|
||||
// Enable quick fixes for unused imports
|
||||
"python.analysis.autoImportCompletions": true,
|
||||
"python.analysis.fixAll": ["source.unusedImports"],
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
|
||||
// Enable code actions for CursorPyright
|
||||
"cursorpyright.analysis.autoImportCompletions": true,
|
||||
"cursorpyright.analysis.typeCheckingMode": "off",
|
||||
"cursorpyright.reportUnusedImport": "warning",
|
||||
"cursorpyright.reportUnusedVariable": "warning",
|
||||
"cursorpyright.analysis.diagnosticMode": "workspace",
|
||||
|
||||
// Force enable code actions
|
||||
"editor.lightBulb.enabled": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.fixAll": "explicit",
|
||||
"source.unusedImports": "explicit"
|
||||
},
|
||||
|
||||
// Enable Python-specific code actions
|
||||
"python.analysis.completeFunctionParens": true,
|
||||
"python.analysis.addImport.exactMatchOnly": false,
|
||||
"workbench.tree.indent": 24,
|
||||
}
|
||||
181
__DELETE__/.vscode/launch.json
vendored
Normal file
181
__DELETE__/.vscode/launch.json
vendored
Normal file
@ -0,0 +1,181 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
|
||||
{
|
||||
"name": "Python Debugger: Current File",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- Z-Score (OLS) --------",
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO z-score",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/zscore.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.z-score.ADA-SOL.20250602.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY z-score",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/zscore.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,MSTR:EQUITY:ALPACA",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/equity/%T.z-score.COIN-MSTR.20250602.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY-CRYPTO z-score",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/zscore.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,BTC-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/intermarket/%T.z-score.COIN-BTC.20250601.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- VECM --------",
|
||||
},
|
||||
{
|
||||
"name": "CRYPTO vecm",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.vecm.ADA-SOL.20250602.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY vecm",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,MSTR:EQUITY:ALPACA",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/equity/%T.vecm.COIN-MSTR.20250602.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "EQUITY-CRYPTO vecm",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "research/pt_backtest.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/vecm.cfg",
|
||||
"--instruments=COIN:EQUITY:ALPACA,BTC-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/intermarket/%T.vecm.COIN-BTC.20250601.equity_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- New ZSCORE --------",
|
||||
},
|
||||
{
|
||||
"name": "New CRYPTO z-score",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest_new.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/new_zscore.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=2025060*",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.new_zscore.ADA-SOL.2025060-.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "New CRYPTO vecm",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/backtest_new.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/new_vecm.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
"--result_db=${workspaceFolder}/research/results/crypto/%T.vecm.ADA-SOL.20250605.crypto_results.db",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "-------- Viz Test --------",
|
||||
},
|
||||
{
|
||||
"name": "Viz Test",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"python": "/home/oleg/.pyenv/python3.12-venv/bin/python",
|
||||
"program": "${workspaceFolder}/research/viz_test.py",
|
||||
"args": [
|
||||
"--config=${workspaceFolder}/configuration/new_zscore.cfg",
|
||||
"--instruments=ADA-USDT:CRYPTO:BNBSPOT,SOL-USDT:CRYPTO:BNBSPOT",
|
||||
"--date_pattern=20250605",
|
||||
],
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/lib"
|
||||
},
|
||||
"console": "integratedTerminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
44
__DELETE__/configuration/vecm.cfg
Normal file
44
__DELETE__/configuration/vecm.cfg
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close", # "vwap"
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 1.0,
|
||||
"training_minutes": 120, # TODO Remove this
|
||||
"training_size": 120,
|
||||
"fit_method_class": "pt_trading.vecm_rolling_fit.VECMRollingFit",
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
@ -16,13 +16,14 @@
|
||||
"funding_per_pair": 2000.0,
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close",
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
# "execution_price": {
|
||||
# "column": "vwap",
|
||||
# "shift": 1,
|
||||
# },
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 0.5,
|
||||
"training_minutes": 120,
|
||||
"training_minutes": 120, # TODO Remove this
|
||||
"training_size": 120,
|
||||
"fit_method_class": "pt_trading.z-score_rolling_fit.ZScoreRollingFit",
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
@ -36,7 +37,7 @@
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "9:30:00",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
43
__DELETE__/configuration/zscore_expanding.cfg
Normal file
43
__DELETE__/configuration/zscore_expanding.cfg
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close",
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 0.5,
|
||||
"training_minutes": 120, # TODO Remove this
|
||||
"training_size": 120,
|
||||
"fit_method_class": "pt_trading.z-score_rolling_fit.ZScoreRollingFit",
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "9:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
304
__DELETE__/lib/pt_trading/expanding_window_fit.py
Normal file
304
__DELETE__/lib/pt_trading/expanding_window_fit.py
Normal file
@ -0,0 +1,304 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import pandas as pd # type: ignore[import]
|
||||
from pt_trading.fit_method import PairsTradingFitMethod
|
||||
from pt_trading.results import BacktestResult
|
||||
from pt_trading.trading_pair import PairState, TradingPair
|
||||
|
||||
NanoPerMin = 1e9
|
||||
|
||||
class ExpandingWindowFit(PairsTradingFitMethod):
|
||||
"""
|
||||
N O T E:
|
||||
=========
|
||||
- This class remains to be abstract
|
||||
- The following methods are to be implemented in the subclass:
|
||||
- create_trading_pair()
|
||||
=========
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def run_pair(
|
||||
self, pair: TradingPair, bt_result: BacktestResult
|
||||
) -> Optional[pd.DataFrame]:
|
||||
print(f"***{pair}*** STARTING....")
|
||||
config = pair.config_
|
||||
|
||||
start_idx = pair.get_begin_index()
|
||||
end_index = pair.get_end_index()
|
||||
|
||||
pair.user_data_["state"] = PairState.INITIAL
|
||||
# Initialize trades DataFrame with proper dtypes to avoid concatenation warnings
|
||||
pair.user_data_["trades"] = pd.DataFrame(columns=self.TRADES_COLUMNS).astype(
|
||||
{
|
||||
"time": "datetime64[ns]",
|
||||
"symbol": "string",
|
||||
"side": "string",
|
||||
"action": "string",
|
||||
"price": "float64",
|
||||
"disequilibrium": "float64",
|
||||
"scaled_disequilibrium": "float64",
|
||||
"pair": "object",
|
||||
}
|
||||
)
|
||||
|
||||
training_minutes = config["training_minutes"]
|
||||
while training_minutes + 1 < end_index:
|
||||
|
||||
pair.get_datasets(
|
||||
training_minutes=training_minutes,
|
||||
training_start_index=start_idx,
|
||||
testing_size=1,
|
||||
)
|
||||
|
||||
# ================================ PREDICTION ================================
|
||||
try:
|
||||
self.pair_predict_result_ = pair.predict()
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
f"{pair}: TrainingPrediction failed: {str(e)}"
|
||||
) from e
|
||||
|
||||
training_minutes += 1
|
||||
|
||||
|
||||
self._create_trading_signals(pair, config, bt_result)
|
||||
print(f"***{pair}*** FINISHED *** Num Trades:{len(pair.user_data_['trades'])}")
|
||||
|
||||
return pair.get_trades()
|
||||
|
||||
def _create_trading_signals(
|
||||
self, pair: TradingPair, config: Dict, bt_result: BacktestResult
|
||||
) -> None:
|
||||
|
||||
predicted_df = self.pair_predict_result_
|
||||
assert predicted_df is not None
|
||||
|
||||
open_threshold = config["dis-equilibrium_open_trshld"]
|
||||
close_threshold = config["dis-equilibrium_close_trshld"]
|
||||
for curr_predicted_row_idx in range(len(predicted_df)):
|
||||
pred_row = predicted_df.iloc[curr_predicted_row_idx]
|
||||
scaled_disequilibrium = pred_row["scaled_disequilibrium"]
|
||||
|
||||
if pair.user_data_["state"] in [
|
||||
PairState.INITIAL,
|
||||
PairState.CLOSE,
|
||||
PairState.CLOSE_POSITION,
|
||||
PairState.CLOSE_STOP_LOSS,
|
||||
PairState.CLOSE_STOP_PROFIT,
|
||||
]:
|
||||
if scaled_disequilibrium >= open_threshold:
|
||||
open_trades = self._get_open_trades(
|
||||
pair, row=pred_row, open_threshold=open_threshold
|
||||
)
|
||||
if open_trades is not None:
|
||||
open_trades["status"] = PairState.OPEN.name
|
||||
print(f"OPEN TRADES:\n{open_trades}")
|
||||
pair.add_trades(open_trades)
|
||||
pair.user_data_["state"] = PairState.OPEN
|
||||
pair.on_open_trades(open_trades)
|
||||
|
||||
elif pair.user_data_["state"] == PairState.OPEN:
|
||||
if scaled_disequilibrium <= close_threshold:
|
||||
close_trades = self._get_close_trades(
|
||||
pair, row=pred_row, close_threshold=close_threshold
|
||||
)
|
||||
if close_trades is not None:
|
||||
close_trades["status"] = PairState.CLOSE.name
|
||||
print(f"CLOSE TRADES:\n{close_trades}")
|
||||
pair.add_trades(close_trades)
|
||||
pair.user_data_["state"] = PairState.CLOSE
|
||||
pair.on_close_trades(close_trades)
|
||||
elif pair.to_stop_close_conditions(predicted_row=pred_row):
|
||||
close_trades = self._get_close_trades(
|
||||
pair, row=pred_row, close_threshold=close_threshold
|
||||
)
|
||||
if close_trades is not None:
|
||||
close_trades["status"] = pair.user_data_[
|
||||
"stop_close_state"
|
||||
].name
|
||||
print(f"STOP CLOSE TRADES:\n{close_trades}")
|
||||
pair.add_trades(close_trades)
|
||||
pair.user_data_["state"] = pair.user_data_["stop_close_state"]
|
||||
pair.on_close_trades(close_trades)
|
||||
|
||||
# Outstanding positions
|
||||
if pair.user_data_["state"] == PairState.OPEN:
|
||||
print(f"{pair}: *** Position is NOT CLOSED. ***")
|
||||
# outstanding positions
|
||||
if config["close_outstanding_positions"]:
|
||||
close_position_row = pd.Series(pair.market_data_.iloc[-2])
|
||||
close_position_row["disequilibrium"] = 0.0
|
||||
close_position_row["scaled_disequilibrium"] = 0.0
|
||||
close_position_row["signed_scaled_disequilibrium"] = 0.0
|
||||
|
||||
close_position_trades = self._get_close_trades(
|
||||
pair=pair, row=close_position_row, close_threshold=close_threshold
|
||||
)
|
||||
if close_position_trades is not None:
|
||||
close_position_trades["status"] = PairState.CLOSE_POSITION.name
|
||||
print(f"CLOSE_POSITION TRADES:\n{close_position_trades}")
|
||||
pair.add_trades(close_position_trades)
|
||||
pair.user_data_["state"] = PairState.CLOSE_POSITION
|
||||
pair.on_close_trades(close_position_trades)
|
||||
else:
|
||||
if predicted_df is not None:
|
||||
bt_result.handle_outstanding_position(
|
||||
pair=pair,
|
||||
pair_result_df=predicted_df,
|
||||
last_row_index=0,
|
||||
open_side_a=pair.user_data_["open_side_a"],
|
||||
open_side_b=pair.user_data_["open_side_b"],
|
||||
open_px_a=pair.user_data_["open_px_a"],
|
||||
open_px_b=pair.user_data_["open_px_b"],
|
||||
open_tstamp=pair.user_data_["open_tstamp"],
|
||||
)
|
||||
|
||||
def _get_open_trades(
|
||||
self, pair: TradingPair, row: pd.Series, open_threshold: float
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
open_row = row
|
||||
|
||||
open_tstamp = open_row["tstamp"]
|
||||
open_disequilibrium = open_row["disequilibrium"]
|
||||
open_scaled_disequilibrium = open_row["scaled_disequilibrium"]
|
||||
signed_scaled_disequilibrium = open_row["signed_scaled_disequilibrium"]
|
||||
open_px_a = open_row[f"{colname_a}"]
|
||||
open_px_b = open_row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
print(f"OPEN_TRADES: {row["tstamp"]} {open_scaled_disequilibrium=}")
|
||||
if open_disequilibrium > 0:
|
||||
open_side_a = "SELL"
|
||||
open_side_b = "BUY"
|
||||
close_side_a = "BUY"
|
||||
close_side_b = "SELL"
|
||||
else:
|
||||
open_side_a = "BUY"
|
||||
open_side_b = "SELL"
|
||||
close_side_a = "SELL"
|
||||
close_side_b = "BUY"
|
||||
|
||||
# save closing sides
|
||||
pair.user_data_["open_side_a"] = open_side_a
|
||||
pair.user_data_["open_side_b"] = open_side_b
|
||||
pair.user_data_["open_px_a"] = open_px_a
|
||||
pair.user_data_["open_px_b"] = open_px_b
|
||||
|
||||
pair.user_data_["open_tstamp"] = open_tstamp
|
||||
|
||||
pair.user_data_["close_side_a"] = close_side_a
|
||||
pair.user_data_["close_side_b"] = close_side_b
|
||||
|
||||
# create opening trades
|
||||
trd_signal_tuples = [
|
||||
(
|
||||
open_tstamp,
|
||||
pair.symbol_a_,
|
||||
open_side_a,
|
||||
"OPEN",
|
||||
open_px_a,
|
||||
open_disequilibrium,
|
||||
open_scaled_disequilibrium,
|
||||
signed_scaled_disequilibrium,
|
||||
pair,
|
||||
),
|
||||
(
|
||||
open_tstamp,
|
||||
pair.symbol_b_,
|
||||
open_side_b,
|
||||
"OPEN",
|
||||
open_px_b,
|
||||
open_disequilibrium,
|
||||
open_scaled_disequilibrium,
|
||||
signed_scaled_disequilibrium,
|
||||
pair,
|
||||
),
|
||||
]
|
||||
# Create DataFrame with explicit dtypes to avoid concatenation warnings
|
||||
df = pd.DataFrame(
|
||||
trd_signal_tuples,
|
||||
columns=self.TRADES_COLUMNS,
|
||||
)
|
||||
# Ensure consistent dtypes
|
||||
return df.astype(
|
||||
{
|
||||
"time": "datetime64[ns]",
|
||||
"action": "string",
|
||||
"symbol": "string",
|
||||
"price": "float64",
|
||||
"disequilibrium": "float64",
|
||||
"scaled_disequilibrium": "float64",
|
||||
"signed_scaled_disequilibrium": "float64",
|
||||
"pair": "object",
|
||||
}
|
||||
)
|
||||
|
||||
def _get_close_trades(
|
||||
self, pair: TradingPair, row: pd.Series, close_threshold: float
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
close_row = row
|
||||
close_tstamp = close_row["tstamp"]
|
||||
close_disequilibrium = close_row["disequilibrium"]
|
||||
close_scaled_disequilibrium = close_row["scaled_disequilibrium"]
|
||||
signed_scaled_disequilibrium = close_row["signed_scaled_disequilibrium"]
|
||||
close_px_a = close_row[f"{colname_a}"]
|
||||
close_px_b = close_row[f"{colname_b}"]
|
||||
|
||||
close_side_a = pair.user_data_["close_side_a"]
|
||||
close_side_b = pair.user_data_["close_side_b"]
|
||||
|
||||
trd_signal_tuples = [
|
||||
(
|
||||
close_tstamp,
|
||||
pair.symbol_a_,
|
||||
close_side_a,
|
||||
"CLOSE",
|
||||
close_px_a,
|
||||
close_disequilibrium,
|
||||
close_scaled_disequilibrium,
|
||||
signed_scaled_disequilibrium,
|
||||
pair,
|
||||
),
|
||||
(
|
||||
close_tstamp,
|
||||
pair.symbol_b_,
|
||||
close_side_b,
|
||||
"CLOSE",
|
||||
close_px_b,
|
||||
close_disequilibrium,
|
||||
close_scaled_disequilibrium,
|
||||
signed_scaled_disequilibrium,
|
||||
pair,
|
||||
),
|
||||
]
|
||||
|
||||
# Add tuples to data frame with explicit dtypes to avoid concatenation warnings
|
||||
df = pd.DataFrame(
|
||||
trd_signal_tuples,
|
||||
columns=self.TRADES_COLUMNS,
|
||||
)
|
||||
# Ensure consistent dtypes
|
||||
return df.astype(
|
||||
{
|
||||
"time": "datetime64[ns]",
|
||||
"action": "string",
|
||||
"symbol": "string",
|
||||
"price": "float64",
|
||||
"disequilibrium": "float64",
|
||||
"scaled_disequilibrium": "float64",
|
||||
"signed_scaled_disequilibrium": "float64",
|
||||
"pair": "object",
|
||||
}
|
||||
)
|
||||
|
||||
def reset(self) -> None:
|
||||
pass
|
||||
@ -195,6 +195,16 @@ def convert_timestamp(timestamp: Any) -> Optional[datetime]:
|
||||
raise ValueError(f"Unsupported timestamp type: {type(timestamp)}")
|
||||
|
||||
|
||||
class PairResarchResult:
|
||||
pair_: TradingPair
|
||||
trades_: Dict[str, Dict[str, Any]]
|
||||
outstanding_positions_: List[Dict[str, Any]]
|
||||
def __init__(self, config: Dict[str, Any], pair: TradingPair, trades: Dict[str, Dict[str, Any]], outstanding_positions: List[Dict[str, Any]]):
|
||||
self.config = config
|
||||
self.pair_ = pair
|
||||
self.trades_ = trades
|
||||
self.outstanding_positions_ = outstanding_positions
|
||||
|
||||
|
||||
class BacktestResult:
|
||||
"""
|
||||
@ -206,7 +216,7 @@ class BacktestResult:
|
||||
self.trades: Dict[str, Dict[str, Any]] = {}
|
||||
self.total_realized_pnl = 0.0
|
||||
self.outstanding_positions: List[Dict[str, Any]] = []
|
||||
self.pairs_trades_: Dict[str, List[Dict[str, Any]]] = {}
|
||||
self.symbol_roundtrip_trades_: Dict[str, List[Dict[str, Any]]] = {}
|
||||
|
||||
def add_trade(
|
||||
self,
|
||||
@ -334,7 +344,7 @@ class BacktestResult:
|
||||
for filename, data in all_results.items():
|
||||
pairs = list(data["trades"].keys())
|
||||
for pair in pairs:
|
||||
self.pairs_trades_[pair] = []
|
||||
self.symbol_roundtrip_trades_[pair] = []
|
||||
trades_dict = data["trades"][pair]
|
||||
for symbol in trades_dict.keys():
|
||||
trades.extend(trades_dict[symbol])
|
||||
@ -369,7 +379,7 @@ class BacktestResult:
|
||||
|
||||
pair_return = symbol_a_return + symbol_b_return
|
||||
|
||||
self.pairs_trades_[pair].append(
|
||||
self.symbol_roundtrip_trades_[pair].append(
|
||||
{
|
||||
"symbol": symbol_a,
|
||||
"open_side": trade_a_1["side"],
|
||||
@ -391,7 +401,7 @@ class BacktestResult:
|
||||
"pair_return": pair_return
|
||||
}
|
||||
)
|
||||
self.pairs_trades_[pair].append(
|
||||
self.symbol_roundtrip_trades_[pair].append(
|
||||
{
|
||||
"symbol": symbol_b,
|
||||
"open_side": trade_b_1["side"],
|
||||
@ -417,26 +427,24 @@ class BacktestResult:
|
||||
|
||||
# Print pair returns with disequilibrium information
|
||||
day_return = 0.0
|
||||
if pair in self.pairs_trades_:
|
||||
if pair in self.symbol_roundtrip_trades_:
|
||||
|
||||
print(f"{pair}:")
|
||||
pair_return = 0.0
|
||||
for trd in self.pairs_trades_[pair]:
|
||||
for trd in self.symbol_roundtrip_trades_[pair]:
|
||||
disequil_info = ""
|
||||
if (
|
||||
trd["open_scaled_disequilibrium"] is not None
|
||||
and trd["open_scaled_disequilibrium"] is not None
|
||||
):
|
||||
disequil_info = (
|
||||
f' | Open Dis-eq: {trd["open_scaled_disequilibrium"]:.2f},'
|
||||
f' Close Dis-eq: {trd["close_scaled_disequilibrium"]:.2f}'
|
||||
)
|
||||
disequil_info = f" | Open Dis-eq: {trd['open_scaled_disequilibrium']:.2f},"
|
||||
f" Close Dis-eq: {trd['open_scaled_disequilibrium']:.2f}"
|
||||
|
||||
print(
|
||||
f' {trd["open_time"].time()}-{trd["close_time"].time()} {trd["symbol"]}: '
|
||||
f' {trd["open_side"]} @ ${trd["open_price"]:.2f},'
|
||||
f' {trd["close_side"]} @ ${trd["close_price"]:.2f},'
|
||||
f' Return: {trd["symbol_return"]:.2f}%{disequil_info}'
|
||||
f" {trd['open_time'].time()}-{trd['close_time'].time()} {trd['symbol']}: "
|
||||
f" {trd['open_side']} @ ${trd['open_price']:.2f},"
|
||||
f" {trd["close_side"]} @ ${trd["close_price"]:.2f},"
|
||||
f" Return: {trd['symbol_return']:.2f}%{disequil_info}"
|
||||
)
|
||||
pair_return += trd["symbol_return"]
|
||||
|
||||
@ -643,7 +651,7 @@ class BacktestResult:
|
||||
for pair_name, _ in trades.items():
|
||||
|
||||
# Second pass: insert completed trade records into database
|
||||
for trade_pair in sorted(self.pairs_trades_[pair_name], key=lambda x: x["open_time"]):
|
||||
for trade_pair in sorted(self.symbol_roundtrip_trades_[pair_name], key=lambda x: x["open_time"]):
|
||||
# Only store completed trades in pt_bt_results table
|
||||
cursor.execute(
|
||||
"""
|
||||
@ -188,8 +188,7 @@ class RollingFit(PairsTradingFitMethod):
|
||||
open_px_b = open_row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
# use outer single quotes so we can reference DataFrame keys with double quotes inside
|
||||
print(f'OPEN_TRADES: {open_tstamp} open_scaled_disequilibrium={open_scaled_disequilibrium}')
|
||||
print(f"OPEN_TRADES: {row["tstamp"]} {open_scaled_disequilibrium=}")
|
||||
if open_disequilibrium > 0:
|
||||
open_side_a = "SELL"
|
||||
open_side_b = "BUY"
|
||||
@ -238,7 +237,10 @@ class RollingFit(PairsTradingFitMethod):
|
||||
),
|
||||
]
|
||||
# Create DataFrame with explicit dtypes to avoid concatenation warnings
|
||||
df = pd.DataFrame(trd_signal_tuples, columns=self.TRADES_COLUMNS)
|
||||
df = pd.DataFrame(
|
||||
trd_signal_tuples,
|
||||
columns=self.TRADES_COLUMNS,
|
||||
)
|
||||
# Ensure consistent dtypes
|
||||
return df.astype(
|
||||
{
|
||||
@ -314,4 +316,4 @@ class RollingFit(PairsTradingFitMethod):
|
||||
)
|
||||
|
||||
def reset(self) -> None:
|
||||
curr_training_start_idx = 0
|
||||
pass
|
||||
@ -119,8 +119,8 @@ class TradingPair(ABC):
|
||||
return
|
||||
execution_price_column = self.config_["execution_price"]["column"]
|
||||
execution_price_shift = self.config_["execution_price"]["shift"]
|
||||
self.market_data_[f"exec_price_{self.symbol_a_}"] = self.market_data_[f"{self.stat_model_price_}_{self.symbol_a_}"].shift(-execution_price_shift)
|
||||
self.market_data_[f"exec_price_{self.symbol_b_}"] = self.market_data_[f"{self.stat_model_price_}_{self.symbol_b_}"].shift(-execution_price_shift)
|
||||
self.market_data_[f"exec_price_{self.symbol_a_}"] = self.market_data_[f"{execution_price_column}_{self.symbol_a_}"].shift(-execution_price_shift)
|
||||
self.market_data_[f"exec_price_{self.symbol_b_}"] = self.market_data_[f"{execution_price_column}_{self.symbol_b_}"].shift(-execution_price_shift)
|
||||
self.market_data_ = self.market_data_.dropna().reset_index(drop=True)
|
||||
|
||||
|
||||
122
__DELETE__/lib/pt_trading/vecm_rolling_fit.py
Normal file
122
__DELETE__/lib/pt_trading/vecm_rolling_fit.py
Normal file
@ -0,0 +1,122 @@
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import pandas as pd
|
||||
from pt_trading.results import BacktestResult
|
||||
from pt_trading.rolling_window_fit import RollingFit
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
from statsmodels.tsa.vector_ar.vecm import VECM, VECMResults
|
||||
|
||||
NanoPerMin = 1e9
|
||||
|
||||
|
||||
class VECMTradingPair(TradingPair):
|
||||
vecm_fit_: Optional[VECMResults]
|
||||
pair_predict_result_: Optional[pd.DataFrame]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
):
|
||||
super().__init__(config, market_data, symbol_a, symbol_b)
|
||||
self.vecm_fit_ = None
|
||||
self.pair_predict_result_ = None
|
||||
|
||||
def _train_pair(self) -> None:
|
||||
self._fit_VECM()
|
||||
assert self.vecm_fit_ is not None
|
||||
diseq_series = self.training_df_[self.colnames()] @ self.vecm_fit_.beta
|
||||
# print(diseq_series.shape)
|
||||
self.training_mu_ = float(diseq_series[0].mean())
|
||||
self.training_std_ = float(diseq_series[0].std())
|
||||
|
||||
self.training_df_["dis-equilibrium"] = (
|
||||
self.training_df_[self.colnames()] @ self.vecm_fit_.beta
|
||||
)
|
||||
# Normalize the dis-equilibrium
|
||||
self.training_df_["scaled_dis-equilibrium"] = (
|
||||
diseq_series - self.training_mu_
|
||||
) / self.training_std_
|
||||
|
||||
def _fit_VECM(self) -> None:
|
||||
assert self.training_df_ is not None
|
||||
vecm_df = self.training_df_[self.colnames()].reset_index(drop=True)
|
||||
vecm_model = VECM(vecm_df, coint_rank=1)
|
||||
vecm_fit = vecm_model.fit()
|
||||
|
||||
assert vecm_fit is not None
|
||||
|
||||
# URGENT check beta and alpha
|
||||
|
||||
# Check if the model converged properly
|
||||
if not hasattr(vecm_fit, "beta") or vecm_fit.beta is None:
|
||||
print(f"{self}: VECM model failed to converge properly")
|
||||
|
||||
self.vecm_fit_ = vecm_fit
|
||||
pass
|
||||
|
||||
def predict(self) -> pd.DataFrame:
|
||||
self._train_pair()
|
||||
|
||||
assert self.testing_df_ is not None
|
||||
assert self.vecm_fit_ is not None
|
||||
predicted_prices = self.vecm_fit_.predict(steps=len(self.testing_df_))
|
||||
|
||||
# Convert prediction to a DataFrame for readability
|
||||
predicted_df = pd.DataFrame(
|
||||
predicted_prices, columns=pd.Index(self.colnames()), dtype=float
|
||||
)
|
||||
|
||||
predicted_df = pd.merge(
|
||||
self.testing_df_.reset_index(drop=True),
|
||||
pd.DataFrame(
|
||||
predicted_prices, columns=pd.Index(self.colnames()), dtype=float
|
||||
),
|
||||
left_index=True,
|
||||
right_index=True,
|
||||
suffixes=("", "_pred"),
|
||||
).dropna()
|
||||
|
||||
predicted_df["disequilibrium"] = (
|
||||
predicted_df[self.colnames()] @ self.vecm_fit_.beta
|
||||
)
|
||||
|
||||
predicted_df["signed_scaled_disequilibrium"] = (
|
||||
predicted_df["disequilibrium"] - self.training_mu_
|
||||
) / self.training_std_
|
||||
|
||||
predicted_df["scaled_disequilibrium"] = abs(
|
||||
predicted_df["signed_scaled_disequilibrium"]
|
||||
)
|
||||
|
||||
predicted_df = predicted_df.reset_index(drop=True)
|
||||
if self.pair_predict_result_ is None:
|
||||
self.pair_predict_result_ = predicted_df
|
||||
else:
|
||||
self.pair_predict_result_ = pd.concat(
|
||||
[self.pair_predict_result_, predicted_df], ignore_index=True
|
||||
)
|
||||
# Reset index to ensure proper indexing
|
||||
self.pair_predict_result_ = self.pair_predict_result_.reset_index(drop=True)
|
||||
return self.pair_predict_result_
|
||||
|
||||
|
||||
class VECMRollingFit(RollingFit):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def create_trading_pair(
|
||||
self,
|
||||
config: Dict,
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
) -> TradingPair:
|
||||
return VECMTradingPair(
|
||||
config=config,
|
||||
market_data=market_data,
|
||||
symbol_a=symbol_a,
|
||||
symbol_b=symbol_b,
|
||||
)
|
||||
85
__DELETE__/lib/pt_trading/z-score_rolling_fit.py
Normal file
85
__DELETE__/lib/pt_trading/z-score_rolling_fit.py
Normal file
@ -0,0 +1,85 @@
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import pandas as pd
|
||||
from pt_trading.results import BacktestResult
|
||||
from pt_trading.rolling_window_fit import RollingFit
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
import statsmodels.api as sm
|
||||
|
||||
NanoPerMin = 1e9
|
||||
|
||||
|
||||
class ZScoreTradingPair(TradingPair):
|
||||
zscore_model_: Optional[sm.regression.linear_model.RegressionResultsWrapper]
|
||||
pair_predict_result_: Optional[pd.DataFrame]
|
||||
zscore_df_: Optional[pd.DataFrame]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
):
|
||||
super().__init__(config, market_data, symbol_a, symbol_b)
|
||||
self.zscore_model_ = None
|
||||
self.pair_predict_result_ = None
|
||||
self.zscore_df_ = None
|
||||
|
||||
def _fit_zscore(self) -> None:
|
||||
assert self.training_df_ is not None
|
||||
symbol_a_px_series = self.training_df_[self.colnames()].iloc[:, 0]
|
||||
symbol_b_px_series = self.training_df_[self.colnames()].iloc[:, 1]
|
||||
|
||||
symbol_a_px_series, symbol_b_px_series = symbol_a_px_series.align(
|
||||
symbol_b_px_series, axis=0
|
||||
)
|
||||
|
||||
X = sm.add_constant(symbol_b_px_series)
|
||||
self.zscore_model_ = sm.OLS(symbol_a_px_series, X).fit()
|
||||
assert self.zscore_model_ is not None
|
||||
hedge_ratio = self.zscore_model_.params.iloc[1]
|
||||
|
||||
# Calculate spread and Z-score
|
||||
spread = symbol_a_px_series - hedge_ratio * symbol_b_px_series
|
||||
self.zscore_df_ = (spread - spread.mean()) / spread.std()
|
||||
|
||||
def predict(self) -> pd.DataFrame:
|
||||
self._fit_zscore()
|
||||
assert self.zscore_df_ is not None
|
||||
self.training_df_["dis-equilibrium"] = self.zscore_df_
|
||||
self.training_df_["scaled_dis-equilibrium"] = abs(self.zscore_df_)
|
||||
|
||||
assert self.testing_df_ is not None
|
||||
assert self.zscore_df_ is not None
|
||||
predicted_df = self.testing_df_
|
||||
|
||||
predicted_df["disequilibrium"] = self.zscore_df_
|
||||
predicted_df["signed_scaled_disequilibrium"] = self.zscore_df_
|
||||
predicted_df["scaled_disequilibrium"] = abs(self.zscore_df_)
|
||||
|
||||
predicted_df = predicted_df.reset_index(drop=True)
|
||||
if self.pair_predict_result_ is None:
|
||||
self.pair_predict_result_ = predicted_df
|
||||
else:
|
||||
self.pair_predict_result_ = pd.concat(
|
||||
[self.pair_predict_result_, predicted_df], ignore_index=True
|
||||
)
|
||||
# Reset index to ensure proper indexing
|
||||
self.pair_predict_result_ = self.pair_predict_result_.reset_index(drop=True)
|
||||
return self.pair_predict_result_.dropna()
|
||||
|
||||
|
||||
class ZScoreRollingFit(RollingFit):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def create_trading_pair(
|
||||
self, config: Dict, market_data: pd.DataFrame, symbol_a: str, symbol_b: str
|
||||
) -> TradingPair:
|
||||
return ZScoreTradingPair(
|
||||
config=config,
|
||||
market_data=market_data,
|
||||
symbol_a=symbol_a,
|
||||
symbol_b=symbol_b,
|
||||
)
|
||||
@ -9,19 +9,18 @@ import pandas as pd
|
||||
|
||||
from tools.config import expand_filename, load_config
|
||||
from tools.data_loader import get_available_instruments_from_db
|
||||
|
||||
from pt_trading.results import (
|
||||
BacktestResult,
|
||||
create_result_database,
|
||||
store_config_in_database,
|
||||
store_results_in_database,
|
||||
)
|
||||
|
||||
from pt_trading.fit_method import PairsTradingFitMethod
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
|
||||
from research.research_tools import create_pairs, resolve_datafiles
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run pairs trading backtest.")
|
||||
parser.add_argument(
|
||||
@ -37,7 +36,7 @@ def main() -> None:
|
||||
"--instruments",
|
||||
type=str,
|
||||
required=False,
|
||||
help = "Comma-separated list of instrument symbols (e.g., COIN,GBTC). If not provided, auto-detects from database.",
|
||||
help="Comma-separated list of instrument symbols (e.g., COIN,GBTC). If not provided, auto-detects from database.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
7656
__DELETE__/research/notebooks/single_pair_test.ipynb
Normal file
7656
__DELETE__/research/notebooks/single_pair_test.ipynb
Normal file
File diff suppressed because one or more lines are too long
101
__DELETE__/strategy/pair_strategy.py
Normal file
101
__DELETE__/strategy/pair_strategy.py
Normal file
@ -0,0 +1,101 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
from datetime import date, datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import hjson
|
||||
import pandas as pd
|
||||
|
||||
from tools.data_loader import get_available_instruments_from_db, load_market_data
|
||||
from pt_trading.results import (
|
||||
BacktestResult,
|
||||
create_result_database,
|
||||
store_config_in_database,
|
||||
store_results_in_database,
|
||||
)
|
||||
from pt_trading.fit_methods import PairsTradingFitMethod
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
|
||||
|
||||
def run_strategy(
|
||||
config: Dict,
|
||||
datafile: str,
|
||||
fit_method: PairsTradingFitMethod,
|
||||
instruments: List[str],
|
||||
) -> BacktestResult:
|
||||
"""
|
||||
Run backtest for all pairs using the specified instruments.
|
||||
"""
|
||||
bt_result: BacktestResult = BacktestResult(config=config)
|
||||
|
||||
def _create_pairs(config: Dict, instruments: List[str]) -> List[TradingPair]:
|
||||
nonlocal datafile
|
||||
all_indexes = range(len(instruments))
|
||||
unique_index_pairs = [(i, j) for i in all_indexes for j in all_indexes if i < j]
|
||||
pairs = []
|
||||
|
||||
# Update config to use the specified instruments
|
||||
config_copy = config.copy()
|
||||
config_copy["instruments"] = instruments
|
||||
|
||||
market_data_df = load_market_data(
|
||||
datafile=datafile,
|
||||
exchange_id=config_copy["exchange_id"],
|
||||
instruments=config_copy["instruments"],
|
||||
instrument_id_pfx=config_copy["instrument_id_pfx"],
|
||||
db_table_name=config_copy["db_table_name"],
|
||||
trading_hours=config_copy["trading_hours"],
|
||||
)
|
||||
|
||||
for a_index, b_index in unique_index_pairs:
|
||||
pair = fit_method.create_trading_pair(
|
||||
market_data=market_data_df,
|
||||
symbol_a=instruments[a_index],
|
||||
symbol_b=instruments[b_index],
|
||||
)
|
||||
pairs.append(pair)
|
||||
return pairs
|
||||
|
||||
pairs_trades = []
|
||||
for pair in _create_pairs(config, instruments):
|
||||
single_pair_trades = fit_method.run_pair(
|
||||
pair=pair, config=config, bt_result=bt_result
|
||||
)
|
||||
if single_pair_trades is not None and len(single_pair_trades) > 0:
|
||||
pairs_trades.append(single_pair_trades)
|
||||
|
||||
# Check if result_list has any data before concatenating
|
||||
if len(pairs_trades) == 0:
|
||||
print("No trading signals found for any pairs")
|
||||
return bt_result
|
||||
|
||||
result = pd.concat(pairs_trades, ignore_index=True)
|
||||
result["time"] = pd.to_datetime(result["time"])
|
||||
result = result.set_index("time").sort_index()
|
||||
|
||||
bt_result.collect_single_day_results(result)
|
||||
return bt_result
|
||||
|
||||
|
||||
def main() -> None:
|
||||
# Load config
|
||||
# Subscribe to CVTT market data
|
||||
# On snapshot (with historical data) - create trading strategy with market data dateframe
|
||||
|
||||
async def on_message(message_type: MessageTypeT, subscr_id: SubscriptionIdT, message: Dict, instrument_id: str) -> None:
|
||||
print(f"{message_type=} {subscr_id=} {instrument_id}")
|
||||
if message_type == "md_aggregate":
|
||||
aggr = message.get("md_aggregate", [])
|
||||
print(f"[{aggr['tstamp'][:19]}] *** RLTM *** {message}")
|
||||
elif message_type == "historical_md_aggregate":
|
||||
for aggr in message.get("historical_data", []):
|
||||
print(f"[{aggr['tstamp'][:19]}] *** HIST *** {aggr}")
|
||||
else:
|
||||
print(f"Unknown message type: {message_type}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
105
bin/pairs_trader.py
Normal file
105
bin/pairs_trader.py
Normal file
@ -0,0 +1,105 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from typing import Dict, List
|
||||
|
||||
from cvttpy_tools.settings.cvtt_types import JsonDictT
|
||||
from cvttpy_tools.tools.app import App
|
||||
from cvttpy_tools.tools.base import NamedObject
|
||||
from cvttpy_tools.tools.config import CvttAppConfig
|
||||
from cvttpy_tools.tools.logger import Log
|
||||
from pt_strategy.live.live_strategy import PtLiveStrategy
|
||||
from pt_strategy.live.pricer_md_client import PtMktDataClient
|
||||
from pt_strategy.live.ti_sender import TradingInstructionsSender
|
||||
|
||||
# import sys
|
||||
# print("PYTHONPATH directories:")
|
||||
# for path in sys.path:
|
||||
# print(path)
|
||||
|
||||
|
||||
|
||||
# from cvtt_client.mkt_data import (CvttPricerWebSockClient,
|
||||
# CvttPricesSubscription, MessageTypeT,
|
||||
# SubscriptionIdT)
|
||||
|
||||
class PairTradingRunner(NamedObject):
|
||||
config_: CvttAppConfig
|
||||
instruments_: List[JsonDictT]
|
||||
|
||||
live_strategy_: PtLiveStrategy
|
||||
pricer_client_: PtMktDataClient
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.instruments_ = []
|
||||
|
||||
App.instance().add_cmdline_arg(
|
||||
"--pair",
|
||||
type=str,
|
||||
required=True,
|
||||
help=(
|
||||
"Comma-separated pair of instrument symbols"
|
||||
" with exchange config name"
|
||||
" (e.g., PAIR-BTC-USD:BNBSPOT,PAIR-ETH-USD:BNBSPOT)"
|
||||
),
|
||||
)
|
||||
|
||||
App.instance().add_call(App.Stage.Config, self._on_config())
|
||||
App.instance().add_call(App.Stage.Run, self.run())
|
||||
|
||||
async def _on_config(self) -> None:
|
||||
self.config_ = CvttAppConfig.instance()
|
||||
|
||||
# ------- PARSE INSTRUMENTS -------
|
||||
instr_str = App.instance().get_argument("pair", "")
|
||||
if not instr_str:
|
||||
raise ValueError("Pair is required")
|
||||
instr_list = instr_str.split(",")
|
||||
for instr in instr_list:
|
||||
instr_parts = instr.split(":")
|
||||
if len(instr_parts) != 2:
|
||||
raise ValueError(f"Invalid pair format: {instr}")
|
||||
instrument_id = instr_parts[0]
|
||||
exchange_config_name = instr_parts[1]
|
||||
self.instruments_.append({
|
||||
"exchange_config_name": exchange_config_name,
|
||||
"instrument_id": instrument_id
|
||||
})
|
||||
|
||||
assert len(self.instruments_) == 2, "Only two instruments are supported"
|
||||
Log.info(f"{self.fname()} Instruments: {self.instruments_}")
|
||||
|
||||
# ------- CREATE TI (trading instructions) CLIENT -------
|
||||
ti_config = self.config_.get_subconfig("ti_config", {})
|
||||
self.ti_sender_ = TradingInstructionsSender(config=ti_config)
|
||||
Log.info(f"{self.fname()} TI client created: {self.ti_sender_}")
|
||||
|
||||
# ------- CREATE STRATEGY -------
|
||||
strategy_config = self.config_.get_value("strategy_config", {})
|
||||
self.live_strategy_ = PtLiveStrategy(
|
||||
config=strategy_config,
|
||||
instruments=self.instruments_,
|
||||
ti_sender=self.ti_sender_
|
||||
)
|
||||
Log.info(f"{self.fname()} Strategy created: {self.live_strategy_}")
|
||||
|
||||
# ------- CREATE PRICER CLIENT -------
|
||||
pricer_config = self.config_.get_subconfig("pricer_config", {})
|
||||
self.pricer_client_ = PtMktDataClient(
|
||||
live_strategy=self.live_strategy_,
|
||||
pricer_config=pricer_config
|
||||
)
|
||||
Log.info(f"{self.fname()} CVTT Pricer client created: {self.pricer_client_}")
|
||||
|
||||
async def run(self) -> None:
|
||||
Log.info(f"{self.fname()} ...")
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
App()
|
||||
CvttAppConfig()
|
||||
PairTradingRunner()
|
||||
App.instance().run()
|
||||
@ -1,27 +0,0 @@
|
||||
{
|
||||
"security_type": "EQUITY",
|
||||
"data_directory": "./data/equity",
|
||||
"datafiles": [
|
||||
"20250618.mktdata.ohlcv.db",
|
||||
],
|
||||
"db_table_name": "md_1min_bars",
|
||||
"exchange_id": "ALPACA",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
"trading_hours": {
|
||||
"begin_session": "9:30:00",
|
||||
"end_session": "16:00:00",
|
||||
"timezone": "America/New_York"
|
||||
},
|
||||
"price_column": "close",
|
||||
"min_required_points": 30,
|
||||
"zero_threshold": 1e-10,
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 1.0,
|
||||
"training_minutes": 120,
|
||||
"funding_per_pair": 2000.0,
|
||||
# "fit_method_class": "pt_trading.sliding_fit.SlidingFit",
|
||||
"fit_method_class": "pt_trading.static_fit.StaticFit",
|
||||
"exclude_instruments": ["CAN"],
|
||||
"close_outstanding_positions": false
|
||||
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
{
|
||||
"security_type": "EQUITY",
|
||||
"data_directory": "./data/equity",
|
||||
"datafiles": [
|
||||
"20250602.mktdata.ohlcv.db",
|
||||
],
|
||||
"db_table_name": "md_1min_bars",
|
||||
"exchange_id": "ALPACA",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
"trading_hours": {
|
||||
"begin_session": "9:30:00",
|
||||
"end_session": "16:00:00",
|
||||
"timezone": "America/New_York"
|
||||
},
|
||||
"price_column": "close",
|
||||
"min_required_points": 30,
|
||||
"zero_threshold": 1e-10,
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 1.0,
|
||||
"training_minutes": 120,
|
||||
"funding_per_pair": 2000.0,
|
||||
"fit_method_class": "pt_trading.fit_methods.StaticFit",
|
||||
"exclude_instruments": ["CAN"]
|
||||
}
|
||||
# "fit_method_class": "pt_trading.fit_methods.SlidingFit",
|
||||
# "fit_method_class": "pt_trading.fit_methods.StaticFit",
|
||||
43
configuration/ols-exp.cfg
Normal file
43
configuration/ols-exp.cfg
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close",
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_close_trshld": 0.5,
|
||||
"training_size": 120,
|
||||
"model_class": "pt_strategy.models.OLSModel",
|
||||
"model_data_policy_class": "pt_strategy.model_data_policy.ExpandingWindowDataPolicy",
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
47
configuration/ols-opt.cfg
Normal file
47
configuration/ols-opt.cfg
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close",
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 1.75,
|
||||
"dis-equilibrium_close_trshld": 0.9,
|
||||
"model_class": "pt_strategy.models.OLSModel",
|
||||
|
||||
# "model_data_policy_class": "pt_strategy.model_data_policy.EGOptimizedWndDataPolicy",
|
||||
# "model_data_policy_class": "pt_strategy.model_data_policy.ADFOptimizedWndDataPolicy",
|
||||
"model_data_policy_class": "pt_strategy.model_data_policy.JohansenOptdWndDataPolicy",
|
||||
"min_training_size": 60,
|
||||
"max_training_size": 150,
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
47
configuration/ols.cfg
Normal file
47
configuration/ols.cfg
Normal file
@ -0,0 +1,47 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close",
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 1.75,
|
||||
"dis-equilibrium_close_trshld": 0.9,
|
||||
"model_class": "pt_strategy.models.OLSModel",
|
||||
|
||||
"training_size": 120,
|
||||
"model_data_policy_class": "pt_strategy.model_data_policy.RollingWindowDataPolicy",
|
||||
# "model_data_policy_class": "pt_strategy.model_data_policy.OptimizedWindowDataPolicy",
|
||||
# "min_training_size": 60,
|
||||
# "max_training_size": 150,
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
21
configuration/pairs_trader.cfg
Normal file
21
configuration/pairs_trader.cfg
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"strategy_config": @inc=file:///home/oleg/develop/pairs_trading/configuration/ols.cfg
|
||||
"pricer_config": {
|
||||
"pricer_url": "ws://localhost:12346/ws",
|
||||
"history_depth_sec": 86400 #"60*60*24", # use simpleeval
|
||||
"interval_sec": 60
|
||||
},
|
||||
"ti_config": {
|
||||
"cvtt_base_url": "http://localhost:23456"
|
||||
"book_id": "XXXXXXXXX",
|
||||
"strategy_id": "XXXXXXXXX",
|
||||
"ti_endpoint": {
|
||||
"method": "POST",
|
||||
"url": "/trading_instructions"
|
||||
},
|
||||
"health_check_endpoint": {
|
||||
"method": "GET",
|
||||
"url": "/ping"
|
||||
}
|
||||
}
|
||||
}
|
||||
49
configuration/vecm-opt.cfg
Normal file
49
configuration/vecm-opt.cfg
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
"market_data_loading": {
|
||||
"CRYPTO": {
|
||||
"data_directory": "./data/crypto",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "PAIR-",
|
||||
},
|
||||
"EQUITY": {
|
||||
"data_directory": "./data/equity",
|
||||
"db_table_name": "md_1min_bars",
|
||||
"instrument_id_pfx": "STOCK-",
|
||||
}
|
||||
},
|
||||
|
||||
# ====== Funding ======
|
||||
"funding_per_pair": 2000.0,
|
||||
|
||||
# ====== Trading Parameters ======
|
||||
"stat_model_price": "close", # "vwap"
|
||||
"execution_price": {
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 1.75,
|
||||
"dis-equilibrium_close_trshld": 1.0,
|
||||
|
||||
"model_class": "pt_strategy.models.VECMModel",
|
||||
|
||||
# "training_size": 120,
|
||||
# "model_data_policy_class": "pt_strategy.model_data_policy.RollingWindowDataPolicy",
|
||||
"model_data_policy_class": "pt_strategy.model_data_policy.ADFOptimizedWndDataPolicy",
|
||||
"min_training_size": 60,
|
||||
"max_training_size": 150,
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
"profit": 2.0,
|
||||
"loss": -0.5
|
||||
}
|
||||
|
||||
# ====== End of Session Closeout ======
|
||||
"close_outstanding_positions": true,
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
@ -21,10 +21,15 @@
|
||||
"column": "vwap",
|
||||
"shift": 1,
|
||||
},
|
||||
"dis-equilibrium_open_trshld": 2.0,
|
||||
"dis-equilibrium_open_trshld": 1.75,
|
||||
"dis-equilibrium_close_trshld": 1.0,
|
||||
"training_minutes": 120,
|
||||
"fit_method_class": "pt_trading.vecm_rolling_fit.VECMRollingFit",
|
||||
"model_class": "pt_strategy.models.VECMModel",
|
||||
|
||||
"training_size": 120,
|
||||
"model_data_policy_class": "pt_strategy.model_data_policy.RollingWindowDataPolicy",
|
||||
# "model_data_policy_class": "pt_strategy.model_data_policy.OptimizedWindowDataPolicy",
|
||||
# "min_training_size": 60,
|
||||
# "max_training_size": 150,
|
||||
|
||||
# ====== Stop Conditions ======
|
||||
"stop_close_conditions": {
|
||||
@ -37,7 +42,7 @@
|
||||
# "close_outstanding_positions": false,
|
||||
"trading_hours": {
|
||||
"timezone": "America/New_York",
|
||||
"begin_session": "9:30:00",
|
||||
"begin_session": "7:30:00",
|
||||
"end_session": "18:30:00",
|
||||
}
|
||||
}
|
||||
115
lg_notes.md
115
lg_notes.md
@ -1,115 +0,0 @@
|
||||
07.11.2025
|
||||
pairs_trading/configuration <---- directory for config
|
||||
equity_lg.cfg <-------- copy of equity.cfg
|
||||
How to run a Program: TRIANGLEsquare ----> triangle EQUITY backtest
|
||||
Results are in > results (timestamp table for all runs)
|
||||
table "...timestamp... .pt_backtest_results.equity.db"
|
||||
going to table using sqlite
|
||||
> sqlite3 '/home/coder/results/20250721_175750.pt_backtest_results.equity.db'
|
||||
|
||||
sqlite> .databases
|
||||
main: /home/coder/results/20250717_180122.pt_backtest_results.equity.db r/w
|
||||
sqlite> .tables
|
||||
config outstanding_positions pt_bt_results
|
||||
|
||||
sqlite> PRAGMA table_info('pt_bt_results');
|
||||
0|date|DATE|0||0
|
||||
1|pair|TEXT|0||0
|
||||
2|symbol|TEXT|0||0
|
||||
3|open_time|DATETIME|0||0
|
||||
4|open_side|TEXT|0||0
|
||||
5|open_price|REAL|0||0
|
||||
6|open_quantity|INTEGER|0||0
|
||||
7|open_disequilibrium|REAL|0||0
|
||||
8|close_time|DATETIME|0||0
|
||||
9|close_side|TEXT|0||0
|
||||
10|close_price|REAL|0||0
|
||||
11|close_quantity|INTEGER|0||0
|
||||
12|close_disequilibrium|REAL|0||0
|
||||
13|symbol_return|REAL|0||0
|
||||
14|pair_return|REAL|0||0
|
||||
|
||||
select count(*) as cnt from pt_bt_results;
|
||||
8
|
||||
|
||||
select * from pt_bt_results;
|
||||
|
||||
select
|
||||
date, close_time, pair, symbol, symbol_return, pair_return
|
||||
from pt_bt_results ;
|
||||
|
||||
select date, sum(symbol_return) as daily_return
|
||||
from pt_bt_results where date = '2025-06-18' group by date;
|
||||
|
||||
.quit
|
||||
|
||||
sqlite3 '/home/coder/results/20250717_172435.pt_backtest_results.equity.db'
|
||||
|
||||
sqlite> select date, sum(symbol_return) as daily_return
|
||||
from pt_bt_results group by date;
|
||||
|
||||
2025-06-02|1.29845390060828
|
||||
...
|
||||
2025-06-18|-43.5084977104115 <========== ????? ==========>
|
||||
2025-06-20|11.8605547517183
|
||||
|
||||
|
||||
select
|
||||
date, close_time, pair, symbol, symbol_return, pair_return
|
||||
from pt_bt_results ;
|
||||
|
||||
select date, close_time, pair, symbol, symbol_return, pair_return
|
||||
from pt_bt_results where date = '2025-06-18';
|
||||
|
||||
|
||||
./scripts/load_equity_pair_intraday.sh -A NVDA -B QQQ -d 20250701 -T ./intraday_md
|
||||
|
||||
to inspect exactly what sources, formats, and processing steps you can open the script with:
|
||||
head -n 50 ./scripts/load_equity_pair_intraday.sh
|
||||
|
||||
|
||||
|
||||
✓ Data file found: /home/coder/pairs_trading/data/crypto/20250605.mktdata.ohlcv.db
|
||||
|
||||
sqlite3 '/home/coder/results/20250722_201930.pt_backtest_results.crypto.db'
|
||||
|
||||
sqlite3 '/home/coder/results/xxxxxxxx_yyyyyy.pt_backtest_results.pseudo.db'
|
||||
|
||||
11111111
|
||||
=== At your terminal, run these commands:
|
||||
sqlite3 '/home/coder/results/20250722_201930.pt_backtest_results.crypto.db'
|
||||
=== Then inside the SQLite prompt:
|
||||
.mode csv
|
||||
.headers on
|
||||
.output results_20250722.csv
|
||||
SELECT * FROM pt_bt_results;
|
||||
.output stdout
|
||||
.quit
|
||||
|
||||
cd /home/coder/
|
||||
|
||||
# === mode csv formats output as CSV
|
||||
# === headers on includes column names
|
||||
# === output my_table.csv directs output to that file
|
||||
# === Run your SELECT query, then revert output
|
||||
# === Open my_table.csv in Excel directly
|
||||
|
||||
# ======== Using scp (Secure Copy)
|
||||
# === On your local machine, open a terminal and run:
|
||||
scp cvtt@953f6e8df266:/home/coder/results_20250722.csv ~/Downloads/
|
||||
|
||||
|
||||
# ===== convert cvs pandas dataframe ====== -->
|
||||
import pandas as pd
|
||||
# Replace with the actual path to your CSV file
|
||||
file_path = '/home/coder/results_20250722.csv'
|
||||
# Read the CSV file into a DataFrame
|
||||
df = pd.read_csv(file_path)
|
||||
# Show the first few rows
|
||||
print(df.head())
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,17 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
from ast import Sub
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable, Coroutine, Dict, List, Optional
|
||||
from functools import partial
|
||||
from typing import Callable, Coroutine, Dict, Optional
|
||||
|
||||
from numpy.strings import str_len
|
||||
import websockets
|
||||
from cvttpy_tools.settings.cvtt_types import JsonDictT
|
||||
from cvttpy_tools.tools.logger import Log
|
||||
from websockets.asyncio.client import ClientConnection
|
||||
|
||||
MessageTypeT = str
|
||||
@ -48,22 +46,56 @@ class CvttPricesSubscription:
|
||||
self.is_subscribed_ = False
|
||||
self.is_historical_ = history_depth_sec > 0
|
||||
|
||||
|
||||
class CvttPricerWebSockClient:
|
||||
# Class members with type hints
|
||||
class CvttWebSockClient:
|
||||
ws_url_: UrlT
|
||||
websocket_: Optional[ClientConnection]
|
||||
subscriptions_: Dict[SubscriptionIdT, CvttPricesSubscription]
|
||||
is_connected_: bool
|
||||
logger_: logging.Logger
|
||||
|
||||
|
||||
def __init__(self, url: str):
|
||||
self.ws_url_ = url
|
||||
self.websocket_ = None
|
||||
self.is_connected_ = False
|
||||
|
||||
async def connect(self) -> None:
|
||||
self.websocket_ = await websockets.connect(self.ws_url_)
|
||||
self.is_connected_ = True
|
||||
|
||||
async def close(self) -> None:
|
||||
if self.websocket_ is not None:
|
||||
await self.websocket_.close()
|
||||
self.is_connected_ = False
|
||||
|
||||
async def receive_message(self) -> JsonDictT:
|
||||
assert self.websocket_ is not None
|
||||
assert self.is_connected_
|
||||
message = await self.websocket_.recv()
|
||||
message_str = (
|
||||
message.decode("utf-8")
|
||||
if isinstance(message, bytes)
|
||||
else message
|
||||
)
|
||||
res = json.loads(message_str)
|
||||
assert res is not None
|
||||
assert isinstance(res, dict)
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def check_connection(cls, url: str) -> bool:
|
||||
try:
|
||||
async with websockets.connect(url) as websocket:
|
||||
result = True
|
||||
except Exception as e:
|
||||
Log.error(f"Unable to connect to {url}: {str(e)}")
|
||||
result = False
|
||||
return result
|
||||
|
||||
class CvttPricerWebSockClient(CvttWebSockClient):
|
||||
# Class members with type hints
|
||||
subscriptions_: Dict[SubscriptionIdT, CvttPricesSubscription]
|
||||
|
||||
def __init__(self, url: str):
|
||||
super().__init__(url)
|
||||
self.subscriptions_ = {}
|
||||
self.logger_ = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
async def subscribe(
|
||||
self, subscription: CvttPricesSubscription
|
||||
@ -71,11 +103,10 @@ class CvttPricerWebSockClient:
|
||||
|
||||
if not self.is_connected_:
|
||||
try:
|
||||
self.logger_.info(f"Connecting to {self.ws_url_}")
|
||||
self.websocket_ = await websockets.connect(self.ws_url_)
|
||||
self.is_connected_ = True
|
||||
Log.info(f"Connecting to {self.ws_url_}")
|
||||
await self.connect()
|
||||
except Exception as e:
|
||||
self.logger_.error(f"Unable to connect to {self.ws_url_}: {str(e)}")
|
||||
Log.error(f"Unable to connect to {self.ws_url_}: {str(e)}")
|
||||
raise e
|
||||
|
||||
subscr_msg = {
|
||||
@ -109,10 +140,10 @@ class CvttPricerWebSockClient:
|
||||
return False
|
||||
|
||||
if response.get("status") == "success":
|
||||
self.logger_.info(f"Subscription successful: {json.dumps(response)}")
|
||||
Log.info(f"Subscription successful: {json.dumps(response)}")
|
||||
return True
|
||||
elif response.get("status") == "error":
|
||||
self.logger_.error(f"Subscription failed: {response.get('reason')}")
|
||||
Log.error(f"Subscription failed: {response.get('reason')}")
|
||||
return False
|
||||
return False
|
||||
|
||||
@ -121,19 +152,20 @@ class CvttPricerWebSockClient:
|
||||
try:
|
||||
while self.is_connected_:
|
||||
try:
|
||||
message = await self.websocket_.recv()
|
||||
message_str = (
|
||||
message.decode("utf-8")
|
||||
if isinstance(message, bytes)
|
||||
else message
|
||||
)
|
||||
await self.process_message(json.loads(message_str))
|
||||
msg_dict: JsonDictT = await self.receive_message()
|
||||
except websockets.ConnectionClosed:
|
||||
self.logger_.warning("Connection closed")
|
||||
Log.warning("Connection closed")
|
||||
self.is_connected_ = False
|
||||
break
|
||||
except Exception as e:
|
||||
Log.error(f"Error occurred: {str(e)}")
|
||||
self.is_connected_ = False
|
||||
await asyncio.sleep(5) # Wait before reconnecting
|
||||
|
||||
await self.process_message(msg_dict)
|
||||
|
||||
except Exception as e:
|
||||
self.logger_.error(f"Error occurred: {str(e)}")
|
||||
Log.error(f"Error occurred: {str(e)}")
|
||||
self.is_connected_ = False
|
||||
await asyncio.sleep(5) # Wait before reconnecting
|
||||
|
||||
@ -142,13 +174,13 @@ class CvttPricerWebSockClient:
|
||||
if message_type in ["md_aggregate", "historical_md_aggregate"]:
|
||||
subscription_id = message.get("subscr_id")
|
||||
if subscription_id not in self.subscriptions_:
|
||||
self.logger_.warning(f"Unknown subscription id: {subscription_id}")
|
||||
Log.warning(f"Unknown subscription id: {subscription_id}")
|
||||
return
|
||||
|
||||
subscription = self.subscriptions_[subscription_id]
|
||||
await subscription.callback_(message_type, subscription_id, message)
|
||||
else:
|
||||
self.logger_.warning(f"Unknown message type: {message.get('type')}")
|
||||
Log.warning(f"Unknown message type: {message.get('type')}")
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
@ -156,10 +188,10 @@ async def main() -> None:
|
||||
print(f"{message_type=} {subscr_id=} {instrument_id}")
|
||||
if message_type == "md_aggregate":
|
||||
aggr = message.get("md_aggregate", [])
|
||||
print(f"[{aggr['tstmp'][:19]}] *** RLTM *** {message}")
|
||||
print(f"[{aggr['tstamp'][:19]}] *** RLTM *** {message}")
|
||||
elif message_type == "historical_md_aggregate":
|
||||
for aggr in message.get("historical_data", []):
|
||||
print(f"[{aggr['tstmp'][:19]}] *** HIST *** {aggr}")
|
||||
print(f"[{aggr['tstamp'][:19]}] *** HIST *** {aggr}")
|
||||
else:
|
||||
print(f"Unknown message type: {message_type}")
|
||||
|
||||
346
lib/pt_strategy/live/live_strategy.py
Normal file
346
lib/pt_strategy/live/live_strategy.py
Normal file
@ -0,0 +1,346 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pandas as pd
|
||||
from cvttpy_tools.settings.cvtt_types import JsonDictT
|
||||
from cvttpy_tools.tools.base import NamedObject
|
||||
from cvttpy_tools.tools.logger import Log
|
||||
from pt_strategy.live.ti_sender import TradingInstructionsSender
|
||||
from pt_strategy.model_data_policy import ModelDataPolicy
|
||||
from pt_strategy.pt_market_data import RealTimeMarketData
|
||||
from pt_strategy.pt_model import Prediction
|
||||
from pt_strategy.trading_pair import PairState, TradingPair
|
||||
|
||||
"""
|
||||
--config=pair.cfg
|
||||
--pair=PAIR-BTC-USDT:COINBASE_AT,PAIR-ETH-USDT:COINBASE_AT
|
||||
"""
|
||||
|
||||
|
||||
class TradingInstructionType(Enum):
|
||||
TARGET_POSITION = "TARGET_POSITION"
|
||||
|
||||
@dataclass
|
||||
class TradingInstruction(NamedObject):
|
||||
type_: TradingInstructionType
|
||||
exch_instr_: ExchangeInstrument
|
||||
specifics_: Dict[str, Any]
|
||||
|
||||
|
||||
class PtLiveStrategy(NamedObject):
|
||||
config_: Dict[str, Any]
|
||||
trading_pair_: TradingPair
|
||||
model_data_policy_: ModelDataPolicy
|
||||
pt_mkt_data_: RealTimeMarketData
|
||||
ti_sender_: TradingInstructionsSender
|
||||
|
||||
# for presentation: history of prediction values and trading signals
|
||||
predictions_: pd.DataFrame
|
||||
trading_signals_: pd.DataFrame
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
instruments: List[Dict[str, str]],
|
||||
ti_sender: TradingInstructionsSender,
|
||||
):
|
||||
|
||||
self.config_ = config
|
||||
self.trading_pair_ = TradingPair(config=config, instruments=instruments)
|
||||
self.predictions_ = pd.DataFrame()
|
||||
self.trading_signals_ = pd.DataFrame()
|
||||
self.ti_sender_ = ti_sender
|
||||
|
||||
import copy
|
||||
|
||||
# modified config must be passed to PtMarketData
|
||||
config_copy = copy.deepcopy(config)
|
||||
config_copy["instruments"] = instruments
|
||||
self.pt_mkt_data_ = RealTimeMarketData(config=config_copy)
|
||||
self.model_data_policy_ = ModelDataPolicy.create(
|
||||
config, is_real_time=True, pair=self.trading_pair_
|
||||
)
|
||||
self.open_threshold_ = self.config_.get("dis-equilibrium_open_trshld", 0.0)
|
||||
assert self.open_threshold_ > 0, "open_threshold must be greater than 0"
|
||||
self.close_threshold_ = self.config_.get("dis-equilibrium_close_trshld", 0.0)
|
||||
assert self.close_threshold_ > 0, "close_threshold must be greater than 0"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.classname()}: trading_pair={self.trading_pair_}, mdp={self.model_data_policy_.__class__.__name__}, "
|
||||
|
||||
async def on_mkt_data_hist_snapshot(self, aggr: JsonDictT) -> None:
|
||||
Log.info(f"on_mkt_data_hist_snapshot: {aggr}")
|
||||
await self.pt_mkt_data_.on_mkt_data_hist_snapshot(snapshot=aggr)
|
||||
pass
|
||||
|
||||
async def on_mkt_data_update(self, aggr: JsonDictT) -> None:
|
||||
market_data_df = await self.pt_mkt_data_.on_mkt_data_update(update=aggr)
|
||||
if market_data_df is not None:
|
||||
self.trading_pair_.market_data_ = market_data_df
|
||||
self.model_data_policy_.advance()
|
||||
prediction = self.trading_pair_.run(
|
||||
market_data_df, self.model_data_policy_.advance()
|
||||
)
|
||||
self.predictions_ = pd.concat(
|
||||
[self.predictions_, prediction.to_df()], ignore_index=True
|
||||
)
|
||||
|
||||
trading_instructions: List[TradingInstruction] = (
|
||||
self._create_trading_instructions(
|
||||
prediction=prediction, last_row=market_data_df.iloc[-1]
|
||||
)
|
||||
)
|
||||
if len(trading_instructions) > 0:
|
||||
await self._send_trading_instructions(trading_instructions)
|
||||
# trades = self._create_trades(prediction=prediction, last_row=market_data_df.iloc[-1])
|
||||
# URGENT implement this
|
||||
pass
|
||||
|
||||
async def _send_trading_instructions(
|
||||
self, trading_instructions: pd.DataFrame
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def _create_trading_instructions(
|
||||
self, prediction: Prediction, last_row: pd.Series
|
||||
) -> List[TradingInstruction]:
|
||||
pair = self.trading_pair_
|
||||
trd_instructions: List[TradingInstruction] = []
|
||||
|
||||
scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
abs_scaled_disequilibrium = abs(scaled_disequilibrium)
|
||||
|
||||
if pair.is_closed():
|
||||
if abs_scaled_disequilibrium >= self.open_threshold_:
|
||||
trd_instructions = self._create_open_trade_instructions(
|
||||
pair, row=last_row, prediction=prediction
|
||||
)
|
||||
elif pair.is_open():
|
||||
if abs_scaled_disequilibrium <= self.close_threshold_:
|
||||
trd_instructions = self._create_close_trade_instructions(
|
||||
pair, row=last_row, prediction=prediction
|
||||
)
|
||||
elif pair.to_stop_close_conditions(predicted_row=last_row):
|
||||
trd_instructions = self._create_close_trade_instructions(
|
||||
pair, row=last_row
|
||||
)
|
||||
|
||||
return trd_instructions
|
||||
|
||||
def _create_open_trade_instructions(
|
||||
self, pair: TradingPair, row: pd.Series, prediction: Prediction
|
||||
) -> List[TradingInstruction]:
|
||||
scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
|
||||
if scaled_disequilibrium > 0:
|
||||
side_a = "SELL"
|
||||
trd_inst_a = TradingInstruction(
|
||||
type=TradingInstructionType.TARGET_POSITION,
|
||||
exch_instr=pair.get_instrument_a(),
|
||||
specifics={"side": "SELL", "strength": -1},
|
||||
)
|
||||
side_b = "BUY"
|
||||
else:
|
||||
side_a = "BUY"
|
||||
side_b = "SELL"
|
||||
|
||||
# save closing sides
|
||||
pair.user_data_["open_side_a"] = side_a # used in oustanding positions
|
||||
pair.user_data_["open_side_b"] = side_b
|
||||
pair.user_data_["open_px_a"] = px_a
|
||||
pair.user_data_["open_px_b"] = px_b
|
||||
pair.user_data_["open_tstamp"] = tstamp
|
||||
|
||||
pair.user_data_["close_side_a"] = side_b # used for closing trades
|
||||
pair.user_data_["close_side_b"] = side_a
|
||||
|
||||
# create opening trades
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_a_,
|
||||
"side": side_a,
|
||||
"action": "OPEN",
|
||||
"price": px_a,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
# "pair": pair,
|
||||
}
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_b_,
|
||||
"side": side_b,
|
||||
"action": "OPEN",
|
||||
"price": px_b,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
return df
|
||||
|
||||
def _handle_outstanding_positions(self) -> Optional[pd.DataFrame]:
|
||||
trades = None
|
||||
pair = self.trading_pair_
|
||||
|
||||
# Outstanding positions
|
||||
if pair.user_data_["state"] == PairState.OPEN:
|
||||
print(f"{pair}: *** Position is NOT CLOSED. ***")
|
||||
# outstanding positions
|
||||
if self.config_["close_outstanding_positions"]:
|
||||
close_position_row = pd.Series(pair.market_data_.iloc[-2])
|
||||
# close_position_row["disequilibrium"] = 0.0
|
||||
# close_position_row["scaled_disequilibrium"] = 0.0
|
||||
# close_position_row["signed_scaled_disequilibrium"] = 0.0
|
||||
|
||||
trades = self._create_close_trades(
|
||||
pair=pair, row=close_position_row, prediction=None
|
||||
)
|
||||
if trades is not None:
|
||||
trades["status"] = PairState.CLOSE_POSITION.name
|
||||
print(f"CLOSE_POSITION TRADES:\n{trades}")
|
||||
pair.user_data_["state"] = PairState.CLOSE_POSITION
|
||||
pair.on_close_trades(trades)
|
||||
else:
|
||||
pair.add_outstanding_position(
|
||||
symbol=pair.symbol_a_,
|
||||
open_side=pair.user_data_["open_side_a"],
|
||||
open_px=pair.user_data_["open_px_a"],
|
||||
open_tstamp=pair.user_data_["open_tstamp"],
|
||||
last_mkt_data_row=pair.market_data_.iloc[-1],
|
||||
)
|
||||
pair.add_outstanding_position(
|
||||
symbol=pair.symbol_b_,
|
||||
open_side=pair.user_data_["open_side_b"],
|
||||
open_px=pair.user_data_["open_px_b"],
|
||||
open_tstamp=pair.user_data_["open_tstamp"],
|
||||
last_mkt_data_row=pair.market_data_.iloc[-1],
|
||||
)
|
||||
return trades
|
||||
|
||||
def _trades_df(self) -> pd.DataFrame:
|
||||
types = {
|
||||
"time": "datetime64[ns]",
|
||||
"action": "string",
|
||||
"symbol": "string",
|
||||
"side": "string",
|
||||
"price": "float64",
|
||||
"disequilibrium": "float64",
|
||||
"scaled_disequilibrium": "float64",
|
||||
"signed_scaled_disequilibrium": "float64",
|
||||
# "pair": "object",
|
||||
}
|
||||
columns = list(types.keys())
|
||||
return pd.DataFrame(columns=columns).astype(types)
|
||||
|
||||
def _create_open_trades(
|
||||
self, pair: TradingPair, row: pd.Series, prediction: Prediction
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
tstamp = row["tstamp"]
|
||||
diseqlbrm = prediction.disequilibrium_
|
||||
scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
px_a = row[f"{colname_a}"]
|
||||
px_b = row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
df = self._trades_df()
|
||||
|
||||
print(f"OPEN_TRADES: {row["tstamp"]} {scaled_disequilibrium=}")
|
||||
if diseqlbrm > 0:
|
||||
side_a = "SELL"
|
||||
side_b = "BUY"
|
||||
else:
|
||||
side_a = "BUY"
|
||||
side_b = "SELL"
|
||||
|
||||
# save closing sides
|
||||
pair.user_data_["open_side_a"] = side_a # used in oustanding positions
|
||||
pair.user_data_["open_side_b"] = side_b
|
||||
pair.user_data_["open_px_a"] = px_a
|
||||
pair.user_data_["open_px_b"] = px_b
|
||||
pair.user_data_["open_tstamp"] = tstamp
|
||||
|
||||
pair.user_data_["close_side_a"] = side_b # used for closing trades
|
||||
pair.user_data_["close_side_b"] = side_a
|
||||
|
||||
# create opening trades
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_a_,
|
||||
"side": side_a,
|
||||
"action": "OPEN",
|
||||
"price": px_a,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
# "pair": pair,
|
||||
}
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_b_,
|
||||
"side": side_b,
|
||||
"action": "OPEN",
|
||||
"price": px_b,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
return df
|
||||
|
||||
def _create_close_trades(
|
||||
self, pair: TradingPair, row: pd.Series, prediction: Optional[Prediction] = None
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
tstamp = row["tstamp"]
|
||||
if prediction is not None:
|
||||
diseqlbrm = prediction.disequilibrium_
|
||||
signed_scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
scaled_disequilibrium = abs(prediction.scaled_disequilibrium_)
|
||||
else:
|
||||
diseqlbrm = 0.0
|
||||
signed_scaled_disequilibrium = 0.0
|
||||
scaled_disequilibrium = 0.0
|
||||
px_a = row[f"{colname_a}"]
|
||||
px_b = row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
df = self._trades_df()
|
||||
|
||||
# create opening trades
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_a_,
|
||||
"side": pair.user_data_["close_side_a"],
|
||||
"action": "CLOSE",
|
||||
"price": px_a,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": scaled_disequilibrium,
|
||||
"signed_scaled_disequilibrium": signed_scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_b_,
|
||||
"side": pair.user_data_["close_side_b"],
|
||||
"action": "CLOSE",
|
||||
"price": px_b,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": scaled_disequilibrium,
|
||||
"signed_scaled_disequilibrium": signed_scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
del pair.user_data_["close_side_a"]
|
||||
del pair.user_data_["close_side_b"]
|
||||
|
||||
del pair.user_data_["open_tstamp"]
|
||||
del pair.user_data_["open_px_a"]
|
||||
del pair.user_data_["open_px_b"]
|
||||
del pair.user_data_["open_side_a"]
|
||||
del pair.user_data_["open_side_b"]
|
||||
return df
|
||||
85
lib/pt_strategy/live/pricer_md_client.py
Normal file
85
lib/pt_strategy/live/pricer_md_client.py
Normal file
@ -0,0 +1,85 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from typing import Dict, List
|
||||
|
||||
from cvtt_client.mkt_data import (CvttPricerWebSockClient,
|
||||
CvttPricesSubscription, MessageTypeT,
|
||||
SubscriptionIdT)
|
||||
from cvttpy_tools.settings.cvtt_types import JsonDictT
|
||||
from cvttpy_tools.tools.app import App
|
||||
from cvttpy_tools.tools.base import NamedObject
|
||||
from cvttpy_tools.tools.config import Config
|
||||
from cvttpy_tools.tools.logger import Log
|
||||
from pt_strategy.live.live_strategy import PtLiveStrategy
|
||||
from pt_strategy.trading_pair import TradingPair
|
||||
|
||||
"""
|
||||
--config=pair.cfg
|
||||
--pair=PAIR-BTC-USDT:COINBASE_AT,PAIR-ETH-USDT:COINBASE_AT
|
||||
"""
|
||||
|
||||
|
||||
class PtMktDataClient(NamedObject):
|
||||
config_: Config
|
||||
live_strategy_: PtLiveStrategy
|
||||
pricer_client_: CvttPricerWebSockClient
|
||||
subscriptions_: List[CvttPricesSubscription]
|
||||
|
||||
def __init__(self, live_strategy: PtLiveStrategy, pricer_config: Config):
|
||||
self.config_ = pricer_config
|
||||
self.live_strategy_ = live_strategy
|
||||
|
||||
App.instance().add_call(App.Stage.Start, self._on_start())
|
||||
App.instance().add_call(App.Stage.Run, self.run())
|
||||
|
||||
async def _on_start(self) -> None:
|
||||
pricer_url = self.config_.get_value("pricer_url")
|
||||
assert pricer_url is not None, "pricer_url is not found in config"
|
||||
self.pricer_client_ = CvttPricerWebSockClient(url=pricer_url)
|
||||
|
||||
|
||||
async def _subscribe(self) -> None:
|
||||
history_depth_sec = self.config_.get_value("history_depth_sec", 86400)
|
||||
interval_sec = self.config_.get_value("interval_sec", 60)
|
||||
|
||||
pair: TradingPair = self.live_strategy_.trading_pair_
|
||||
subscriptions = [CvttPricesSubscription(
|
||||
exchange_config_name=instrument["exchange_config_name"],
|
||||
instrument_id=instrument["instrument_id"],
|
||||
interval_sec=interval_sec,
|
||||
history_depth_sec=history_depth_sec,
|
||||
callback=partial(
|
||||
self.on_message, instrument_id=instrument["instrument_id"]
|
||||
),
|
||||
) for instrument in pair.instruments_]
|
||||
|
||||
for subscription in subscriptions:
|
||||
Log.info(f"{self.fname()} Subscribing to {subscription}")
|
||||
await self.pricer_client_.subscribe(subscription)
|
||||
|
||||
async def on_message(
|
||||
self,
|
||||
message_type: MessageTypeT,
|
||||
subscr_id: SubscriptionIdT,
|
||||
message: Dict,
|
||||
instrument_id: str,
|
||||
) -> None:
|
||||
Log.info(f"{self.fname()}: {message_type=} {subscr_id=} {instrument_id}")
|
||||
aggr: JsonDictT
|
||||
if message_type == "md_aggregate":
|
||||
aggr = message.get("md_aggregate", {})
|
||||
await self.live_strategy_.on_mkt_data_update(aggr)
|
||||
elif message_type == "historical_md_aggregate":
|
||||
aggr = message.get("historical_data", {})
|
||||
await self.live_strategy_.on_mkt_data_hist_snapshot(aggr)
|
||||
else:
|
||||
Log.info(f"Unknown message type: {message_type}")
|
||||
|
||||
async def run(self) -> None:
|
||||
if not await CvttPricerWebSockClient.check_connection(self.pricer_client_.ws_url_):
|
||||
Log.error(f"Unable to connect to {self.pricer_client_.ws_url_}")
|
||||
raise Exception(f"Unable to connect to {self.pricer_client_.ws_url_}")
|
||||
await self._subscribe()
|
||||
|
||||
await self.pricer_client_.run()
|
||||
86
lib/pt_strategy/live/ti_sender.py
Normal file
86
lib/pt_strategy/live/ti_sender.py
Normal file
@ -0,0 +1,86 @@
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import Tuple
|
||||
|
||||
# import aiohttp
|
||||
from cvttpy_tools.tools.app import App
|
||||
from cvttpy_tools.tools.base import NamedObject
|
||||
from cvttpy_tools.tools.config import Config
|
||||
from cvttpy_tools.tools.logger import Log
|
||||
from cvttpy_tools.tools.timer import Timer
|
||||
from cvttpy_tools.tools.timeutils import NanoPerSec
|
||||
from cvttpy_tools.tools.web.rest_client import REST_RequestProcessor
|
||||
|
||||
|
||||
class TradingInstructionsSender(NamedObject):
|
||||
|
||||
class TradingInstType(str, Enum):
|
||||
TARGET_POSITION = "TARGET_POSITION"
|
||||
DIRECT_ORDER = "DIRECT_ORDER"
|
||||
MARKET_MAKING = "MARKET_MAKING"
|
||||
NONE = "NONE"
|
||||
|
||||
config_: Config
|
||||
ti_method_: str
|
||||
ti_url_: str
|
||||
health_check_method_: str
|
||||
health_check_url_: str
|
||||
|
||||
def __init__(self, config: Config):
|
||||
self.config_ = config
|
||||
base_url = config.get_value("url", "ws://localhost:12346/ws")
|
||||
|
||||
self.book_id_ = config.get_value("book_id", "")
|
||||
assert self.book_id_, "book_id is required"
|
||||
|
||||
self.strategy_id_ = config.get_value("strategy_id", "")
|
||||
assert self.strategy_id_, "strategy_id is required"
|
||||
|
||||
endpoint_uri = config.get_value("ti_endpoint/url", "/trading_instructions")
|
||||
endpoint_method = config.get_value("ti_endpoint/method", "POST")
|
||||
|
||||
health_check_uri = config.get_value("health_check_endpoint/url", "/ping")
|
||||
health_check_method = config.get_value("health_check_endpoint/method", "GET")
|
||||
|
||||
|
||||
|
||||
self.ti_method_ = endpoint_method
|
||||
self.ti_url_ = f"{base_url}{endpoint_uri}"
|
||||
|
||||
self.health_check_method_ = health_check_method
|
||||
self.health_check_url_ = f"{base_url}{health_check_uri}"
|
||||
|
||||
App.instance().add_call(App.Stage.Start, self._set_health_check_timer(), can_run_now=True)
|
||||
|
||||
async def _set_health_check_timer(self) -> None:
|
||||
# TODO: configurable interval
|
||||
self.health_check_timer_ = Timer(is_periodic=True, period_interval=15, start_in_sec=0, func=self._health_check)
|
||||
Log.info(f"{self.fname()} Health check timer set to 15 seconds")
|
||||
|
||||
async def _health_check(self) -> None:
|
||||
rqst = REST_RequestProcessor(method=self.health_check_method_, url=self.health_check_url_)
|
||||
async with rqst as (status, msg, headers):
|
||||
if status != 200:
|
||||
Log.error(f"{self.fname()} CVTT Service is not responding")
|
||||
|
||||
async def send_tgt_positions(self, strength: float, base_asset: str, quote_asset: str) -> Tuple[int, str]:
|
||||
instr = {
|
||||
"type": self.TradingInstType.TARGET_POSITION.value,
|
||||
"book_id": self.book_id_,
|
||||
"strategy_id": self.strategy_id_,
|
||||
"issued_ts_ns": int(time.time() * NanoPerSec),
|
||||
"data": {
|
||||
"strength": strength,
|
||||
"base_asset": base_asset,
|
||||
"quote_asset": quote_asset,
|
||||
"user_data": {},
|
||||
},
|
||||
}
|
||||
|
||||
rqst = REST_RequestProcessor(method=self.ti_method_, url=self.ti_url_, params=instr)
|
||||
async with rqst as (status, msg, headers):
|
||||
if status != 200:
|
||||
raise ConnectionError(f"Failed to send trading instructions: {msg}")
|
||||
return (status, msg)
|
||||
|
||||
|
||||
257
lib/pt_strategy/model_data_policy.py
Normal file
257
lib/pt_strategy/model_data_policy.py
Normal file
@ -0,0 +1,257 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
|
||||
@dataclass
|
||||
class DataWindowParams:
|
||||
training_size: int
|
||||
training_start_index: int
|
||||
|
||||
|
||||
class ModelDataPolicy(ABC):
|
||||
config_: Dict[str, Any]
|
||||
current_data_params_: DataWindowParams
|
||||
count_: int
|
||||
is_real_time_: bool
|
||||
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
self.config_ = config
|
||||
training_size = config.get("training_size", 120)
|
||||
training_start_index = 0
|
||||
if kwargs.get("is_real_time", False):
|
||||
training_size = 120
|
||||
training_start_index = 0
|
||||
else:
|
||||
training_size = config.get("training_size", 120)
|
||||
self.current_data_params_ = DataWindowParams(
|
||||
training_size=config.get("training_size", 120),
|
||||
training_start_index=0,
|
||||
)
|
||||
self.count_ = 0
|
||||
self.is_real_time_ = kwargs.get("is_real_time", False)
|
||||
|
||||
@abstractmethod
|
||||
def advance(self, mkt_data_df: Optional[pd.DataFrame] = None) -> DataWindowParams:
|
||||
self.count_ += 1
|
||||
print(self.count_, end="\r")
|
||||
return self.current_data_params_
|
||||
|
||||
@staticmethod
|
||||
def create(config: Dict[str, Any], *args: Any, **kwargs: Any) -> ModelDataPolicy:
|
||||
import importlib
|
||||
|
||||
model_data_policy_class_name = config.get("model_data_policy_class", None)
|
||||
assert model_data_policy_class_name is not None
|
||||
module_name, class_name = model_data_policy_class_name.rsplit(".", 1)
|
||||
module = importlib.import_module(module_name)
|
||||
model_training_data_policy_object = getattr(module, class_name)(
|
||||
config=config, *args, **kwargs
|
||||
)
|
||||
return cast(ModelDataPolicy, model_training_data_policy_object)
|
||||
|
||||
|
||||
class RollingWindowDataPolicy(ModelDataPolicy):
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
self.count_ = 1
|
||||
|
||||
def advance(self, mkt_data_df: Optional[pd.DataFrame] = None) -> DataWindowParams:
|
||||
super().advance(mkt_data_df)
|
||||
if self.is_real_time_:
|
||||
self.current_data_params_.training_start_index = -self.current_data_params_.training_size
|
||||
else:
|
||||
self.current_data_params_.training_start_index += 1
|
||||
return self.current_data_params_
|
||||
|
||||
|
||||
class OptimizedWndDataPolicy(ModelDataPolicy, ABC):
|
||||
mkt_data_df_: pd.DataFrame
|
||||
pair_: TradingPair # type: ignore
|
||||
min_training_size_: int
|
||||
max_training_size_: int
|
||||
end_index_: int
|
||||
prices_a_: np.ndarray
|
||||
prices_b_: np.ndarray
|
||||
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
assert (
|
||||
kwargs.get("pair") is not None
|
||||
), "pair must be provided"
|
||||
assert (
|
||||
"min_training_size" in config and "max_training_size" in config
|
||||
), "min_training_size and max_training_size must be provided"
|
||||
self.min_training_size_ = cast(int, config.get("min_training_size"))
|
||||
self.max_training_size_ = cast(int, config.get("max_training_size"))
|
||||
|
||||
from pt_strategy.trading_pair import TradingPair
|
||||
self.pair_ = cast(TradingPair, kwargs.get("pair"))
|
||||
|
||||
if "mkt_data" in kwargs:
|
||||
self.mkt_data_df_ = cast(pd.DataFrame, kwargs.get("mkt_data"))
|
||||
col_a, col_b = self.pair_.colnames()
|
||||
self.prices_a_ = np.array(self.mkt_data_df_[col_a])
|
||||
self.prices_b_ = np.array(self.mkt_data_df_[col_b])
|
||||
assert self.min_training_size_ < self.max_training_size_
|
||||
|
||||
|
||||
def advance(self, mkt_data_df: Optional[pd.DataFrame] = None) -> DataWindowParams:
|
||||
super().advance(mkt_data_df)
|
||||
if mkt_data_df is not None:
|
||||
self.mkt_data_df_ = mkt_data_df
|
||||
|
||||
if self.is_real_time_:
|
||||
self.end_index_ = len(self.mkt_data_df_) - 1
|
||||
else:
|
||||
self.end_index_ = self.current_data_params_.training_start_index + self.max_training_size_
|
||||
if self.end_index_ > len(self.mkt_data_df_) - 1:
|
||||
self.end_index_ = len(self.mkt_data_df_) - 1
|
||||
self.current_data_params_.training_start_index = self.end_index_ - self.max_training_size_
|
||||
if self.current_data_params_.training_start_index < 0:
|
||||
self.current_data_params_.training_start_index = 0
|
||||
|
||||
col_a, col_b = self.pair_.colnames()
|
||||
self.prices_a_ = np.array(self.mkt_data_df_[col_a])
|
||||
self.prices_b_ = np.array(self.mkt_data_df_[col_b])
|
||||
|
||||
self.current_data_params_ = self.optimize_window_size()
|
||||
return self.current_data_params_
|
||||
|
||||
@abstractmethod
|
||||
def optimize_window_size(self) -> DataWindowParams:
|
||||
...
|
||||
|
||||
class EGOptimizedWndDataPolicy(OptimizedWndDataPolicy):
|
||||
'''
|
||||
# Engle-Granger cointegration test
|
||||
*** VERY SLOW ***
|
||||
'''
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
|
||||
def optimize_window_size(self) -> DataWindowParams:
|
||||
# Run Engle-Granger cointegration test
|
||||
last_pvalue = 1.0
|
||||
result = copy.copy(self.current_data_params_)
|
||||
for trn_size in range(self.min_training_size_, self.max_training_size_):
|
||||
if self.end_index_ - trn_size < 0:
|
||||
break
|
||||
|
||||
from statsmodels.tsa.stattools import coint # type: ignore
|
||||
|
||||
start_index = self.end_index_ - trn_size
|
||||
series_a = self.prices_a_[start_index : self.end_index_]
|
||||
series_b = self.prices_b_[start_index : self.end_index_]
|
||||
eg_pvalue = float(coint(series_a, series_b)[1])
|
||||
if eg_pvalue < last_pvalue:
|
||||
last_pvalue = eg_pvalue
|
||||
result.training_size = trn_size
|
||||
result.training_start_index = start_index
|
||||
|
||||
# print(
|
||||
# f"*** DEBUG *** end_index={self.end_index_}, best_trn_size={self.current_data_params_.training_size}, {last_pvalue=}"
|
||||
# )
|
||||
return result
|
||||
|
||||
class ADFOptimizedWndDataPolicy(OptimizedWndDataPolicy):
|
||||
# Augmented Dickey-Fuller test
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
|
||||
def optimize_window_size(self) -> DataWindowParams:
|
||||
from statsmodels.regression.linear_model import OLS
|
||||
from statsmodels.tools.tools import add_constant
|
||||
from statsmodels.tsa.stattools import adfuller
|
||||
|
||||
last_pvalue = 1.0
|
||||
result = copy.copy(self.current_data_params_)
|
||||
for trn_size in range(self.min_training_size_, self.max_training_size_):
|
||||
if self.end_index_ - trn_size < 0:
|
||||
break
|
||||
start_index = self.end_index_ - trn_size
|
||||
y = self.prices_a_[start_index : self.end_index_]
|
||||
x = self.prices_b_[start_index : self.end_index_]
|
||||
|
||||
# Add constant to x for intercept
|
||||
x_with_const = add_constant(x)
|
||||
|
||||
# OLS regression: y = a + b*x + e
|
||||
model = OLS(y, x_with_const).fit()
|
||||
residuals = y - model.predict(x_with_const)
|
||||
|
||||
# ADF test on residuals
|
||||
try:
|
||||
adf_result = adfuller(residuals, maxlag=1, regression="c")
|
||||
adf_pvalue = float(adf_result[1])
|
||||
except Exception as e:
|
||||
# Handle edge cases with exception (e.g., constant series, etc.)
|
||||
adf_pvalue = 1.0
|
||||
|
||||
if adf_pvalue < last_pvalue:
|
||||
last_pvalue = adf_pvalue
|
||||
result.training_size = trn_size
|
||||
result.training_start_index = start_index
|
||||
|
||||
# print(
|
||||
# f"*** DEBUG *** end_index={self.end_index_},"
|
||||
# f" best_trn_size={self.current_data_params_.training_size},"
|
||||
# f" {last_pvalue=}"
|
||||
# )
|
||||
return result
|
||||
|
||||
class JohansenOptdWndDataPolicy(OptimizedWndDataPolicy):
|
||||
# Johansen test
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
|
||||
def optimize_window_size(self) -> DataWindowParams:
|
||||
from statsmodels.tsa.vector_ar.vecm import coint_johansen
|
||||
import numpy as np
|
||||
|
||||
best_stat = -np.inf
|
||||
best_trn_size = 0
|
||||
best_start_index = -1
|
||||
|
||||
result = copy.copy(self.current_data_params_)
|
||||
for trn_size in range(self.min_training_size_, self.max_training_size_):
|
||||
if self.end_index_ - trn_size < 0:
|
||||
break
|
||||
start_index = self.end_index_ - trn_size
|
||||
series_a = self.prices_a_[start_index:self.end_index_]
|
||||
series_b = self.prices_b_[start_index:self.end_index_]
|
||||
|
||||
# Combine into 2D matrix for Johansen test
|
||||
try:
|
||||
data = np.column_stack([series_a, series_b])
|
||||
|
||||
# Johansen test: det_order=0 (no deterministic trend), k_ar_diff=1 (lag)
|
||||
res = coint_johansen(data, det_order=0, k_ar_diff=1)
|
||||
|
||||
# Trace statistic for cointegration rank 1
|
||||
trace_stat = res.lr1[0] # test stat for rank=0 vs >=1
|
||||
critical_value = res.cvt[0, 1] # 5% critical value
|
||||
|
||||
if trace_stat > best_stat:
|
||||
best_stat = trace_stat
|
||||
best_trn_size = trn_size
|
||||
best_start_index = start_index
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if best_trn_size > 0:
|
||||
result.training_size = best_trn_size
|
||||
result.training_start_index = best_start_index
|
||||
else:
|
||||
print("*** WARNING: No valid cointegration window found.")
|
||||
|
||||
# print(
|
||||
# f"*** DEBUG *** end_index={self.end_index_}, best_trn_size={best_trn_size}, trace_stat={best_stat}"
|
||||
# )
|
||||
return result
|
||||
104
lib/pt_strategy/models.py
Normal file
104
lib/pt_strategy/models.py
Normal file
@ -0,0 +1,104 @@
|
||||
from __future__ import annotations
|
||||
from typing import Optional
|
||||
|
||||
import pandas as pd
|
||||
import statsmodels.api as sm
|
||||
|
||||
|
||||
|
||||
from pt_strategy.pt_model import PairsTradingModel, Prediction
|
||||
from pt_strategy.trading_pair import TradingPair
|
||||
|
||||
|
||||
class OLSModel(PairsTradingModel):
|
||||
model_: Optional[sm.regression.linear_model.RegressionResultsWrapper]
|
||||
pair_predict_result_: Optional[pd.DataFrame]
|
||||
zscore_df_: Optional[pd.DataFrame]
|
||||
|
||||
def predict(self, pair: TradingPair) -> Prediction:
|
||||
self.training_df_ = pair.market_data_.copy()
|
||||
|
||||
zscore_df = self._fit_zscore(pair=pair)
|
||||
|
||||
assert zscore_df is not None
|
||||
# zscore is both disequilibrium and scaled_disequilibrium
|
||||
self.training_df_["dis-equilibrium"] = zscore_df[0]
|
||||
self.training_df_["scaled_dis-equilibrium"] = zscore_df[0]
|
||||
|
||||
assert zscore_df is not None
|
||||
return Prediction(
|
||||
tstamp=pair.market_data_.iloc[-1]["tstamp"],
|
||||
disequilibrium=self.training_df_["dis-equilibrium"].iloc[-1],
|
||||
scaled_disequilibrium=self.training_df_["scaled_dis-equilibrium"].iloc[-1],
|
||||
)
|
||||
|
||||
def _fit_zscore(self, pair: TradingPair) -> pd.DataFrame:
|
||||
assert self.training_df_ is not None
|
||||
symbol_a_px_series = self.training_df_[pair.colnames()].iloc[:, 0]
|
||||
symbol_b_px_series = self.training_df_[pair.colnames()].iloc[:, 1]
|
||||
|
||||
symbol_a_px_series, symbol_b_px_series = symbol_a_px_series.align(
|
||||
symbol_b_px_series, axis=0
|
||||
)
|
||||
|
||||
X = sm.add_constant(symbol_b_px_series)
|
||||
self.model_ = sm.OLS(symbol_a_px_series, X).fit()
|
||||
assert self.model_ is not None
|
||||
|
||||
# alternate way would be to use models residuals (will give identical results)
|
||||
# alpha, beta = self.model_.params
|
||||
# spread = symbol_a_px_series - (alpha + beta * symbol_b_px_series)
|
||||
spread = self.model_.resid
|
||||
return pd.DataFrame((spread - spread.mean()) / spread.std())
|
||||
|
||||
|
||||
class VECMModel(PairsTradingModel):
|
||||
def predict(self, pair: TradingPair) -> Prediction:
|
||||
self.training_df_ = pair.market_data_.copy()
|
||||
assert self.training_df_ is not None
|
||||
vecm_fit = self._fit_VECM(pair=pair)
|
||||
|
||||
assert vecm_fit is not None
|
||||
predicted_prices = vecm_fit.predict(steps=1)
|
||||
|
||||
# Convert prediction to a DataFrame for readability
|
||||
predicted_df = pd.DataFrame(
|
||||
predicted_prices, columns=pd.Index(pair.colnames()), dtype=float
|
||||
)
|
||||
|
||||
disequilibrium = (predicted_df[pair.colnames()] @ vecm_fit.beta)[0][0]
|
||||
scaled_disequilibrium = (disequilibrium - self.training_mu_) / self.training_std_
|
||||
return Prediction(
|
||||
tstamp=pair.market_data_.iloc[-1]["tstamp"],
|
||||
disequilibrium=disequilibrium,
|
||||
scaled_disequilibrium=scaled_disequilibrium,
|
||||
)
|
||||
|
||||
def _fit_VECM(self, pair: TradingPair) -> VECMResults: # type: ignore
|
||||
from statsmodels.tsa.vector_ar.vecm import VECM, VECMResults
|
||||
|
||||
vecm_df = self.training_df_[pair.colnames()].reset_index(drop=True)
|
||||
vecm_model = VECM(vecm_df, coint_rank=1)
|
||||
vecm_fit = vecm_model.fit()
|
||||
|
||||
assert vecm_fit is not None
|
||||
|
||||
# Check if the model converged properly
|
||||
if not hasattr(vecm_fit, "beta") or vecm_fit.beta is None:
|
||||
print(f"{self}: VECM model failed to converge properly")
|
||||
|
||||
diseq_series = self.training_df_[pair.colnames()] @ vecm_fit.beta
|
||||
# print(diseq_series.shape)
|
||||
self.training_mu_ = float(diseq_series[0].mean())
|
||||
self.training_std_ = float(diseq_series[0].std())
|
||||
|
||||
self.training_df_["dis-equilibrium"] = (
|
||||
self.training_df_[pair.colnames()] @ vecm_fit.beta
|
||||
)
|
||||
# Normalize the dis-equilibrium
|
||||
self.training_df_["scaled_dis-equilibrium"] = (
|
||||
diseq_series - self.training_mu_
|
||||
) / self.training_std_
|
||||
|
||||
return vecm_fit
|
||||
|
||||
28
lib/pt_strategy/prediction.py
Normal file
28
lib/pt_strategy/prediction.py
Normal file
@ -0,0 +1,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
class Prediction:
|
||||
tstamp_: pd.Timestamp
|
||||
disequilibrium_: float
|
||||
scaled_disequilibrium_: float
|
||||
|
||||
def __init__(self, tstamp: pd.Timestamp, disequilibrium: float, scaled_disequilibrium: float):
|
||||
self.tstamp_ = tstamp
|
||||
self.disequilibrium_ = disequilibrium
|
||||
self.scaled_disequilibrium_ = scaled_disequilibrium
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"tstamp": self.tstamp_,
|
||||
"disequilibrium": self.disequilibrium_,
|
||||
"signed_scaled_disequilibrium": self.scaled_disequilibrium_,
|
||||
"scaled_disequilibrium": abs(self.scaled_disequilibrium_),
|
||||
# "pair": self.pair_,
|
||||
}
|
||||
def to_df(self) -> pd.DataFrame:
|
||||
return pd.DataFrame([self.to_dict()])
|
||||
|
||||
229
lib/pt_strategy/pt_market_data.py
Normal file
229
lib/pt_strategy/pt_market_data.py
Normal file
@ -0,0 +1,229 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pandas as pd
|
||||
from cvttpy_tools.settings.cvtt_types import JsonDictT
|
||||
from tools.data_loader import load_market_data
|
||||
|
||||
|
||||
class PtMarketData():
|
||||
config_: Dict[str, Any]
|
||||
origin_mkt_data_df_: pd.DataFrame
|
||||
market_data_df_: pd.DataFrame
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
self.config_ = config
|
||||
self.origin_mkt_data_df_ = pd.DataFrame()
|
||||
self.market_data_df_ = pd.DataFrame()
|
||||
|
||||
|
||||
class ResearchMarketData(PtMarketData):
|
||||
current_index_: int
|
||||
|
||||
is_execution_price_: bool
|
||||
|
||||
def __init__(self, config: Dict[str, Any]):
|
||||
super().__init__(config)
|
||||
self.current_index_ = 0
|
||||
self.is_execution_price_ = "execution_price" in self.config_
|
||||
if self.is_execution_price_:
|
||||
self.execution_price_column_ = self.config_["execution_price"]["column"]
|
||||
self.execution_price_shift_ = self.config_["execution_price"]["shift"]
|
||||
else:
|
||||
self.execution_price_column_ = None
|
||||
self.execution_price_shift_ = 0
|
||||
|
||||
def has_next(self) -> bool:
|
||||
return self.current_index_ < len(self.market_data_df_)
|
||||
|
||||
def get_next(self) -> pd.Series:
|
||||
result = self.market_data_df_.iloc[self.current_index_]
|
||||
self.current_index_ += 1
|
||||
return result
|
||||
|
||||
def load(self) -> None:
|
||||
datafiles: List[str] = self.config_.get("datafiles", [])
|
||||
instruments: List[Dict[str, str]] = self.config_.get("instruments", [])
|
||||
assert len(instruments) > 0, "No instruments found in config"
|
||||
assert len(datafiles) > 0, "No datafiles found in config"
|
||||
self.symbol_a_ = instruments[0]["symbol"]
|
||||
self.symbol_b_ = instruments[1]["symbol"]
|
||||
self.stat_model_price_ = self.config_["stat_model_price"]
|
||||
|
||||
extra_minutes: int
|
||||
extra_minutes = self.execution_price_shift_
|
||||
|
||||
for datafile in datafiles:
|
||||
md_df = load_market_data(
|
||||
datafile=datafile,
|
||||
instruments=instruments,
|
||||
db_table_name=self.config_["market_data_loading"][instruments[0]["instrument_type"]]["db_table_name"],
|
||||
trading_hours=self.config_["trading_hours"],
|
||||
extra_minutes=extra_minutes,
|
||||
)
|
||||
self.origin_mkt_data_df_ = pd.concat([self.origin_mkt_data_df_, md_df])
|
||||
|
||||
self.origin_mkt_data_df_ = self.origin_mkt_data_df_.sort_values(by="tstamp")
|
||||
self.origin_mkt_data_df_ = self.origin_mkt_data_df_.dropna().reset_index(drop=True)
|
||||
self._set_market_data()
|
||||
|
||||
def _set_market_data(self, ) -> None:
|
||||
if self.is_execution_price_:
|
||||
self.market_data_df_ = pd.DataFrame(
|
||||
self._transform_dataframe(self.origin_mkt_data_df_)[["tstamp"] + self.colnames() + self.orig_exec_prices_colnames()]
|
||||
)
|
||||
else:
|
||||
self.market_data_df_ = pd.DataFrame(
|
||||
self._transform_dataframe(self.origin_mkt_data_df_)[["tstamp"] + self.colnames()]
|
||||
)
|
||||
|
||||
self.market_data_df_ = self.market_data_df_.dropna().reset_index(drop=True)
|
||||
self.market_data_df_["tstamp"] = pd.to_datetime(self.market_data_df_["tstamp"])
|
||||
self.market_data_df_ = self.market_data_df_.sort_values("tstamp")
|
||||
self._set_execution_price_data()
|
||||
|
||||
def _transform_dataframe(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||
df_selected: pd.DataFrame
|
||||
if self.is_execution_price_:
|
||||
execution_price_column = self.config_["execution_price"]["column"]
|
||||
|
||||
df_selected = pd.DataFrame(
|
||||
df[["tstamp", "symbol", self.stat_model_price_, execution_price_column]]
|
||||
)
|
||||
else:
|
||||
df_selected = pd.DataFrame(
|
||||
df[["tstamp", "symbol", self.stat_model_price_]]
|
||||
)
|
||||
|
||||
result_df = pd.DataFrame(df_selected["tstamp"]).drop_duplicates().reset_index(drop=True)
|
||||
|
||||
# For each unique symbol, add a corresponding stat_model_price column
|
||||
symbols = df_selected["symbol"].unique()
|
||||
|
||||
|
||||
|
||||
for symbol in symbols:
|
||||
# Filter rows for this symbol
|
||||
df_symbol = df_selected[df_selected["symbol"] == symbol].reset_index(
|
||||
drop=True
|
||||
)
|
||||
|
||||
# Create column name like "close-COIN"
|
||||
new_price_column = f"{self.stat_model_price_}_{symbol}"
|
||||
if self.is_execution_price_:
|
||||
new_execution_price_column = f"{self.execution_price_column_}_{symbol}"
|
||||
|
||||
# Create temporary dataframe with timestamp and price
|
||||
temp_df = pd.DataFrame(
|
||||
{
|
||||
"tstamp": df_symbol["tstamp"],
|
||||
new_price_column: df_symbol[self.stat_model_price_],
|
||||
new_execution_price_column: df_symbol[execution_price_column],
|
||||
}
|
||||
)
|
||||
else:
|
||||
temp_df = pd.DataFrame(
|
||||
{
|
||||
"tstamp": df_symbol["tstamp"],
|
||||
new_price_column: df_symbol[self.stat_model_price_],
|
||||
}
|
||||
)
|
||||
|
||||
# Join with our result dataframe
|
||||
result_df = pd.merge(result_df, temp_df, on="tstamp", how="left")
|
||||
result_df = result_df.reset_index(
|
||||
drop=True
|
||||
) # do not dropna() since irrelevant symbol would affect dataset
|
||||
|
||||
return result_df.dropna()
|
||||
|
||||
def _set_execution_price_data(self) -> None:
|
||||
if "execution_price" not in self.config_:
|
||||
self.market_data_df_[f"exec_price_{self.symbol_a_}"] = self.market_data_df_[f"{self.stat_model_price_}_{self.symbol_a_}"]
|
||||
self.market_data_df_[f"exec_price_{self.symbol_b_}"] = self.market_data_df_[f"{self.stat_model_price_}_{self.symbol_b_}"]
|
||||
return
|
||||
execution_price_column = self.config_["execution_price"]["column"]
|
||||
execution_price_shift = self.config_["execution_price"]["shift"]
|
||||
self.market_data_df_[f"exec_price_{self.symbol_a_}"] = self.market_data_df_[f"{execution_price_column}_{self.symbol_a_}"].shift(-execution_price_shift)
|
||||
self.market_data_df_[f"exec_price_{self.symbol_b_}"] = self.market_data_df_[f"{execution_price_column}_{self.symbol_b_}"].shift(-execution_price_shift)
|
||||
self.market_data_df_ = self.market_data_df_.dropna().reset_index(drop=True)
|
||||
|
||||
def colnames(self) -> List[str]:
|
||||
return [
|
||||
f"{self.stat_model_price_}_{self.symbol_a_}",
|
||||
f"{self.stat_model_price_}_{self.symbol_b_}",
|
||||
]
|
||||
|
||||
def orig_exec_prices_colnames(self) -> List[str]:
|
||||
return [
|
||||
f"{self.execution_price_column_}_{self.symbol_a_}",
|
||||
f"{self.execution_price_column_}_{self.symbol_b_}",
|
||||
]
|
||||
|
||||
def exec_prices_colnames(self) -> List[str]:
|
||||
return [
|
||||
f"exec_price_{self.symbol_a_}",
|
||||
f"exec_price_{self.symbol_b_}",
|
||||
]
|
||||
|
||||
class RealTimeMarketData(PtMarketData):
|
||||
|
||||
def __init__(self, config: Dict[str, Any], *args: Any, **kwargs: Any):
|
||||
super().__init__(config, *args, **kwargs)
|
||||
|
||||
async def on_mkt_data_hist_snapshot(self, snapshot: JsonDictT) -> None:
|
||||
# URGENT
|
||||
# create origin_mkt_data_df_ from snapshot
|
||||
# verify that the data for both instruments are present
|
||||
|
||||
# transform it to market_data_df_ tstamp, close_symbolA, close_symbolB
|
||||
'''
|
||||
# from cvttpy/exchanges/binance/spot/mkt_data.py
|
||||
values = {
|
||||
"time_ns": time_ns,
|
||||
"tstamp": format_nanos_utc(time_ns),
|
||||
"exchange_id": exch_inst.exchange_id_,
|
||||
"instrument_id": exch_inst.instrument_id(),
|
||||
"interval_ns": interval_sec * 1_000_000_000,
|
||||
"open": float(kline[1]),
|
||||
"high": float(kline[2]),
|
||||
"low": float(kline[3]),
|
||||
"close": float(kline[4]),
|
||||
"volume": float(kline[5]),
|
||||
"num_trades": kline[8],
|
||||
"vwap": float(kline[7]) / float(kline[5]) if float(kline[5]) > 0 else 0.0 # Calculate VWAP
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
pass
|
||||
|
||||
async def on_mkt_data_update(self, update: JsonDictT) -> Optional[pd.DataFrame]:
|
||||
# URGENT
|
||||
# make sure update has both instruments
|
||||
# create DataFrame tmp1 from update
|
||||
# transform tmp1 into temp. datframe tmp2
|
||||
# add tmp1 to origin_mkt_data_df_
|
||||
# add tmp2 to market_data_df_
|
||||
# return market_data_df_
|
||||
'''
|
||||
class MdTradesAggregate(NamedObject):
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"time_ns": self.time_ns_,
|
||||
"tstamp": format_nanos_utc(self.time_ns_),
|
||||
"exchange_id": self.exch_inst_.exchange_id_,
|
||||
"instrument_id": self.exch_inst_.instrument_id(),
|
||||
"interval_ns": self.interval_ns_,
|
||||
"open": self.exch_inst_.get_price(self.open_),
|
||||
"high": self.exch_inst_.get_price(self.high_),
|
||||
"low": self.exch_inst_.get_price(self.low_),
|
||||
"close": self.exch_inst_.get_price(self.close_),
|
||||
"volume": self.exch_inst_.get_quantity(self.volume_),
|
||||
"vwap": self.exch_inst_.get_price(self.vwap_),
|
||||
"num_trades": self.exch_inst_.get_quantity(self.num_trades_),
|
||||
}
|
||||
'''
|
||||
|
||||
return pd.DataFrame()
|
||||
27
lib/pt_strategy/pt_model.py
Normal file
27
lib/pt_strategy/pt_model.py
Normal file
@ -0,0 +1,27 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, cast
|
||||
|
||||
from pt_strategy.prediction import Prediction
|
||||
|
||||
|
||||
class PairsTradingModel(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def predict(self, pair: TradingPair) -> Prediction: # type: ignore[assignment]
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def create(config: Dict[str, Any]) -> PairsTradingModel:
|
||||
import importlib
|
||||
|
||||
model_class_name = config.get("model_class", None)
|
||||
assert model_class_name is not None
|
||||
module_name, class_name = model_class_name.rsplit(".", 1)
|
||||
module = importlib.import_module(module_name)
|
||||
model_object = getattr(module, class_name)()
|
||||
return cast(PairsTradingModel, model_object)
|
||||
|
||||
|
||||
|
||||
303
lib/pt_strategy/research_strategy.py
Normal file
303
lib/pt_strategy/research_strategy.py
Normal file
@ -0,0 +1,303 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import pandas as pd
|
||||
from pt_strategy.model_data_policy import ModelDataPolicy
|
||||
from pt_strategy.pt_market_data import ResearchMarketData
|
||||
from pt_strategy.pt_model import Prediction
|
||||
from pt_strategy.trading_pair import PairState, TradingPair
|
||||
|
||||
|
||||
class PtResearchStrategy:
|
||||
config_: Dict[str, Any]
|
||||
trading_pair_: TradingPair
|
||||
model_data_policy_: ModelDataPolicy
|
||||
pt_mkt_data_: ResearchMarketData
|
||||
|
||||
trades_: List[pd.DataFrame]
|
||||
predictions_: pd.DataFrame
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
datafiles: List[str],
|
||||
instruments: List[Dict[str, str]],
|
||||
):
|
||||
from pt_strategy.model_data_policy import ModelDataPolicy
|
||||
from pt_strategy.trading_pair import TradingPair
|
||||
|
||||
self.config_ = config
|
||||
self.trades_ = []
|
||||
self.trading_pair_ = TradingPair(config=config, instruments=instruments)
|
||||
self.predictions_ = pd.DataFrame()
|
||||
|
||||
import copy
|
||||
|
||||
# modified config must be passed to PtMarketData
|
||||
config_copy = copy.deepcopy(config)
|
||||
config_copy["instruments"] = instruments
|
||||
config_copy["datafiles"] = datafiles
|
||||
self.pt_mkt_data_ = ResearchMarketData(config=config_copy)
|
||||
self.pt_mkt_data_.load()
|
||||
self.model_data_policy_ = ModelDataPolicy.create(
|
||||
config, mkt_data=self.pt_mkt_data_.market_data_df_, pair=self.trading_pair_
|
||||
)
|
||||
|
||||
def outstanding_positions(self) -> List[Dict[str, Any]]:
|
||||
return list(self.trading_pair_.user_data_.get("outstanding_positions", []))
|
||||
|
||||
def run(self) -> None:
|
||||
training_minutes = self.config_.get("training_minutes", 120)
|
||||
market_data_series: pd.Series
|
||||
market_data_df = pd.DataFrame()
|
||||
|
||||
idx = 0
|
||||
while self.pt_mkt_data_.has_next():
|
||||
market_data_series = self.pt_mkt_data_.get_next()
|
||||
new_row = pd.DataFrame([market_data_series])
|
||||
market_data_df = pd.concat([market_data_df, new_row], ignore_index=True)
|
||||
if idx >= training_minutes:
|
||||
break
|
||||
idx += 1
|
||||
|
||||
assert idx >= training_minutes, "Not enough training data"
|
||||
|
||||
while self.pt_mkt_data_.has_next():
|
||||
|
||||
market_data_series = self.pt_mkt_data_.get_next()
|
||||
new_row = pd.DataFrame([market_data_series])
|
||||
market_data_df = pd.concat([market_data_df, new_row], ignore_index=True)
|
||||
|
||||
prediction = self.trading_pair_.run(
|
||||
market_data_df, self.model_data_policy_.advance(mkt_data_df=market_data_df)
|
||||
)
|
||||
self.predictions_ = pd.concat(
|
||||
[self.predictions_, prediction.to_df()], ignore_index=True
|
||||
)
|
||||
assert prediction is not None
|
||||
|
||||
trades = self._create_trades(
|
||||
prediction=prediction, last_row=market_data_df.iloc[-1]
|
||||
)
|
||||
if trades is not None:
|
||||
self.trades_.append(trades)
|
||||
|
||||
trades = self._handle_outstanding_positions()
|
||||
if trades is not None:
|
||||
self.trades_.append(trades)
|
||||
|
||||
def _create_trades(
|
||||
self, prediction: Prediction, last_row: pd.Series
|
||||
) -> Optional[pd.DataFrame]:
|
||||
pair = self.trading_pair_
|
||||
trades = None
|
||||
|
||||
open_threshold = self.config_["dis-equilibrium_open_trshld"]
|
||||
close_threshold = self.config_["dis-equilibrium_close_trshld"]
|
||||
scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
abs_scaled_disequilibrium = abs(scaled_disequilibrium)
|
||||
|
||||
if pair.user_data_["state"] in [
|
||||
PairState.INITIAL,
|
||||
PairState.CLOSE,
|
||||
PairState.CLOSE_POSITION,
|
||||
PairState.CLOSE_STOP_LOSS,
|
||||
PairState.CLOSE_STOP_PROFIT,
|
||||
]:
|
||||
if abs_scaled_disequilibrium >= open_threshold:
|
||||
trades = self._create_open_trades(
|
||||
pair, row=last_row, prediction=prediction
|
||||
)
|
||||
if trades is not None:
|
||||
trades["status"] = PairState.OPEN.name
|
||||
print(f"OPEN TRADES:\n{trades}")
|
||||
pair.user_data_["state"] = PairState.OPEN
|
||||
pair.on_open_trades(trades)
|
||||
|
||||
elif pair.user_data_["state"] == PairState.OPEN:
|
||||
if abs_scaled_disequilibrium <= close_threshold:
|
||||
trades = self._create_close_trades(
|
||||
pair, row=last_row, prediction=prediction
|
||||
)
|
||||
if trades is not None:
|
||||
trades["status"] = PairState.CLOSE.name
|
||||
print(f"CLOSE TRADES:\n{trades}")
|
||||
pair.user_data_["state"] = PairState.CLOSE
|
||||
pair.on_close_trades(trades)
|
||||
elif pair.to_stop_close_conditions(predicted_row=last_row):
|
||||
trades = self._create_close_trades(pair, row=last_row)
|
||||
if trades is not None:
|
||||
trades["status"] = pair.user_data_["stop_close_state"].name
|
||||
print(f"STOP CLOSE TRADES:\n{trades}")
|
||||
pair.user_data_["state"] = pair.user_data_["stop_close_state"]
|
||||
pair.on_close_trades(trades)
|
||||
|
||||
return trades
|
||||
|
||||
def _handle_outstanding_positions(self) -> Optional[pd.DataFrame]:
|
||||
trades = None
|
||||
pair = self.trading_pair_
|
||||
|
||||
# Outstanding positions
|
||||
if pair.user_data_["state"] == PairState.OPEN:
|
||||
print(f"{pair}: *** Position is NOT CLOSED. ***")
|
||||
# outstanding positions
|
||||
if self.config_["close_outstanding_positions"]:
|
||||
close_position_row = pd.Series(pair.market_data_.iloc[-2])
|
||||
# close_position_row["disequilibrium"] = 0.0
|
||||
# close_position_row["scaled_disequilibrium"] = 0.0
|
||||
# close_position_row["signed_scaled_disequilibrium"] = 0.0
|
||||
|
||||
trades = self._create_close_trades(
|
||||
pair=pair, row=close_position_row, prediction=None
|
||||
)
|
||||
if trades is not None:
|
||||
trades["status"] = PairState.CLOSE_POSITION.name
|
||||
print(f"CLOSE_POSITION TRADES:\n{trades}")
|
||||
pair.user_data_["state"] = PairState.CLOSE_POSITION
|
||||
pair.on_close_trades(trades)
|
||||
else:
|
||||
pair.add_outstanding_position(
|
||||
symbol=pair.symbol_a_,
|
||||
open_side=pair.user_data_["open_side_a"],
|
||||
open_px=pair.user_data_["open_px_a"],
|
||||
open_tstamp=pair.user_data_["open_tstamp"],
|
||||
last_mkt_data_row=pair.market_data_.iloc[-1],
|
||||
)
|
||||
pair.add_outstanding_position(
|
||||
symbol=pair.symbol_b_,
|
||||
open_side=pair.user_data_["open_side_b"],
|
||||
open_px=pair.user_data_["open_px_b"],
|
||||
open_tstamp=pair.user_data_["open_tstamp"],
|
||||
last_mkt_data_row=pair.market_data_.iloc[-1],
|
||||
)
|
||||
return trades
|
||||
|
||||
def _trades_df(self) -> pd.DataFrame:
|
||||
types = {
|
||||
"time": "datetime64[ns]",
|
||||
"action": "string",
|
||||
"symbol": "string",
|
||||
"side": "string",
|
||||
"price": "float64",
|
||||
"disequilibrium": "float64",
|
||||
"scaled_disequilibrium": "float64",
|
||||
"signed_scaled_disequilibrium": "float64",
|
||||
# "pair": "object",
|
||||
}
|
||||
columns = list(types.keys())
|
||||
return pd.DataFrame(columns=columns).astype(types)
|
||||
|
||||
def _create_open_trades(
|
||||
self, pair: TradingPair, row: pd.Series, prediction: Prediction
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
tstamp = row["tstamp"]
|
||||
diseqlbrm = prediction.disequilibrium_
|
||||
scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
px_a = row[f"{colname_a}"]
|
||||
px_b = row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
df = self._trades_df()
|
||||
|
||||
print(f"OPEN_TRADES: {row["tstamp"]} {scaled_disequilibrium=}")
|
||||
if diseqlbrm > 0:
|
||||
side_a = "SELL"
|
||||
side_b = "BUY"
|
||||
else:
|
||||
side_a = "BUY"
|
||||
side_b = "SELL"
|
||||
|
||||
# save closing sides
|
||||
pair.user_data_["open_side_a"] = side_a # used in oustanding positions
|
||||
pair.user_data_["open_side_b"] = side_b
|
||||
pair.user_data_["open_px_a"] = px_a
|
||||
pair.user_data_["open_px_b"] = px_b
|
||||
pair.user_data_["open_tstamp"] = tstamp
|
||||
|
||||
pair.user_data_["close_side_a"] = side_b # used for closing trades
|
||||
pair.user_data_["close_side_b"] = side_a
|
||||
|
||||
# create opening trades
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_a_,
|
||||
"side": side_a,
|
||||
"action": "OPEN",
|
||||
"price": px_a,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
# "pair": pair,
|
||||
}
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_b_,
|
||||
"side": side_b,
|
||||
"action": "OPEN",
|
||||
"price": px_b,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": abs(scaled_disequilibrium),
|
||||
"signed_scaled_disequilibrium": scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
return df
|
||||
|
||||
def _create_close_trades(
|
||||
self, pair: TradingPair, row: pd.Series, prediction: Optional[Prediction] = None
|
||||
) -> Optional[pd.DataFrame]:
|
||||
colname_a, colname_b = pair.exec_prices_colnames()
|
||||
|
||||
tstamp = row["tstamp"]
|
||||
if prediction is not None:
|
||||
diseqlbrm = prediction.disequilibrium_
|
||||
signed_scaled_disequilibrium = prediction.scaled_disequilibrium_
|
||||
scaled_disequilibrium = abs(prediction.scaled_disequilibrium_)
|
||||
else:
|
||||
diseqlbrm = 0.0
|
||||
signed_scaled_disequilibrium = 0.0
|
||||
scaled_disequilibrium = 0.0
|
||||
px_a = row[f"{colname_a}"]
|
||||
px_b = row[f"{colname_b}"]
|
||||
|
||||
# creating the trades
|
||||
df = self._trades_df()
|
||||
|
||||
# create opening trades
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_a_,
|
||||
"side": pair.user_data_["close_side_a"],
|
||||
"action": "CLOSE",
|
||||
"price": px_a,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": scaled_disequilibrium,
|
||||
"signed_scaled_disequilibrium": signed_scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
df.loc[len(df)] = {
|
||||
"time": tstamp,
|
||||
"symbol": pair.symbol_b_,
|
||||
"side": pair.user_data_["close_side_b"],
|
||||
"action": "CLOSE",
|
||||
"price": px_b,
|
||||
"disequilibrium": diseqlbrm,
|
||||
"scaled_disequilibrium": scaled_disequilibrium,
|
||||
"signed_scaled_disequilibrium": signed_scaled_disequilibrium,
|
||||
# "pair": pair,
|
||||
}
|
||||
del pair.user_data_["close_side_a"]
|
||||
del pair.user_data_["close_side_b"]
|
||||
|
||||
del pair.user_data_["open_tstamp"]
|
||||
del pair.user_data_["open_px_a"]
|
||||
del pair.user_data_["open_px_b"]
|
||||
del pair.user_data_["open_side_a"]
|
||||
del pair.user_data_["open_side_b"]
|
||||
return df
|
||||
|
||||
def day_trades(self) -> pd.DataFrame:
|
||||
return pd.concat(self.trades_, ignore_index=True)
|
||||
532
lib/pt_strategy/results.py
Normal file
532
lib/pt_strategy/results.py
Normal file
@ -0,0 +1,532 @@
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import date, datetime
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import pandas as pd
|
||||
from pt_strategy.trading_pair import TradingPair
|
||||
|
||||
|
||||
# Recommended replacement adapters and converters for Python 3.12+
|
||||
# From: https://docs.python.org/3/library/sqlite3.html#sqlite3-adapter-converter-recipes
|
||||
def adapt_date_iso(val: date) -> str:
|
||||
"""Adapt datetime.date to ISO 8601 date."""
|
||||
return val.isoformat()
|
||||
|
||||
|
||||
def adapt_datetime_iso(val: datetime) -> str:
|
||||
"""Adapt datetime.datetime to timezone-naive ISO 8601 date."""
|
||||
return val.isoformat()
|
||||
|
||||
|
||||
def convert_date(val: bytes) -> date:
|
||||
"""Convert ISO 8601 date to datetime.date object."""
|
||||
return datetime.fromisoformat(val.decode()).date()
|
||||
|
||||
|
||||
def convert_datetime(val: bytes) -> datetime:
|
||||
"""Convert ISO 8601 datetime to datetime.datetime object."""
|
||||
return datetime.fromisoformat(val.decode())
|
||||
|
||||
|
||||
# Register the adapters and converters
|
||||
sqlite3.register_adapter(date, adapt_date_iso)
|
||||
sqlite3.register_adapter(datetime, adapt_datetime_iso)
|
||||
sqlite3.register_converter("date", convert_date)
|
||||
sqlite3.register_converter("datetime", convert_datetime)
|
||||
|
||||
|
||||
def create_result_database(db_path: str) -> None:
|
||||
"""
|
||||
Create the SQLite database and required tables if they don't exist.
|
||||
"""
|
||||
try:
|
||||
# Create directory if it doesn't exist
|
||||
db_dir = os.path.dirname(db_path)
|
||||
if db_dir and not os.path.exists(db_dir):
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
print(f"Created directory: {db_dir}")
|
||||
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the pt_bt_results table for completed trades
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS pt_bt_results (
|
||||
date DATE,
|
||||
pair TEXT,
|
||||
symbol TEXT,
|
||||
open_time DATETIME,
|
||||
open_side TEXT,
|
||||
open_price REAL,
|
||||
open_quantity INTEGER,
|
||||
open_disequilibrium REAL,
|
||||
close_time DATETIME,
|
||||
close_side TEXT,
|
||||
close_price REAL,
|
||||
close_quantity INTEGER,
|
||||
close_disequilibrium REAL,
|
||||
symbol_return REAL,
|
||||
pair_return REAL,
|
||||
close_condition TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute("DELETE FROM pt_bt_results;")
|
||||
|
||||
# Create the outstanding_positions table for open positions
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS outstanding_positions (
|
||||
date DATE,
|
||||
pair TEXT,
|
||||
symbol TEXT,
|
||||
position_quantity REAL,
|
||||
last_price REAL,
|
||||
unrealized_return REAL,
|
||||
open_price REAL,
|
||||
open_side TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute("DELETE FROM outstanding_positions;")
|
||||
|
||||
# Create the config table for storing configuration JSON for reference
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS config (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
run_timestamp DATETIME,
|
||||
config_file_path TEXT,
|
||||
config_json TEXT,
|
||||
datafiles TEXT,
|
||||
instruments TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute("DELETE FROM config;")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating result database: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
def store_config_in_database(
|
||||
db_path: str,
|
||||
config_file_path: str,
|
||||
config: Dict,
|
||||
datafiles: List[Tuple[str, str]],
|
||||
instruments: List[Dict[str, str]],
|
||||
) -> None:
|
||||
"""
|
||||
Store configuration information in the database for reference.
|
||||
"""
|
||||
import json
|
||||
|
||||
if db_path.upper() == "NONE":
|
||||
return
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Convert config to JSON string
|
||||
config_json = json.dumps(config, indent=2, default=str)
|
||||
|
||||
# Convert lists to comma-separated strings for storage
|
||||
datafiles_str = ", ".join([f"{datafile}" for _, datafile in datafiles])
|
||||
instruments_str = ", ".join(
|
||||
[
|
||||
f"{inst['symbol']}:{inst['instrument_type']}:{inst['exchange_id']}"
|
||||
for inst in instruments
|
||||
]
|
||||
)
|
||||
|
||||
# Insert configuration record
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO config (
|
||||
run_timestamp, config_file_path, config_json, datafiles, instruments
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
datetime.now(),
|
||||
config_file_path,
|
||||
config_json,
|
||||
datafiles_str,
|
||||
instruments_str,
|
||||
),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
print(f"Configuration stored in database")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error storing configuration in database: {str(e)}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def convert_timestamp(timestamp: Any) -> Optional[datetime]:
|
||||
"""Convert pandas Timestamp to Python datetime object for SQLite compatibility."""
|
||||
if timestamp is None:
|
||||
return None
|
||||
if isinstance(timestamp, pd.Timestamp):
|
||||
return timestamp.to_pydatetime()
|
||||
elif isinstance(timestamp, datetime):
|
||||
return timestamp
|
||||
elif isinstance(timestamp, date):
|
||||
return datetime.combine(timestamp, datetime.min.time())
|
||||
elif isinstance(timestamp, str):
|
||||
return datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
|
||||
elif isinstance(timestamp, int):
|
||||
return datetime.fromtimestamp(timestamp)
|
||||
else:
|
||||
raise ValueError(f"Unsupported timestamp type: {type(timestamp)}")
|
||||
|
||||
|
||||
|
||||
DayT = str
|
||||
TradeT = Dict[str, Any]
|
||||
OutstandingPositionT = Dict[str, Any]
|
||||
class PairResearchResult:
|
||||
"""
|
||||
Class to handle pair research results for a single pair across multiple days.
|
||||
Simplified version of BacktestResult focused on single pair analysis.
|
||||
"""
|
||||
trades_: Dict[DayT, pd.DataFrame]
|
||||
outstanding_positions_: Dict[DayT, List[OutstandingPositionT]]
|
||||
symbol_roundtrip_trades_: Dict[str, List[Dict[str, Any]]]
|
||||
|
||||
|
||||
def __init__(self, config: Dict[str, Any]) -> None:
|
||||
self.config_ = config
|
||||
self.trades_ = {}
|
||||
self.outstanding_positions_ = {}
|
||||
self.total_realized_pnl = 0.0
|
||||
self.symbol_roundtrip_trades_ = {}
|
||||
|
||||
def add_day_results(self, day: DayT, trades: pd.DataFrame, outstanding_positions: List[Dict[str, Any]]) -> None:
|
||||
assert isinstance(trades, pd.DataFrame)
|
||||
self.trades_[day] = trades
|
||||
self.outstanding_positions_[day] = outstanding_positions
|
||||
|
||||
# def all_trades(self) -> List[TradeT]:
|
||||
# """Get all trades across all days as a flat list."""
|
||||
# all_trades_list: List[TradeT] = []
|
||||
# for day_trades in self.trades_.values():
|
||||
# all_trades_list.extend(day_trades.to_dict(orient="records"))
|
||||
# return all_trades_list
|
||||
|
||||
def outstanding_positions(self) -> List[OutstandingPositionT]:
|
||||
"""Get all outstanding positions across all days as a flat list."""
|
||||
res: List[Dict[str, Any]] = []
|
||||
for day in self.outstanding_positions_.keys():
|
||||
res.extend(self.outstanding_positions_[day])
|
||||
return res
|
||||
|
||||
def calculate_returns(self) -> None:
|
||||
"""Calculate and store total returns for the single pair across all days."""
|
||||
self.extract_roundtrip_trades()
|
||||
|
||||
self.total_realized_pnl = 0.0
|
||||
|
||||
for day, day_trades in self.symbol_roundtrip_trades_.items():
|
||||
for trade in day_trades:
|
||||
self.total_realized_pnl += trade['symbol_return']
|
||||
|
||||
def extract_roundtrip_trades(self) -> None:
|
||||
"""
|
||||
Extract round-trip trades by day, grouping open/close pairs for each symbol.
|
||||
Returns a dictionary with day as key and list of completed round-trip trades.
|
||||
"""
|
||||
def _symbol_return(trade1_side: str, trade1_px: float, trade2_side: str, trade2_px: float) -> float:
|
||||
if trade1_side == "BUY" and trade2_side == "SELL":
|
||||
return (trade2_px - trade1_px) / trade1_px * 100
|
||||
elif trade1_side == "SELL" and trade2_side == "BUY":
|
||||
return (trade1_px - trade2_px) / trade1_px * 100
|
||||
else:
|
||||
return 0
|
||||
|
||||
# Process each day separately
|
||||
for day, day_trades in self.trades_.items():
|
||||
|
||||
# Sort trades by timestamp for the day
|
||||
sorted_trades = day_trades #sorted(day_trades, key=lambda x: x["timestamp"] if x["timestamp"] else pd.Timestamp.min)
|
||||
|
||||
day_roundtrips = []
|
||||
|
||||
# Process trades in groups of 4 (open A, open B, close A, close B)
|
||||
for idx in range(0, len(sorted_trades), 4):
|
||||
if idx + 3 >= len(sorted_trades):
|
||||
break
|
||||
|
||||
trade_a_1 = sorted_trades.iloc[idx] # Open A
|
||||
trade_b_1 = sorted_trades.iloc[idx + 1] # Open B
|
||||
trade_a_2 = sorted_trades.iloc[idx + 2] # Close A
|
||||
trade_b_2 = sorted_trades.iloc[idx + 3] # Close B
|
||||
|
||||
# Validate trade sequence
|
||||
if not (trade_a_1["action"] == "OPEN" and trade_a_2["action"] == "CLOSE"):
|
||||
continue
|
||||
if not (trade_b_1["action"] == "OPEN" and trade_b_2["action"] == "CLOSE"):
|
||||
continue
|
||||
|
||||
# Calculate individual symbol returns
|
||||
symbol_a_return = _symbol_return(
|
||||
trade_a_1["side"], trade_a_1["price"],
|
||||
trade_a_2["side"], trade_a_2["price"]
|
||||
)
|
||||
symbol_b_return = _symbol_return(
|
||||
trade_b_1["side"], trade_b_1["price"],
|
||||
trade_b_2["side"], trade_b_2["price"]
|
||||
)
|
||||
|
||||
pair_return = symbol_a_return + symbol_b_return
|
||||
|
||||
# Create round-trip records for both symbols
|
||||
funding_per_position = self.config_.get("funding_per_pair", 10000) / 2
|
||||
|
||||
# Symbol A round-trip
|
||||
day_roundtrips.append({
|
||||
"symbol": trade_a_1["symbol"],
|
||||
"open_side": trade_a_1["side"],
|
||||
"open_price": trade_a_1["price"],
|
||||
"open_time": trade_a_1["time"],
|
||||
"close_side": trade_a_2["side"],
|
||||
"close_price": trade_a_2["price"],
|
||||
"close_time": trade_a_2["time"],
|
||||
"symbol_return": symbol_a_return,
|
||||
"pair_return": pair_return,
|
||||
"shares": funding_per_position / trade_a_1["price"],
|
||||
"close_condition": trade_a_2.get("status", "UNKNOWN"),
|
||||
"open_disequilibrium": trade_a_1.get("disequilibrium"),
|
||||
"close_disequilibrium": trade_a_2.get("disequilibrium"),
|
||||
})
|
||||
|
||||
# Symbol B round-trip
|
||||
day_roundtrips.append({
|
||||
"symbol": trade_b_1["symbol"],
|
||||
"open_side": trade_b_1["side"],
|
||||
"open_price": trade_b_1["price"],
|
||||
"open_time": trade_b_1["time"],
|
||||
"close_side": trade_b_2["side"],
|
||||
"close_price": trade_b_2["price"],
|
||||
"close_time": trade_b_2["time"],
|
||||
"symbol_return": symbol_b_return,
|
||||
"pair_return": pair_return,
|
||||
"shares": funding_per_position / trade_b_1["price"],
|
||||
"close_condition": trade_b_2.get("status", "UNKNOWN"),
|
||||
"open_disequilibrium": trade_b_1.get("disequilibrium"),
|
||||
"close_disequilibrium": trade_b_2.get("disequilibrium"),
|
||||
})
|
||||
|
||||
if day_roundtrips:
|
||||
self.symbol_roundtrip_trades_[day] = day_roundtrips
|
||||
|
||||
|
||||
def print_returns_by_day(self) -> None:
|
||||
"""
|
||||
Print detailed return information for each day, grouped by day.
|
||||
Shows individual symbol round-trips and daily totals.
|
||||
"""
|
||||
|
||||
print("\n====== PAIR RESEARCH RETURNS BY DAY ======")
|
||||
|
||||
total_return_all_days = 0.0
|
||||
|
||||
for day, day_trades in sorted(self.symbol_roundtrip_trades_.items()):
|
||||
|
||||
print(f"\n--- {day} ---")
|
||||
|
||||
day_total_return = 0.0
|
||||
pair_returns = []
|
||||
|
||||
# Group trades by pair (every 2 trades form a pair)
|
||||
for idx in range(0, len(day_trades), 2):
|
||||
if idx + 1 < len(day_trades):
|
||||
trade_a = day_trades[idx]
|
||||
trade_b = day_trades[idx + 1]
|
||||
|
||||
# Print individual symbol results
|
||||
print(f" {trade_a['open_time'].time()}-{trade_a['close_time'].time()}")
|
||||
print(f" {trade_a['symbol']}: {trade_a['open_side']} @ ${trade_a['open_price']:.2f} → "
|
||||
f"{trade_a['close_side']} @ ${trade_a['close_price']:.2f} | "
|
||||
f"Return: {trade_a['symbol_return']:+.2f}% | Shares: {trade_a['shares']:.2f}")
|
||||
|
||||
print(f" {trade_b['symbol']}: {trade_b['open_side']} @ ${trade_b['open_price']:.2f} → "
|
||||
f"{trade_b['close_side']} @ ${trade_b['close_price']:.2f} | "
|
||||
f"Return: {trade_b['symbol_return']:+.2f}% | Shares: {trade_b['shares']:.2f}")
|
||||
|
||||
# Show disequilibrium info if available
|
||||
if trade_a.get('open_disequilibrium') is not None:
|
||||
print(f" Disequilibrium: Open: {trade_a['open_disequilibrium']:.4f}, "
|
||||
f"Close: {trade_a['close_disequilibrium']:.4f}")
|
||||
|
||||
pair_return = trade_a['pair_return']
|
||||
print(f" Pair Return: {pair_return:+.2f}% | Close Condition: {trade_a['close_condition']}")
|
||||
print()
|
||||
|
||||
pair_returns.append(pair_return)
|
||||
day_total_return += pair_return
|
||||
|
||||
print(f" Day Total Return: {day_total_return:+.2f}% ({len(pair_returns)} pairs)")
|
||||
total_return_all_days += day_total_return
|
||||
|
||||
print(f"\n====== TOTAL RETURN ACROSS ALL DAYS ======")
|
||||
print(f"Total Return: {total_return_all_days:+.2f}%")
|
||||
print(f"Total Days: {len(self.symbol_roundtrip_trades_)}")
|
||||
if len(self.symbol_roundtrip_trades_) > 0:
|
||||
print(f"Average Daily Return: {total_return_all_days / len(self.symbol_roundtrip_trades_):+.2f}%")
|
||||
|
||||
def get_return_summary(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get a summary of returns across all days.
|
||||
Returns a dictionary with key metrics.
|
||||
"""
|
||||
if len(self.symbol_roundtrip_trades_) == 0:
|
||||
return {
|
||||
"total_return": 0.0,
|
||||
"total_days": 0,
|
||||
"total_pairs": 0,
|
||||
"average_daily_return": 0.0,
|
||||
"best_day": None,
|
||||
"worst_day": None,
|
||||
"daily_returns": {}
|
||||
}
|
||||
|
||||
daily_returns = {}
|
||||
total_return = 0.0
|
||||
total_pairs = 0
|
||||
|
||||
for day, day_trades in self.symbol_roundtrip_trades_.items():
|
||||
day_return = 0.0
|
||||
day_pairs = len(day_trades) // 2 # Each pair has 2 symbol trades
|
||||
|
||||
for trade in day_trades:
|
||||
day_return += trade['symbol_return']
|
||||
|
||||
daily_returns[day] = {
|
||||
"return": day_return,
|
||||
"pairs": day_pairs
|
||||
}
|
||||
total_return += day_return
|
||||
total_pairs += day_pairs
|
||||
|
||||
best_day = max(daily_returns.items(), key=lambda x: x[1]["return"]) if daily_returns else None
|
||||
worst_day = min(daily_returns.items(), key=lambda x: x[1]["return"]) if daily_returns else None
|
||||
|
||||
return {
|
||||
"total_return": total_return,
|
||||
"total_days": len(self.symbol_roundtrip_trades_),
|
||||
"total_pairs": total_pairs,
|
||||
"average_daily_return": total_return / len(self.symbol_roundtrip_trades_) if self.symbol_roundtrip_trades_ else 0.0,
|
||||
"best_day": best_day,
|
||||
"worst_day": worst_day,
|
||||
"daily_returns": daily_returns
|
||||
}
|
||||
|
||||
|
||||
def print_grand_totals(self) -> None:
|
||||
"""Print grand totals for the single pair analysis."""
|
||||
summary = self.get_return_summary()
|
||||
|
||||
print(f"\n====== PAIR RESEARCH GRAND TOTALS ======")
|
||||
print('---')
|
||||
print(f"Total Return: {summary['total_return']:+.2f}%")
|
||||
print('---')
|
||||
print(f"Total Days Traded: {summary['total_days']}")
|
||||
print(f"Total Open-Close Actions: {summary['total_pairs']}")
|
||||
print(f"Total Trades: 4 * {summary['total_pairs']} = {4 * summary['total_pairs']}")
|
||||
|
||||
if summary['total_days'] > 0:
|
||||
print(f"Average Daily Return: {summary['average_daily_return']:+.2f}%")
|
||||
|
||||
if summary['best_day']:
|
||||
best_day, best_data = summary['best_day']
|
||||
print(f"Best Day: {best_day} ({best_data['return']:+.2f}%)")
|
||||
|
||||
if summary['worst_day']:
|
||||
worst_day, worst_data = summary['worst_day']
|
||||
print(f"Worst Day: {worst_day} ({worst_data['return']:+.2f}%)")
|
||||
|
||||
# Update the total_realized_pnl for backward compatibility
|
||||
self.total_realized_pnl = summary['total_return']
|
||||
|
||||
def analyze_pair_performance(self) -> None:
|
||||
"""
|
||||
Main method to perform comprehensive pair research analysis.
|
||||
Extracts round-trip trades, calculates returns, groups by day, and prints results.
|
||||
"""
|
||||
print(f"\n{'='*60}")
|
||||
print(f"PAIR RESEARCH PERFORMANCE ANALYSIS")
|
||||
print(f"{'='*60}")
|
||||
|
||||
self.calculate_returns()
|
||||
self.print_returns_by_day()
|
||||
self.print_outstanding_positions()
|
||||
self._print_additional_metrics()
|
||||
self.print_grand_totals()
|
||||
|
||||
def _print_additional_metrics(self) -> None:
|
||||
"""Print additional performance metrics."""
|
||||
summary = self.get_return_summary()
|
||||
|
||||
if summary['total_days'] == 0:
|
||||
return
|
||||
|
||||
print(f"\n====== ADDITIONAL METRICS ======")
|
||||
|
||||
# Calculate win rate
|
||||
winning_days = sum(1 for day_data in summary['daily_returns'].values() if day_data['return'] > 0)
|
||||
win_rate = (winning_days / summary['total_days']) * 100
|
||||
print(f"Winning Days: {winning_days}/{summary['total_days']} ({win_rate:.1f}%)")
|
||||
|
||||
# Calculate average trade return
|
||||
if summary['total_pairs'] > 0:
|
||||
# Each pair has 2 symbol trades, so total symbol trades = total_pairs * 2
|
||||
total_symbol_trades = summary['total_pairs'] * 2
|
||||
avg_symbol_return = summary['total_return'] / total_symbol_trades
|
||||
print(f"Average Symbol Return: {avg_symbol_return:+.2f}%")
|
||||
|
||||
avg_pair_return = summary['total_return'] / summary['total_pairs'] / 2 # Divide by 2 since we sum both symbols
|
||||
print(f"Average Pair Return: {avg_pair_return:+.2f}%")
|
||||
|
||||
# Show daily return distribution
|
||||
daily_returns_list = [data['return'] for data in summary['daily_returns'].values()]
|
||||
if daily_returns_list:
|
||||
print(f"Daily Return Range: {min(daily_returns_list):+.2f}% to {max(daily_returns_list):+.2f}%")
|
||||
|
||||
|
||||
def print_outstanding_positions(self) -> None:
|
||||
"""Print outstanding positions for the single pair."""
|
||||
all_positions: List[OutstandingPositionT] = self.outstanding_positions()
|
||||
if not all_positions:
|
||||
print("\n====== NO OUTSTANDING POSITIONS ======")
|
||||
return
|
||||
|
||||
print(f"\n====== OUTSTANDING POSITIONS ======")
|
||||
print(f"{'Symbol':<10} {'Side':<4} {'Shares':<10} {'Open $':<8} {'Current $':<10} {'Value $':<12}")
|
||||
print("-" * 70)
|
||||
|
||||
total_value = 0.0
|
||||
for pos in all_positions:
|
||||
current_value = pos.get("last_value", 0.0)
|
||||
print(f"{pos['symbol']:<10} {pos['open_side']:<4} {pos['shares']:<10.2f} "
|
||||
f"{pos['open_px']:<8.2f} {pos['last_px']:<10.2f} {current_value:<12.2f}")
|
||||
total_value += current_value
|
||||
|
||||
print("-" * 70)
|
||||
print(f"{'TOTAL VALUE':<60} ${total_value:<12.2f}")
|
||||
|
||||
def get_total_realized_pnl(self) -> float:
|
||||
"""Get total realized PnL."""
|
||||
return self.total_realized_pnl
|
||||
|
||||
199
lib/pt_strategy/trading_pair.py
Normal file
199
lib/pt_strategy/trading_pair.py
Normal file
@ -0,0 +1,199 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from pt_strategy.model_data_policy import DataWindowParams
|
||||
from pt_strategy.prediction import Prediction
|
||||
|
||||
|
||||
class PairState(Enum):
|
||||
INITIAL = 1
|
||||
OPEN = 2
|
||||
CLOSE = 3
|
||||
CLOSE_POSITION = 4
|
||||
CLOSE_STOP_LOSS = 5
|
||||
CLOSE_STOP_PROFIT = 6
|
||||
|
||||
|
||||
def get_symbol(instrument: Dict[str, str]) -> str:
|
||||
if "symbol" in instrument:
|
||||
return instrument["symbol"]
|
||||
elif "instrument_id" in instrument:
|
||||
instrument_id = instrument["instrument_id"]
|
||||
instrument_pfx = instrument_id[:instrument_id.find("-") + 1]
|
||||
symbol = instrument_id[len(instrument_pfx):]
|
||||
instrument["symbol"] = symbol
|
||||
instrument["instrument_id_pfx"] = instrument_pfx
|
||||
return symbol
|
||||
else:
|
||||
raise ValueError(f"Invalid instrument: {instrument}, missing symbol or instrument_id")
|
||||
|
||||
class TradingPair:
|
||||
config_: Dict[str, Any]
|
||||
market_data_: pd.DataFrame
|
||||
instruments_: List[Dict[str, str]]
|
||||
symbol_a_: str
|
||||
symbol_b_: str
|
||||
|
||||
stat_model_price_: str
|
||||
model_: PairsTradingModel # type: ignore[assignment]
|
||||
|
||||
user_data_: Dict[str, Any]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
instruments: List[Dict[str, str]],
|
||||
):
|
||||
|
||||
from pt_strategy.pt_model import PairsTradingModel
|
||||
|
||||
assert len(instruments) == 2, "Trading pair must have exactly 2 instruments"
|
||||
|
||||
self.config_ = config
|
||||
self.instruments_ = instruments
|
||||
self.symbol_a_ = get_symbol(instruments[0])
|
||||
self.symbol_b_ = get_symbol(instruments[1])
|
||||
self.model_ = PairsTradingModel.create(config)
|
||||
self.stat_model_price_ = config["stat_model_price"]
|
||||
self.user_data_ = {
|
||||
"state": PairState.INITIAL,
|
||||
}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"{self.__class__.__name__}:"
|
||||
f" symbol_a={self.symbol_a_},"
|
||||
f" symbol_b={self.symbol_b_},"
|
||||
f" model={self.model_.__class__.__name__}"
|
||||
)
|
||||
|
||||
def is_closed(self) -> bool:
|
||||
return self.user_data_["state"] in [
|
||||
PairState.CLOSE,
|
||||
PairState.CLOSE_POSITION,
|
||||
PairState.CLOSE_STOP_LOSS,
|
||||
PairState.CLOSE_STOP_PROFIT,
|
||||
]
|
||||
def is_open(self) -> bool:
|
||||
return self.user_data_["state"] == PairState.OPEN
|
||||
|
||||
def colnames(self) -> List[str]:
|
||||
return [
|
||||
f"{self.stat_model_price_}_{self.symbol_a_}",
|
||||
f"{self.stat_model_price_}_{self.symbol_b_}",
|
||||
]
|
||||
|
||||
def exec_prices_colnames(self) -> List[str]:
|
||||
return [
|
||||
f"exec_price_{self.symbol_a_}",
|
||||
f"exec_price_{self.symbol_b_}",
|
||||
]
|
||||
|
||||
def to_stop_close_conditions(self, predicted_row: pd.Series) -> bool:
|
||||
config = self.config_
|
||||
if (
|
||||
"stop_close_conditions" not in config
|
||||
or config["stop_close_conditions"] is None
|
||||
):
|
||||
return False
|
||||
if "profit" in config["stop_close_conditions"]:
|
||||
current_return = self._current_return(predicted_row)
|
||||
#
|
||||
# print(f"time={predicted_row['tstamp']} current_return={current_return}")
|
||||
#
|
||||
if current_return >= config["stop_close_conditions"]["profit"]:
|
||||
print(f"STOP PROFIT: {current_return}")
|
||||
self.user_data_["stop_close_state"] = PairState.CLOSE_STOP_PROFIT
|
||||
return True
|
||||
if "loss" in config["stop_close_conditions"]:
|
||||
if current_return <= config["stop_close_conditions"]["loss"]:
|
||||
print(f"STOP LOSS: {current_return}")
|
||||
self.user_data_["stop_close_state"] = PairState.CLOSE_STOP_LOSS
|
||||
return True
|
||||
return False
|
||||
|
||||
def _current_return(self, predicted_row: pd.Series) -> float:
|
||||
if "open_trades" in self.user_data_:
|
||||
open_trades = self.user_data_["open_trades"]
|
||||
if len(open_trades) == 0:
|
||||
return 0.0
|
||||
|
||||
def _single_instrument_return(symbol: str) -> float:
|
||||
instrument_open_trades = open_trades[open_trades["symbol"] == symbol]
|
||||
instrument_open_price = instrument_open_trades["price"].iloc[0]
|
||||
|
||||
sign = -1 if instrument_open_trades["side"].iloc[0] == "SELL" else 1
|
||||
instrument_price = predicted_row[f"{self.stat_model_price_}_{symbol}"]
|
||||
instrument_return = (
|
||||
sign
|
||||
* (instrument_price - instrument_open_price)
|
||||
/ instrument_open_price
|
||||
)
|
||||
return float(instrument_return) * 100.0
|
||||
|
||||
instrument_a_return = _single_instrument_return(self.symbol_a_)
|
||||
instrument_b_return = _single_instrument_return(self.symbol_b_)
|
||||
return instrument_a_return + instrument_b_return
|
||||
return 0.0
|
||||
|
||||
def on_open_trades(self, trades: pd.DataFrame) -> None:
|
||||
if "close_trades" in self.user_data_:
|
||||
del self.user_data_["close_trades"]
|
||||
self.user_data_["open_trades"] = trades
|
||||
|
||||
def on_close_trades(self, trades: pd.DataFrame) -> None:
|
||||
del self.user_data_["open_trades"]
|
||||
self.user_data_["close_trades"] = trades
|
||||
|
||||
def add_outstanding_position(
|
||||
self,
|
||||
symbol: str,
|
||||
open_side: str,
|
||||
open_px: float,
|
||||
open_tstamp: datetime,
|
||||
last_mkt_data_row: pd.Series,
|
||||
) -> None:
|
||||
assert symbol in [self.symbol_a_, self.symbol_b_], "Symbol must be one of the pair's symbols"
|
||||
assert open_side in ["BUY", "SELL"], "Open side must be either BUY or SELL"
|
||||
assert open_px > 0, "Open price must be greater than 0"
|
||||
assert open_tstamp is not None, "Open timestamp must be provided"
|
||||
assert last_mkt_data_row is not None, "Last market data row must be provided"
|
||||
|
||||
exec_prices_col_a, exec_prices_col_b = self.exec_prices_colnames()
|
||||
if symbol == self.symbol_a_:
|
||||
last_px = last_mkt_data_row[exec_prices_col_a]
|
||||
else:
|
||||
last_px = last_mkt_data_row[exec_prices_col_b]
|
||||
|
||||
|
||||
funding_per_position = self.config_["funding_per_pair"] / 2
|
||||
shares = funding_per_position / open_px
|
||||
if open_side == "SELL":
|
||||
shares = -shares
|
||||
|
||||
if "outstanding_positions" not in self.user_data_:
|
||||
self.user_data_["outstanding_positions"] = []
|
||||
|
||||
self.user_data_["outstanding_positions"].append({
|
||||
"symbol": symbol,
|
||||
"open_side": open_side,
|
||||
"open_px": open_px,
|
||||
"shares": shares,
|
||||
"open_tstamp": open_tstamp,
|
||||
"last_px": last_px,
|
||||
"last_tstamp": last_mkt_data_row["tstamp"],
|
||||
"last_value": last_px * shares,
|
||||
})
|
||||
|
||||
|
||||
def run(self, market_data: pd.DataFrame, data_params: DataWindowParams) -> Prediction: # type: ignore[assignment]
|
||||
self.market_data_ = market_data[data_params.training_start_index:data_params.training_start_index + data_params.training_size]
|
||||
return self.model_.predict(pair=self)
|
||||
|
||||
|
||||
|
||||
@ -1,193 +0,0 @@
|
||||
# original script moved to vecm_rolling_fit_01.py
|
||||
|
||||
# 09.09.25 Added GARCH model - predicting volatility
|
||||
|
||||
# Rule of thumb:
|
||||
# alpha + beta ≈ 1 → strong volatility clustering, persistence.
|
||||
# If much lower → volatility mean reverts quickly.
|
||||
# If > 1 → model is unstable / non-stationary (bad).
|
||||
|
||||
# the VECM disequilibrium (mean reversion signal) and
|
||||
# the GARCH volatility forecast (risk measure).
|
||||
# combine them → e.g., only enter trades when:
|
||||
|
||||
# high_volatility = 1 → persistence > 0.95 or volatility > 2 (rule of thumb: unstable / risky regime).
|
||||
# high_volatility = 0 → stable regime.
|
||||
|
||||
|
||||
# VECM disequilibrium z-score > threshold and
|
||||
# GARCH-forecasted volatility is not too high (avoid noise-driven signals).
|
||||
# This creates a volatility-adjusted pairs trading strategy, more robust than plain VECM
|
||||
|
||||
# now pair_predict_result_ DataFrame includes:
|
||||
# disequilibrium, scaled_disequilibrium, z-scores, garch_alpha, garch_beta, garch_persistence (α+β rule-of-thumb)
|
||||
# garch_vol_forecast (1-step volatility forecast)
|
||||
|
||||
# Would you like me to also add a warning flag column
|
||||
# (e.g., "high_volatility" = 1 if persistence > 0.95 or vol_forecast > threshold)
|
||||
# so you can easily detect unstable regimes?
|
||||
|
||||
# VECM/GARCH
|
||||
# vecm_rolling_fit.py:
|
||||
from typing import Any, Dict, Optional, cast
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from typing import Any, Dict, Optional
|
||||
from pt_trading.results import BacktestResult
|
||||
from pt_trading.rolling_window_fit import RollingFit
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
from statsmodels.tsa.vector_ar.vecm import VECM, VECMResults
|
||||
from arch import arch_model
|
||||
|
||||
NanoPerMin = 1e9
|
||||
|
||||
class VECMTradingPair(TradingPair):
|
||||
vecm_fit_: Optional[VECMResults]
|
||||
pair_predict_result_: Optional[pd.DataFrame]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
):
|
||||
super().__init__(config, market_data, symbol_a, symbol_b)
|
||||
self.vecm_fit_ = None
|
||||
self.pair_predict_result_ = None
|
||||
self.garch_fit_ = None
|
||||
self.sigma_spread_forecast_ = None
|
||||
self.garch_alpha_ = None
|
||||
self.garch_beta_ = None
|
||||
self.garch_persistence_ = None
|
||||
self.high_volatility_flag_ = None
|
||||
|
||||
def _train_pair(self) -> None:
|
||||
self._fit_VECM()
|
||||
assert self.vecm_fit_ is not None
|
||||
|
||||
diseq_series = self.training_df_[self.colnames()] @ self.vecm_fit_.beta
|
||||
self.training_mu_ = float(diseq_series[0].mean())
|
||||
self.training_std_ = float(diseq_series[0].std())
|
||||
|
||||
self.training_df_["disequilibrium"] = diseq_series
|
||||
self.training_df_["scaled_disequilibrium"] = (
|
||||
diseq_series - self.training_mu_
|
||||
) / self.training_std_
|
||||
|
||||
def _fit_VECM(self) -> None:
|
||||
assert self.training_df_ is not None
|
||||
vecm_df = self.training_df_[self.colnames()].reset_index(drop=True)
|
||||
vecm_model = VECM(vecm_df, coint_rank=1)
|
||||
vecm_fit = vecm_model.fit()
|
||||
self.vecm_fit_ = vecm_fit
|
||||
|
||||
# Error Correction Term (spread)
|
||||
ect_series = (vecm_df @ vecm_fit.beta).iloc[:, 0]
|
||||
|
||||
# Difference the spread for stationarity
|
||||
dz = ect_series.diff().dropna()
|
||||
|
||||
if len(dz) < 30:
|
||||
print("Not enough data for GARCH fitting.")
|
||||
return
|
||||
|
||||
# Rescale if variance too small
|
||||
if dz.std() < 0.1:
|
||||
dz = dz * 1000
|
||||
# print("Scale check:", dz.std())
|
||||
|
||||
try:
|
||||
garch = arch_model(dz, vol="GARCH", p=1, q=1, mean="Zero", dist="normal")
|
||||
garch_fit = garch.fit(disp="off")
|
||||
self.garch_fit_ = garch_fit
|
||||
|
||||
# Extract parameters
|
||||
params = garch_fit.params
|
||||
self.garch_alpha_ = params.get("alpha[1]", np.nan)
|
||||
self.garch_beta_ = params.get("beta[1]", np.nan)
|
||||
self.garch_persistence_ = self.garch_alpha_ + self.garch_beta_
|
||||
|
||||
# print (f"GARCH α: {self.garch_alpha_:.4f}, β: {self.garch_beta_:.4f}, "
|
||||
# f"α+β (persistence): {self.garch_persistence_:.4f}")
|
||||
|
||||
# One-step-ahead volatility forecast
|
||||
forecast = garch_fit.forecast(horizon=1)
|
||||
sigma_next = np.sqrt(forecast.variance.iloc[-1, 0])
|
||||
self.sigma_spread_forecast_ = float(sigma_next)
|
||||
# print("GARCH sigma forecast:", self.sigma_spread_forecast_)
|
||||
|
||||
# Rule of thumb: persistence close to 1 or large volatility forecast
|
||||
self.high_volatility_flag_ = int(
|
||||
(self.garch_persistence_ is not None and self.garch_persistence_ > 0.95)
|
||||
or (self.sigma_spread_forecast_ is not None and self.sigma_spread_forecast_ > 2)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(f"GARCH fit failed: {e}")
|
||||
self.garch_fit_ = None
|
||||
self.sigma_spread_forecast_ = None
|
||||
self.high_volatility_flag_ = None
|
||||
|
||||
def predict(self) -> pd.DataFrame:
|
||||
self._train_pair()
|
||||
assert self.testing_df_ is not None
|
||||
assert self.vecm_fit_ is not None
|
||||
|
||||
# VECM predictions
|
||||
predicted_prices = self.vecm_fit_.predict(steps=len(self.testing_df_))
|
||||
predicted_df = pd.merge(
|
||||
self.testing_df_.reset_index(drop=True),
|
||||
pd.DataFrame(predicted_prices, columns=pd.Index(self.colnames()), dtype=float),
|
||||
left_index=True,
|
||||
right_index=True,
|
||||
suffixes=("", "_pred"),
|
||||
).dropna()
|
||||
|
||||
# Disequilibrium and z-scores
|
||||
predicted_df["disequilibrium"] = (
|
||||
predicted_df[self.colnames()] @ self.vecm_fit_.beta
|
||||
)
|
||||
predicted_df["signed_scaled_disequilibrium"] = (
|
||||
predicted_df["disequilibrium"] - self.training_mu_
|
||||
) / self.training_std_
|
||||
predicted_df["scaled_disequilibrium"] = abs(
|
||||
predicted_df["signed_scaled_disequilibrium"]
|
||||
)
|
||||
|
||||
# Add GARCH parameters + volatility forecast
|
||||
predicted_df["garch_alpha"] = self.garch_alpha_
|
||||
predicted_df["garch_beta"] = self.garch_beta_
|
||||
predicted_df["garch_persistence"] = self.garch_persistence_
|
||||
predicted_df["garch_vol_forecast"] = self.sigma_spread_forecast_
|
||||
predicted_df["high_volatility"] = self.high_volatility_flag_
|
||||
|
||||
# Save results
|
||||
if self.pair_predict_result_ is None:
|
||||
self.pair_predict_result_ = predicted_df
|
||||
else:
|
||||
self.pair_predict_result_ = pd.concat(
|
||||
[self.pair_predict_result_, predicted_df], ignore_index=True
|
||||
)
|
||||
|
||||
return self.pair_predict_result_
|
||||
|
||||
|
||||
class VECMRollingFit(RollingFit):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def create_trading_pair(
|
||||
self,
|
||||
config: Dict,
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
) -> TradingPair:
|
||||
return VECMTradingPair(
|
||||
config=config,
|
||||
market_data=market_data,
|
||||
symbol_a = symbol_a,
|
||||
symbol_b = symbol_b,
|
||||
)
|
||||
@ -1,124 +0,0 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pandas as pd
|
||||
import statsmodels.api as sm
|
||||
|
||||
from pt_trading.rolling_window_fit import RollingFit
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
|
||||
NanoPerMin = 1e9
|
||||
|
||||
|
||||
class ZScoreTradingPair(TradingPair):
|
||||
"""TradingPair implementation that fits a hedge ratio with OLS and
|
||||
computes a standardized spread (z-score).
|
||||
|
||||
The class stores training spread mean/std and hedge ratio so the model
|
||||
can be applied to testing data consistently.
|
||||
"""
|
||||
|
||||
zscore_model_: Optional[sm.regression.linear_model.RegressionResultsWrapper]
|
||||
pair_predict_result_: Optional[pd.DataFrame]
|
||||
zscore_df_: Optional[pd.Series]
|
||||
hedge_ratio_: Optional[float]
|
||||
spread_mean_: Optional[float]
|
||||
spread_std_: Optional[float]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: Dict[str, Any],
|
||||
market_data: pd.DataFrame,
|
||||
symbol_a: str,
|
||||
symbol_b: str,
|
||||
):
|
||||
super().__init__(config, market_data, symbol_a, symbol_b)
|
||||
self.zscore_model_ = None
|
||||
self.pair_predict_result_ = None
|
||||
self.zscore_df_ = None
|
||||
self.hedge_ratio_ = None
|
||||
self.spread_mean_ = None
|
||||
self.spread_std_ = None
|
||||
|
||||
def _fit_zscore(self) -> None:
|
||||
"""Fit OLS on the training window and compute training z-score."""
|
||||
assert self.training_df_ is not None
|
||||
|
||||
# Extract price series for the two symbols from the training frame.
|
||||
px_df = self.training_df_[self.colnames()]
|
||||
symbol_a_px = px_df.iloc[:, 0]
|
||||
symbol_b_px = px_df.iloc[:, 1]
|
||||
|
||||
# Align indexes and fit OLS: symbol_a ~ const + symbol_b
|
||||
symbol_a_px, symbol_b_px = symbol_a_px.align(symbol_b_px, join="inner")
|
||||
X = sm.add_constant(symbol_b_px)
|
||||
self.zscore_model_ = sm.OLS(symbol_a_px, X).fit()
|
||||
|
||||
# Hedge ratio is the slope on symbol_b
|
||||
params = self.zscore_model_.params
|
||||
self.hedge_ratio_ = float(params.iloc[1]) if len(params) > 1 else 0.0
|
||||
|
||||
# Training spread and its standardized z-score
|
||||
spread = symbol_a_px - self.hedge_ratio_ * symbol_b_px
|
||||
self.spread_mean_ = float(spread.mean())
|
||||
self.spread_std_ = float(spread.std(ddof=0)) if spread.std(ddof=0) != 0 else 1.0
|
||||
self.zscore_df_ = (spread - self.spread_mean_) / self.spread_std_
|
||||
|
||||
def predict(self) -> pd.DataFrame:
|
||||
"""Apply fitted hedge ratio to the testing frame and return a
|
||||
dataframe with canonical columns:
|
||||
- disequilibrium: signed z-score
|
||||
- scaled_disequilibrium: absolute z-score
|
||||
- signed_scaled_disequilibrium: same as disequilibrium (keeps sign)
|
||||
"""
|
||||
# Fit on training window
|
||||
self._fit_zscore()
|
||||
assert self.zscore_df_ is not None
|
||||
assert self.hedge_ratio_ is not None
|
||||
assert self.spread_mean_ is not None and self.spread_std_ is not None
|
||||
|
||||
# Keep training columns for inspection
|
||||
self.training_df_["disequilibrium"] = self.zscore_df_
|
||||
self.training_df_["scaled_disequilibrium"] = self.zscore_df_.abs()
|
||||
|
||||
# Apply model to testing frame
|
||||
assert self.testing_df_ is not None
|
||||
test_df = self.testing_df_.copy()
|
||||
px_test = test_df[self.colnames()]
|
||||
a_test = px_test.iloc[:, 0]
|
||||
b_test = px_test.iloc[:, 1]
|
||||
a_test, b_test = a_test.align(b_test, join="inner")
|
||||
|
||||
# Compute test spread and standardize using training mean/std
|
||||
test_spread = a_test - self.hedge_ratio_ * b_test
|
||||
test_zscore = (test_spread - self.spread_mean_) / self.spread_std_
|
||||
|
||||
# Attach canonical columns
|
||||
# Align back to test_df index if needed
|
||||
test_zscore = test_zscore.reindex(test_df.index)
|
||||
test_df["disequilibrium"] = test_zscore
|
||||
test_df["signed_scaled_disequilibrium"] = test_zscore
|
||||
test_df["scaled_disequilibrium"] = test_zscore.abs()
|
||||
|
||||
# Reset index and accumulate results across windows
|
||||
test_df = test_df.reset_index(drop=True)
|
||||
if self.pair_predict_result_ is None:
|
||||
self.pair_predict_result_ = test_df
|
||||
else:
|
||||
self.pair_predict_result_ = pd.concat(
|
||||
[self.pair_predict_result_, test_df], ignore_index=True
|
||||
)
|
||||
|
||||
self.pair_predict_result_ = self.pair_predict_result_.reset_index(drop=True)
|
||||
return self.pair_predict_result_.dropna()
|
||||
|
||||
|
||||
class ZScoreRollingFit(RollingFit):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
|
||||
def create_trading_pair(
|
||||
self, config: Dict, market_data: pd.DataFrame, symbol_a: str, symbol_b: str
|
||||
) -> TradingPair:
|
||||
return ZScoreTradingPair(
|
||||
config=config, market_data=market_data, symbol_a=symbol_a, symbol_b=symbol_b
|
||||
)
|
||||
33
lib/tools/filetools.py
Normal file
33
lib/tools/filetools.py
Normal file
@ -0,0 +1,33 @@
|
||||
import os
|
||||
import glob
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
DayT = str
|
||||
DataFileNameT = str
|
||||
|
||||
def resolve_datafiles(
|
||||
config: Dict, date_pattern: str, instruments: List[Dict[str, str]]
|
||||
) -> List[Tuple[DayT, DataFileNameT]]:
|
||||
resolved_files: List[Tuple[DayT, DataFileNameT]] = []
|
||||
for inst in instruments:
|
||||
pattern = date_pattern
|
||||
inst_type = inst["instrument_type"]
|
||||
data_dir = config["market_data_loading"][inst_type]["data_directory"]
|
||||
if "*" in pattern or "?" in pattern:
|
||||
# Handle wildcards
|
||||
if not os.path.isabs(pattern):
|
||||
pattern = os.path.join(data_dir, f"{pattern}.mktdata.ohlcv.db")
|
||||
matched_files = glob.glob(pattern)
|
||||
for matched_file in matched_files:
|
||||
import re
|
||||
match = re.search(r"(\d{8})\.mktdata\.ohlcv\.db$", matched_file)
|
||||
assert match is not None
|
||||
day = match.group(1)
|
||||
resolved_files.append((day, matched_file))
|
||||
else:
|
||||
# Handle explicit file path
|
||||
if not os.path.isabs(pattern):
|
||||
pattern = os.path.join(data_dir, f"{pattern}.mktdata.ohlcv.db")
|
||||
resolved_files.append((date_pattern, pattern))
|
||||
return sorted(list(set(resolved_files))) # Remove duplicates and sort
|
||||
|
||||
21
lib/tools/instruments.py
Normal file
21
lib/tools/instruments.py
Normal file
@ -0,0 +1,21 @@
|
||||
import argparse
|
||||
from typing import Dict, List
|
||||
|
||||
def get_instruments(args: argparse.Namespace, config: Dict) -> List[Dict[str, str]]:
|
||||
|
||||
instruments = [
|
||||
{
|
||||
"symbol": inst.split(":")[0],
|
||||
"instrument_type": inst.split(":")[1],
|
||||
"exchange_id": inst.split(":")[2],
|
||||
"instrument_id_pfx": config["market_data_loading"][inst.split(":")[1]][
|
||||
"instrument_id_pfx"
|
||||
],
|
||||
"db_table_name": config["market_data_loading"][inst.split(":")[1]][
|
||||
"db_table_name"
|
||||
],
|
||||
}
|
||||
for inst in args.instruments.split(",")
|
||||
]
|
||||
return instruments
|
||||
|
||||
79
lib/tools/viz/viz_prices.py
Normal file
79
lib/tools/viz/viz_prices.py
Normal file
@ -0,0 +1,79 @@
|
||||
from pt_strategy.research_strategy import PtResearchStrategy
|
||||
|
||||
|
||||
def visualize_prices(strategy: PtResearchStrategy, trading_date: str) -> None:
|
||||
# Plot raw price data
|
||||
import matplotlib.pyplot as plt
|
||||
# Set plotting style
|
||||
import seaborn as sns
|
||||
|
||||
pair = strategy.trading_pair_
|
||||
SYMBOL_A = pair.symbol_a_
|
||||
SYMBOL_B = pair.symbol_b_
|
||||
TRD_DATE = f"{trading_date[0:4]}-{trading_date[4:6]}-{trading_date[6:8]}"
|
||||
|
||||
plt.style.use('seaborn-v0_8')
|
||||
sns.set_palette("husl")
|
||||
plt.rcParams['figure.figsize'] = (15, 10)
|
||||
|
||||
# Get column names for the trading pair
|
||||
colname_a, colname_b = pair.colnames()
|
||||
price_data = strategy.pt_mkt_data_.market_data_df_.copy()
|
||||
|
||||
# Create separate subplots for better visibility
|
||||
fig_price, price_axes = plt.subplots(2, 1, figsize=(18, 10))
|
||||
|
||||
# Plot SYMBOL_A
|
||||
price_axes[0].plot(price_data['tstamp'], price_data[colname_a], alpha=0.7,
|
||||
label=f'{SYMBOL_A}', linewidth=1, color='blue')
|
||||
price_axes[0].set_title(f'{SYMBOL_A} Price Data ({TRD_DATE})')
|
||||
price_axes[0].set_ylabel(f'{SYMBOL_A} Price')
|
||||
price_axes[0].legend()
|
||||
price_axes[0].grid(True)
|
||||
|
||||
# Plot SYMBOL_B
|
||||
price_axes[1].plot(price_data['tstamp'], price_data[colname_b], alpha=0.7,
|
||||
label=f'{SYMBOL_B}', linewidth=1, color='red')
|
||||
price_axes[1].set_title(f'{SYMBOL_B} Price Data ({TRD_DATE})')
|
||||
price_axes[1].set_ylabel(f'{SYMBOL_B} Price')
|
||||
price_axes[1].set_xlabel('Time')
|
||||
price_axes[1].legend()
|
||||
price_axes[1].grid(True)
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
|
||||
|
||||
# Plot individual prices
|
||||
fig, axes = plt.subplots(2, 1, figsize=(18, 12))
|
||||
|
||||
# Normalized prices for comparison
|
||||
norm_a = price_data[colname_a] / price_data[colname_a].iloc[0]
|
||||
norm_b = price_data[colname_b] / price_data[colname_b].iloc[0]
|
||||
|
||||
axes[0].plot(price_data['tstamp'], norm_a, label=f'{SYMBOL_A} (normalized)', alpha=0.8, linewidth=1)
|
||||
axes[0].plot(price_data['tstamp'], norm_b, label=f'{SYMBOL_B} (normalized)', alpha=0.8, linewidth=1)
|
||||
axes[0].set_title(f'Normalized Price Comparison (Base = 1.0) ({TRD_DATE})')
|
||||
axes[0].set_ylabel('Normalized Price')
|
||||
axes[0].legend()
|
||||
axes[0].grid(True)
|
||||
|
||||
# Price ratio
|
||||
price_ratio = price_data[colname_a] / price_data[colname_b]
|
||||
axes[1].plot(price_data['tstamp'], price_ratio, label=f'{SYMBOL_A}/{SYMBOL_B} Ratio', color='green', alpha=0.8, linewidth=1)
|
||||
axes[1].set_title(f'Price Ratio Px({SYMBOL_A})/Px({SYMBOL_B}) ({TRD_DATE})')
|
||||
axes[1].set_ylabel('Ratio')
|
||||
axes[1].set_xlabel('Time')
|
||||
axes[1].legend()
|
||||
axes[1].grid(True)
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
|
||||
# Print basic statistics
|
||||
print(f"\nPrice Statistics:")
|
||||
print(f" {SYMBOL_A}: Mean=${price_data[colname_a].mean():.2f}, Std=${price_data[colname_a].std():.2f}")
|
||||
print(f" {SYMBOL_B}: Mean=${price_data[colname_b].mean():.2f}, Std=${price_data[colname_b].std():.2f}")
|
||||
print(f" Price Ratio: Mean={price_ratio.mean():.2f}, Std={price_ratio.std():.2f}")
|
||||
print(f" Correlation: {price_data[colname_a].corr(price_data[colname_b]):.4f}")
|
||||
|
||||
507
lib/tools/viz/viz_trades.py
Normal file
507
lib/tools/viz/viz_trades.py
Normal file
@ -0,0 +1,507 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from pt_strategy.results import (PairResearchResult, create_result_database,
|
||||
store_config_in_database)
|
||||
from pt_strategy.research_strategy import PtResearchStrategy
|
||||
from tools.filetools import resolve_datafiles
|
||||
from tools.instruments import get_instruments
|
||||
|
||||
|
||||
def visualize_trades(strategy: PtResearchStrategy, results: PairResearchResult, trading_date: str) -> None:
|
||||
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.graph_objects as go
|
||||
import plotly.offline as pyo
|
||||
from IPython.display import HTML
|
||||
from plotly.subplots import make_subplots
|
||||
|
||||
|
||||
pair = strategy.trading_pair_
|
||||
trades = results.trades_[trading_date].copy()
|
||||
origin_mkt_data_df = strategy.pt_mkt_data_.origin_mkt_data_df_
|
||||
mkt_data_df = strategy.pt_mkt_data_.market_data_df_
|
||||
TRD_DATE = f"{trading_date[0:4]}-{trading_date[4:6]}-{trading_date[6:8]}"
|
||||
SYMBOL_A = pair.symbol_a_
|
||||
SYMBOL_B = pair.symbol_b_
|
||||
|
||||
|
||||
print(f"\nCreated trading pair: {pair}")
|
||||
print(f"Market data shape: {pair.market_data_.shape}")
|
||||
print(f"Column names: {pair.colnames()}")
|
||||
|
||||
# Configure plotly for offline mode
|
||||
pyo.init_notebook_mode(connected=True)
|
||||
|
||||
# Strategy-specific interactive visualization
|
||||
assert strategy.config_ is not None
|
||||
|
||||
print("=== SLIDING FIT INTERACTIVE VISUALIZATION ===")
|
||||
print("Note: Rolling Fit strategy visualization with interactive plotly charts")
|
||||
|
||||
|
||||
# Create consistent timeline - superset of timestamps from both dataframes
|
||||
all_timestamps = sorted(set(mkt_data_df['tstamp']))
|
||||
|
||||
|
||||
# Create a unified timeline dataframe for consistent plotting
|
||||
timeline_df = pd.DataFrame({'tstamp': all_timestamps})
|
||||
|
||||
# Merge with predicted data to get dis-equilibrium values
|
||||
timeline_df = timeline_df.merge(strategy.predictions_[['tstamp', 'disequilibrium', 'scaled_disequilibrium', 'signed_scaled_disequilibrium']],
|
||||
on='tstamp', how='left')
|
||||
|
||||
# Get Symbol_A and Symbol_B market data
|
||||
colname_a, colname_b = pair.colnames()
|
||||
symbol_a_data = mkt_data_df[['tstamp', colname_a]].copy()
|
||||
symbol_b_data = mkt_data_df[['tstamp', colname_b]].copy()
|
||||
|
||||
norm_a = symbol_a_data[colname_a] / symbol_a_data[colname_a].iloc[0]
|
||||
norm_b = symbol_b_data[colname_b] / symbol_b_data[colname_b].iloc[0]
|
||||
|
||||
print(f"Using consistent timeline with {len(timeline_df)} timestamps")
|
||||
print(f"Timeline range: {timeline_df['tstamp'].min()} to {timeline_df['tstamp'].max()}")
|
||||
|
||||
# Create subplots with price charts at bottom
|
||||
fig = make_subplots(
|
||||
rows=4, cols=1,
|
||||
row_heights=[0.3, 0.4, 0.15, 0.15],
|
||||
subplot_titles=[
|
||||
f'Dis-equilibrium with Trading Thresholds ({TRD_DATE})',
|
||||
f'Normalized Price Comparison with BUY/SELL Signals - {SYMBOL_A}&{SYMBOL_B} ({TRD_DATE})',
|
||||
f'{SYMBOL_A} Market Data with Trading Signals ({TRD_DATE})',
|
||||
f'{SYMBOL_B} Market Data with Trading Signals ({TRD_DATE})',
|
||||
],
|
||||
vertical_spacing=0.06,
|
||||
specs=[[{"secondary_y": False}],
|
||||
[{"secondary_y": False}],
|
||||
[{"secondary_y": False}],
|
||||
[{"secondary_y": False}]]
|
||||
)
|
||||
|
||||
# 1. Scaled dis-equilibrium with thresholds - using consistent timeline
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=timeline_df['tstamp'],
|
||||
y=timeline_df['scaled_disequilibrium'],
|
||||
name='Absolute Scaled Dis-equilibrium',
|
||||
line=dict(color='green', width=2),
|
||||
opacity=0.8
|
||||
),
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=timeline_df['tstamp'],
|
||||
y=timeline_df['signed_scaled_disequilibrium'],
|
||||
name='Scaled Dis-equilibrium',
|
||||
line=dict(color='darkmagenta', width=2),
|
||||
opacity=0.8
|
||||
),
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Add threshold lines to first subplot
|
||||
fig.add_shape(
|
||||
type="line",
|
||||
x0=timeline_df['tstamp'].min(),
|
||||
x1=timeline_df['tstamp'].max(),
|
||||
y0=strategy.config_['dis-equilibrium_open_trshld'],
|
||||
y1=strategy.config_['dis-equilibrium_open_trshld'],
|
||||
line=dict(color="purple", width=2, dash="dot"),
|
||||
opacity=0.7,
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
fig.add_shape(
|
||||
type="line",
|
||||
x0=timeline_df['tstamp'].min(),
|
||||
x1=timeline_df['tstamp'].max(),
|
||||
y0=-strategy.config_['dis-equilibrium_open_trshld'],
|
||||
y1=-strategy.config_['dis-equilibrium_open_trshld'],
|
||||
line=dict(color="purple", width=2, dash="dot"),
|
||||
opacity=0.7,
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
fig.add_shape(
|
||||
type="line",
|
||||
x0=timeline_df['tstamp'].min(),
|
||||
x1=timeline_df['tstamp'].max(),
|
||||
y0=strategy.config_['dis-equilibrium_close_trshld'],
|
||||
y1=strategy.config_['dis-equilibrium_close_trshld'],
|
||||
line=dict(color="brown", width=2, dash="dot"),
|
||||
opacity=0.7,
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
fig.add_shape(
|
||||
type="line",
|
||||
x0=timeline_df['tstamp'].min(),
|
||||
x1=timeline_df['tstamp'].max(),
|
||||
y0=-strategy.config_['dis-equilibrium_close_trshld'],
|
||||
y1=-strategy.config_['dis-equilibrium_close_trshld'],
|
||||
line=dict(color="brown", width=2, dash="dot"),
|
||||
opacity=0.7,
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
fig.add_shape(
|
||||
type="line",
|
||||
x0=timeline_df['tstamp'].min(),
|
||||
x1=timeline_df['tstamp'].max(),
|
||||
y0=0,
|
||||
y1=0,
|
||||
line=dict(color="black", width=1, dash="solid"),
|
||||
opacity=0.5,
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Add normalized price lines
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=mkt_data_df['tstamp'],
|
||||
y=norm_a,
|
||||
name=f'{SYMBOL_A} (Normalized)',
|
||||
line=dict(color='blue', width=2),
|
||||
opacity=0.8
|
||||
),
|
||||
row=2, col=1
|
||||
)
|
||||
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=mkt_data_df['tstamp'],
|
||||
y=norm_b,
|
||||
name=f'{SYMBOL_B} (Normalized)',
|
||||
line=dict(color='orange', width=2),
|
||||
opacity=0.8,
|
||||
),
|
||||
row=2, col=1
|
||||
)
|
||||
|
||||
# Add BUY and SELL signals if available
|
||||
if trades is not None and len(trades) > 0:
|
||||
# Define signal groups to avoid legend repetition
|
||||
signal_groups = {}
|
||||
|
||||
# Process all trades and group by signal type (ignore OPEN/CLOSE status)
|
||||
for _, trade in trades.iterrows():
|
||||
symbol = trade['symbol']
|
||||
side = trade['side']
|
||||
# status = trade['status']
|
||||
action = trade['action']
|
||||
|
||||
# Create signal group key (without status to combine OPEN/CLOSE)
|
||||
signal_key = f"{symbol} {side} {action}"
|
||||
|
||||
# Find normalized price for this trade
|
||||
trade_time = trade['time']
|
||||
if symbol == SYMBOL_A:
|
||||
closest_idx = mkt_data_df['tstamp'].searchsorted(trade_time)
|
||||
if closest_idx < len(norm_a):
|
||||
norm_price = norm_a.iloc[closest_idx]
|
||||
else:
|
||||
norm_price = norm_a.iloc[-1]
|
||||
else: # SYMBOL_B
|
||||
closest_idx = mkt_data_df['tstamp'].searchsorted(trade_time)
|
||||
if closest_idx < len(norm_b):
|
||||
norm_price = norm_b.iloc[closest_idx]
|
||||
else:
|
||||
norm_price = norm_b.iloc[-1]
|
||||
|
||||
# Initialize group if not exists
|
||||
if signal_key not in signal_groups:
|
||||
signal_groups[signal_key] = {
|
||||
'times': [],
|
||||
'prices': [],
|
||||
'actual_prices': [],
|
||||
'symbol': symbol,
|
||||
'side': side,
|
||||
# 'status': status,
|
||||
'action': trade['action']
|
||||
}
|
||||
|
||||
# Add to group
|
||||
signal_groups[signal_key]['times'].append(trade_time)
|
||||
signal_groups[signal_key]['prices'].append(norm_price)
|
||||
signal_groups[signal_key]['actual_prices'].append(trade['price'])
|
||||
|
||||
# Add each signal group as a single trace
|
||||
for signal_key, group_data in signal_groups.items():
|
||||
symbol = group_data['symbol']
|
||||
side = group_data['side']
|
||||
# status = group_data['status']
|
||||
|
||||
# Determine marker properties (same for all OPEN/CLOSE of same side)
|
||||
is_close: bool = (group_data['action'] == "CLOSE")
|
||||
|
||||
if 'BUY' in side:
|
||||
marker_color = 'green'
|
||||
marker_symbol = 'triangle-up'
|
||||
marker_size = 14
|
||||
else: # SELL
|
||||
marker_color = 'red'
|
||||
marker_symbol = 'triangle-down'
|
||||
marker_size = 14
|
||||
|
||||
# Create hover text for each point in the group
|
||||
hover_texts = []
|
||||
for i, (time, norm_price, actual_price) in enumerate(zip(group_data['times'],
|
||||
group_data['prices'],
|
||||
group_data['actual_prices'])):
|
||||
# Find the corresponding trade to get the status for hover text
|
||||
trade_info = trades[(trades['time'] == time) &
|
||||
(trades['symbol'] == symbol) &
|
||||
(trades['side'] == side)]
|
||||
if len(trade_info) > 0:
|
||||
action = trade_info.iloc[0]['action']
|
||||
hover_texts.append(f'<b>{signal_key} {action}</b><br>' +
|
||||
f'Time: {time}<br>' +
|
||||
f'Normalized Price: {norm_price:.4f}<br>' +
|
||||
f'Actual Price: ${actual_price:.2f}')
|
||||
else:
|
||||
hover_texts.append(f'<b>{signal_key}</b><br>' +
|
||||
f'Time: {time}<br>' +
|
||||
f'Normalized Price: {norm_price:.4f}<br>' +
|
||||
f'Actual Price: ${actual_price:.2f}')
|
||||
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=group_data['times'],
|
||||
y=group_data['prices'],
|
||||
mode='markers',
|
||||
name=signal_key,
|
||||
marker=dict(
|
||||
color=marker_color,
|
||||
size=marker_size,
|
||||
symbol=marker_symbol,
|
||||
line=dict(width=2, color='black') if is_close else None
|
||||
),
|
||||
showlegend=True,
|
||||
hovertemplate='%{text}<extra></extra>',
|
||||
text=hover_texts
|
||||
),
|
||||
row=2, col=1
|
||||
)
|
||||
|
||||
# -----------------------------
|
||||
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=symbol_a_data['tstamp'],
|
||||
y=symbol_a_data[colname_a],
|
||||
name=f'{SYMBOL_A} Price',
|
||||
line=dict(color='blue', width=2),
|
||||
opacity=0.8
|
||||
),
|
||||
row=3, col=1
|
||||
)
|
||||
|
||||
# Filter trades for Symbol_A
|
||||
symbol_a_trades = trades[trades['symbol'] == SYMBOL_A]
|
||||
print(f"\nSymbol_A trades:\n{symbol_a_trades}")
|
||||
|
||||
if len(symbol_a_trades) > 0:
|
||||
# Separate trades by action and status for different colors
|
||||
buy_open_trades = symbol_a_trades[(symbol_a_trades['side'].str.contains('BUY', na=False)) &
|
||||
(symbol_a_trades['action'].str.contains('OPEN', na=False))]
|
||||
buy_close_trades = symbol_a_trades[(symbol_a_trades['side'].str.contains('BUY', na=False)) &
|
||||
(symbol_a_trades['action'].str.contains('CLOSE', na=False))]
|
||||
|
||||
sell_open_trades = symbol_a_trades[(symbol_a_trades['side'].str.contains('SELL', na=False)) &
|
||||
(symbol_a_trades['action'].str.contains('OPEN', na=False))]
|
||||
sell_close_trades = symbol_a_trades[(symbol_a_trades['side'].str.contains('SELL', na=False)) &
|
||||
(symbol_a_trades['action'].str.contains('CLOSE', na=False))]
|
||||
|
||||
# Add BUY OPEN signals
|
||||
if len(buy_open_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=buy_open_trades['time'],
|
||||
y=buy_open_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_A} BUY OPEN',
|
||||
marker=dict(color='green', size=12, symbol='triangle-up'),
|
||||
showlegend=True
|
||||
),
|
||||
row=3, col=1
|
||||
)
|
||||
|
||||
# Add BUY CLOSE signals
|
||||
if len(buy_close_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=buy_close_trades['time'],
|
||||
y=buy_close_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_A} BUY CLOSE',
|
||||
marker=dict(color='green', size=12, symbol='triangle-up'),
|
||||
line=dict(width=2, color='black'),
|
||||
showlegend=True
|
||||
),
|
||||
row=3, col=1
|
||||
)
|
||||
|
||||
# Add SELL OPEN signals
|
||||
if len(sell_open_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=sell_open_trades['time'],
|
||||
y=sell_open_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_A} SELL OPEN',
|
||||
marker=dict(color='red', size=12, symbol='triangle-down'),
|
||||
showlegend=True
|
||||
),
|
||||
row=3, col=1
|
||||
)
|
||||
|
||||
# Add SELL CLOSE signals
|
||||
if len(sell_close_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=sell_close_trades['time'],
|
||||
y=sell_close_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_A} SELL CLOSE',
|
||||
marker=dict(color='red', size=12, symbol='triangle-down'),
|
||||
line=dict(width=2, color='black'),
|
||||
showlegend=True
|
||||
),
|
||||
row=3, col=1
|
||||
)
|
||||
|
||||
# 4. Symbol_B Market Data with Trading Signals
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=symbol_b_data['tstamp'],
|
||||
y=symbol_b_data[colname_b],
|
||||
name=f'{SYMBOL_B} Price',
|
||||
line=dict(color='orange', width=2),
|
||||
opacity=0.8
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Add trading signals for Symbol_B if available
|
||||
symbol_b_trades = trades[trades['symbol'] == SYMBOL_B]
|
||||
print(f"\nSymbol_B trades:\n{symbol_b_trades}")
|
||||
|
||||
if len(symbol_b_trades) > 0:
|
||||
# Separate trades by action and status for different colors
|
||||
buy_open_trades = symbol_b_trades[(symbol_b_trades['side'].str.contains('BUY', na=False)) &
|
||||
(symbol_b_trades['action'].str.startswith('OPEN', na=False))]
|
||||
buy_close_trades = symbol_b_trades[(symbol_b_trades['side'].str.contains('BUY', na=False)) &
|
||||
(symbol_b_trades['action'].str.startswith('CLOSE', na=False))]
|
||||
|
||||
sell_open_trades = symbol_b_trades[(symbol_b_trades['side'].str.contains('SELL', na=False)) &
|
||||
(symbol_b_trades['action'].str.contains('OPEN', na=False))]
|
||||
sell_close_trades = symbol_b_trades[(symbol_b_trades['side'].str.contains('SELL', na=False)) &
|
||||
(symbol_b_trades['action'].str.contains('CLOSE', na=False))]
|
||||
|
||||
# Add BUY OPEN signals
|
||||
if len(buy_open_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=buy_open_trades['time'],
|
||||
y=buy_open_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_B} BUY OPEN',
|
||||
marker=dict(color='darkgreen', size=12, symbol='triangle-up'),
|
||||
showlegend=True
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Add BUY CLOSE signals
|
||||
if len(buy_close_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=buy_close_trades['time'],
|
||||
y=buy_close_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_B} BUY CLOSE',
|
||||
marker=dict(color='green', size=12, symbol='triangle-up'),
|
||||
line=dict(width=2, color='black'),
|
||||
showlegend=True
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Add SELL OPEN signals
|
||||
if len(sell_open_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=sell_open_trades['time'],
|
||||
y=sell_open_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_B} SELL OPEN',
|
||||
marker=dict(color='red', size=12, symbol='triangle-down'),
|
||||
showlegend=True
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Add SELL CLOSE signals
|
||||
if len(sell_close_trades) > 0:
|
||||
fig.add_trace(
|
||||
go.Scatter(
|
||||
x=sell_close_trades['time'],
|
||||
y=sell_close_trades['price'],
|
||||
mode='markers',
|
||||
name=f'{SYMBOL_B} SELL CLOSE',
|
||||
marker=dict(color='red', size=12, symbol='triangle-down'),
|
||||
line=dict(width=2, color='black'),
|
||||
showlegend=True
|
||||
),
|
||||
row=4, col=1
|
||||
)
|
||||
|
||||
# Update layout
|
||||
fig.update_layout(
|
||||
height=1600,
|
||||
title_text=f"Strategy Analysis - {SYMBOL_A} & {SYMBOL_B} ({TRD_DATE})",
|
||||
showlegend=True,
|
||||
template="plotly_white",
|
||||
plot_bgcolor='lightgray',
|
||||
)
|
||||
|
||||
# Update y-axis labels
|
||||
fig.update_yaxes(title_text="Scaled Dis-equilibrium", row=1, col=1)
|
||||
fig.update_yaxes(title_text=f"{SYMBOL_A} Price ($)", row=2, col=1)
|
||||
fig.update_yaxes(title_text=f"{SYMBOL_B} Price ($)", row=3, col=1)
|
||||
fig.update_yaxes(title_text="Normalized Price (Base = 1.0)", row=4, col=1)
|
||||
|
||||
# Update x-axis labels and ensure consistent time range
|
||||
time_range = [timeline_df['tstamp'].min(), timeline_df['tstamp'].max()]
|
||||
fig.update_xaxes(range=time_range, row=1, col=1)
|
||||
fig.update_xaxes(range=time_range, row=2, col=1)
|
||||
fig.update_xaxes(range=time_range, row=3, col=1)
|
||||
fig.update_xaxes(title_text="Time", range=time_range, row=4, col=1)
|
||||
|
||||
# Display using plotly offline mode
|
||||
# pyo.iplot(fig)
|
||||
fig.show()
|
||||
|
||||
else:
|
||||
print("No interactive visualization data available - strategy may not have run successfully")
|
||||
|
||||
print(f"\nChart shows:")
|
||||
print(f"- {SYMBOL_A} and {SYMBOL_B} prices normalized to start at 1.0")
|
||||
print(f"- BUY signals shown as green triangles pointing up")
|
||||
print(f"- SELL signals shown as orange triangles pointing down")
|
||||
print(f"- All BUY signals per symbol grouped together, all SELL signals per symbol grouped together")
|
||||
print(f"- Hover over markers to see individual trade details (OPEN/CLOSE status)")
|
||||
|
||||
if trades is not None and len(trades) > 0:
|
||||
print(f"- Total signals displayed: {len(trades)}")
|
||||
print(f"- {SYMBOL_A} signals: {len(trades[trades['symbol'] == SYMBOL_A])}")
|
||||
print(f"- {SYMBOL_B} signals: {len(trades[trades['symbol'] == SYMBOL_B])}")
|
||||
else:
|
||||
print("- No trading signals to display")
|
||||
|
||||
@ -1,169 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Database inspector utility for pairs trading results database.
|
||||
Provides functionality to view all tables and their contents.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
from typing import List, Dict, Any
|
||||
|
||||
def list_tables(db_path: str) -> List[str]:
|
||||
"""List all tables in the database."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table'
|
||||
ORDER BY name
|
||||
""")
|
||||
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
conn.close()
|
||||
return tables
|
||||
|
||||
def view_table_schema(db_path: str, table_name: str) -> None:
|
||||
"""View the schema of a specific table."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(f"PRAGMA table_info({table_name})")
|
||||
columns = cursor.fetchall()
|
||||
|
||||
print(f"\nTable: {table_name}")
|
||||
print("-" * 50)
|
||||
print("Column Name".ljust(20) + "Type".ljust(15) + "Not Null".ljust(10) + "Default")
|
||||
print("-" * 50)
|
||||
|
||||
for col in columns:
|
||||
cid, name, type_, not_null, default_value, pk = col
|
||||
print(f"{name}".ljust(20) + f"{type_}".ljust(15) + f"{bool(not_null)}".ljust(10) + f"{default_value or ''}")
|
||||
|
||||
conn.close()
|
||||
|
||||
def view_config_table(db_path: str, limit: int = 10) -> None:
|
||||
"""View entries from the config table."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(f"""
|
||||
SELECT id, run_timestamp, config_file_path, fit_method_class,
|
||||
datafiles, instruments, config_json
|
||||
FROM config
|
||||
ORDER BY run_timestamp DESC
|
||||
LIMIT {limit}
|
||||
""")
|
||||
|
||||
rows = cursor.fetchall()
|
||||
|
||||
if not rows:
|
||||
print("No configuration entries found.")
|
||||
return
|
||||
|
||||
print(f"\nMost recent {len(rows)} configuration entries:")
|
||||
print("=" * 80)
|
||||
|
||||
for row in rows:
|
||||
id, run_timestamp, config_file_path, fit_method_class, datafiles, instruments, config_json = row
|
||||
|
||||
print(f"ID: {id} | {run_timestamp}")
|
||||
print(f"Config: {config_file_path} | Strategy: {fit_method_class}")
|
||||
print(f"Files: {datafiles}")
|
||||
print(f"Instruments: {instruments}")
|
||||
print("-" * 80)
|
||||
|
||||
conn.close()
|
||||
|
||||
def view_results_summary(db_path: str) -> None:
|
||||
"""View summary of trading results."""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get results summary
|
||||
cursor.execute("""
|
||||
SELECT date, COUNT(*) as trade_count,
|
||||
ROUND(SUM(symbol_return), 2) as total_return
|
||||
FROM pt_bt_results
|
||||
GROUP BY date
|
||||
ORDER BY date DESC
|
||||
""")
|
||||
|
||||
results = cursor.fetchall()
|
||||
|
||||
if not results:
|
||||
print("No trading results found.")
|
||||
return
|
||||
|
||||
print(f"\nTrading Results Summary:")
|
||||
print("-" * 50)
|
||||
print("Date".ljust(15) + "Trades".ljust(10) + "Total Return %")
|
||||
print("-" * 50)
|
||||
|
||||
for date, trade_count, total_return in results:
|
||||
print(f"{date}".ljust(15) + f"{trade_count}".ljust(10) + f"{total_return}")
|
||||
|
||||
# Get outstanding positions summary
|
||||
cursor.execute("""
|
||||
SELECT COUNT(*) as position_count,
|
||||
ROUND(SUM(unrealized_return), 2) as total_unrealized
|
||||
FROM outstanding_positions
|
||||
""")
|
||||
|
||||
outstanding = cursor.fetchone()
|
||||
if outstanding and outstanding[0] > 0:
|
||||
print(f"\nOutstanding Positions: {outstanding[0]} positions")
|
||||
print(f"Total Unrealized Return: {outstanding[1]}%")
|
||||
|
||||
conn.close()
|
||||
|
||||
def main() -> None:
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python db_inspector.py <database_path> [command]")
|
||||
print("Commands:")
|
||||
print(" tables - List all tables")
|
||||
print(" schema - Show schema for all tables")
|
||||
print(" config - View configuration entries")
|
||||
print(" results - View trading results summary")
|
||||
print(" all - Show everything (default)")
|
||||
print("\nExample: python db_inspector.py results/equity.db config")
|
||||
sys.exit(1)
|
||||
|
||||
db_path = sys.argv[1]
|
||||
command = sys.argv[2] if len(sys.argv) > 2 else "all"
|
||||
|
||||
if not os.path.exists(db_path):
|
||||
print(f"Database file not found: {db_path}")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
if command in ["tables", "all"]:
|
||||
tables = list_tables(db_path)
|
||||
print(f"Tables in database: {', '.join(tables)}")
|
||||
|
||||
if command in ["schema", "all"]:
|
||||
tables = list_tables(db_path)
|
||||
for table in tables:
|
||||
view_table_schema(db_path, table)
|
||||
|
||||
if command in ["config", "all"]:
|
||||
if "config" in list_tables(db_path):
|
||||
view_config_table(db_path)
|
||||
else:
|
||||
print("Config table not found.")
|
||||
|
||||
if command in ["results", "all"]:
|
||||
if "pt_bt_results" in list_tables(db_path):
|
||||
view_results_summary(db_path)
|
||||
else:
|
||||
print("Results table not found.")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error inspecting database: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -16,7 +16,8 @@
|
||||
"autoImportCompletions": true,
|
||||
"autoSearchPaths": true,
|
||||
"extraPaths": [
|
||||
"lib"
|
||||
"lib",
|
||||
".."
|
||||
],
|
||||
"stubPath": "./typings",
|
||||
"venvPath": ".",
|
||||
|
||||
213
requirements.txt
213
requirements.txt
@ -61,7 +61,7 @@ protobuf>=3.12.4
|
||||
psutil>=5.9.0
|
||||
ptyprocess>=0.7.0
|
||||
pycurl>=7.44.1
|
||||
# pyelftools>=0.27
|
||||
pyelftools>=0.27
|
||||
Pygments>=2.11.2
|
||||
pyparsing>=2.4.7
|
||||
pyrsistent>=0.18.1
|
||||
@ -69,7 +69,7 @@ python-debian>=0.1.43 #+ubuntu1.1
|
||||
python-dotenv>=0.19.2
|
||||
python-magic>=0.4.24
|
||||
python-xlib>=0.29
|
||||
# pyxdg>=0.27
|
||||
pyxdg>=0.27
|
||||
PyYAML>=6.0
|
||||
reportlab>=3.6.8
|
||||
requests>=2.25.1
|
||||
@ -78,117 +78,118 @@ scipy<1.13.0
|
||||
seaborn>=0.13.2
|
||||
SecretStorage>=3.3.1
|
||||
setproctitle>=1.2.2
|
||||
simpleeval>=1.0.3
|
||||
six>=1.16.0
|
||||
soupsieve>=2.3.1
|
||||
ssh-import-id>=5.11
|
||||
statsmodels>=0.14.4
|
||||
# texttable>=1.6.4
|
||||
texttable>=1.6.4
|
||||
tldextract>=3.1.2
|
||||
tomli>=1.2.2
|
||||
######## typed-ast>=1.4.3
|
||||
# types-aiofiles>=0.1
|
||||
# types-annoy>=1.17
|
||||
# types-appdirs>=1.4
|
||||
# types-atomicwrites>=1.4
|
||||
# types-aws-xray-sdk>=2.8
|
||||
# types-babel>=2.9
|
||||
# types-backports-abc>=0.5
|
||||
# types-backports.ssl-match-hostname>=3.7
|
||||
# types-beautifulsoup4>=4.10
|
||||
# types-bleach>=4.1
|
||||
# types-boto>=2.49
|
||||
# types-braintree>=4.11
|
||||
# types-cachetools>=4.2
|
||||
# types-caldav>=0.8
|
||||
# types-certifi>=2020.4
|
||||
# types-characteristic>=14.3
|
||||
# types-chardet>=4.0
|
||||
# types-click>=7.1
|
||||
# types-click-spinner>=0.1
|
||||
# types-colorama>=0.4
|
||||
# types-commonmark>=0.9
|
||||
# types-contextvars>=0.1
|
||||
# types-croniter>=1.0
|
||||
# types-cryptography>=3.3
|
||||
# types-dataclasses>=0.1
|
||||
# types-dateparser>=1.0
|
||||
# types-DateTimeRange>=0.1
|
||||
# types-decorator>=0.1
|
||||
# types-Deprecated>=1.2
|
||||
# types-docopt>=0.6
|
||||
# types-docutils>=0.17
|
||||
# types-editdistance>=0.5
|
||||
# types-emoji>=1.2
|
||||
# types-entrypoints>=0.3
|
||||
# types-enum34>=1.1
|
||||
# types-filelock>=3.2
|
||||
# types-first>=2.0
|
||||
# types-Flask>=1.1
|
||||
# types-freezegun>=1.1
|
||||
# types-frozendict>=0.1
|
||||
# types-futures>=3.3
|
||||
# types-html5lib>=1.1
|
||||
# types-httplib2>=0.19
|
||||
# types-humanfriendly>=9.2
|
||||
# types-ipaddress>=1.0
|
||||
# types-itsdangerous>=1.1
|
||||
# types-JACK-Client>=0.1
|
||||
# types-Jinja2>=2.11
|
||||
# types-jmespath>=0.10
|
||||
# types-jsonschema>=3.2
|
||||
# types-Markdown>=3.3
|
||||
# types-MarkupSafe>=1.1
|
||||
# types-mock>=4.0
|
||||
# types-mypy-extensions>=0.4
|
||||
# types-mysqlclient>=2.0
|
||||
# types-oauthlib>=3.1
|
||||
# types-orjson>=3.6
|
||||
# types-paramiko>=2.7
|
||||
# types-Pillow>=8.3
|
||||
# types-polib>=1.1
|
||||
# types-prettytable>=2.1
|
||||
# types-protobuf>=3.17
|
||||
# types-psutil>=5.8
|
||||
# types-psycopg2>=2.9
|
||||
# types-pyaudio>=0.2
|
||||
# types-pycurl>=0.1
|
||||
# types-pyfarmhash>=0.2
|
||||
# types-Pygments>=2.9
|
||||
# types-PyMySQL>=1.0
|
||||
# types-pyOpenSSL>=20.0
|
||||
# types-pyRFC3339>=0.1
|
||||
# types-pysftp>=0.2
|
||||
# types-pytest-lazy-fixture>=0.6
|
||||
# types-python-dateutil>=2.8
|
||||
# types-python-gflags>=3.1
|
||||
# types-python-nmap>=0.6
|
||||
# types-python-slugify>=5.0
|
||||
# types-pytz>=2021.1
|
||||
# types-pyvmomi>=7.0
|
||||
# types-PyYAML>=5.4
|
||||
# types-redis>=3.5
|
||||
# types-requests>=2.25
|
||||
# types-retry>=0.9
|
||||
# types-selenium>=3.141
|
||||
# types-Send2Trash>=1.8
|
||||
# types-setuptools>=57.4
|
||||
# types-simplejson>=3.17
|
||||
# types-singledispatch>=3.7
|
||||
# types-six>=1.16
|
||||
# types-slumber>=0.7
|
||||
# types-stripe>=2.59
|
||||
# types-tabulate>=0.8
|
||||
# types-termcolor>=1.1
|
||||
# types-toml>=0.10
|
||||
# types-toposort>=1.6
|
||||
# types-ttkthemes>=3.2
|
||||
# types-typed-ast>=1.4
|
||||
# types-tzlocal>=0.1
|
||||
# types-ujson>=0.1
|
||||
# types-vobject>=0.9
|
||||
# types-waitress>=0.1
|
||||
#types-Werkzeug>=1.0
|
||||
#types-xxhash>=2.0
|
||||
types-aiofiles>=0.1
|
||||
types-annoy>=1.17
|
||||
types-appdirs>=1.4
|
||||
types-atomicwrites>=1.4
|
||||
types-aws-xray-sdk>=2.8
|
||||
types-babel>=2.9
|
||||
types-backports-abc>=0.5
|
||||
types-backports.ssl-match-hostname>=3.7
|
||||
types-beautifulsoup4>=4.10
|
||||
types-bleach>=4.1
|
||||
types-boto>=2.49
|
||||
types-braintree>=4.11
|
||||
types-cachetools>=4.2
|
||||
types-caldav>=0.8
|
||||
types-certifi>=2020.4
|
||||
types-characteristic>=14.3
|
||||
types-chardet>=4.0
|
||||
types-click>=7.1
|
||||
types-click-spinner>=0.1
|
||||
types-colorama>=0.4
|
||||
types-commonmark>=0.9
|
||||
types-contextvars>=0.1
|
||||
types-croniter>=1.0
|
||||
types-cryptography>=3.3
|
||||
types-dataclasses>=0.1
|
||||
types-dateparser>=1.0
|
||||
types-DateTimeRange>=0.1
|
||||
types-decorator>=0.1
|
||||
types-Deprecated>=1.2
|
||||
types-docopt>=0.6
|
||||
types-docutils>=0.17
|
||||
types-editdistance>=0.5
|
||||
types-emoji>=1.2
|
||||
types-entrypoints>=0.3
|
||||
types-enum34>=1.1
|
||||
types-filelock>=3.2
|
||||
types-first>=2.0
|
||||
types-Flask>=1.1
|
||||
types-freezegun>=1.1
|
||||
types-frozendict>=0.1
|
||||
types-futures>=3.3
|
||||
types-html5lib>=1.1
|
||||
types-httplib2>=0.19
|
||||
types-humanfriendly>=9.2
|
||||
types-ipaddress>=1.0
|
||||
types-itsdangerous>=1.1
|
||||
types-JACK-Client>=0.1
|
||||
types-Jinja2>=2.11
|
||||
types-jmespath>=0.10
|
||||
types-jsonschema>=3.2
|
||||
types-Markdown>=3.3
|
||||
types-MarkupSafe>=1.1
|
||||
types-mock>=4.0
|
||||
types-mypy-extensions>=0.4
|
||||
types-mysqlclient>=2.0
|
||||
types-oauthlib>=3.1
|
||||
types-orjson>=3.6
|
||||
types-paramiko>=2.7
|
||||
types-Pillow>=8.3
|
||||
types-polib>=1.1
|
||||
types-prettytable>=2.1
|
||||
types-protobuf>=3.17
|
||||
types-psutil>=5.8
|
||||
types-psycopg2>=2.9
|
||||
types-pyaudio>=0.2
|
||||
types-pycurl>=0.1
|
||||
types-pyfarmhash>=0.2
|
||||
types-Pygments>=2.9
|
||||
types-PyMySQL>=1.0
|
||||
types-pyOpenSSL>=20.0
|
||||
types-pyRFC3339>=0.1
|
||||
types-pysftp>=0.2
|
||||
types-pytest-lazy-fixture>=0.6
|
||||
types-python-dateutil>=2.8
|
||||
types-python-gflags>=3.1
|
||||
types-python-nmap>=0.6
|
||||
types-python-slugify>=5.0
|
||||
types-pytz>=2021.1
|
||||
types-pyvmomi>=7.0
|
||||
types-PyYAML>=5.4
|
||||
types-redis>=3.5
|
||||
types-requests>=2.25
|
||||
types-retry>=0.9
|
||||
types-selenium>=3.141
|
||||
types-Send2Trash>=1.8
|
||||
types-setuptools>=57.4
|
||||
types-simplejson>=3.17
|
||||
types-singledispatch>=3.7
|
||||
types-six>=1.16
|
||||
types-slumber>=0.7
|
||||
types-stripe>=2.59
|
||||
types-tabulate>=0.8
|
||||
types-termcolor>=1.1
|
||||
types-toml>=0.10
|
||||
types-toposort>=1.6
|
||||
types-ttkthemes>=3.2
|
||||
types-typed-ast>=1.4
|
||||
types-tzlocal>=0.1
|
||||
types-ujson>=0.1
|
||||
types-vobject>=0.9
|
||||
types-waitress>=0.1
|
||||
types-Werkzeug>=1.0
|
||||
types-xxhash>=2.0
|
||||
typing-extensions>=3.10.0.2
|
||||
Unidecode>=1.3.3
|
||||
urllib3>=1.26.5
|
||||
|
||||
106
research/backtest.py
Normal file
106
research/backtest.py
Normal file
@ -0,0 +1,106 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from pt_strategy.results import (
|
||||
PairResearchResult,
|
||||
create_result_database,
|
||||
store_config_in_database,
|
||||
)
|
||||
from pt_strategy.research_strategy import PtResearchStrategy
|
||||
from tools.filetools import resolve_datafiles
|
||||
from tools.instruments import get_instruments
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import argparse
|
||||
|
||||
from tools.config import expand_filename, load_config
|
||||
|
||||
parser = argparse.ArgumentParser(description="Run pairs trading backtest.")
|
||||
parser.add_argument(
|
||||
"--config", type=str, required=True, help="Path to the configuration file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--date_pattern",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Date YYYYMMDD, allows * and ? wildcards",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--instruments",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Comma-separated list of instrument symbols (e.g., COIN:EQUITY,GBTC:CRYPTO)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--result_db",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to SQLite database for storing results. Use 'NONE' to disable database output.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Dict = load_config(args.config)
|
||||
|
||||
# Resolve data files (CLI takes priority over config)
|
||||
instruments = get_instruments(args, config)
|
||||
datafiles = resolve_datafiles(config, args.date_pattern, instruments)
|
||||
|
||||
days = list(set([day for day, _ in datafiles]))
|
||||
print(f"Found {len(datafiles)} data files to process:")
|
||||
for df in datafiles:
|
||||
print(f" - {df}")
|
||||
|
||||
# Create result database if needed
|
||||
if args.result_db.upper() != "NONE":
|
||||
args.result_db = expand_filename(args.result_db)
|
||||
create_result_database(args.result_db)
|
||||
|
||||
# Initialize a dictionary to store all trade results
|
||||
all_results: Dict[str, Dict[str, Any]] = {}
|
||||
is_config_stored = False
|
||||
# Process each data file
|
||||
|
||||
results = PairResearchResult(config=config)
|
||||
for day in sorted(days):
|
||||
md_datafiles = [datafile for md_day, datafile in datafiles if md_day == day]
|
||||
if not all([os.path.exists(datafile) for datafile in md_datafiles]):
|
||||
print(f"WARNING: insufficient data files: {md_datafiles}")
|
||||
continue
|
||||
print(f"\n====== Processing {day} ======")
|
||||
|
||||
if not is_config_stored:
|
||||
store_config_in_database(
|
||||
db_path=args.result_db,
|
||||
config_file_path=args.config,
|
||||
config=config,
|
||||
datafiles=datafiles,
|
||||
instruments=instruments,
|
||||
)
|
||||
is_config_stored = True
|
||||
|
||||
pt_strategy = PtResearchStrategy(
|
||||
config=config, datafiles=md_datafiles, instruments=instruments
|
||||
)
|
||||
pt_strategy.run()
|
||||
results.add_day_results(
|
||||
day=day,
|
||||
trades=pt_strategy.day_trades(),
|
||||
outstanding_positions=pt_strategy.outstanding_positions(),
|
||||
)
|
||||
|
||||
|
||||
results.analyze_pair_performance()
|
||||
|
||||
|
||||
if args.result_db.upper() != "NONE":
|
||||
print(f"\nResults stored in database: {args.result_db}")
|
||||
else:
|
||||
print("No results to display.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
6416
research/notebooks/pair_trading_test.ipynb
Normal file
6416
research/notebooks/pair_trading_test.ipynb
Normal file
File diff suppressed because one or more lines are too long
@ -1,16 +0,0 @@
|
||||
{
|
||||
"cells": [],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"name": "python",
|
||||
"version": "3.12.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -5,6 +5,7 @@ from typing import Dict, List, Optional
|
||||
import pandas as pd
|
||||
from pt_trading.fit_method import PairsTradingFitMethod
|
||||
|
||||
|
||||
def resolve_datafiles(config: Dict, cli_datafiles: Optional[str] = None) -> List[str]:
|
||||
"""
|
||||
Resolve the list of data files to process.
|
||||
@ -68,9 +69,9 @@ def create_pairs(
|
||||
|
||||
for datafile in datafiles:
|
||||
md_df = load_market_data(
|
||||
datafile = datafile,
|
||||
instruments = instruments,
|
||||
db_table_name = config_copy["market_data_loading"][instruments[0]["instrument_type"]]["db_table_name"],
|
||||
datafile=datafile,
|
||||
instruments=instruments,
|
||||
db_table_name=config_copy["market_data_loading"][instruments[0]["instrument_type"]]["db_table_name"],
|
||||
trading_hours=config_copy["trading_hours"],
|
||||
extra_minutes=extra_minutes,
|
||||
)
|
||||
@ -1,221 +0,0 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
from datetime import date, datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import hjson
|
||||
import pandas as pd
|
||||
|
||||
from tools.data_loader import get_available_instruments_from_db, load_market_data
|
||||
from pt_trading.results import (
|
||||
BacktestResult,
|
||||
create_result_database,
|
||||
store_config_in_database,
|
||||
store_results_in_database,
|
||||
)
|
||||
from pt_trading.fit_methods import PairsTradingFitMethod
|
||||
from pt_trading.trading_pair import TradingPair
|
||||
|
||||
|
||||
def run_strategy(
|
||||
config: Dict,
|
||||
datafile: str,
|
||||
fit_method: PairsTradingFitMethod,
|
||||
instruments: List[str],
|
||||
) -> BacktestResult:
|
||||
"""
|
||||
Run backtest for all pairs using the specified instruments.
|
||||
"""
|
||||
bt_result: BacktestResult = BacktestResult(config=config)
|
||||
|
||||
def _create_pairs(config: Dict, instruments: List[str]) -> List[TradingPair]:
|
||||
nonlocal datafile
|
||||
all_indexes = range(len(instruments))
|
||||
unique_index_pairs = [(i, j) for i in all_indexes for j in all_indexes if i < j]
|
||||
pairs = []
|
||||
|
||||
# Update config to use the specified instruments
|
||||
config_copy = config.copy()
|
||||
config_copy["instruments"] = instruments
|
||||
|
||||
market_data_df = load_market_data(
|
||||
datafile=datafile,
|
||||
exchange_id=config_copy["exchange_id"],
|
||||
instruments=config_copy["instruments"],
|
||||
instrument_id_pfx=config_copy["instrument_id_pfx"],
|
||||
db_table_name=config_copy["db_table_name"],
|
||||
trading_hours=config_copy["trading_hours"],
|
||||
)
|
||||
|
||||
for a_index, b_index in unique_index_pairs:
|
||||
pair = fit_method.create_trading_pair(
|
||||
market_data=market_data_df,
|
||||
symbol_a=instruments[a_index],
|
||||
symbol_b=instruments[b_index],
|
||||
)
|
||||
pairs.append(pair)
|
||||
return pairs
|
||||
|
||||
pairs_trades = []
|
||||
for pair in _create_pairs(config, instruments):
|
||||
single_pair_trades = fit_method.run_pair(
|
||||
pair=pair, config=config, bt_result=bt_result
|
||||
)
|
||||
if single_pair_trades is not None and len(single_pair_trades) > 0:
|
||||
pairs_trades.append(single_pair_trades)
|
||||
|
||||
# Check if result_list has any data before concatenating
|
||||
if len(pairs_trades) == 0:
|
||||
print("No trading signals found for any pairs")
|
||||
return bt_result
|
||||
|
||||
result = pd.concat(pairs_trades, ignore_index=True)
|
||||
result["time"] = pd.to_datetime(result["time"])
|
||||
result = result.set_index("time").sort_index()
|
||||
|
||||
bt_result.collect_single_day_results(result)
|
||||
return bt_result
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Run pairs trading backtest.")
|
||||
parser.add_argument(
|
||||
"--config", type=str, required=True, help="Path to the configuration file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--datafiles",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Comma-separated list of data files (overrides config). No wildcards supported.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--instruments",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Comma-separated list of instrument symbols (e.g., COIN,GBTC). If not provided, auto-detects from database.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--result_db",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to SQLite database for storing results. Use 'NONE' to disable database output.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Dict = load_config(args.config)
|
||||
|
||||
# Dynamically instantiate fit method class
|
||||
fit_method_class_name = config.get("fit_method_class", None)
|
||||
assert fit_method_class_name is not None
|
||||
module_name, class_name = fit_method_class_name.rsplit(".", 1)
|
||||
module = importlib.import_module(module_name)
|
||||
fit_method = getattr(module, class_name)()
|
||||
|
||||
# Resolve data files (CLI takes priority over config)
|
||||
datafiles = resolve_datafiles(config, args.datafiles)
|
||||
|
||||
if not datafiles:
|
||||
print("No data files found to process.")
|
||||
return
|
||||
|
||||
print(f"Found {len(datafiles)} data files to process:")
|
||||
for df in datafiles:
|
||||
print(f" - {df}")
|
||||
|
||||
# Create result database if needed
|
||||
if args.result_db.upper() != "NONE":
|
||||
create_result_database(args.result_db)
|
||||
|
||||
# Initialize a dictionary to store all trade results
|
||||
all_results: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# Store configuration in database for reference
|
||||
if args.result_db.upper() != "NONE":
|
||||
# Get list of all instruments for storage
|
||||
all_instruments = []
|
||||
for datafile in datafiles:
|
||||
if args.instruments:
|
||||
file_instruments = [
|
||||
inst.strip() for inst in args.instruments.split(",")
|
||||
]
|
||||
else:
|
||||
file_instruments = get_available_instruments_from_db(datafile, config)
|
||||
all_instruments.extend(file_instruments)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
unique_instruments = list(dict.fromkeys(all_instruments))
|
||||
|
||||
store_config_in_database(
|
||||
db_path=args.result_db,
|
||||
config_file_path=args.config,
|
||||
config=config,
|
||||
fit_method_class=fit_method_class_name,
|
||||
datafiles=datafiles,
|
||||
instruments=unique_instruments,
|
||||
)
|
||||
|
||||
# Process each data file
|
||||
|
||||
for datafile in datafiles:
|
||||
print(f"\n====== Processing {os.path.basename(datafile)} ======")
|
||||
|
||||
# Determine instruments to use
|
||||
if args.instruments:
|
||||
# Use CLI-specified instruments
|
||||
instruments = [inst.strip() for inst in args.instruments.split(",")]
|
||||
print(f"Using CLI-specified instruments: {instruments}")
|
||||
else:
|
||||
# Auto-detect instruments from database
|
||||
instruments = get_available_instruments_from_db(datafile, config)
|
||||
print(f"Auto-detected instruments: {instruments}")
|
||||
|
||||
if not instruments:
|
||||
print(f"No instruments found for {datafile}, skipping...")
|
||||
continue
|
||||
|
||||
# Process data for this file
|
||||
try:
|
||||
fit_method.reset()
|
||||
|
||||
bt_results = run_strategy(
|
||||
config=config,
|
||||
datafile=datafile,
|
||||
fit_method=fit_method,
|
||||
instruments=instruments,
|
||||
)
|
||||
|
||||
# Store results with file name as key
|
||||
filename = os.path.basename(datafile)
|
||||
all_results[filename] = {"trades": bt_results.trades.copy()}
|
||||
|
||||
# Store results in database
|
||||
if args.result_db.upper() != "NONE":
|
||||
store_results_in_database(args.result_db, datafile, bt_results)
|
||||
|
||||
print(f"Successfully processed {filename}")
|
||||
|
||||
except Exception as err:
|
||||
print(f"Error processing {datafile}: {str(err)}")
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
# Calculate and print results using a new BacktestResult instance for aggregation
|
||||
if all_results:
|
||||
aggregate_bt_results = BacktestResult(config=config)
|
||||
aggregate_bt_results.calculate_returns(all_results)
|
||||
aggregate_bt_results.print_grand_totals()
|
||||
aggregate_bt_results.print_outstanding_positions()
|
||||
|
||||
if args.result_db.upper() != "NONE":
|
||||
print(f"\nResults stored in database: {args.result_db}")
|
||||
else:
|
||||
print("No results to display.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
111
tests/viz_test.py
Normal file
111
tests/viz_test.py
Normal file
@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from pt_strategy.results import (PairResearchResult, create_result_database,
|
||||
store_config_in_database)
|
||||
from pt_strategy.research_strategy import PtResearchStrategy
|
||||
from tools.filetools import resolve_datafiles
|
||||
from tools.instruments import get_instruments
|
||||
from tools.viz.viz_trades import visualize_trades
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import argparse
|
||||
|
||||
from tools.config import expand_filename, load_config
|
||||
|
||||
parser = argparse.ArgumentParser(description="Run pairs trading backtest.")
|
||||
parser.add_argument(
|
||||
"--config", type=str, required=True, help="Path to the configuration file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--date_pattern",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Date YYYYMMDD, allows * and ? wildcards",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--instruments",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Comma-separated list of instrument symbols (e.g., COIN:EQUITY,GBTC:CRYPTO)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--result_db",
|
||||
type=str,
|
||||
required=False,
|
||||
default="NONE",
|
||||
help="Path to SQLite database for storing results. Use 'NONE' to disable database output.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Dict = load_config(args.config)
|
||||
|
||||
# Resolve data files (CLI takes priority over config)
|
||||
instruments = get_instruments(args, config)
|
||||
datafiles = resolve_datafiles(config, args.date_pattern, instruments)
|
||||
|
||||
days = list(set([day for day, _ in datafiles]))
|
||||
print(f"Found {len(datafiles)} data files to process:")
|
||||
for df in datafiles:
|
||||
print(f" - {df}")
|
||||
|
||||
# Create result database if needed
|
||||
if args.result_db.upper() != "NONE":
|
||||
args.result_db = expand_filename(args.result_db)
|
||||
create_result_database(args.result_db)
|
||||
|
||||
# Initialize a dictionary to store all trade results
|
||||
all_results: Dict[str, Dict[str, Any]] = {}
|
||||
is_config_stored = False
|
||||
# Process each data file
|
||||
|
||||
results = PairResearchResult(config=config)
|
||||
for day in sorted(days):
|
||||
md_datafiles = [datafile for md_day, datafile in datafiles if md_day == day]
|
||||
if not all([os.path.exists(datafile) for datafile in md_datafiles]):
|
||||
print(f"WARNING: insufficient data files: {md_datafiles}")
|
||||
continue
|
||||
print(f"\n====== Processing {day} ======")
|
||||
|
||||
if not is_config_stored:
|
||||
store_config_in_database(
|
||||
db_path=args.result_db,
|
||||
config_file_path=args.config,
|
||||
config=config,
|
||||
datafiles=datafiles,
|
||||
instruments=instruments,
|
||||
)
|
||||
is_config_stored = True
|
||||
|
||||
pt_strategy = PtResearchStrategy(
|
||||
config=config, datafiles=md_datafiles, instruments=instruments
|
||||
)
|
||||
pt_strategy.run()
|
||||
results.add_day_results(
|
||||
day=day,
|
||||
trades=pt_strategy.day_trades(),
|
||||
outstanding_positions=pt_strategy.outstanding_positions(),
|
||||
)
|
||||
|
||||
|
||||
results.analyze_pair_performance()
|
||||
|
||||
|
||||
visualize_trades(pt_strategy, results, day)
|
||||
|
||||
|
||||
if args.result_db.upper() != "NONE":
|
||||
print(f"\nResults stored in database: {args.result_db}")
|
||||
else:
|
||||
print("No results to display.")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
x
Reference in New Issue
Block a user