Compare commits

..

45 Commits

Author SHA1 Message Date
b288e997b6 moved daily_sim to cvtt_apps 2026-01-06 15:32:46 +00:00
00addf2027 fix 2026-01-06 14:28:05 +00:00
02f74a6f3f fix 2026-01-06 14:27:16 +00:00
Cryptoval2
231bfd079f progress 2025-10-19 15:41:05 +00:00
Cryptoval2
2512023079 smarter crypto simdata - using DB_HOST_BACKUP 2025-10-19 15:13:50 +00:00
Cryptoval2
7f05a919d3 added --mkpath to rsync 2025-09-13 21:10:51 +00:00
Cryptoval2
fda425c680 eqt sim data to dated target directory - fix 2025-09-13 20:48:04 +00:00
Cryptoval2
52ce7d3f55 eqt sim data to dated target directory 2025-09-13 20:40:40 +00:00
Cryptoval2
64d9408506 fix 2025-09-13 19:39:02 +00:00
Cryptoval2
d427e22d41 crypto sim -> dated target directory 2025-09-12 23:01:51 +00:00
0647e9d9b7 fix 2025-05-18 21:43:53 -04:00
66d2840a74 eqt 2025-05-18 21:38:09 -04:00
b4dfecf12a test 2025-05-18 21:25:34 -04:00
7f47905579 testing 2025-05-18 21:17:55 -04:00
08636b2206 fix 2025-05-18 20:56:40 -04:00
3b1a1fc9ba eqt initial 2025-05-18 20:46:58 -04:00
19c7313326 progress 2025-05-18 19:40:08 -04:00
e23ddd5e44 progress 2025-05-18 19:35:51 -04:00
9bb868845f fix 2025-05-18 19:11:19 -04:00
aced5b23e0 progress 2025-05-18 19:05:25 -04:00
9eb4f8e117 daily sim for docker.initial 2025-05-18 18:42:08 -04:00
3e49b5bf27 Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-17 23:29:01 -04:00
0876776d60 progress 2025-05-17 19:30:00 -04:00
Cryptoval2
fb685ca91d cleaning 2025-05-16 16:00:44 +00:00
Cryptoval2
f1a83c8e33 . 2025-05-16 15:58:26 +00:00
Cryptoval2
26540a5294 cleaning 2025-05-16 15:57:12 +00:00
c37d62637a Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-16 11:36:33 -04:00
bcb257add4 deprecated 2025-05-16 11:36:12 -04:00
Cryptoval2
ccc45b06ea utils upgrade 2025-05-16 15:16:01 +00:00
Cryptoval2
ebcf3ac20b Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-16 13:28:39 +00:00
Cryptoval2
d66bf5f48b 2.1.3,remove cvttdata 2025-05-16 13:28:00 +00:00
5c3fd357ab Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-15 18:28:08 -04:00
Cryptoval2
1432794197 2.1.2,fix 2025-05-15 19:40:31 +00:00
Cryptoval2
52dd9997ca 2.1.1,archive_logs interface change 2025-05-15 19:30:09 +00:00
066bdbdb93 new aggregate features are added to research 2025-05-05 15:31:56 -04:00
Cryptoval2
f59c729c6f flexible target directory for loading sim data 2025-04-16 17:15:03 +00:00
Cryptoval2
df04d764c8 prepare crypto sim enhanced 2025-03-24 17:25:57 +00:00
Cryptoval2
06610de992 OPS-10 2025-03-21 17:04:23 +00:00
9aaf356048 OPS-6 2025-03-18 13:47:51 -04:00
f2bb798fff fix 2025-03-07 23:16:48 -05:00
b0a0080a4a replacing "latest" with real docker image tag 2025-03-07 23:12:38 -05:00
e35b15f024 replacing "latest" with real docker image tag 2025-03-07 23:09:39 -05:00
3bd31b7d8b crypto_md_day - specific docker tag 2025-03-07 23:02:06 -05:00
085308ff9c using specific version for md_recorder 2025-03-07 22:42:46 -05:00
9f789738e9 using HA trading-calendar and config service 2025-02-03 15:31:34 -05:00
33 changed files with 730 additions and 515 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.history/

View File

@ -1 +1 @@
1.9.9.F2,docker build imagages script fix
2.3.2

View File

@ -17,7 +17,7 @@ is_container_running() {
fi
}
export CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
export CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
is_business_day() {
dt=${1}

View File

@ -3,7 +3,7 @@
ValidJobs=('BNBFUT_CLD21' 'BNBSPOT_CLD21' 'COINBASE_CLD21' 'BNBFUT_CVTTDATA' 'BNBSPOT_CVTTDATA' 'COINBASE_CVTTDATA')
# runs on host to start container
usage() {
echo "Usage: $0 <job_name; one of (${ValidJobs[@]})>"
echo "Usage: $0 <job_name; one of (${ValidJobs[@]})> [image_tag]"
exit 1
}
@ -35,6 +35,12 @@ if ! is_valid "${job}"; then
usage
fi
ImageTag=${2}
if [ "${ImageTag}" == "" ] ; then
ImageTag="1.6.9"
fi
DockerImage=cloud21.cvtt.vpn:5500/md_recorder:${ImageTag}
ContainerName="md_recorder.${job}"
if is_container_running "$ContainerName"; then
@ -47,13 +53,13 @@ Cmd="docker run"
Cmd+=" -d"
Cmd+=" --rm"
Cmd+=" --network=host"
Cmd+=" --pull=always"
# Cmd+=" --pull=always"
Cmd+=" --name=${ContainerName}"
Cmd+=" -v /home/cvtt/.creds:/.creds"
Cmd+=" -v /home/cvtt/prod/data:/app/data"
Cmd+=" -v /home/cvtt/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" cloud21.cvtt.vpn:5500/md_recorder:latest"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" ${DockerImage}"
Cmd+=" ${job}"
echo ${Cmd}

View File

@ -52,7 +52,7 @@ Cmd+=" --pull=always"
Cmd+=" --name=${ContainerName}"
Cmd+=" -v /home/cvtt/.creds:/.creds"
Cmd+=" -v /home/cvtt/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" cloud21.cvtt.vpn:5500/md_recorder_monitor:latest"
Cmd+=" ${Instance} ${AdminPort}"

View File

@ -1,99 +1,94 @@
#!/bin/bash
# SQLite DDL for simulation
# =========================
# -- md_quotes
# -- md_1min_bars
usage() {
echo "Usage: $0 -d YYYMMDD Date> [-O <output dir (./) >]"
exit 1
}
# --------------------- Settings
SourceHost=cloud21.cvtt.vpn
SourceRootDir=/opt/store/cvtt/md_archive/crypto
DbSource=cloud21
# SOURCE_HOST=cloud21.cvtt.vpn
# SOURCE_ROOT_DIR=/opt/store/cvtt/md_archive/crypto
# DB_SOURCES=cloud28,cloud29
# OUTPUT_DIR=/tmp
# DATE=20250516
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/crypto/sim/ cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/crypto/sim/"
# --------------------- Settings
while getopts ":d:O:" opt; do
case ${opt} in
d )
Date=$OPTARG
;;
O )
OutputDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
if [ -z ${DATE} ] ; then
DATE=$(date -d 'yesterday' +'%Y%m%d')
fi
if [ -z ${OUTPUT_DIR} ] ; then
OUTPUT_DIR=.
fi
echo "DATE=${DATE} SOURCE_HOST=${SOURCE_HOST}"
mkdir -p ${OUTPUT_DIR}
year=$(date -d ${DATE} +"%Y")
month=$(date -d ${DATE} +"%m")
if [ -z "${DB_SOURCES}" ]; then
echo "DB_SOURCES is empty"
exit
fi
IFS=',' read -r -a db_source_hosts <<< "${DB_SOURCES}"
SourceFile="${DATE}.mktdata.db.gz"
SelectedSourceHost=""
SelectedSourceFilePath=""
SelectedSourceSize=0
for db_source_host in "${db_source_hosts[@]}"; do
SourceDir="${SOURCE_ROOT_DIR}/${db_source_host}/${year}/${month}"
CandidatePath="${SourceDir}/${SourceFile}"
remote_stat_cmd="if [ -f '${CandidatePath}' ]; then stat -c %s '${CandidatePath}'; else exit 1; fi"
CandidateSize=$(ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${SOURCE_HOST} "${remote_stat_cmd}" 2>/dev/null)
if [ $? -eq 0 ] && [ -n "${CandidateSize}" ]; then
echo "Found ${SOURCE_HOST}:${CandidatePath} (${CandidateSize} bytes)"
if [ -z "${SelectedSourceHost}" ] || [ "${CandidateSize}" -gt "${SelectedSourceSize}" ]; then
SelectedSourceHost=${db_source_host}
SelectedSourceFilePath=${CandidatePath}
SelectedSourceSize=${CandidateSize}
fi
fi
done
if [ -z ${Date} ] ; then
echo "date is not specified"
usage
if [ -z "${SelectedSourceHost}" ]; then
echo "File ${SourceFile} NOT FOUND on any DB_SOURCES host"
exit
fi
if [ -z ${OutputDir} ] ; then
OutputDir=.
fi
mkdir -p ${OutputDir}
# --- Binance
Instruments=( PAIR-ADA-USDT )
Instruments+=( PAIR-BCH-USDT )
Instruments+=( PAIR-BTC-USDT )
Instruments+=( PAIR-DOT-USDT )
Instruments+=( PAIR-ETH-USDT )
Instruments+=( PAIR-LTC-USDT )
Instruments+=( PAIR-SOL-USDT )
Instruments+=( PAIR-USDC-USDT )
Instruments+=( PAIR-XRP-USDT )
echo "Using source ${SelectedSourceHost} with ${SelectedSourceFilePath} (${SelectedSourceSize} bytes)"
# --- Coinbase
Instruments+=( PAIR-ADA-USD )
Instruments+=( PAIR-BCH-USD )
Instruments+=( PAIR-BTC-USD )
Instruments+=( PAIR-DOT-USD )
Instruments+=( PAIR-ETH-USD )
Instruments+=( PAIR-LTC-USD )
Instruments+=( PAIR-SOL-USD )
Instruments+=( PAIR-XRP-USD )
echo "Date=${Date} Instruments=${Instruments[@]} OutputDir=${OutputDir}"
echo Getting data from ${DataHost} ...
year=$(date -d ${Date} +"%Y")
month=$(date -d ${Date} +"%m")
SourceDir="${SourceRootDir}/${DbSource}/${year}/${month}"
SourceFile="${SourceDir}/${Date}.mktdata.db.gz"
Cmd="rsync -ahv"
Cmd+=" ${SourceHost}:${SourceFile}"
Cmd+=" $OutputDir/"
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${SOURCE_HOST}:${SelectedSourceFilePath}"
Cmd+=" $OUTPUT_DIR/"
echo ${Cmd}
eval ${Cmd}
Cmd="(cd ${OutputDir} && gunzip *.db.gz)"
if [ ! -f ${OUTPUT_DIR}/${SourceFile} ] ; then
echo "File ${OUTPUT_DIR}/${SourceFile} NOT FOUND"
exit
fi
Cmd="(cd ${OUTPUT_DIR} && gunzip -f *.db.gz)"
echo ${Cmd}
eval ${Cmd}
SourceDbFile="${OutputDir}/${Date}.mktdata.db"
ResultDbFile="${OutputDir}/${Date}.crypto_sim_md.db"
SourceDbFile="${OUTPUT_DIR}/${DATE}.mktdata.db"
ResultDbFile="${OUTPUT_DIR}/${DATE}.crypto_sim_md.db"
echo "SourceDbFile=${SourceDbFile}"
echo "Creating Result Database File ${ResultDbFile}"
cleanup() {
rm ${SourceDbFile}
}
trap cleanup EXIT
echo "Creating table md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_trades (
tstamp text,
tstamp_ns integer,
@ -106,12 +101,14 @@ CREATE TABLE IF NOT EXISTS md_trades (
condition text,
tape text
);
CREATE UNIQUE INDEX IF NOT EXISTS md_trades_uidx
ON md_trades(tstamp_ns, exchange_id, instrument_id);
EOF
echo "Creating table md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_quotes (
tstamp text,
tstamp_ns integer,
@ -130,6 +127,7 @@ EOF
echo "Creating table md_1min_bars ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_1min_bars (
tstamp text,
tstamp_ns integer,
@ -149,6 +147,7 @@ EOF
echo "Loading md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_trades SELECT
@ -186,6 +185,7 @@ EOF
echo "Loading md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_quotes SELECT
@ -224,6 +224,7 @@ EOF
echo "Loading md_1min_bars ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_1min_bars SELECT
@ -260,25 +261,18 @@ COMMIT;
DETACH source_db;
EOF
Cmd="rm ${SourceDbFile}"
echo ${Cmd}
eval ${Cmd}
Cmd="gzip ${ResultDbFile}"
echo ${Cmd}
eval ${Cmd}
Cmd="rsync -ahvv ${ResultDbFile}.gz cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/crypto/sim/"
echo ${Cmd}
eval ${Cmd}
Cmd="rsync -ahvv ${ResultDbFile}.gz cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/crypto/sim/"
echo ${Cmd}
eval ${Cmd}
Cmd="rm ${ResultDbFile}.gz"
for tgt in ${RSYNC_TARGETS} ; do
tgt="${tgt}/${year}/${month}/"
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${ResultDbFile}.gz ${tgt}"
echo ${Cmd}
eval ${Cmd}
done
echo Done $0 ${*}

View File

@ -1,23 +1,27 @@
#!/bin/bash
usage() {
echo "Usage: $0 -S <symbols> -d <YYYYMMDD Date> [-O <output dir (./) >]"
exit 1
}
# --------------------- Settings
SourceHost=cloud21.cvtt.vpn
SourceRootDir=/opt/store/cvtt/md_archive/equity/alpaca_md
# SOURCE_HOST=cloud21.cvtt.vpn
# SOURCE_ROOT_DIR=/opt/store/cvtt/md_archive/equity/alpaca_md
# SYMBOL_LIST=CAN #,COIN,GBTC,HOOD,MSTR,PYPL,XYZ
# OUTPUT_DIR=/tmp
# DATE=20250514
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/equity/test/alpaca_md/sim/"
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/equity/alpaca_md/sim/
#cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/equity/alpaca_md/sim/"
# --------------------- Settings
is_business_day() {
dt=${1}
date=$(date -d "${dt}" +"%Y-%m-%d")
CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
URL="${CalendarURL}&start=${date}&end=${date}"
echo "URL=${URL}"
# curl $URL
open_time=$(curl -s "${URL}" | jq '.[] | .open_time')
echo "open_time=${open_time}"
if [ -n "${open_time}" ]; then
return 0
else
@ -27,83 +31,59 @@ is_business_day() {
}
export -f is_business_day
while getopts ":d:S:O:" opt; do
case ${opt} in
d )
Date=$OPTARG
;;
S )
SymList=$OPTARG
;;
O )
OutputDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
if [ -z ${SymList} ] ; then
if [ -z ${SYMBOL_LIST} ] ; then
echo "symbols are not specified"
usage
exit
fi
if [ -z ${Date} ] ; then
echo "date is not specified"
usage
if [ -z ${DATE} ] ; then
DATE=$(date -d 'yesterday' +'%Y%m%d')
fi
if [ -z ${OutputDir} ] ; then
OutputDir=.
fi
mkdir -p ${OutputDir}
if ! is_business_day ${Date}; then
echo "${Date} is not business day"
usage
mkdir -p ${OUTPUT_DIR}
if ! is_business_day ${DATE}; then
echo "${DATE} is not business day"
exit
fi
OLD_IFS=${IFS}
IFS=","
read -ra Symbols <<< "${SymList}"
read -ra Symbols <<< "${SYMBOL_LIST}"
IFS=${OLD_IFS}
echo "Date=${Date} Symbols=${Symbols[@]} OutputDir=${OutputDir}"
echo "DATE=${DATE} Symbols=${Symbols[@]} OUTPUT_DIR=${OUTPUT_DIR}"
echo Getting data from ${DataHost} ...
year=$(date -d ${Date} +"%Y")
year=$(date -d ${DATE} +"%Y")
month=$(date -d ${DATE} +"%m")
for sym in ${Symbols[@]}; do
inst_id="STOCK-${sym}"
capital=${sym:0:1}
SourceDir="${SourceRootDir}/${year}/${capital}/${sym}"
SourceHbarFile="${SourceDir}/${Date}.${sym}.alpaca_1m_bars.db.gz"
SourceQatFile="${SourceDir}/${Date}.${sym}.alpaca_qat.db.gz"
SourceDir="${SOURCE_ROOT_DIR}/${year}/${capital}/${sym}"
SourceHbarFile="${SourceDir}/${DATE}.${sym}.alpaca_1m_bars.db.gz"
SourceQatFile="${SourceDir}/${DATE}.${sym}.alpaca_qat.db.gz"
for src_file in ${SourceHbarFile} ${SourceQatFile}; do
Cmd="rsync -ahv"
Cmd+=" ${SourceHost}:${src_file}"
Cmd+=" $OutputDir/"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${SOURCE_HOST}:${src_file}"
Cmd+=" $OUTPUT_DIR/"
echo ${Cmd}
eval ${Cmd}
done
done
Cmd="(cd ${OutputDir} && gunzip *.db.gz)"
Cmd="(cd ${OUTPUT_DIR} && gunzip *.db.gz)"
echo ${Cmd}
eval ${Cmd}
ResultDbFile="${OutputDir}/${Date}.alpaca_sim_md.db"
echo "Creating Result Database File ${ResultDbFile}"
ResultDbFile="${OUTPUT_DIR}/${DATE}.alpaca_sim_md.db"
echo "Creating Result Database File ${ResultDbFile}"
echo "Creating table md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_trades (
tstamp text,
tstamp_ns integer,
@ -122,6 +102,7 @@ EOF
echo "Creating table md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_quotes (
tstamp text,
tstamp_ns integer,
@ -161,11 +142,12 @@ EOF
# set -f # not to expand *
for sym in ${Symbols[@]}; do
src_hbar_db=${OutputDir}/${Date}.${sym}.alpaca_1m_bars.db
src_qat_db=${OutputDir}/${Date}.${sym}.alpaca_qat.db
src_hbar_db=${OUTPUT_DIR}/${DATE}.${sym}.alpaca_1m_bars.db
src_qat_db=${OUTPUT_DIR}/${DATE}.${sym}.alpaca_qat.db
echo "Loading md_trades and md_quotes from ${src_qat_db} ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${src_qat_db}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_trades SELECT
@ -200,6 +182,7 @@ EOF
echo "Loading md_1min_bars from ${src_hbar_db} ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${src_hbar_db}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_1min_bars SELECT
@ -229,16 +212,13 @@ Cmd="gzip ${ResultDbFile}"
echo ${Cmd}
eval ${Cmd}
Cmd="rsync -ahvv ${ResultDbFile}.gz cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/equity/alpaca_md/sim/"
echo ${Cmd}
eval ${Cmd}
Cmd="rsync -ahvv ${ResultDbFile}.gz cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/equity/alpaca_md/sim/"
echo ${Cmd}
eval ${Cmd}
Cmd="rm ${ResultDbFile}.gz"
for tgt in ${RSYNC_TARGETS} ; do
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${ResultDbFile}.gz ${tgt}/${year}/${month}/"
echo ${Cmd}
eval ${Cmd}
done
echo Done $0 ${*}

View File

@ -19,7 +19,7 @@ fi
RegistryService=cloud21.cvtt.vpn:5500
RegistryProtocol=http
ConfigUrl=http://cloud23.cvtt.vpn:6789/admin/docker_images
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/docker_images
ReleaseHost="cloud21.cvtt.vpn"

View File

@ -10,7 +10,7 @@ if [ "" == "${prj}" ] ; then
usage
fi
Cmd="pushd /home/oleg/develop/cvtt2"
Cmd="pushd /home/oleg/develop/cvtt2-ops"
Cmd="${Cmd} && (cd ${prj}"
Cmd="${Cmd} && git pushall)"
Cmd="${Cmd} && ./build_release.sh -p ${prj}"

183
build_ops.sh Executable file
View File

@ -0,0 +1,183 @@
#!/usr/bin/env bash
# ---------------- Settings
repo=git@cloud21.cvtt.vpn:/works/git/cvtt2/ops.git
dist_root=/home/cvttdist/software/cvtt2
dist_user=cvttdist
dist_host="cloud21.cvtt.vpn"
dist_ssh_port="22"
dist_locations="cloud21.cvtt.vpn:22 hs01.cvtt.vpn:22"
version_file="VERSION"
prj=ops
brnch=master
interactive=N
# ---------------- Settings
# ---------------- cmdline
usage() {
echo "Usage: $0 [-b <branch (master)> -i (interactive)"
exit 1
}
while getopts "b:i" opt; do
case ${opt} in
b )
brnch=$OPTARG
;;
i )
interactive=Y
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
confirm() {
if [ "${interactive}" == "Y" ]; then
echo "--------------------------------"
echo -n "Press <Enter> to continue" && read
fi
}
if [ "${interactive}" == "Y" ]; then
echo -n "Enter project [${prj}]: "
read project
if [ "${project}" == "" ]
then
project=${prj}
fi
else
project=${prj}
fi
# repo=${git_repo_arr[${project}]}
if [ -z ${repo} ]; then
echo "ERROR: Project repository for ${project} not found"
exit -1
fi
echo "Project repo: ${repo}"
if [ "${interactive}" == "Y" ]; then
echo -n "Enter branch to build release from [${brnch}]: "
read branch
if [ "${branch}" == "" ]
then
branch=${brnch}
fi
else
branch=${brnch}
fi
tmp_dir=$(mktemp -d)
function cleanup {
cd ${HOME}
rm -rf ${tmp_dir}
}
trap cleanup EXIT
prj_dir="${tmp_dir}/${prj}"
cmd_arr=()
Cmd="git clone ${repo} ${prj_dir}"
cmd_arr+=("${Cmd}")
Cmd="cd ${prj_dir}"
cmd_arr+=("${Cmd}")
if [ "${interactive}" == "Y" ]; then
echo "------------------------------------"
echo "The following commands will execute:"
echo "------------------------------------"
for cmd in "${cmd_arr[@]}"
do
echo ${cmd}
done
fi
confirm
for cmd in "${cmd_arr[@]}"
do
echo ${cmd} && eval ${cmd}
done
Cmd="git checkout ${branch}"
echo ${Cmd} && eval ${Cmd}
if [ "${?}" != "0" ]; then
echo "ERROR: Branch ${branch} is not found"
cd ${HOME} && rm -rf ${tmp_dir}
exit -1
fi
release_version=$(cat ${version_file} | awk -F',' '{print $1}')
whats_new=$(cat ${version_file} | awk -F',' '{print $2}')
echo "--------------------------------"
echo "Version file: ${version_file}"
echo "Release version: ${release_version}"
confirm
version_tag="v${release_version}"
version_comment="'${version_tag} ${project} ${branch} $(date +%Y-%m-%d)\n${whats_new}'"
cmd_arr=()
Cmd="git tag -a ${version_tag} -m ${version_comment}"
cmd_arr+=("${Cmd}")
Cmd="git push origin --tags"
cmd_arr+=("${Cmd}")
Cmd="rm -rf .git"
cmd_arr+=("${Cmd}")
SourceLoc=../${project}
dist_path="${dist_root}/${project}/${release_version}"
for dist_loc in ${dist_locations}; do
dhp=(${dist_loc//:/ })
dist_host=${dhp[0]}
dist_port=${dhp[1]}
Cmd="rsync -avzh"
Cmd="${Cmd} --rsync-path=\"mkdir -p ${dist_path}"
Cmd="${Cmd} && rsync\" -e \"ssh -p ${dist_ssh_port}\""
Cmd="${Cmd} $SourceLoc ${dist_user}@${dist_host}:${dist_path}/"
cmd_arr+=("${Cmd}")
done
if [ "${interactive}" == "Y" ]; then
echo "------------------------------------"
echo "The following commands will execute:"
echo "------------------------------------"
for cmd in "${cmd_arr[@]}"
do
echo ${cmd}
done
fi
confirm
for cmd in "${cmd_arr[@]}"
do
pwd && echo ${cmd} && eval ${cmd}
done
echo "$0 Done ${project} ${release_version}"

View File

@ -9,7 +9,7 @@ get_user_hosts() {
local User=${1}
local Domain=${2}
Cmd="curl -s http://cloud23.cvtt.vpn:6789/admin/cvtt_hosts"
Cmd="curl -s http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts"
Cmd="${Cmd} | ${HOME}/bin/hjson -j"
Cmd="${Cmd} | jq -r"
Cmd="${Cmd} --arg domain \"${Domain}\""

View File

@ -1,27 +0,0 @@
#!/bin/bash
host=${1}
date=${2}
DockerRegistry=cloud21.cvtt.vpn:5500
DockerImage=${DockerRegistry}/crypto_md_day
if [ -z ${date} ] ; then
date=$(date -d "yesterday" +%Y%m%d)
fi
Cmd="docker run"
Cmd+=" --pull=always"
Cmd+=" --network=host"
Cmd+=" --name=crypto_md_day.${host}.${date}"
Cmd+=" --rm"
Cmd+=" --volume=${HOME}/.creds:/.creds"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" ${DockerImage}"
Cmd+=" -h ${host}"
Cmd+=" -d ${date}"
# Cmd+=" -s coinbase,bnbspot,bnbfut"
Cmd+=" -s coinbase,bnbspot"
echo $Cmd
eval $Cmd

View File

@ -1,107 +0,0 @@
#!/bin/bash
usage() {
echo "Usage: $0 -N <num_symbols> [-L <LogDir>] [-d <YYYYMMDD Date>]"
exit 1
}
is_container_running() {
local container_name=$1
if [ "$(docker ps --filter "name=^/${container_name}$" --filter "status=running" -q)" ]; then
return 0 # true
else
return 1 # false
fi
}
export CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
is_business_day() {
dt=${1}
open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time')
if [ -n "${open_time}" ]; then
return 0
else
return 1
fi
}
export -f is_business_day
get_prev_business_day() {
Start=${1}
while true; do
if is_business_day ${Start}; then
break
fi
echo "${Start} is not business day in US" >&2
Start=$(date -d "${Start} - 1 day" "+%Y-%m-%d")
done
echo ${Start}
}
export -f get_prev_business_day
# ----- Settings
DockerRegistry=cloud21.cvtt.vpn:5500
DockerImage=${DockerRegistry}/alpaca_hbar #:latest
ContainerName=alpaca_hbar
LogDir=/home/cvtt/prod/logs
# ----- Settings
while getopts ":d:N:L:" opt; do
case ${opt} in
d )
date_to_load=$OPTARG
;;
N )
NumSymbols=$OPTARG
;;
L )
LogDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is already running."
exit 3
fi
if [ -z "${date_to_load}" ]; then
echo "date_to_load is empty"
date_to_load=$(get_prev_business_day $(date -d "yesterday" '+%Y-%m-%d'))
echo "Historical Data for ${date_to_load}"
fi
echo "date_to_load=${date_to_load}"
Cmd="docker run"
Cmd+=" --pull=always"
Cmd+=" --network=host"
Cmd+=" --name=${ContainerName}"
Cmd+=" --rm"
Cmd+=" --volume=${LogDir}:/logs"
Cmd+=" --volume=${HOME}/.creds:/.creds"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" ${DockerImage}"
Cmd+=" -d ${date_to_load}"
if [ -n "${NumSymbols}" ]; then
Cmd+=" -N ${NumSymbols}"
fi
echo $Cmd
eval $Cmd
if [ "$?" != "0" ] ; then
exit 1 # if killed we do not save last day
fi

View File

@ -1,110 +0,0 @@
#!/bin/bash
usage() {
echo "Usage: $0 -S <symbols> [-L <LogDir>] [-d <YYYYMMDD Date>]"
exit 1
}
is_container_running() {
local container_name=$1
if [ "$(docker ps --filter "name=^/${container_name}$" --filter "status=running" -q)" ]; then
return 0 # true
else
return 1 # false
fi
}
export CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
is_business_day() {
dt=${1}
open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time')
if [ -n "${open_time}" ]; then
return 0
else
return 1
fi
}
export -f is_business_day
get_prev_business_day() {
Start=${1}
while true; do
if is_business_day ${Start}; then
break
fi
echo "${Start} is not business day in US" >&2
Start=$(date -d "${Start} - 1 day" "+%Y-%m-%d")
done
echo ${Start}
}
export -f get_prev_business_day
# ----- Settings
DockerRegistry=cloud21.cvtt.vpn:5500
DockerImage=${DockerRegistry}/alpaca_qat #:latest
ContainerName=alpaca_qat
LogDir=/home/cvtt/prod/logs
# ----- Settings
while getopts ":d:S:L:" opt; do
case ${opt} in
d )
date_to_load=$OPTARG
;;
S )
Symbols=$OPTARG
;;
L )
LogDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
if [ -z ${Symbols} ] ; then
echo "symbols are not specified"
usage
fi
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is already running."
exit 3
fi
if [ -z "${date_to_load}" ]; then
echo "date_to_load is empty"
date_to_load=$(get_prev_business_day $(date -d "yesterday" '+%Y-%m-%d'))
echo "Historical Data for ${date_to_load}"
fi
echo "date_to_load=${date_to_load}"
Cmd="docker run"
Cmd+=" --pull=always"
Cmd+=" --network=host"
Cmd+=" --name=${ContainerName}"
Cmd+=" --rm"
Cmd+=" --volume=${LogDir}:/logs"
Cmd+=" --volume=${HOME}/.creds:/.creds"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" ${DockerImage}"
Cmd+=" -d ${date_to_load}"
Cmd+=" -S ${Symbols}"
echo $Cmd
eval $Cmd
if [ "$?" != "0" ] ; then
exit 1 # if killed we do not save last day
fi

View File

@ -8,7 +8,7 @@ Cmd+=" --network=host"
Cmd+=" --name=crypto_exch_stats"
Cmd+=" --volume=${HOME}/prod/data:/app/data"
Cmd+=" --volume=${HOME}/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud23.cvtt.vpn:6789"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" cloud21.cvtt.vpn:5500/crypto_exch_stats:latest"
echo ${Cmd}

275
research/aggregate_features.sh Executable file
View File

@ -0,0 +1,275 @@
#!/bin/bash
if [ $# -ne 2 ]; then
echo "Usage: $0 <source_database_file> <features_database_file>"
exit 1
fi
SRC_DB=$1
DEST_DB=$2
if [ ! -f "$SRC_DB" ]; then
echo "Error: Source database file $SRC_DB does not exist"
exit 1
fi
echo "Creating feature tables in $DEST_DB using data from $SRC_DB..."
# Create md_1min_trade_features table
echo "Creating md_1min_trade_features table..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_trade_features;
CREATE TABLE IF NOT EXISTS md_1min_trade_features (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
price_mean REAL,
price_median REAL,
volume REAL,
vwap REAL,
signed_volume REAL,
order_flow_imbalance REAL,
num_trades INTEGER,
avg_trade_size REAL,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
# Create index for md_1min_trade_features
echo "Creating index for md_1min_trade_features..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_trade_features_uidx
ON md_1min_trade_features(bin_tstamp, exchange_id, instrument_id);"
# Populate md_1min_trade_features using source database
echo "Populating md_1min_trade_features..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
WITH trade_metrics AS (
SELECT
tstamp,
strftime('%Y-%m-%d %H:%M:00', tstamp) as bin_tstamp,
exchange_id,
instrument_id,
px as price,
qty,
CASE
WHEN condition = 'B' THEN qty
WHEN condition = 'S' THEN -qty
ELSE 0
END as signed_qty
FROM md_trades
),
trade_metrics_agg AS (
SELECT
bin_tstamp,
exchange_id,
instrument_id,
COUNT(*) as cnt,
MIN(tstamp) as min_tstamp,
MAX(tstamp) as max_tstamp
FROM trade_metrics
GROUP BY bin_tstamp, exchange_id, instrument_id
)
INSERT INTO dest.md_1min_trade_features
SELECT
tm.bin_tstamp,
CAST(strftime('%s', tm.bin_tstamp) * 1000000000 AS INTEGER) as tstamp_ns,
tm.exchange_id,
tm.instrument_id,
AVG(price) as price_mean,
AVG(CASE WHEN rank_num >= FLOOR(cnt/2.0) AND rank_num <= CEIL(cnt/2.0) THEN price ELSE NULL END) as price_median,
SUM(qty) as volume,
SUM(price * qty) / SUM(qty) as vwap,
SUM(signed_qty) as signed_volume,
SUM(CASE WHEN signed_qty > 0 THEN signed_qty ELSE 0 END) -
SUM(CASE WHEN signed_qty < 0 THEN ABS(signed_qty) ELSE 0 END) as order_flow_imbalance,
COUNT(*) as num_trades,
AVG(qty) as avg_trade_size
FROM (
SELECT
tm.*,
tma.cnt,
ROW_NUMBER() OVER (PARTITION BY tm.bin_tstamp, tm.exchange_id, tm.instrument_id ORDER BY price) as rank_num
FROM trade_metrics tm
JOIN trade_metrics_agg tma
ON tm.bin_tstamp = tma.bin_tstamp
AND tm.exchange_id = tma.exchange_id
AND tm.instrument_id = tma.instrument_id
) tm
GROUP BY tm.bin_tstamp, tm.exchange_id, tm.instrument_id;"
# Create md_1min_quote_features table in destination database
echo "Creating md_1min_quote_features table..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_quote_features;
CREATE TABLE IF NOT EXISTS md_1min_quote_features (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
mid_price_open REAL,
mid_price_high REAL,
mid_price_low REAL,
mid_price_close REAL,
mid_price_mean REAL,
rel_spread_mean REAL,
rel_spread_min REAL,
rel_spread_max REAL,
rel_spread_first REAL,
rel_spread_last REAL,
l1_imbalance_mean REAL,
l1_imbalance_min REAL,
l1_imbalance_max REAL,
l1_imbalance_first REAL,
l1_imbalance_last REAL,
micro_price_mean REAL,
micro_price_min REAL,
micro_price_max REAL,
micro_price_first REAL,
micro_price_last REAL,
weighted_mid_mean REAL,
weighted_mid_min REAL,
weighted_mid_max REAL,
weighted_mid_first REAL,
weighted_mid_last REAL,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
# Create index for md_1min_quote_features
echo "Creating index for md_1min_quote_features..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_quote_features_uidx
ON md_1min_quote_features(bin_tstamp, exchange_id, instrument_id);"
# Populate md_1min_quote_features using source database
echo "Populating md_1min_quote_features..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
INSERT INTO dest.md_1min_quote_features
SELECT
strftime('%Y-%m-%d %H:%M:00', tstamp) as bin_tstamp,
CAST(strftime('%s', tstamp) * 1000000000 AS INTEGER) as tstamp_ns,
exchange_id,
instrument_id,
FIRST_VALUE((ask_px + bid_px) / 2.0) OVER w as mid_price_open,
MAX((ask_px + bid_px) / 2.0) as mid_price_high,
MIN((ask_px + bid_px) / 2.0) as mid_price_low,
LAST_VALUE((ask_px + bid_px) / 2.0) OVER w as mid_price_close,
AVG((ask_px + bid_px) / 2.0) as mid_price_mean,
AVG((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_mean,
MIN((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_min,
MAX((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_max,
FIRST_VALUE((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) OVER w as rel_spread_first,
LAST_VALUE((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) OVER w as rel_spread_last,
AVG((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_mean,
MIN((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_min,
MAX((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_max,
FIRST_VALUE((bid_qty - ask_qty) / (bid_qty + ask_qty)) OVER w as l1_imbalance_first,
LAST_VALUE((bid_qty - ask_qty) / (bid_qty + ask_qty)) OVER w as l1_imbalance_last,
AVG((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_mean,
MIN((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_min,
MAX((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_max,
FIRST_VALUE((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) OVER w as micro_price_first,
LAST_VALUE((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) OVER w as micro_price_last,
AVG((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_mean,
MIN((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_min,
MAX((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_max,
FIRST_VALUE((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) OVER w as weighted_mid_first,
LAST_VALUE((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) OVER w as weighted_mid_last
FROM md_quotes
GROUP BY strftime('%Y-%m-%d %H:%M:00', tstamp), exchange_id, instrument_id
WINDOW w AS (
PARTITION BY strftime('%Y-%m-%d %H:%M:00', tstamp), exchange_id, instrument_id
ORDER BY tstamp
RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
);"
# Copy the md_1min_bars table to destination database
echo "Copying md_1min_bars to destination..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_bars;
CREATE TABLE IF NOT EXISTS md_1min_bars (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
open REAL,
high REAL,
low REAL,
close REAL,
volume REAL,
vwap REAL,
num_trades INTEGER,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
echo "Creating index for md_1min_bars..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_bars_uidx
ON md_1min_bars(bin_tstamp, exchange_id, instrument_id);"
echo "Populating md_1min_bars..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
INSERT INTO dest.md_1min_bars
SELECT * FROM md_1min_bars;"
# Create the combined features view in destination database
echo "Creating combined features view..."
sqlite3 "$DEST_DB" "
DROP VIEW IF EXISTS md_1min_features_view;
CREATE VIEW IF NOT EXISTS md_1min_features_view AS
SELECT
b.bin_tstamp,
b.tstamp_ns,
b.exchange_id,
b.instrument_id,
-- OHLCV data from md_1min_bars
b.open,
b.high,
b.low,
b.close,
b.volume,
b.vwap,
b.num_trades,
-- Quote features
q.mid_price_open,
q.mid_price_high,
q.mid_price_low,
q.mid_price_close,
q.mid_price_mean,
q.rel_spread_mean,
q.rel_spread_min,
q.rel_spread_max,
q.rel_spread_first,
q.rel_spread_last,
q.l1_imbalance_mean,
q.l1_imbalance_min,
q.l1_imbalance_max,
q.l1_imbalance_first,
q.l1_imbalance_last,
q.micro_price_mean,
q.micro_price_min,
q.micro_price_max,
q.micro_price_first,
q.micro_price_last,
q.weighted_mid_mean,
q.weighted_mid_min,
q.weighted_mid_max,
q.weighted_mid_first,
q.weighted_mid_last,
-- Trade features
t.price_mean as trade_price_mean,
t.price_median as trade_price_median,
t.signed_volume,
t.order_flow_imbalance,
t.avg_trade_size
FROM md_1min_bars b
LEFT JOIN md_1min_quote_features q
ON b.bin_tstamp = q.bin_tstamp
AND b.exchange_id = q.exchange_id
AND b.instrument_id = q.instrument_id
LEFT JOIN md_1min_trade_features t
ON b.bin_tstamp = t.bin_tstamp
AND b.exchange_id = t.exchange_id
AND b.instrument_id = t.instrument_id;"
echo "Feature tables created and populated successfully in $DEST_DB!"

View File

@ -1,7 +1,10 @@
#!/usr/bin/env bash
usage() {
echo "Usage: $0 [-h <host (hs01*/cloud21)>] [-d <YYYYMMDD> (yesterday*)] [-s <source> (cvttdata/cloud21*)>]"
echo -n "Usage: $0 [-h <host (hs01*/cloud21)>]"
echo -n " [-d <YYYYMMDD> (yesterday*)]"
echo -n " [-s <source> (cloud28/cloud21*)>]"
echo " [-t <target_dir> (/opt/jupyter_gpu/data/crypto_md)]"
exit 1
}
@ -19,11 +22,15 @@ is_valid() {
return 1
}
# ------- D E F A U L T S
date=""
host=hs01
source=cloud21
TargetDir="/opt/jupyter_gpu/data/crypto_md"
# ------- D E F A U L T S
while getopts ":h:d:s:" opt; do
while getopts ":h:d:s:t:" opt; do
case ${opt} in
d )
date=$OPTARG
@ -34,6 +41,9 @@ while getopts ":h:d:s:" opt; do
s )
source=$OPTARG
;;
t )
TargetDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
@ -57,7 +67,7 @@ if ! is_valid "${host}" "${valid_hosts[@]}" ; then
usage
fi
valid_sources=('cloud21' 'cvttdata')
valid_sources=('cloud21' 'cloud28')
if ! is_valid ${source} ${valid_sources[@]} ; then
echo "Source '${source}' is not valid"
usage
@ -82,7 +92,6 @@ SourceFile=$(date -d ${date} "+%Y%m%d.mktdata.db")
SourceFileZip="${SourceFile}.gz"
SourceFilePath=$(date -d ${date} "+${SourceRootDir}/%Y/%m/${SourceFileZip}")
TargetDir="/opt/jupyter_gpu/data/crypto_md"
TargetFile=$(date -d ${date} "+%Y%m%d.mktdata.ohlcv.db")
TargetFilePath="${TargetDir}/${TargetFile}"

View File

@ -1,44 +0,0 @@
#!/bin/bash
Python=/home/cvtt/.pyenv/python3.12-venv/bin/python3.12
RootDir=/home/cvtt/prod
export PYTHONPATH=${RootDir}
host=${1}
if [ "${host}" == "cvttdata" ]
then
ArchiveRootDir=/home/cvtt/prod/archive/md_archive/crypto/cvttdata
CredKey=TSDB_MD_CVTTDATA_RO
elif [ "${host}" == "cloud21" ]
then
ArchiveRootDir=/home/cvtt/prod/archive/md_archive/crypto/cloud21
CredKey=TSDB_MD_CLD21_RO
else
echo "Unknown host ${host}. ${0} Aborted."
exit 1
fi
mkdir -p ${ArchiveRootDir}
yesterday=$(date -d "yesterday" +%Y%m%d)
Schemas=${2}
if [ "${Schemas}" == "" ]
then
Schemas="coinbase,bnbspot,bnbfut"
fi
echo "Schemas=${Schemas}"
Cmd=
Cmd="${Python}"
Cmd="${Cmd} ${RootDir}/cvttpy/research/utils/archive_ts_md.py"
Cmd="${Cmd} --config=http://cloud23.cvtt.vpn:6789/apps/md_recorder"
Cmd="${Cmd} --db_credentials_key=${CredKey}"
Cmd="${Cmd} --date=${yesterday}"
Cmd="${Cmd} --schemas=${Schemas}"
Cmd="${Cmd} --root_dir=${ArchiveRootDir}"
Cmd="${Cmd} --format=SQLite"
Cmd="${Cmd} --compress"
echo ${Cmd}
eval ${Cmd}
echo "${0} ${*} Done."

View File

@ -3,7 +3,7 @@
usage() {
echo -n "Usage: ${0}"
echo -n " [-c <config (dflt: apps/cvtt_eqt_alpaca)>]"
echo -n " [-s <config_serverice (dflt: http://cloud23.cvtt.vpn:6789)>]"
echo -n " [-s <config_serverice (dflt: http://cloud16.cvtt.vpn:6789)>]"
echo -n " [-N <name (dflt: ALPACA-SNDBX)>]"
echo
exit 1
@ -23,7 +23,7 @@ export PYTHONPATH=${RootDir}
StatusChannel=Status-CVTT
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigService=http://cloud23.cvtt.vpn:6789
ConfigService=http://cloud16.cvtt.vpn:6789
Config=apps/cvtt_eqt_alpaca
Name="ALPACA-SNDBX"

View File

@ -13,7 +13,7 @@ RootDir="${HOME}/prod"
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigUrl=http://cloud23.cvtt.vpn:6789/admin/cvtt_hosts
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts
HOSTS_CONFIG=$(curl -s ${ConfigUrl} | ${HOME}/bin/hjson -j)

View File

@ -12,7 +12,7 @@ RootDir="${HOME}/prod"
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigUrl=http://cloud23.cvtt.vpn:6789/admin/cvtt_services
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/cvtt_services
SERVICES_CONFIG=$(curl -s ${ConfigUrl} | ${HOME}/bin/hjson -j)

View File

@ -18,7 +18,7 @@ get_user_hosts() {
local User=${1}
local Domain=${2}
Cmd="curl -s http://cloud23.cvtt.vpn:6789/admin/cvtt_hosts"
Cmd="curl -s http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts"
Cmd+=" | ${HOME}/bin/hjson -j"
Cmd+=" | jq -r"
Cmd+=" --arg domain \"${Domain}\""

View File

@ -4,7 +4,7 @@ Start=${1}
NumJobs=${2}
InstListFile=${3}
export CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
export CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
is_business_day() {
dt=${1}
@ -46,7 +46,7 @@ echo "Start=${Start} End=${End} NumJobs=${NumJobs}"
export PYTHONPATH=/home/cvtt/prod
export Python=/home/cvtt/.pyenv/python3.12-venv/bin/python3
export Config=http://cloud23.cvtt.vpn:6789/apps/minimal_md
export Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md
export PyScript=/home/cvtt/prod/cvttpy/exchanges/alpaca/hist_md/hist_md_bars.py
export OutputDir=/home/cvtt/prod/archive/md_archive/equity/alpaca_md # Local

View File

@ -5,7 +5,7 @@ export PYTHONPATH=${HOME}/prod
Python=${HOME}/.pyenv/python3.12-venv/bin/python3
Script=${HOME}/prod/cvttpy/exchanges/alpaca/hist_md/rl_calc_loader.py
DbFile=${HOME}/prod/data/rel_liquidity.db
Config=http://cloud23.cvtt.vpn:6789/apps/minimal_md_eqt
Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md_eqt
Cmd="${Python}"
Cmd="${Cmd} ${Script}"

View File

@ -3,7 +3,7 @@
is_business_day() {
dt=${1}
CalendarURL=http://cloud23.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS
CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time')
if [ -n "${open_time}" ]; then
return 0

View File

@ -26,9 +26,11 @@ run_checklist() {
declare -A Commands
Commands=(
["hs01:cloud21"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud21/${yr}/${mn} | tail -5"
["hs01:cvttdata"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cvttdata/${yr}/${mn} | tail -5"
["cloud21:cloud21"]="ssh cvtt@cloud21.cvtt.vpn ls -l /opt/store/cvtt/md_archive/crypto/cloud21/${yr}/${mn} | tail -5"
["cloud21:cvttdata"]="ssh cvtt@cloud21.cvtt.vpn ls -l /opt/store/cvtt/md_archive/crypto/cvttdata/${yr}/${mn} | tail -5"
["hs01:cloud28"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud28/${yr}/${mn} | tail -5"
["hs01:sim"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/sim/ | tail -5"
["cloud21:cloud21"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud21/${yr}/${mn} | tail -5"
["cloud21:cloud28"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud28/${yr}/${mn} | tail -5"
["cloud21:sim"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/sim | tail -5"
["gpushnik"]="ssh oleg@gpushnik.cvtt.vpn 'ls -l /opt/jupyter_gpu/data/crypto_md | tail -10'"
)

View File

@ -29,12 +29,14 @@ run_checklist() {
Commands["hs01"]+="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/${yr}/${sym} | tail -3; "
done
Commands["hs01"]+="echo"
Commands["hs01:sim"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/sim | tail -5"
Commands["cloud21"]=""
for sym in ${CheckSymbols}; do
Commands["cloud21"]+="ssh cvtt@cloud21.cvtt.vpn ls -l /opt/store/cvtt/md_archive/equity/alpaca_md/${yr}/${sym} | tail -3; "
Commands["cloud21"]+="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/${yr}/${sym} | tail -3; "
done
Commands["cloud21"]+="echo"
Commands["cloud21:sim"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/sim | tail -5"
Commands["gpushnik"]="ssh oleg@gpushnik.cvtt.vpn 'ls -l /opt/jupyter_gpu/data/eqty_md | tail -10'"
@ -46,6 +48,13 @@ run_checklist() {
done
}
tmpfile=$(mktemp)
function cleanup {
cd ${HOME}
rm -f ${tmpfile}
}
trap cleanup EXIT
# run_checklist
tmpfile=$(mktemp)

View File

@ -4,7 +4,7 @@ export PYTHONPATH=/home/cvtt/prod
Cmd="/home/cvtt/.pyenv/python3.12-venv/bin/python3"
Cmd="${Cmd} /home/cvtt/prod/cvttpy/apps/research/exchange_trading_stats.py"
Cmd="${Cmd} --config=http://cloud23.cvtt.vpn:6789/apps/tests/listen_market_data"
Cmd="${Cmd} --config=http://cloud16.cvtt.vpn:6789/apps/tests/listen_market_data"
Cmd="${Cmd} --active_exchanges=OKX,GEMINI,BITSTAMP,COINBASE_AT,BNBSPOT,KRAKEN"
Cmd="${Cmd} --instruments=OKX:PAIR-BTC-USDT,GEMINI:PAIR-BTC-USD,BITSTAMP:PAIR-BTC-USD,COINBASE:PAIR-BTC-USD,BNBSPOT:PAIR-BTC-USDT,KRAKEN:PAIR-BTC-USD"
Cmd="${Cmd} --db_file=/home/cvtt/prod/data/exchange_trading_stats.db"

View File

@ -1,36 +1,63 @@
#!/bin/bash
function usage {
echo "Usage: ${0} <log directory> [days (default 2)]"
echo -n "Usage: ${0}"
echo -n " -L <log directory>"
echo -n " [ -A <archive_logs_dir> (default /works/archive/logs)]"
echo -n " [-D <older than time criteria> (default: '2 days ago')]"
echo
exit 1
}
echo Starting $0 $*
LogDir=${1}
# ---- D e f a u l t s
LogArchiveDir=/works/archive/logs
DateCriteria="2 days ago"
# ---- D e f a u l t s
# ---------------- cmdline
while getopts "A:L:D:" opt; do
case ${opt} in
A )
LogArchiveDir=$OPTARG
;;
L )
LogDir=$OPTARG
;;
D )
DateCriteria=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
if [ "${LogDir}" == "" ]
then
usage
fi
Days=${2}
if [ "${Days}" == "" ]
then
Days=2
fi
Cmd="mkdir -p ${LogArchiveDir}"
echo ${Cmd} && eval ${Cmd}
DateCriteria="${Days} days ago"
echo "Looking for log files older than '${DateCriteria}' in ${LogDir}"
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d')
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d %H:%M:%S')
echo "Looking for log files older than ${DateCriteria} in ${LogDir}"
Cmd="find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt \"${Oldest}\""
echo $Cmd
LogArchiveDir=${HOME}/prod/archive/logs
mkdir -p ${LogArchiveDir}
echo "find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt ${Oldest})"
files=$(find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt ${Oldest})
files=$(eval ${Cmd})
if [ "$files" == "" ]
then
echo "No files found older than ${Oldest} in ${LogDir}"

View File

@ -1,40 +1,57 @@
#!/bin/bash
# FOR cloud hosts with limited disk space - move to storage server
# FOR hosts with limited disk space - move to storage server
function usage {
echo "Usage: ${0} <host> <from_dir> <days>"
echo -n "Usage: ${0}"
echo -n " -H <host_label>"
echo -n " [ -A <archive_dir> (default /works/archive)]"
echo -n " [-D <older than time criteria> (default: '2 days ago')]"
echo
exit 1
}
echo Starting $0 $*
# ---- D e f a u l t s
ArchiveDir=/works/archive
DateCriteria="2 days ago"
FromHost=$(hostname -s)
# ---- D e f a u l t s
# ---------------- cmdline
while getopts "A:H:D:" opt; do
case ${opt} in
A )
ArchiveDir=$OPTARG
;;
H )
FromHost=$OPTARG
;;
D )
DateCriteria=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
if [ "${FromHost}" == "" ]
then
usage
fi
ArchiveDir=${2}
if [ "${ArchiveDir}" == "" ]
then
usage
fi
Days=${3}
if [ "${Days}" == "" ]
then
Days=2
fi
DateCriteria="${Days} days ago"
TargetHost=cloud21.cvtt.vpn
TargetRootDir=/opt/store/cvtt/archive
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d')
Now=$(date '+%Y%m%d_%H%M%S')
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d %H:%M:%S')
echo "Looking for log files older than ${DateCriteria} in ${ArchiveDir}"
Cmd="find ${ArchiveDir}/"
@ -45,7 +62,7 @@ Cmd="${Cmd} -o -name '*.logs.*'"
Cmd="${Cmd} -o -name '*.tgz'"
Cmd="${Cmd} ')'"
Cmd="${Cmd} -type f"
Cmd="${Cmd} -not -newermt ${Oldest}"
Cmd="${Cmd} -not -newermt \"${Oldest}\""
echo ${Cmd}
files=$(eval ${Cmd})