Compare commits

...

215 Commits

Author SHA1 Message Date
b288e997b6 moved daily_sim to cvtt_apps 2026-01-06 15:32:46 +00:00
00addf2027 fix 2026-01-06 14:28:05 +00:00
02f74a6f3f fix 2026-01-06 14:27:16 +00:00
Cryptoval2
231bfd079f progress 2025-10-19 15:41:05 +00:00
Cryptoval2
2512023079 smarter crypto simdata - using DB_HOST_BACKUP 2025-10-19 15:13:50 +00:00
Cryptoval2
7f05a919d3 added --mkpath to rsync 2025-09-13 21:10:51 +00:00
Cryptoval2
fda425c680 eqt sim data to dated target directory - fix 2025-09-13 20:48:04 +00:00
Cryptoval2
52ce7d3f55 eqt sim data to dated target directory 2025-09-13 20:40:40 +00:00
Cryptoval2
64d9408506 fix 2025-09-13 19:39:02 +00:00
Cryptoval2
d427e22d41 crypto sim -> dated target directory 2025-09-12 23:01:51 +00:00
0647e9d9b7 fix 2025-05-18 21:43:53 -04:00
66d2840a74 eqt 2025-05-18 21:38:09 -04:00
b4dfecf12a test 2025-05-18 21:25:34 -04:00
7f47905579 testing 2025-05-18 21:17:55 -04:00
08636b2206 fix 2025-05-18 20:56:40 -04:00
3b1a1fc9ba eqt initial 2025-05-18 20:46:58 -04:00
19c7313326 progress 2025-05-18 19:40:08 -04:00
e23ddd5e44 progress 2025-05-18 19:35:51 -04:00
9bb868845f fix 2025-05-18 19:11:19 -04:00
aced5b23e0 progress 2025-05-18 19:05:25 -04:00
9eb4f8e117 daily sim for docker.initial 2025-05-18 18:42:08 -04:00
3e49b5bf27 Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-17 23:29:01 -04:00
0876776d60 progress 2025-05-17 19:30:00 -04:00
Cryptoval2
fb685ca91d cleaning 2025-05-16 16:00:44 +00:00
Cryptoval2
f1a83c8e33 . 2025-05-16 15:58:26 +00:00
Cryptoval2
26540a5294 cleaning 2025-05-16 15:57:12 +00:00
c37d62637a Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-16 11:36:33 -04:00
bcb257add4 deprecated 2025-05-16 11:36:12 -04:00
Cryptoval2
ccc45b06ea utils upgrade 2025-05-16 15:16:01 +00:00
Cryptoval2
ebcf3ac20b Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-16 13:28:39 +00:00
Cryptoval2
d66bf5f48b 2.1.3,remove cvttdata 2025-05-16 13:28:00 +00:00
5c3fd357ab Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops 2025-05-15 18:28:08 -04:00
Cryptoval2
1432794197 2.1.2,fix 2025-05-15 19:40:31 +00:00
Cryptoval2
52dd9997ca 2.1.1,archive_logs interface change 2025-05-15 19:30:09 +00:00
066bdbdb93 new aggregate features are added to research 2025-05-05 15:31:56 -04:00
Cryptoval2
f59c729c6f flexible target directory for loading sim data 2025-04-16 17:15:03 +00:00
Cryptoval2
df04d764c8 prepare crypto sim enhanced 2025-03-24 17:25:57 +00:00
Cryptoval2
06610de992 OPS-10 2025-03-21 17:04:23 +00:00
9aaf356048 OPS-6 2025-03-18 13:47:51 -04:00
f2bb798fff fix 2025-03-07 23:16:48 -05:00
b0a0080a4a replacing "latest" with real docker image tag 2025-03-07 23:12:38 -05:00
e35b15f024 replacing "latest" with real docker image tag 2025-03-07 23:09:39 -05:00
3bd31b7d8b crypto_md_day - specific docker tag 2025-03-07 23:02:06 -05:00
085308ff9c using specific version for md_recorder 2025-03-07 22:42:46 -05:00
9f789738e9 using HA trading-calendar and config service 2025-02-03 15:31:34 -05:00
d5e1f0a446 fix 2025-01-27 18:24:47 -05:00
f639bd41f5 docker build imagages script fix 2025-01-27 18:15:30 -05:00
a35bcf089e minor 2025-01-27 17:07:28 -05:00
5bf02ae67a release_version.txt -> VERSION 2025-01-27 16:41:22 -05:00
43c50befc4 docker to use CONFIG_SERVICE 2025-01-24 11:39:04 -05:00
48ecf7cca2 storage healthcheck - ignore known_hosts 2025-01-22 17:48:13 -05:00
5671ec5044 homestore ==> hs01 2025-01-13 21:54:30 -05:00
a7e442b954 homestore ==> hs01 2025-01-13 20:04:52 -05:00
7e0c389536 minor - docker build script logging 2024-12-16 19:58:02 -05:00
058431a37c version only 2024-12-16 18:08:40 -05:00
9de3ed3ab9 creds to be taken from ~/.creds 2024-12-16 17:46:05 -05:00
31aca36fe7 fix: prepare eqt simdata script 2024-12-06 20:11:33 -05:00
92538f0819 fix: prepare eqt simdata script 2024-12-06 20:08:53 -05:00
1cfac2b32d fix: prepare eqt simdata script 2024-12-06 20:00:23 -05:00
625670c69e prepare sim for crypto (before added num_trades) - progress 2024-12-06 19:50:02 -05:00
11ab9dd2c2 prepare sim for crypto (before added num_trades) 2024-12-06 17:34:37 -05:00
bbbdbb5473 16 -> 23 2024-12-04 16:42:35 -05:00
cade1cd055 simdata prepare 2024-12-03 20:11:36 -05:00
ed44757900 fix 2024-12-03 13:55:39 -05:00
340283c0a9 fix: syntax 2024-12-03 13:54:56 -05:00
f4365d32e5 fix: no boolean for jq, it considers "false" as missing (null) 2024-12-03 13:41:32 -05:00
1fe5edcd74 build script fix 2024-12-03 12:37:48 -05:00
8510226599 host checking improving 2024-12-03 12:32:40 -05:00
10105ad639 prepare equity simulation data 2024-11-27 20:46:10 -05:00
9b2bda6257 crypto_exch_stats docker run 2024-11-06 10:46:41 -05:00
80a6ffeb18 docker_image_builder fix 2024-11-02 23:36:26 -04:00
e7c02587c2 progress docker image builder 2024-11-02 21:22:23 -04:00
cc2c07741a fix 2024-11-02 17:54:16 -04:00
36d6914c8c initial version of docker_image_builder.sh 2024-11-02 16:42:06 -04:00
86a58462bf initial (not working) version of docker_image_builder.sh 2024-11-02 13:23:51 -04:00
f0a639940b fix 2024-11-01 17:30:59 -04:00
3fec261a46 changes to build scripts 2024-11-01 17:26:45 -04:00
618a7eb0cf md eqty checklist to use new structure 2024-11-01 13:15:34 -04:00
512f5ce14d build for cvtt-rust uses cargo install 2024-10-31 23:35:41 -04:00
9a3215524e build for cvtt-rust 2024-10-31 19:50:56 -04:00
84a8c1cb31 case insensitive to_check parameter host and services avalability check 2024-10-31 17:37:19 -04:00
4774bd9005 fix 2024-10-31 17:28:25 -04:00
59e7002a59 services avalability check 2024-10-31 17:21:25 -04:00
1387893a14 fix 2024-10-31 15:39:33 -04:00
87e1a0610b progress: host checks 2024-10-31 15:17:21 -04:00
6a6046050c fix 2024-10-31 12:04:08 -04:00
0e23024aab using new(rust) alpaca market data structure for hbar 2024-10-31 11:51:42 -04:00
d32bf31f4a fix 2024-10-30 15:34:19 -04:00
65c87f97e6 fix 2024-10-30 15:29:10 -04:00
ae79f940fb update for storage check 2024-10-30 15:21:38 -04:00
80ae780f02 fix 2024-10-29 19:51:24 -04:00
5318b16b7f alpaca hbar load 2024-10-29 19:44:41 -04:00
03c341cefb fix2 2024-10-29 19:07:30 -04:00
db7ea93c6b fix 2024-10-29 18:38:21 -04:00
d7c1a3c456 load alpaca QAT 2024-10-29 18:25:17 -04:00
5bf2d043a2 minor 2024-10-04 11:34:26 -04:00
72d5a849be 2024-09-23 08:04:54 -04:00
6125b10935 minor 2024-09-23 04:13:31 -04:00
c637faf4bf cleaned out several docker scripts. added checklist sources 2024-09-11 11:37:51 -04:00
a8f3b5c495 beginning to move docker to prod repos 2024-09-10 16:57:26 -04:00
26d0b1a4e0 config (for all trading images) is made more flexible, docker compose initial 2024-09-09 20:44:49 -04:00
2c26e4d258 config (for executor) is made more flexible, docker compose initial 2024-09-09 17:39:00 -04:00
0624f35ddd docker compose 2024-09-07 12:47:04 -04:00
7052563d3b Merge branch 'master' of cloud21.cvtt.vpn:/opt/store/git/ops into moving2python_3.12 2024-09-07 12:05:41 -04:00
fb60f5f4f6 progress 2024-09-06 21:12:46 -04:00
98fe0e08f3 minor 2024-09-05 00:03:10 -04:00
0b2d9e40ee python 3.12, etc. 2024-09-03 19:52:16 -04:00
3eec3e6bf8 to python3.12 2024-09-03 18:28:38 -04:00
0eded0e6da homestore /works/cvtt ownership moved to cvtt 2024-09-02 16:56:43 -04:00
96247c1248 homestore /works/cvtt ownership moved to cvtt 2024-09-02 16:52:19 -04:00
f61518ae20 homestore /works/cvtt ownership moved to cvtt 2024-09-02 16:42:04 -04:00
25fd9ab3c4 homestore /works/cvtt ownership moved to cvtt 2024-09-02 16:34:28 -04:00
66b5cd16dd homestore /works/cvtt ownership moved to cvtt 2024-09-02 16:28:43 -04:00
1db0d08547 added cvtt_ts_status_checks.sh 2024-08-30 18:26:18 -04:00
b61c3a684a load eqt to research - fix 2024-08-28 10:49:07 -04:00
78382df12d load eqt to research - fix 2024-08-26 16:59:01 -04:00
bbfb7112bb trading recorder -> docker 2024-08-24 19:19:18 -04:00
23603ad076 md_portal uses different config 2024-08-24 18:23:27 -04:00
81fe6372e9 added quant docker start script 2024-08-22 18:36:29 -04:00
48f77e5280 added quant docker start script 2024-08-22 18:31:14 -04:00
2c88bc613c added trader docker start script 2024-08-22 15:59:02 -04:00
085f1af3da added trader docker start script 2024-08-22 15:52:05 -04:00
fa93450b3d fix executor start 2024-08-20 21:55:13 -04:00
4188dd7c7c fix executor start 2024-08-20 21:50:54 -04:00
929953c1c4 fix executor start 2024-08-20 21:48:45 -04:00
e562716b73 renamed 2024-08-20 21:45:52 -04:00
7c806107ca executor start added 2024-08-20 21:26:00 -04:00
85c4042d00 md_portal docker start 2024-08-19 13:00:44 -04:00
dcca83a3ae trading: risk manager docker initial 2024-08-18 21:25:14 -04:00
e192766c9a load EQT md to GPUshnik fix 2024-08-18 17:31:33 -04:00
9d9b9c17f6 renamed2 2024-08-18 17:05:06 -04:00
5ad576b704 renaming checklist script 2024-08-18 17:02:42 -04:00
6d0e32f6ad progress. md checklists 2024-08-18 16:44:19 -04:00
b57fb2257f progress - intermediate for md checklists 2024-08-18 16:29:40 -04:00
1dafab8971 fix 2024-08-17 14:39:07 -04:00
9dfdc615de fix 2024-08-17 14:25:03 -04:00
17f29a3a67 fix 2024-08-17 14:24:32 -04:00
1de85ffe73 fix 2024-08-17 14:02:13 -04:00
4ce11b12ce clean 2024-08-17 14:00:25 -04:00
4962d59d46 cleaning 2024-08-17 13:59:30 -04:00
e444f697eb progress 2024-08-14 20:08:12 -04:00
087e536bdf progress 2024-08-14 15:31:42 -04:00
c3218eac2e progress 2024-08-13 23:03:24 -04:00
e5716c29a3 progress 2024-08-12 21:35:58 -04:00
7c7248efd3 progress 2024-08-12 20:30:55 -04:00
ff7f8c0e01 progress 2024-08-10 23:33:30 -04:00
7c34d6e918 fix 2024-08-10 23:28:07 -04:00
55782dbd44 progress 2024-08-10 23:24:46 -04:00
72c9cb2e00 progress 2024-08-09 22:54:33 -04:00
a610ab5462 progress 2024-08-09 22:43:44 -04:00
969360642a debugging crypto_md_day 2024-08-08 20:51:45 -04:00
939cc0197d progress: build script 2024-08-08 18:17:00 -04:00
0766a07b79 fix 2024-08-08 13:04:16 -04:00
9874aaeac7 fix 2024-08-06 23:59:35 -04:00
6458974866 progress: storage check uses hosts from config service 2024-08-06 16:05:55 -04:00
df761027ec progress 2024-08-05 22:41:52 -04:00
90fa146190 fix 2024-08-05 18:17:12 -04:00
c3ed03e865 hjson and jq in bash 2024-08-05 17:38:12 -04:00
87250d5c77 minor 2024-08-05 14:28:05 -04:00
b1fe4f38f0 progress 2024-07-31 12:44:24 -04:00
c7e717a64f fix 2024-07-31 12:37:32 -04:00
c6b99f6678 progress 2024-07-31 12:24:00 -04:00
6c771ad1a1 progress 2024-07-31 11:34:58 -04:00
066a9fbdf1 progress 2024-07-31 11:24:32 -04:00
e7d7af1889 progress 2024-07-30 23:32:40 -04:00
797a46746c progress 2024-07-30 13:50:30 -04:00
eb0319a81b progress 2024-07-30 12:12:15 -04:00
05bf8e59aa fix 2024-07-29 20:54:12 -04:00
35a13db907 fix 2024-07-29 20:46:33 -04:00
b24a1217e8 fix 2024-07-29 20:30:37 -04:00
362a2bf61d progress 2024-07-29 20:28:30 -04:00
e0a0d6d9d6 monor fix 2024-07-28 19:43:49 -04:00
20a375cd55 progress 2024-07-28 19:40:22 -04:00
35466a1932 fix 2024-07-28 13:50:41 -04:00
26afead109 fix 2024-07-27 21:54:47 -04:00
32935143da progress 2024-07-27 18:31:37 -04:00
4e2df290db version 2024-07-27 18:22:34 -04:00
d0ee8fea09 progress 2024-07-27 18:20:56 -04:00
cd4663d775 fix 2024-07-26 13:46:15 -04:00
b353572ea6 progress 2024-07-25 12:42:43 -04:00
fa6dd0fa95 fix 2024-07-25 10:22:51 -04:00
ce68165eef fix 2024-07-24 19:57:30 -04:00
3932cbde37 fix 2024-07-24 13:53:26 -04:00
9e90186432 progress 2024-07-24 13:30:17 -04:00
bbfd200ec4 fix 2024-07-23 16:49:48 -04:00
489016782c progress 2024-07-23 14:48:59 -04:00
b698b847a5 progress 2024-07-23 13:57:47 -04:00
65f55528ef fix version 2024-07-22 17:53:00 -04:00
13ff201999 progress 2024-07-22 17:49:34 -04:00
1bc2a367ec fix version 2024-07-22 13:18:14 -04:00
416fdaffed fix 2024-07-22 13:17:38 -04:00
b08ffbb73b progress 2024-07-22 13:00:00 -04:00
85534b130a progress 2024-07-19 11:09:34 -04:00
f391593857 progress 2024-07-19 11:04:30 -04:00
108ae35a06 progress 2024-07-19 10:45:46 -04:00
2416a5a77e progress 2024-07-19 10:37:45 -04:00
0b9feb8610 progress 2024-07-19 10:25:26 -04:00
51e059a72b fix 2024-07-19 09:47:18 -04:00
9a59b00998 progress 2024-07-18 16:45:52 -04:00
78de26d154 progress 2024-07-18 12:08:35 -04:00
8eb52d718f progress 2024-07-17 10:47:52 -04:00
a0cf2531e6 progress 2024-07-17 09:14:22 -04:00
ada2a74eb6 fix 2024-07-16 15:11:14 -04:00
7dec83b190 fix 2024-07-16 14:51:21 -04:00
2b2056171e progress 2024-07-16 14:37:46 -04:00
913d971663 progress 2024-07-16 12:11:37 -04:00
0f6d7874de progress 2024-06-13 19:40:49 -04:00
769d8116d5 fix 2024-06-13 18:17:13 -04:00
a7ad7f45f5 fix 2024-06-13 18:13:27 -04:00
0e6190f999 progress 2024-06-13 18:11:54 -04:00
180e3f89cf fix 2024-06-13 18:03:43 -04:00
80bbf47755 progress 2024-06-13 18:00:09 -04:00
e295aa8dff progress 2024-06-13 17:27:16 -04:00
60d2327b7e progress 2024-06-06 20:07:49 -04:00
a0b41cad8e progress 2024-06-06 19:55:04 -04:00
56 changed files with 2989 additions and 828 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
.history/

1
VERSION Normal file
View File

@ -0,0 +1 @@
2.3.2

125
__DEPRECATED__/alpaca_md_day.sh Executable file
View File

@ -0,0 +1,125 @@
#!/bin/bash
date_to_load=${1} # YYYY-MM-DD
DockerRegistry=cloud21.cvtt.vpn:5500
DockerImage=${DockerRegistry}/alpaca_md_day #:latest
LastDayFile=/home/cvtt/prod/logs/last_alpaca_hist_day
ContainerName=alpaca_md_day
is_container_running() {
local container_name=$1
if [ "$(docker ps --filter "name=^/${container_name}$" --filter "status=running" -q)" ]; then
return 0 # true
else
return 1 # false
fi
}
export CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
is_business_day() {
dt=${1}
open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time')
if [ -n "${open_time}" ]; then
return 0
else
return 1
fi
}
export -f is_business_day
get_prev_business_day() {
Start=${1}
while true; do
if is_business_day ${Start}; then
break
fi
echo "${Start} is not business day in US" >&2
Start=$(date -d "${Start} - 1 day" "+%Y-%m-%d")
done
echo ${Start}
}
export -f get_prev_business_day
if [ -z ${date_to_load} ] ; then
echo "Date is not specified. Yesterday data has priority. Running historical container will be stopped"
Cmd="docker stop ${ContainerName}"
echo ${Cmd} && eval ${Cmd}
Cmd="docker kill ${ContainerName}"
echo ${Cmd} && eval ${Cmd}
Cmd="docker rm -f ${ContainerName}"
echo ${Cmd} && eval ${Cmd}
Cmd="docker ps"
echo ${Cmd} && eval ${Cmd}
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is still running."
exit 3
fi
else
date_to_load=$(date -d ${date_to_load} '+%Y-%m-%d') #expected format
echo "Historical run ${date_to_load}"
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is already running."
exit 3
fi
if [ "${date_to_load}" == "next" ] ; then
if [ ! -e ${LastDayFile} ]; then
echo "File ${LastDayFile} does not exist. Will try to use prev file."
if [ ! -e ${LastDayFile}.prev ]; then
echo "File ${LastDayFile}.prev does not exist. Aborted."
exit 2
fi
mv ${LastDayFile}.prev ${LastDayFile}
fi
last_date=$(cat ${LastDayFile} | xargs)
if [ -z "${last_date}" ] ; then
echo "File ${LastDayFile} returned an empty last day. Will try to use prev file."
if [ ! -e ${LastDayFile}.prev ]; then
echo "File ${LastDayFile}.prev does not exist. Aborted."
exit 2
fi
mv ${LastDayFile}.prev ${LastDayFile}
last_date=$(cat ${LastDayFile} | xargs)
if [ -z "${last_date}" ] ; then
echo "Unable to obtain last_date. Aborted"
exit 3
fi
echo "No last date_to_load in ${LastDayFile}"
exit 1
fi
mv ${LastDayFile} ${LastDayFile}.prev
date_to_load=$(date -d "${last_date} - 1 day" "+%Y-%m-%d")
fi
fi
if [ -n "${date_to_load}" ]; then
date_to_load=$(get_prev_business_day ${date_to_load})
echo "Historical Data for ${date_to_load}"
fi
Cmd="docker run"
Cmd="${Cmd} --pull=always"
Cmd="${Cmd} --network=host"
Cmd="${Cmd} --name=${ContainerName}"
Cmd="${Cmd} --rm"
Cmd="${Cmd} ${DockerImage}"
Cmd="${Cmd} ${date_to_load}"
echo $Cmd
eval $Cmd
if [ "$?" != "0" ] ; then
exit 1 # if killed we do not save last day
fi
# truncate to avoid false positive
date_to_load=$(echo "${date_to_load}" | xargs)
if [ -n "${date_to_load}" ]; then
echo "Saving date_to_load to ${LastDayFile}"
echo ${date_to_load} > ${LastDayFile}
fi

View File

@ -0,0 +1,42 @@
-------------------------------------
C R Y P T O M A R K E T D A T A
-------------------------------------
============
Exchanges
============
| Coinbase
| Binance Spot
| Binance Futures
|
v
============
Databases
============
| TimescaleDB: cloud21
| TimescaleDB: cvttdata
v
================
Daily Archive
================
| created by crontabs:
| cvttdata:
| (1) /home/cvtt/prod/utils/archive_yesterday_md.sh cloud21
| (2) /home/cvtt/prod/utils/archive_yesterday_md.sh cvttdata
| stored in:
| cvttdata:/home/cvtt/prod/archive/md_archive/crypto/cloud21 (1)
| cvttdata:/home/cvtt/prod/archive/md_archive/crypto/cvttdata (2)
|
v
==========
Storage
==========
| created by crontab cvttdata:
| /home/cvtt/prod/utils/sync_market_data.sh (ops/utls)
| stored in:
| homestore:/works/cvtt/md_archive/equity/alpaca_md
| cloud21:/opt/store/cvtt/md_archive/equity/alpaca_md
|
v
==========
Usage
==========

View File

@ -0,0 +1,33 @@
-------------------------------------
E Q U I T Y M A R K E T D A T A
-------------------------------------
=====================
Exchanges (Sources)
=====================
| Alpaca
|
|
| TBD ============
| TBD Databases
| TBD ============
| TBD | TimescaleDB: cloud21
| TBD | TimescaleDB: cvttdata
v TBD v
================
Daily Archive
================
| created by crontab on cvttdata:
| /home/cvtt/prod/run/alpaca_md.sh
| stored in:
| cvttdata:/home/cvtt/prod/archive/md_archive/equity/alpaca_md
|
v
==========
Storage
==========
| created by crontab cvttdata:
| /home/cvtt/prod/utils/sync_market_data.sh
| stored in:
| homestore:/works/cvtt/md_archive/equity/alpaca_md
| cloud21:/opt/store/cvtt/md_archive/equity/alpaca_md
____V_______

View File

@ -0,0 +1,61 @@
[supervisord]
environment=PYTHONPATH="/home/cvtt/prod"
autostart=true
autorestart=unexpected
startretries=3
logfile=/var/log/supervisor/supervisord_cvtt.log
[supervisorctl]
[inet_http_server]
port = 127.0.0.1:9001
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[program:coinbase_rec_cvttdata]
directory=/home/cvtt/prod
command=/home/cvtt/.pyenv/python3.10-venv/bin/python3 -u /home/cvtt/prod/cvttpy/apps/md/md_recorder.py
--config=http://cloud16.cvtt.vpn:6789/apps/md_recorder
--active_exchanges=COINBASE_AT
--instruments=COINBASE:PAIR-BTC-USD,COINBASE:PAIR-ETH-USD,COINBASE:PAIR-LTC-USD,COINBASE:PAIR-XRP-USD
--db_credentials_key=TSDB_MD_CVTTDATA
--admin_port=7223
--log_level=INFO
--log_file=/home/cvtt/prod/logs/%%T.MD_REC.COINBASE.CVTTDATA.log
autostart=true
autorestart=true
startretries=1
startsecs=3
user=cvtt
#
[program:bnbfut_rec_cvttdata]
directory=/home/cvtt/prod
command=/home/cvtt/.pyenv/python3.10-venv/bin/python3 -u /home/cvtt/prod/cvttpy/apps/md/md_recorder.py
--config=http://cloud16.cvtt.vpn:6789/apps/md_recorder
--active_exchanges=BNBFUT
--instruments=BNBFUT:PERP-BTC-USDT,BNBFUT:PERP-ETH-USDT
--db_credentials_key=TSDB_MD_CVTTDATA
--admin_port=7224
--log_level=INFO
--log_file=/home/cvtt/prod/logs/%%T.MD_REC.BNBFUT.CVTTDATA.log
autostart=true
autorestart=true
startretries=1
startsecs=3
user=cvtt
[program:md_rec_monitor]
directory=/home/cvtt/prod
command=/home/cvtt/.pyenv/python3.10-venv/bin/python3 -u /home/cvtt/prod/cvttpy/apps/md/md_rec_monitor.py
--config=http://cloud16.cvtt.vpn:6789/apps/md_rec_monitor
--db=CVTTDATA
--admin_port=7225
--log_level=INFO
--log_file=/home/cvtt/prod/logs/%%T.MD_REC.MONITOR.CVTTDATA.log
autostart=true
autorestart=true
startretries=1
startsecs=3
user=cvtt

View File

@ -0,0 +1,66 @@
#!/bin/bash
ValidJobs=('BNBFUT_CLD21' 'BNBSPOT_CLD21' 'COINBASE_CLD21' 'BNBFUT_CVTTDATA' 'BNBSPOT_CVTTDATA' 'COINBASE_CVTTDATA')
# runs on host to start container
usage() {
echo "Usage: $0 <job_name; one of (${ValidJobs[@]})> [image_tag]"
exit 1
}
is_valid() {
local job=$1
for valid_job in "${ValidJobs[@]}";
do
# echo "job=$job valid_job=$valid_job"
if [[ "${job}" == "${valid_job}" ]]; then
return 0
fi
done
return 1
}
is_container_running() {
local container_name=$1
if [ "$(docker ps --filter "name=^/${container_name}$" --filter "status=running" -q)" ]; then
return 0 # true
else
return 1 # false
fi
}
job=${1}
if ! is_valid "${job}"; then
usage
fi
ImageTag=${2}
if [ "${ImageTag}" == "" ] ; then
ImageTag="1.6.9"
fi
DockerImage=cloud21.cvtt.vpn:5500/md_recorder:${ImageTag}
ContainerName="md_recorder.${job}"
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is already running. Aborted."
exit
fi
Cmd="docker run"
Cmd+=" -d"
Cmd+=" --rm"
Cmd+=" --network=host"
# Cmd+=" --pull=always"
Cmd+=" --name=${ContainerName}"
Cmd+=" -v /home/cvtt/.creds:/.creds"
Cmd+=" -v /home/cvtt/prod/data:/app/data"
Cmd+=" -v /home/cvtt/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" ${DockerImage}"
Cmd+=" ${job}"
echo ${Cmd}
eval ${Cmd}

View File

@ -0,0 +1,60 @@
#!/bin/bash
usage() {
echo "Usage: $0 <instance (CLD21,CVTTDATA)> [<admin_port (def. 7225)>]"
exit 1
}
ValidInstances=('CLD21' 'CVTTDATA')
is_valid() {
local inst=$1
for valid_inst in "${ValidInstances[@]}";
do
if [[ "$inst" == "$valid_inst" ]]; then
return 0
fi
done
return 1
}
is_container_running() {
local container_name=$1
if [ "$(docker ps --filter "name=^/${container_name}$" --filter "status=running" -q)" ]; then
return 0 # true
else
return 1 # false
fi
}
Instance=${1}
if ! is_valid "${Instance}"; then
usage
fi
AdminPort=7225
if [ "${2}" != "" ]; then
AdminPort=${2}
fi
ContainerName="md_recorder_monitor.${Instance}"
if is_container_running "$ContainerName"; then
echo "Container ${ContainerName} is already running. Aborted."
exit
fi
Cmd="docker run"
Cmd+=" -d"
Cmd+=" --rm"
Cmd+=" --network=host"
Cmd+=" --pull=always"
Cmd+=" --name=${ContainerName}"
Cmd+=" -v /home/cvtt/.creds:/.creds"
Cmd+=" -v /home/cvtt/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" cloud21.cvtt.vpn:5500/md_recorder_monitor:latest"
Cmd+=" ${Instance} ${AdminPort}"
echo ${Cmd}
eval ${Cmd}

View File

@ -0,0 +1,278 @@
#!/bin/bash
# --------------------- Settings
# SOURCE_HOST=cloud21.cvtt.vpn
# SOURCE_ROOT_DIR=/opt/store/cvtt/md_archive/crypto
# DB_SOURCES=cloud28,cloud29
# OUTPUT_DIR=/tmp
# DATE=20250516
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/crypto/sim/ cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/crypto/sim/"
# --------------------- Settings
if [ -z ${DATE} ] ; then
DATE=$(date -d 'yesterday' +'%Y%m%d')
fi
if [ -z ${OUTPUT_DIR} ] ; then
OUTPUT_DIR=.
fi
echo "DATE=${DATE} SOURCE_HOST=${SOURCE_HOST}"
mkdir -p ${OUTPUT_DIR}
year=$(date -d ${DATE} +"%Y")
month=$(date -d ${DATE} +"%m")
if [ -z "${DB_SOURCES}" ]; then
echo "DB_SOURCES is empty"
exit
fi
IFS=',' read -r -a db_source_hosts <<< "${DB_SOURCES}"
SourceFile="${DATE}.mktdata.db.gz"
SelectedSourceHost=""
SelectedSourceFilePath=""
SelectedSourceSize=0
for db_source_host in "${db_source_hosts[@]}"; do
SourceDir="${SOURCE_ROOT_DIR}/${db_source_host}/${year}/${month}"
CandidatePath="${SourceDir}/${SourceFile}"
remote_stat_cmd="if [ -f '${CandidatePath}' ]; then stat -c %s '${CandidatePath}'; else exit 1; fi"
CandidateSize=$(ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${SOURCE_HOST} "${remote_stat_cmd}" 2>/dev/null)
if [ $? -eq 0 ] && [ -n "${CandidateSize}" ]; then
echo "Found ${SOURCE_HOST}:${CandidatePath} (${CandidateSize} bytes)"
if [ -z "${SelectedSourceHost}" ] || [ "${CandidateSize}" -gt "${SelectedSourceSize}" ]; then
SelectedSourceHost=${db_source_host}
SelectedSourceFilePath=${CandidatePath}
SelectedSourceSize=${CandidateSize}
fi
fi
done
if [ -z "${SelectedSourceHost}" ]; then
echo "File ${SourceFile} NOT FOUND on any DB_SOURCES host"
exit
fi
echo "Using source ${SelectedSourceHost} with ${SelectedSourceFilePath} (${SelectedSourceSize} bytes)"
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${SOURCE_HOST}:${SelectedSourceFilePath}"
Cmd+=" $OUTPUT_DIR/"
echo ${Cmd}
eval ${Cmd}
if [ ! -f ${OUTPUT_DIR}/${SourceFile} ] ; then
echo "File ${OUTPUT_DIR}/${SourceFile} NOT FOUND"
exit
fi
Cmd="(cd ${OUTPUT_DIR} && gunzip -f *.db.gz)"
echo ${Cmd}
eval ${Cmd}
SourceDbFile="${OUTPUT_DIR}/${DATE}.mktdata.db"
ResultDbFile="${OUTPUT_DIR}/${DATE}.crypto_sim_md.db"
echo "SourceDbFile=${SourceDbFile}"
echo "Creating Result Database File ${ResultDbFile}"
cleanup() {
rm ${SourceDbFile}
}
trap cleanup EXIT
echo "Creating table md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_trades (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
exch text,
px real,
qty real,
trade_id text,
condition text,
tape text
);
CREATE UNIQUE INDEX IF NOT EXISTS md_trades_uidx
ON md_trades(tstamp_ns, exchange_id, instrument_id);
EOF
echo "Creating table md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_quotes (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
bid_exch text,
bid_px real,
bid_qty real,
ask_exch text,
ask_px real,
ask_qty real
);
CREATE UNIQUE INDEX IF NOT EXISTS md_quotes_uidx
ON md_quotes(tstamp_ns, exchange_id, instrument_id);
EOF
echo "Creating table md_1min_bars ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_1min_bars (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
open real,
high real,
low real,
close real,
volume real,
vwap real,
num_trades integer
);
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_bars_uidx
ON md_1min_bars(tstamp, exchange_id, instrument_id);
EOF
echo "Loading md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_trades SELECT
datetime(exchange_ts_ns / 1000000000, 'unixepoch') || '.' || printf('%06d', (exchange_ts_ns % 1000000000) / 1000) as tstamp,
time as tstamp_ns,
exchange_id,
instrument_id,
"" as exch,
price as px,
quantity as qty,
"" as trade_id,
taker_side as condition,
"" as tape
from source_db.bnbspot_md_trades;
COMMIT;
BEGIN;
INSERT OR IGNORE INTO md_trades
SELECT
datetime(exchange_ts_ns / 1000000000, 'unixepoch') || '.' || printf('%06d', (exchange_ts_ns % 1000000000) / 1000) as tstamp,
time as tstamp_ns,
exchange_id,
instrument_id,
"" as exch,
price as px,
quantity as qty,
"" as trade_id,
taker_side as condition,
"" as tape
from source_db.coinbase_md_trades;
COMMIT;
DETACH source_db;
EOF
echo "Loading md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_quotes SELECT
datetime(exchange_ts_ns / 1000000000, 'unixepoch') || '.' || printf('%06d', (exchange_ts_ns % 1000000000) / 1000) as tstamp,
time as tstamp_ns,
exchange_id,
instrument_id,
exchange_id as bid_exch,
bid_price as bid_px,
bid_quantity as bid_qty,
exchange_id as ask_exch,
ask_price as ask_px,
ask_quantity as ask_qty
from bnbspot_md_booktops;
COMMIT;
BEGIN;
INSERT OR IGNORE INTO md_quotes SELECT
datetime(exchange_ts_ns / 1000000000, 'unixepoch') || '.' || printf('%06d', (exchange_ts_ns % 1000000000) / 1000) as tstamp,
time as tstamp_ns,
exchange_id,
instrument_id,
exchange_id as bid_exch,
bid_price as bid_px,
bid_quantity as bid_qty,
exchange_id as ask_exch,
ask_price as ask_px,
ask_quantity as ask_qty
from coinbase_md_booktops;
COMMIT;
DETACH source_db;
EOF
### --- REPLACE 0 with num_trades ---
echo "Loading md_1min_bars ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${SourceDbFile}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_1min_bars SELECT
datetime(tstamp / 1000000000, 'unixepoch') || '.' || printf('%06d', (tstamp % 1000000000) / 1000) as tsatmp,
tstamp as tstamp_ns,
exchange_id,
instrument_id,
open,
high,
low,
close,
volume,
vwap,
0 as num_trades
from bnbspot_ohlcv_1min;
COMMIT;
BEGIN;
INSERT OR IGNORE INTO md_1min_bars SELECT
datetime(tstamp / 1000000000, 'unixepoch') || '.' || printf('%06d', (tstamp % 1000000000) / 1000) as tstamp,
tstamp as tstamp_ns,
exchange_id,
instrument_id,
open,
high,
low,
close,
volume,
vwap,
0 as num_trades
from coinbase_ohlcv_1min;
COMMIT;
DETACH source_db;
EOF
Cmd="gzip ${ResultDbFile}"
echo ${Cmd}
eval ${Cmd}
for tgt in ${RSYNC_TARGETS} ; do
tgt="${tgt}/${year}/${month}/"
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${ResultDbFile}.gz ${tgt}"
echo ${Cmd}
eval ${Cmd}
done
echo Done $0 ${*}

View File

@ -0,0 +1,224 @@
#!/bin/bash
# --------------------- Settings
# SOURCE_HOST=cloud21.cvtt.vpn
# SOURCE_ROOT_DIR=/opt/store/cvtt/md_archive/equity/alpaca_md
# SYMBOL_LIST=CAN #,COIN,GBTC,HOOD,MSTR,PYPL,XYZ
# OUTPUT_DIR=/tmp
# DATE=20250514
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/equity/test/alpaca_md/sim/"
# RSYNC_TARGETS="cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/equity/alpaca_md/sim/
#cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/equity/alpaca_md/sim/"
# --------------------- Settings
is_business_day() {
dt=${1}
date=$(date -d "${dt}" +"%Y-%m-%d")
CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
URL="${CalendarURL}&start=${date}&end=${date}"
echo "URL=${URL}"
# curl $URL
open_time=$(curl -s "${URL}" | jq '.[] | .open_time')
echo "open_time=${open_time}"
if [ -n "${open_time}" ]; then
return 0
else
return 1
fi
}
export -f is_business_day
if [ -z ${SYMBOL_LIST} ] ; then
echo "symbols are not specified"
exit
fi
if [ -z ${DATE} ] ; then
DATE=$(date -d 'yesterday' +'%Y%m%d')
fi
mkdir -p ${OUTPUT_DIR}
if ! is_business_day ${DATE}; then
echo "${DATE} is not business day"
exit
fi
OLD_IFS=${IFS}
IFS=","
read -ra Symbols <<< "${SYMBOL_LIST}"
IFS=${OLD_IFS}
echo "DATE=${DATE} Symbols=${Symbols[@]} OUTPUT_DIR=${OUTPUT_DIR}"
echo Getting data from ${DataHost} ...
year=$(date -d ${DATE} +"%Y")
month=$(date -d ${DATE} +"%m")
for sym in ${Symbols[@]}; do
inst_id="STOCK-${sym}"
capital=${sym:0:1}
SourceDir="${SOURCE_ROOT_DIR}/${year}/${capital}/${sym}"
SourceHbarFile="${SourceDir}/${DATE}.${sym}.alpaca_1m_bars.db.gz"
SourceQatFile="${SourceDir}/${DATE}.${sym}.alpaca_qat.db.gz"
for src_file in ${SourceHbarFile} ${SourceQatFile}; do
Cmd="rsync -ahv"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${SOURCE_HOST}:${src_file}"
Cmd+=" $OUTPUT_DIR/"
echo ${Cmd}
eval ${Cmd}
done
done
Cmd="(cd ${OUTPUT_DIR} && gunzip *.db.gz)"
echo ${Cmd}
eval ${Cmd}
ResultDbFile="${OUTPUT_DIR}/${DATE}.alpaca_sim_md.db"
echo "Creating Result Database File ${ResultDbFile}"
echo "Creating table md_trades ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_trades (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
exch text,
px real,
qty real,
trade_id text,
condition text,
tape text
);
CREATE UNIQUE INDEX IF NOT EXISTS md_trades_uidx
ON md_trades(tstamp_ns, exchange_id, instrument_id);
EOF
echo "Creating table md_quotes ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
CREATE TABLE IF NOT EXISTS md_quotes (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
bid_exch text,
bid_px real,
bid_qty real,
ask_exch text,
ask_px real,
ask_qty real
);
CREATE UNIQUE INDEX IF NOT EXISTS md_quotes_uidx
ON md_quotes(tstamp_ns, exchange_id, instrument_id);
EOF
echo "Creating table md_1min_bars ..."
sqlite3 ${ResultDbFile} <<EOF
CREATE TABLE IF NOT EXISTS md_1min_bars (
tstamp text,
tstamp_ns integer,
exchange_id text,
instrument_id text,
open real,
high real,
low real,
close real,
volume real,
vwap real,
num_trades integer
);
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_bars_uidx
ON md_1min_bars(tstamp, exchange_id, instrument_id);
EOF
# set -f # not to expand *
for sym in ${Symbols[@]}; do
src_hbar_db=${OUTPUT_DIR}/${DATE}.${sym}.alpaca_1m_bars.db
src_qat_db=${OUTPUT_DIR}/${DATE}.${sym}.alpaca_qat.db
echo "Loading md_trades and md_quotes from ${src_qat_db} ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${src_qat_db}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_trades SELECT
tstamp,
tstamp_ns,
exchange_id,
instrument_id,
exch,
px,
qty,
trade_id,
condition,
tape
FROM source_db.md_trades;
COMMIT;
BEGIN;
INSERT OR IGNORE INTO md_quotes SELECT
tstamp,
tstamp_ns,
exchange_id,
instrument_id,
bid_exch,
bid_px,
bid_qty,
ask_exch,
ask_px,
ask_qty
FROM source_db.md_quotes;
COMMIT;
EOF
echo "Loading md_1min_bars from ${src_hbar_db} ..."
sqlite3 ${ResultDbFile} <<EOF
.echo ON
ATTACH '${src_hbar_db}' AS source_db;
BEGIN;
INSERT OR IGNORE INTO md_1min_bars SELECT
tstamp,
tstamp_ns,
exchange_id,
instrument_id,
open,
high,
low,
close,
volume,
vwap,
num_trades
FROM source_db.md_1min_bars;
COMMIT;
DETACH source_db;
EOF
Cmd="rm ${src_hbar_db} ${src_qat_db}"
echo ${Cmd}
eval ${Cmd}
done
Cmd="gzip ${ResultDbFile}"
echo ${Cmd}
eval ${Cmd}
for tgt in ${RSYNC_TARGETS} ; do
Cmd="/usr/bin/rsync -ahv"
Cmd+=" --mkpath"
Cmd+=" -e 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
Cmd+=" ${ResultDbFile}.gz ${tgt}/${year}/${month}/"
echo ${Cmd}
eval ${Cmd}
done
echo Done $0 ${*}

22
admin/docker_images.sh Executable file
View File

@ -0,0 +1,22 @@
#!/bin/env bash
Registry=http://hs01.cvtt.vpn:5500
Catalog=${Registry}/v2/_catalog
jstr=$(curl -s -X GET ${Catalog})
# echo ${jstr}
echo "${jstr}" | jq -r '.repositories[]' | while read repo; do
TagListURL="${Registry}/v2/${repo}/tags/list"
# echo $repo
Cmd="curl -s -X GET ${TagListURL}"
echo ${Cmd}
eval ${Cmd}
# TagList=$(eval ${Cmd})
# # echo ${TagList}
# echo "${TagList}" | jq -r '.tags[]' | while read tag; do
# echo "${Registry}/${repo}:${tag}"
# done
done

221
build/build_release.sh Executable file
View File

@ -0,0 +1,221 @@
#!/usr/bin/env bash
# ---------------- Settings
declare -A git_repo_arr
git_repo_arr[cvttpy]=git@cloud21.cvtt.vpn:/opt/store/git/cvtt2/cvttpy.git
git_repo_arr[ops]=git@cloud21.cvtt.vpn:/opt/store/git/cvtt2/ops.git
git_repo_arr[research]=git@cloud21.cvtt.vpn:/opt/store/git/cvtt2/research.git
git_repo_arr[cvtt-rust]=git@cloud21.cvtt.vpn:/opt/store/git/cvtt2/cvtt-rust.git
git_repo_arr[docker_dev]=git@cloud21.cvtt.vpn:/opt/store/git/cvtt2/docker_dev.git
dist_root=/home/cvttdist/software/cvtt2
dist_user=cvttdist
dist_host="cloud21.cvtt.vpn"
dist_ssh_port="22"
# ---------------- Settings
dist_locations="cloud21.cvtt.vpn:22 hs01.cvtt.vpn:22"
version_file="VERSION"
# ---------------- cmdline
prj=
brnch=master
interactive=N
usage() {
echo "Usage: $0 -p <project> [-b <branch (master)> -i (interactive)"
exit 1
}
while getopts ":p:b:i" opt; do
case ${opt} in
p )
prj=$OPTARG
;;
b )
brnch=$OPTARG
;;
i )
interactive=Y
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
confirm() {
if [ "${interactive}" == "Y" ]; then
echo "--------------------------------"
echo -n "Press <Enter> to continue" && read
fi
}
rust_binaries() {
res=()
for binname in $(cargo metadata --no-deps --format-version 1 | jq -r '.packages[].targets[] | select(.kind | index("bin")) | .name'); do
res+=("${binname}")
done
echo "${res[@]}"
}
rust_libraries() {
res=()
for libname in $(cargo metadata --no-deps --format-version 1 | jq -r '.packages[].targets[] | select(.kind | index("lib")) | .name'); do
res+=("lib${libname}.rlib")
done
echo "${res[@]}"
}
if [ "${interactive}" == "Y" ]; then
echo -n "Enter project [${prj}]: "
read project
if [ "${project}" == "" ]
then
project=${prj}
fi
else
project=${prj}
fi
repo=${git_repo_arr[${project}]}
if [ -z ${repo} ]; then
echo "ERROR: Project repository for ${project} not found"
exit -1
fi
echo "Project repo: ${repo}"
if [ "${interactive}" == "Y" ]; then
echo -n "Enter branch to build release from [${brnch}]: "
read branch
if [ "${branch}" == "" ]
then
branch=${brnch}
fi
else
branch=${brnch}
fi
tmp_dir=$(mktemp -d)
function cleanup {
cd ${HOME}
rm -rf ${tmp_dir}
}
trap cleanup EXIT
prj_dir="${tmp_dir}/${prj}"
cmd_arr=()
Cmd="git clone ${repo} ${prj_dir}"
cmd_arr+=("${Cmd}")
Cmd="cd ${prj_dir}"
cmd_arr+=("${Cmd}")
if [ "${interactive}" == "Y" ]; then
echo "------------------------------------"
echo "The following commands will execute:"
echo "------------------------------------"
for cmd in "${cmd_arr[@]}"
do
echo ${cmd}
done
fi
confirm
for cmd in "${cmd_arr[@]}"
do
echo ${cmd} && eval ${cmd}
done
Cmd="git checkout ${branch}"
echo ${Cmd} && eval ${Cmd}
if [ "${?}" != "0" ]; then
echo "ERROR: Branch ${branch} is not found"
cd ${HOME} && rm -rf ${tmp_dir}
exit -1
fi
release_version=$(cat ${version_file} | awk -F',' '{print $1}')
whats_new=$(cat ${version_file} | awk -F',' '{print $2}')
echo "--------------------------------"
echo "Version file: ${version_file}"
echo "Release version: ${release_version}"
confirm
version_tag="v${release_version}"
version_comment="'${version_tag} ${project} ${branch} $(date +%Y-%m-%d)\n${whats_new}'"
cmd_arr=()
Cmd="git tag -a ${version_tag} -m ${version_comment}"
cmd_arr+=("${Cmd}")
Cmd="git push origin --tags"
cmd_arr+=("${Cmd}")
Cmd="rm -rf .git"
cmd_arr+=("${Cmd}")
SourceLoc=../${project}
if [ "${project}" == "cvtt-rust" ]; then
cmd_arr+=("cd ${prj_dir}")
release_dir=${prj_dir}/dist/${project}
cmd_arr+=("mkdir -p ${release_dir}")
cmd_arr+=("cp ${version_file} ${release_dir}")
jq_cmd="jq '.packages[] | select(.targets[].kind[] == \"bin\") | .name'"
apps=$(cargo metadata --no-deps --format-version 1 --manifest-path=${prj_dir}/Cargo.toml | eval $jq_cmd | uniq)
for app in ${apps[@]}; do
app=${app//\"/} # remove quotes
cmd_arr+=("cargo install --root ${release_dir} --path ${prj_dir}/apps/${app}")
done
SourceLoc=${release_dir}
fi
dist_path="${dist_root}/${project}/${release_version}"
for dist_loc in ${dist_locations}; do
dhp=(${dist_loc//:/ })
dist_host=${dhp[0]}
dist_port=${dhp[1]}
Cmd="rsync -avzh"
Cmd="${Cmd} --rsync-path=\"mkdir -p ${dist_path}"
Cmd="${Cmd} && rsync\" -e \"ssh -p ${dist_ssh_port}\""
Cmd="${Cmd} $SourceLoc ${dist_user}@${dist_host}:${dist_path}/"
cmd_arr+=("${Cmd}")
done
if [ "${interactive}" == "Y" ]; then
echo "------------------------------------"
echo "The following commands will execute:"
echo "------------------------------------"
for cmd in "${cmd_arr[@]}"
do
echo ${cmd}
done
fi
confirm
for cmd in "${cmd_arr[@]}"
do
pwd && echo ${cmd} && eval ${cmd}
done
echo "$0 Done ${project} ${release_version}"

273
build/docker_images_builder.sh Executable file
View File

@ -0,0 +1,273 @@
#!/bin/bash
usage() {
echo -n "Usage: ${0} <RootDir>"
echo
exit 1
}
error_exit() {
echo "ERROR: ${1}"
exit 1
}
# --- Settings
RootDir=${1}
if [ -z "${RootDir}" ] ; then
usage
fi
RegistryService=cloud21.cvtt.vpn:5500
RegistryProtocol=http
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/docker_images
ReleaseHost="cloud21.cvtt.vpn"
ReleasePort="22"
ReleaseUser="cvttdist"
ReleaseDir="/home/cvttdist/software/cvtt2"
DIMAGES_CONFIG=$(curl -s ${ConfigUrl} | ${HOME}/bin/hjson -j)
SetVersionScript=${HOME}/prod/set_version.sh
# --------------- D E B U G
mkdir -p ${RootDir}
build_docker_image() {
title=${1}
image_name=${2}
docker_dev_path=${3}
project=${4}
test_script=${5}
echo "Building ${title}..."
echo "ImageName=${image_name}"
echo "DockerDevPath=${docker_dev_path}"
echo "BasedOnDist=${project}"
# Get Latest DIST version
echo "Checking for latest version of ${project} on ${ReleaseUser}@${ReleaseHost}:${ReleaseDir}"
image_version=$(get_latest_dist_version ${project})
if [ -z "${image_version}" ]; then
echo "No latest version found for ${project}"
return
fi
echo "Latest version is ${image_version}"
if image_exists "${image_name}" "${image_version}"; then
echo "Image $image_name:$image_version exists in the registry. Building is not required."
return
fi
echo "Image $image_name:$image_version does not exist in the registry."
echo "getting lastest version of ${project} on ${ReleaseUser}@${ReleaseHost}:${ReleaseDir}"
Cmd="${SetVersionScript} ${project} ${image_version}"
echo ${Cmd}
eval ${Cmd}
convert_symlink_to_dir ${project}
DockerDir=./docker_dev/${docker_dev_path}
echo "================================================================================"
echo "Building ${title}... on ${DockerDir}"
echo "================================================================================"
Cmd="docker build"
Cmd+=" -t ${image_name}"
Cmd+=" -t ${image_name}:latest"
Cmd+=" -t ${image_name}:${image_version}"
Cmd+=" -f ${DockerDir}/Dockerfile ${RootDir}"
echo ${Cmd}
eval ${Cmd} 1>&2 || error_exit "Docker build failed"
if [ ! -z "${test_script}" ] && [ "${test_script}" != "null" ]; then
if [ ! -f ${DockerDir}/${test_script} ]; then
error_exit "Test script ${test_script} is missing"
fi
echo "Running test script ${test_script} for ${image_name}"
Cmd="docker cp ${DockerDir}/${test_script} ${image_name}:/"
echo ${Cmd}
eval ${Cmd} 1>&2 || error_exit "Test script copy failed"
Cmd="docker exec ${image_name} chmod +x /${test_script}"
echo ${Cmd}
eval ${Cmd} 1>&2 || error_exit "Test script chmod failed"
Cmd="docker run"
Cmd+=" --rm"
Cmd+=" -d"
Cmd+=" --name test-container"
Cmd+=" ${image_name}"
Cmd+=" /bin/bash -c /${TestScript}"
echo ${Cmd}
eval ${Cmd} 1>&2 || error_exit "Test script run failed"
echo "Removing test script ${test_script} from ${image_name}"
Cmd="docker exec ${image_name} rm /${test_script}"
echo ${Cmd}
eval ${Cmd} 1>&2 || error_exit "Test script remove failed"
fi
: <<'COMMENT'
-------------------------
Every single docker_dev project must have a test script
image_test.sh
that is copied to the image's root directory.
-------------------------
Cmd="docker run"
Cmd+=" --rm"
Cmd+=" -d"
Cmd+=" --name test-container"
Cmd+=" ${image_name}"
Cmd+=" /bin/bash -c /${TestScript}"
echo ${Cmd}
eval ${Cmd} 1>&2 || exit 1
# Get the exit status of the test script
docker wait test-container
# Check the exit status of the container
EXIT_STATUS=$?
# Analyze the exit status
if [ ${EXIT_STATUS} -ne 0 ]; then
echo "Tests Failed"
exit 1
fi
COMMENT
Cmd="docker tag"
Cmd+=" ${image_name}:latest"
Cmd+=" ${RegistryService}/${image_name}:latest"
echo ${Cmd}
eval ${Cmd} || error_exit "Docker tag failed"
Cmd="docker tag"
Cmd+=" ${image_name}:${image_version}"
Cmd+=" ${RegistryService}/${image_name}:${image_version}"
echo ${Cmd}
eval ${Cmd} || error_exit "Docker tag 2failed"
Cmd="docker push"
Cmd+=" ${RegistryService}/${image_name}:latest"
echo ${Cmd}
eval ${Cmd} || error_exit "Docker push failed"
Cmd="docker push"
Cmd+=" ${RegistryService}/${image_name}:${image_version}"
echo ${Cmd}
eval ${Cmd} || error_exit "Docker push 2 failed"
echo "Removing the copy of ${project} from ${RootDir}"
Cmd="rm -rf ${RootDir}/${project}"
echo ${Cmd}
eval ${Cmd}
print_all_reg_images ${project}
}
convert_symlink_to_dir() {
project=${1}
if [ -L ${RootDir}/${project} ]; then
echo "Converting symlink ${RootDir}/${project} to directory"
Cmd="mv ${RootDir}/${project} ${RootDir}/${project}.symlink"
echo ${Cmd} && eval ${Cmd}
src_path=$(readlink -f ${RootDir}/${project}.symlink)
Cmd="cp -r ${src_path} ${RootDir}/${project}"
echo ${Cmd} && eval ${Cmd}
fi
if [ -d ${project} ]; then
echo "${RootDir}/${project} is a Directory"
fi
Cmd="rm ${RootDir}/${project}.symlink"
echo ${Cmd} && eval ${Cmd}
}
image_exists() {
image_name=${1}
version=${2}
response=$(curl -s -o /dev/null -w "%{http_code}" "${RegistryProtocol}://${RegistryService}/v2/${image_name}/manifests/${version}" -H "Accept: application/vnd.docker.distribution.manifest.v2+json")
if [ "$response" -eq 200 ]; then
return 0
else
return 1
fi
}
print_all_reg_images() {
project=${1}
repositories=$(curl -s "${RegistryProtocol}://${RegistryService}/v2/_catalog" | jq -r '.repositories[]')
for repo in $repositories; do
# Fetch all tags for the repository
tags=$(curl -s "${RegistryProtocol}://${RegistryService}/v2/$repo/tags/list" | jq -r '.tags[]')
# List each tag
for tag in $tags; do
if [ "${project}" == "" ];then
echo "$repo:$tag"
else
echo "$repo:$tag" | grep ${project}
fi
done
done
}
get_latest_dist_version() {
project=${1}
Version=$(ssh -q -p ${ReleasePort} ${ReleaseUser}@${ReleaseHost} "ls -tr ${ReleaseDir}/${project} | tail -1" )
echo "${Version}"
}
echo "------------------------------------- Starting ${0} ${*}"
echo RootDir=${RootDir}
cd ${RootDir}
echo "Getting latest version of docker_dev"
Cmd="${SetVersionScript} docker_dev latest"
echo ${Cmd}
eval ${Cmd}
convert_symlink_to_dir docker_dev
Jobs=()
while IFS= read -r item; do
Jobs+=("$item")
done < <(echo "${DIMAGES_CONFIG}" | jq -c 'to_entries[]')
for item in "${Jobs[@]}"; do
# Extract key (name) and values
title=$(echo "$item" | jq -r '.key')
image_name=$(echo "$item" | jq -r '.value.image_name')
docker_dev_path=$(echo "$item" | jq -r '.value.docker_dev_path')
project=$(echo "$item" | jq -r '.value.based_on_project')
test_script=$(echo "$item" | jq -r '.value.test_script')
build_docker_image "$title" "$image_name" "$docker_dev_path" "$project" "${test_script}"
done
Cmd="rm -rf ${RootDir}/docker_dev"
echo ${Cmd}
eval ${Cmd}
echo "Done ${0} ${*}"
#===============================================================================
#===============================================================================

33
build/list_docker_images.sh Executable file
View File

@ -0,0 +1,33 @@
#!/bin/bash
usage() {
echo -n "Usage: ${0} <grep_filter>"
echo
exit 1
}
# --- Settings
RegistryService=cloud21.cvtt.vpn:5500
RegistryProtocol=http
print_all_reg_images() {
project=${1}
repositories=$(curl -s "${RegistryProtocol}://${RegistryService}/v2/_catalog" | jq -r '.repositories[]')
for repo in $repositories; do
# Fetch all tags for the repository
tags=$(curl -s "${RegistryProtocol}://${RegistryService}/v2/$repo/tags/list" | jq -r '.tags[]')
# List each tag
echo "REPO: $repo"
for tag in $tags; do
if [ "${project}" == "" ];then
echo "$repo:$tag"
else
echo "$repo:$tag" | grep ${project}
fi
done
done
}
print_all_reg_images ${1}

23
build/run_build.sh Executable file
View File

@ -0,0 +1,23 @@
#!/bin/bash
usage() {
echo "Usage: $0 <project name>"
exit 1
}
prj=${1}
if [ "" == "${prj}" ] ; then
usage
fi
Cmd="pushd /home/oleg/develop/cvtt2-ops"
Cmd="${Cmd} && (cd ${prj}"
Cmd="${Cmd} && git pushall)"
Cmd="${Cmd} && ./build_release.sh -p ${prj}"
Cmd="${Cmd} && (cd ${prj}"
Cmd="${Cmd} && git pull --tags"
Cmd="${Cmd} && git pushall)"
Cmd="${Cmd} && popd"
echo ${Cmd}
eval ${Cmd}

View File

@ -1,38 +1,58 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# ---------------- Settings # ---------------- Settings
declare -A git_repo_arr
git_repo_arr[cvttpy]=git@cloud21.cvtt.vpn:/opt/store/git/cvttpy.git
git_repo_arr[ops]=git@cloud21.cvtt.vpn:/opt/store/git/ops.git
default_project=cvttpy
default_branch=master
repo=git@cloud21.cvtt.vpn:/works/git/cvtt2/ops.git
dist_root=/home/cvttdist/software/cvtt2 dist_root=/home/cvttdist/software/cvtt2
dist_user=cvttdist dist_user=cvttdist
dist_host="cloud21.cvtt.vpn" dist_host="cloud21.cvtt.vpn"
dist_ssh_port="22" dist_ssh_port="22"
dist_locations="cloud21.cvtt.vpn:22 homestore.cvtt.vpn:22" dist_locations="cloud21.cvtt.vpn:22 hs01.cvtt.vpn:22"
version_file="VERSION"
interactive=Y prj=ops
brnch=master
interactive=N
# cmdline # ---------------- Settings
prj=${1:-${default_project}}
brnch=${2:-${default_branch}}
if [ "${3}" == "D" ]; then
interactive=N
fi
function confirm { # ---------------- cmdline
usage() {
echo "Usage: $0 [-b <branch (master)> -i (interactive)"
exit 1
}
while getopts "b:i" opt; do
case ${opt} in
b )
brnch=$OPTARG
;;
i )
interactive=Y
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
confirm() {
if [ "${interactive}" == "Y" ]; then if [ "${interactive}" == "Y" ]; then
echo "--------------------------------" echo "--------------------------------"
echo -n "Press <Enter> to continue" && read echo -n "Press <Enter> to continue" && read
fi fi
} }
if [ "${interactive}" == "Y" ]; then if [ "${interactive}" == "Y" ]; then
echo -n "Enter project [${prj}]: " echo -n "Enter project [${prj}]: "
read project read project
@ -44,7 +64,7 @@ else
project=${prj} project=${prj}
fi fi
repo=${git_repo_arr[${project}]} # repo=${git_repo_arr[${project}]}
if [ -z ${repo} ]; then if [ -z ${repo} ]; then
echo "ERROR: Project repository for ${project} not found" echo "ERROR: Project repository for ${project} not found"
exit -1 exit -1
@ -63,6 +83,13 @@ else
fi fi
tmp_dir=$(mktemp -d) tmp_dir=$(mktemp -d)
function cleanup {
cd ${HOME}
rm -rf ${tmp_dir}
}
trap cleanup EXIT
prj_dir="${tmp_dir}/${prj}" prj_dir="${tmp_dir}/${prj}"
cmd_arr=() cmd_arr=()
@ -98,16 +125,18 @@ if [ "${?}" != "0" ]; then
fi fi
release_version=$(cat release_version.txt) release_version=$(cat ${version_file} | awk -F',' '{print $1}')
whats_new=$(cat ${version_file} | awk -F',' '{print $2}')
echo "--------------------------------" echo "--------------------------------"
echo "Version file: ${version_file}"
echo "Release version: ${release_version}" echo "Release version: ${release_version}"
confirm confirm
version_tag="v${release_version}" version_tag="v${release_version}"
version_comment="${version_tag}_${project}_${branch}_$(date +%Y%m%d)" version_comment="'${version_tag} ${project} ${branch} $(date +%Y-%m-%d)\n${whats_new}'"
cmd_arr=() cmd_arr=()
Cmd="git tag -a ${version_tag} -m ${version_comment}" Cmd="git tag -a ${version_tag} -m ${version_comment}"
@ -119,6 +148,8 @@ cmd_arr+=("${Cmd}")
Cmd="rm -rf .git" Cmd="rm -rf .git"
cmd_arr+=("${Cmd}") cmd_arr+=("${Cmd}")
SourceLoc=../${project}
dist_path="${dist_root}/${project}/${release_version}" dist_path="${dist_root}/${project}/${release_version}"
for dist_loc in ${dist_locations}; do for dist_loc in ${dist_locations}; do
@ -126,9 +157,9 @@ for dist_loc in ${dist_locations}; do
dist_host=${dhp[0]} dist_host=${dhp[0]}
dist_port=${dhp[1]} dist_port=${dhp[1]}
Cmd="rsync -avzh" Cmd="rsync -avzh"
Cmd="${Cmd} --rsync-path=\"mkdir -p ${dist_path} && rsync\"" Cmd="${Cmd} --rsync-path=\"mkdir -p ${dist_path}"
Cmd="${Cmd} -e \"ssh -p ${dist_ssh_port}\"" Cmd="${Cmd} && rsync\" -e \"ssh -p ${dist_ssh_port}\""
Cmd="${Cmd} ../${project} ${dist_user}@${dist_host}:${dist_path}/" Cmd="${Cmd} $SourceLoc ${dist_user}@${dist_host}:${dist_path}/"
cmd_arr+=("${Cmd}") cmd_arr+=("${Cmd}")
done done
@ -149,7 +180,4 @@ do
pwd && echo ${cmd} && eval ${cmd} pwd && echo ${cmd} && eval ${cmd}
done done
cd ${HOME}
rm -rf ${tmp_dir}
echo "$0 Done ${project} ${release_version}" echo "$0 Done ${project} ${release_version}"

View File

@ -1,10 +0,0 @@
0 6 * * * /usr/bin/supervisorctl -c /home/cvtt/prod/config/supervisor/supervisord.conf restart config_service 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).config_service.restart.log
#
#
#---------------------- Utils
0 5 * * * /home/cvtt/prod/utils/config_svc_backup.sh 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).config_svc_backup.log
0 6 * * * /home/cvtt/prod/utils/archive_logs.sh /home/cvtt/prod/logs 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).archive_logs.log
10 1 * * * /home/cvtt/prod/utils/move_archives.sh 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).move_archives.log
#
#---------------------- T E S T

View File

@ -1,11 +0,0 @@
0 6 * * * /usr/bin/supervisorctl -c /home/cvtt/prod/config/supervisor/supervisord.conf restart all 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).bnbspot_md_recorder.restart.log
#
#10 6 * * * /usr/bin/supervisorctl -c /home/cvtt/prod/config/supervisor/supervisord.conf restart executor 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).executor_app.restart.log
#
#---------------------- Utils
0 6 * * * /home/cvtt/prod/utils/archive_logs.sh /home/cvtt/prod/logs 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).cronjob_log_archive.log
10 1 * * * /home/cvtt/prod/utils/move_archives.sh cloud17 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).move_archives.log
#
# ----------------- # TEST
#
# ----------------- # TEST

View File

@ -1,8 +0,0 @@
20 5 * * SAT /home/cvtt/prod/run/load_histdata.sh 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' >> /home/cvtt/prod/logs/$(date +\
20 6 * * SAT /home/cvtt/prod/run/prune_eqt_histdata.sh 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' >> /home/cvtt/prod/logs/$(da
#
0 6 * * SUN /home/cvtt/prod/run/archive_logs.sh /home/cvtt/prod/logs 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/p
#
# T E S T
#----------------------------------------------------------------------
#----------------------------------------------------------------------

View File

@ -1,11 +0,0 @@
#
#---------------------- Market Data
0 6 * * * /home/cvtt/prod/run/archive_yesterday_md.sh coinbase,bnbspot,bnbfut 2>&1 > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).archive_yestd_md.log
#---------------------- Utils
0 7 * * * /home/cvtt/prod/utils/backup_md_archive.sh 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).backup_md_archive.log
#
30 7 * * * /home/cvtt/prod/utils/archive_logs.sh /home/cvtt/prod/logs 2>&1 | /usr/bin/ts '[\%Y-\%m-\%d \%H:\%M:\%S]' > /home/cvtt/prod/logs/$(date +\%Y\%m\%d_\%H\%M\%S).cronjob_log_archive.log
#
# ----------------- # TEST
#
# ----------------- # TEST

70
cronjobs/save_cronjobs.sh Executable file
View File

@ -0,0 +1,70 @@
#!/bin/bash
usage() {
echo "Usage: ${0} <local_root_dir>"
exit 1
}
get_user_hosts() {
local User=${1}
local Domain=${2}
Cmd="curl -s http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts"
Cmd="${Cmd} | ${HOME}/bin/hjson -j"
Cmd="${Cmd} | jq -r"
Cmd="${Cmd} --arg domain \"${Domain}\""
Cmd="${Cmd} --arg usr \"${User}\""
Cmd="${Cmd} '.[\$domain] | to_entries[] | select(.value.users[] | contains(\$usr)) | .key'"
# echo $Cmd ### - FOR DEBUG ONLY
eval ${Cmd}
}
Domain=cvtt.vpn
echo "Started ${0} ${*} ..."
RootDir=${1}
if [ ! -d "${RootDir}" ]; then
usage
fi
cd ${RootDir}
Cmd="git pull"
echo ${Cmd} && eval ${Cmd}
for User in cvtt oleg
do
echo "User=$User"
Cmd="mkdir -p ${RootDir}/${User}"
echo ${Cmd} && eval ${Cmd}
hosts=$(get_user_hosts ${User} ${Domain})
echo ${hosts}
for host in ${hosts}
do
echo "===================="
echo "${host}:${User}"
Cmd="ssh ${User}@${host}.${Domain} 'crontab -l' | tee ${RootDir}/${User}/${host}.cron"
echo ${Cmd} && eval ${Cmd}
done
done
cd ${RootDir}
Cmd="find -type f -empty -delete -print"
echo ${Cmd} && eval ${Cmd}
Cmd="find -type d -empty -delete -print"
echo ${Cmd} && eval ${Cmd}
Cmd="git add ."
echo ${Cmd} && eval ${Cmd}
Cmd="git commit -a -m '$(date)'"
echo ${Cmd} && eval ${Cmd}
Cmd="git pushall"
echo ${Cmd} && eval ${Cmd}
echo "Done ${0} ${*}"

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,17 @@
#!/bin/bash
Cmd="docker run"
Cmd+=" -d"
Cmd+=" --rm"
Cmd+=" --pull=always"
Cmd+=" --network=host"
Cmd+=" --name=crypto_exch_stats"
Cmd+=" --volume=${HOME}/prod/data:/app/data"
Cmd+=" --volume=${HOME}/prod/logs:/logs"
Cmd+=" -e CONFIG_SERVICE=cloud16.cvtt.vpn:6789"
Cmd+=" cloud21.cvtt.vpn:5500/crypto_exch_stats:latest"
echo ${Cmd}
eval ${Cmd}
# Cmd+=" cloud21.cvtt.vpn:5500/relative_liquidity:latest"

View File

@ -1 +0,0 @@
0.7.6

33
research/ad_hoc/rearrange2.sh Executable file
View File

@ -0,0 +1,33 @@
#!/bin/bash
# by chatGPT
# ==========================================
# hs01.cvtt.vpn SETTINGS
# ==========================================
# Define source and destination base directories
SOURCE_BASE_DIR="/works/cvtt/md_archive/equity/alpaca_md.OLD" # Replace with your actual source directory
DESTINATION_BASE_DIR="/works/cvtt/md_archive/equity/alpaca_md" # Replace with your destination directory
# Loop through all .db.gz files in the source directory
find "$SOURCE_BASE_DIR" -type f -name "*.db.gz" | while read -r file; do
# Extract relevant directory components from the path
relative_path="${file#$SOURCE_BASE_DIR/}"
ticker=$(echo "$relative_path" | cut -d'/' -f2)
year=$(echo "$relative_path" | cut -d'/' -f3)
original_date=$(basename "$file" | cut -d'.' -f1)
# Define the new file name and destination path
new_file_name="${original_date}.${ticker}.alpaca_1m_bars.db.gz"
destination_dir="$DESTINATION_BASE_DIR/$year/A/$ticker"
# Create the destination directory if it doesn't exist
mkdir -p "$destination_dir"
#echo DEBUG mkdir -p "$destination_dir"
# Move and rename the file
mv "$file" "$destination_dir/$new_file_name"
#echo DEBUG mv "$file" "$destination_dir/$new_file_name"
echo "Moved $file to $destination_dir/$new_file_name"
done

275
research/aggregate_features.sh Executable file
View File

@ -0,0 +1,275 @@
#!/bin/bash
if [ $# -ne 2 ]; then
echo "Usage: $0 <source_database_file> <features_database_file>"
exit 1
fi
SRC_DB=$1
DEST_DB=$2
if [ ! -f "$SRC_DB" ]; then
echo "Error: Source database file $SRC_DB does not exist"
exit 1
fi
echo "Creating feature tables in $DEST_DB using data from $SRC_DB..."
# Create md_1min_trade_features table
echo "Creating md_1min_trade_features table..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_trade_features;
CREATE TABLE IF NOT EXISTS md_1min_trade_features (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
price_mean REAL,
price_median REAL,
volume REAL,
vwap REAL,
signed_volume REAL,
order_flow_imbalance REAL,
num_trades INTEGER,
avg_trade_size REAL,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
# Create index for md_1min_trade_features
echo "Creating index for md_1min_trade_features..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_trade_features_uidx
ON md_1min_trade_features(bin_tstamp, exchange_id, instrument_id);"
# Populate md_1min_trade_features using source database
echo "Populating md_1min_trade_features..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
WITH trade_metrics AS (
SELECT
tstamp,
strftime('%Y-%m-%d %H:%M:00', tstamp) as bin_tstamp,
exchange_id,
instrument_id,
px as price,
qty,
CASE
WHEN condition = 'B' THEN qty
WHEN condition = 'S' THEN -qty
ELSE 0
END as signed_qty
FROM md_trades
),
trade_metrics_agg AS (
SELECT
bin_tstamp,
exchange_id,
instrument_id,
COUNT(*) as cnt,
MIN(tstamp) as min_tstamp,
MAX(tstamp) as max_tstamp
FROM trade_metrics
GROUP BY bin_tstamp, exchange_id, instrument_id
)
INSERT INTO dest.md_1min_trade_features
SELECT
tm.bin_tstamp,
CAST(strftime('%s', tm.bin_tstamp) * 1000000000 AS INTEGER) as tstamp_ns,
tm.exchange_id,
tm.instrument_id,
AVG(price) as price_mean,
AVG(CASE WHEN rank_num >= FLOOR(cnt/2.0) AND rank_num <= CEIL(cnt/2.0) THEN price ELSE NULL END) as price_median,
SUM(qty) as volume,
SUM(price * qty) / SUM(qty) as vwap,
SUM(signed_qty) as signed_volume,
SUM(CASE WHEN signed_qty > 0 THEN signed_qty ELSE 0 END) -
SUM(CASE WHEN signed_qty < 0 THEN ABS(signed_qty) ELSE 0 END) as order_flow_imbalance,
COUNT(*) as num_trades,
AVG(qty) as avg_trade_size
FROM (
SELECT
tm.*,
tma.cnt,
ROW_NUMBER() OVER (PARTITION BY tm.bin_tstamp, tm.exchange_id, tm.instrument_id ORDER BY price) as rank_num
FROM trade_metrics tm
JOIN trade_metrics_agg tma
ON tm.bin_tstamp = tma.bin_tstamp
AND tm.exchange_id = tma.exchange_id
AND tm.instrument_id = tma.instrument_id
) tm
GROUP BY tm.bin_tstamp, tm.exchange_id, tm.instrument_id;"
# Create md_1min_quote_features table in destination database
echo "Creating md_1min_quote_features table..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_quote_features;
CREATE TABLE IF NOT EXISTS md_1min_quote_features (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
mid_price_open REAL,
mid_price_high REAL,
mid_price_low REAL,
mid_price_close REAL,
mid_price_mean REAL,
rel_spread_mean REAL,
rel_spread_min REAL,
rel_spread_max REAL,
rel_spread_first REAL,
rel_spread_last REAL,
l1_imbalance_mean REAL,
l1_imbalance_min REAL,
l1_imbalance_max REAL,
l1_imbalance_first REAL,
l1_imbalance_last REAL,
micro_price_mean REAL,
micro_price_min REAL,
micro_price_max REAL,
micro_price_first REAL,
micro_price_last REAL,
weighted_mid_mean REAL,
weighted_mid_min REAL,
weighted_mid_max REAL,
weighted_mid_first REAL,
weighted_mid_last REAL,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
# Create index for md_1min_quote_features
echo "Creating index for md_1min_quote_features..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_quote_features_uidx
ON md_1min_quote_features(bin_tstamp, exchange_id, instrument_id);"
# Populate md_1min_quote_features using source database
echo "Populating md_1min_quote_features..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
INSERT INTO dest.md_1min_quote_features
SELECT
strftime('%Y-%m-%d %H:%M:00', tstamp) as bin_tstamp,
CAST(strftime('%s', tstamp) * 1000000000 AS INTEGER) as tstamp_ns,
exchange_id,
instrument_id,
FIRST_VALUE((ask_px + bid_px) / 2.0) OVER w as mid_price_open,
MAX((ask_px + bid_px) / 2.0) as mid_price_high,
MIN((ask_px + bid_px) / 2.0) as mid_price_low,
LAST_VALUE((ask_px + bid_px) / 2.0) OVER w as mid_price_close,
AVG((ask_px + bid_px) / 2.0) as mid_price_mean,
AVG((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_mean,
MIN((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_min,
MAX((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) as rel_spread_max,
FIRST_VALUE((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) OVER w as rel_spread_first,
LAST_VALUE((ask_px - bid_px) / ((ask_px + bid_px) / 2.0)) OVER w as rel_spread_last,
AVG((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_mean,
MIN((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_min,
MAX((bid_qty - ask_qty) / (bid_qty + ask_qty)) as l1_imbalance_max,
FIRST_VALUE((bid_qty - ask_qty) / (bid_qty + ask_qty)) OVER w as l1_imbalance_first,
LAST_VALUE((bid_qty - ask_qty) / (bid_qty + ask_qty)) OVER w as l1_imbalance_last,
AVG((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_mean,
MIN((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_min,
MAX((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) as micro_price_max,
FIRST_VALUE((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) OVER w as micro_price_first,
LAST_VALUE((ask_px * bid_qty + bid_px * ask_qty) / (bid_qty + ask_qty)) OVER w as micro_price_last,
AVG((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_mean,
MIN((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_min,
MAX((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) as weighted_mid_max,
FIRST_VALUE((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) OVER w as weighted_mid_first,
LAST_VALUE((ask_px * ask_qty + bid_px * bid_qty) / (bid_qty + ask_qty)) OVER w as weighted_mid_last
FROM md_quotes
GROUP BY strftime('%Y-%m-%d %H:%M:00', tstamp), exchange_id, instrument_id
WINDOW w AS (
PARTITION BY strftime('%Y-%m-%d %H:%M:00', tstamp), exchange_id, instrument_id
ORDER BY tstamp
RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING
);"
# Copy the md_1min_bars table to destination database
echo "Copying md_1min_bars to destination..."
sqlite3 "$DEST_DB" "
DROP TABLE IF EXISTS md_1min_bars;
CREATE TABLE IF NOT EXISTS md_1min_bars (
bin_tstamp TEXT,
tstamp_ns INTEGER,
exchange_id TEXT,
instrument_id TEXT,
open REAL,
high REAL,
low REAL,
close REAL,
volume REAL,
vwap REAL,
num_trades INTEGER,
PRIMARY KEY (bin_tstamp, exchange_id, instrument_id)
);"
echo "Creating index for md_1min_bars..."
sqlite3 "$DEST_DB" "
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_bars_uidx
ON md_1min_bars(bin_tstamp, exchange_id, instrument_id);"
echo "Populating md_1min_bars..."
sqlite3 "$SRC_DB" "ATTACH DATABASE '$DEST_DB' AS dest;
INSERT INTO dest.md_1min_bars
SELECT * FROM md_1min_bars;"
# Create the combined features view in destination database
echo "Creating combined features view..."
sqlite3 "$DEST_DB" "
DROP VIEW IF EXISTS md_1min_features_view;
CREATE VIEW IF NOT EXISTS md_1min_features_view AS
SELECT
b.bin_tstamp,
b.tstamp_ns,
b.exchange_id,
b.instrument_id,
-- OHLCV data from md_1min_bars
b.open,
b.high,
b.low,
b.close,
b.volume,
b.vwap,
b.num_trades,
-- Quote features
q.mid_price_open,
q.mid_price_high,
q.mid_price_low,
q.mid_price_close,
q.mid_price_mean,
q.rel_spread_mean,
q.rel_spread_min,
q.rel_spread_max,
q.rel_spread_first,
q.rel_spread_last,
q.l1_imbalance_mean,
q.l1_imbalance_min,
q.l1_imbalance_max,
q.l1_imbalance_first,
q.l1_imbalance_last,
q.micro_price_mean,
q.micro_price_min,
q.micro_price_max,
q.micro_price_first,
q.micro_price_last,
q.weighted_mid_mean,
q.weighted_mid_min,
q.weighted_mid_max,
q.weighted_mid_first,
q.weighted_mid_last,
-- Trade features
t.price_mean as trade_price_mean,
t.price_median as trade_price_median,
t.signed_volume,
t.order_flow_imbalance,
t.avg_trade_size
FROM md_1min_bars b
LEFT JOIN md_1min_quote_features q
ON b.bin_tstamp = q.bin_tstamp
AND b.exchange_id = q.exchange_id
AND b.instrument_id = q.instrument_id
LEFT JOIN md_1min_trade_features t
ON b.bin_tstamp = t.bin_tstamp
AND b.exchange_id = t.exchange_id
AND b.instrument_id = t.instrument_id;"
echo "Feature tables created and populated successfully in $DEST_DB!"

139
research/load_crypto_md.sh Executable file
View File

@ -0,0 +1,139 @@
#!/usr/bin/env bash
usage() {
echo -n "Usage: $0 [-h <host (hs01*/cloud21)>]"
echo -n " [-d <YYYYMMDD> (yesterday*)]"
echo -n " [-s <source> (cloud28/cloud21*)>]"
echo " [-t <target_dir> (/opt/jupyter_gpu/data/crypto_md)]"
exit 1
}
is_valid() {
local inst=$1
shift
local valid_instances=("$@")
for valid_inst in "${valid_instances[@]}";
do
if [[ "$inst" == "$valid_inst" ]]; then
return 0
fi
done
return 1
}
# ------- D E F A U L T S
date=""
host=hs01
source=cloud21
TargetDir="/opt/jupyter_gpu/data/crypto_md"
# ------- D E F A U L T S
while getopts ":h:d:s:t:" opt; do
case ${opt} in
d )
date=$OPTARG
;;
h )
host=$OPTARG
;;
s )
source=$OPTARG
;;
t )
TargetDir=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
if [ "${date}" == "" ] ; then
date="yesterday"
fi
echo "$date $host $source"
valid_hosts=('hs01' 'cloud21')
if ! is_valid "${host}" "${valid_hosts[@]}" ; then
echo "Host '${host}' is not valid"
usage
fi
valid_sources=('cloud21' 'cloud28')
if ! is_valid ${source} ${valid_sources[@]} ; then
echo "Source '${source}' is not valid"
usage
fi
if [ "${host}" == "cloud21" ] ; then
SourceHost=cloud21.cvtt.vpn
SourceUser=cvtt
SourceRootDir="/opt/store/cvtt/md_archive/crypto/${source}"
elif [ "${host}" == "hs01" ]; then
SourceHost=hs01.cvtt.vpn
SourceUser=cvtt
SourceRootDir=/works/cvtt/md_archive/crypto/${source}
else
usage
fi
echo "${SourceHost} ${SourceUser} ${SourceRootDir}"
SourceFile=$(date -d ${date} "+%Y%m%d.mktdata.db")
SourceFileZip="${SourceFile}.gz"
SourceFilePath=$(date -d ${date} "+${SourceRootDir}/%Y/%m/${SourceFileZip}")
TargetFile=$(date -d ${date} "+%Y%m%d.mktdata.ohlcv.db")
TargetFilePath="${TargetDir}/${TargetFile}"
# Tables=(bnbfut_ohlcv_1min bnbspot_ohlcv_1min coinbase_ohlcv_1min)
Tables=(bnbspot_ohlcv_1min coinbase_ohlcv_1min)
echo ${SourceFile}
tmp_dir=$(mktemp -d)
function cleanup {
cd ${HOME}
rm -rf ${tmp_dir}
}
trap cleanup EXIT
function download_file {
Cmd="rsync"
Cmd="${Cmd} -ahv"
if tty -s; then
Cmd="${Cmd} --progress=info2"
fi
Cmd="${Cmd} ${SourceUser}@${SourceHost}:${SourceFilePath} ${tmp_dir}/"
echo ${Cmd}
eval ${Cmd}
ls -lh ${tmp_dir}
Cmd="gunzip ${tmp_dir}/${SourceFileZip}"
echo ${Cmd} && eval ${Cmd}
ls -lh ${tmp_dir}
rm -f ${TargetFilePath}
touch ${TargetFilePath}
for table in "${Tables[@]}"
do
Cmd="sqlite3 ${tmp_dir}/${SourceFile} \".dump ${table}\" | sqlite3 ${TargetFilePath}"
echo ${Cmd}
eval ${Cmd}
done
chmod 600 ${TargetFilePath}
ls -lh ${TargetFilePath}
}
download_file

182
research/load_eqty_md.sh Executable file
View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
usage() {
echo -n "Usage: $0"
echo -n " [-h <host (hs01*/cloud21)>]"
echo -n " [-d <YYYYMMDD> (yesterday*)]"
echo -n " [-s <stocks comma separated>"
echo -n " [-t <target directory>"
echo
exit 1
}
if [[ "$(uname)" == "Darwin" ]]; then
# macOS
date='gdate'
else
date='date'
fi
# ------------------ Settings
md_date=""
host=hs01
stocks=COIN,GBTC,SQ
TargetDir="/opt/jupyter_gpu/data/eqty_md"
# TargetDir="/tmp" # -------= D E B U G
mkdir -p ${TargetDir}
Table=md_1min_bars
# ------------------ Settings
is_valid() {
local inst=$1
shift
local valid_instances=("$@")
for valid_inst in "${valid_instances[@]}";
do
if [[ "$inst" == "$valid_inst" ]]; then
return 0
fi
done
return 1
}
while getopts ":h:d:s:t:u" opt; do
case ${opt} in
d )
md_date=$OPTARG
;;
h )
host=$OPTARG
;;
s )
stocks=$OPTARG
;;
t )
TargetDir=$OPTARG
;;
u )
usage
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
if [ "${md_date}" == "" ] ; then
md_date="yesterday"
fi
echo "$md_date $host $source"
valid_hosts=('hs01' 'cloud21')
if ! is_valid "${host}" "${valid_hosts[@]}" ; then
echo "Host '${host}' is not valid"
usage
fi
TargetFile=$(${date} -d ${md_date} "+%Y%m%d.eqty.mktdata.ohlcv.db")
TargetFilePath="${TargetDir}/${TargetFile}"
mv ${TargetFilePath} "${TargetFilePath}.saved.$(${date} '+%Y%m%d_%H%M%S')"
# ---- temp dir
echo ${SourceFile}
tmp_dir=$(mktemp -d)
function cleanup {
cd ${HOME}
rm -rf ${tmp_dir}
}
trap cleanup EXIT
# ---- temp dir
if [ "${host}" == "cloud21" ] ; then
SourceHost=cloud21.cvtt.vpn
SourceUser=cvtt
SourceRootDir="/opt/store/cvtt/md_archive/equity/alpaca_md"
elif [ "${host}" == "hs01" ]; then
SourceHost=hs01.cvtt.vpn
SourceUser=cvtt
SourceRootDir="/works/cvtt/md_archive/equity/alpaca_md"
else
usage
fi
OLDIFS=$IFS
IFS=','
read -ra Stocks <<< "$stocks"
IFS=$OLDIFS
echo "${SourceHost} ${SourceUser} ${SourceRootDir} ${Stocks[@]}"
function download_file {
local SourceFilePath=${1}
Cmd="rsync"
Cmd="${Cmd} -ahv"
if tty -s; then
Cmd="${Cmd} --progress=info2"
fi
Cmd="${Cmd} ${SourceUser}@${SourceHost}:${SourceFilePath} ${tmp_dir}/"
echo ${Cmd}
eval ${Cmd}
if [[ ! -f ${tmp_dir}/${SourceFileZip} ]] ; then
echo "File ${SourceUser}@${SourceHost}:${SourceFilePath} is missing. Skipped."
return
fi
ls -lh ${tmp_dir}
Cmd="gunzip ${tmp_dir}/${SourceFileZip}"
echo ${Cmd} && eval ${Cmd}
DbFile=${tmp_dir}/${SourceFile}
touch ${TargetFilePath}
# --- T E M P - for files older than 20240813
sqlite3 ${DbFile} <<EOF
DROP INDEX md_1min_bars_idx;
CREATE UNIQUE INDEX IF NOT EXISTS md_1min_bars_idx on md_1min_bars(tstamp, exchange_id, instrument_id);
EOF
# --- T E M P
Cmd="sqlite3 ${DbFile} \".dump\" | sqlite3 ${TargetFilePath}"
echo ${Cmd}
eval ${Cmd}
}
echo "Loading files"
for stock in "${Stocks[@]}"; do
StockLetter="${stock:0:1}"
SourceFile=$(${date} -d ${md_date} "+%Y%m%d.${stock}.alpaca_1m_bars.db")
SourceFileZip="${SourceFile}.gz"
SourceFilePath=$(${date} -d ${md_date} "+${SourceRootDir}/%Y/${StockLetter}/${stock}/${SourceFileZip}")
echo ${SourceFilePath}
download_file ${SourceFilePath}
done
if [ -f ${TargetFilePath} ]; then
chmod 600
ls -lh ${TargetFilePath}
fi

View File

@ -1,156 +0,0 @@
#!/bin/bash
Script="${0} ${*}"
function usage {
echo "Usage: ${0} --inst_set=<setting #> [--restart] [--stop] [--once] [--sleep_sec=<num_seconds>]"
exit 1
}
for arg in "${@}"
do
case ${arg} in
--restart)
Restart=Y
shift
;;
--stop)
Stop="Y"
shift
;;
--once)
Once="Y"
shift
;;
--inst_set=*)
InstrSet="${arg#*=}"
shift
;;
--sleep_sec=*)
SleepSec="${arg#*=}"
shift
;;
-*|--*)
usage
;;
*)
;;
esac
done
declare -A InstrSetInstrs
InstrSetInstrs[1]=PAIR-BTC-USDT,PAIR-ETH-USDT,PAIR-LTC-USDT,PAIR-XRP-USDT,PAIR-USDT-USD
if [ "${InstrSet}" == "" ]
then
usage
fi
if [ "${SleepSec}" == "" ]
then
SleepSec=3
fi
Instruments=${InstrSetInstrs[${InstrSet}]}
if [ "${Instruments}" == "" ]
then
echo "Unrecognized instrument settings: ${InstrSet}"
usage
fi
# -------------------- S e t t i n g s
RootDir=/home/cvtt/prod
# ConfigFile="${RootDir}/config/md_recorder.cfg"
ConfigFile="http://cloud11.cvtt.vpn:6789/apps/md_recorder"
AdminPort="721${InstrSet}"
Exchange=BNBSPOT
DbCredKey=TSDB_COINBS_1
Name=MD_RECORDER_BNBSPOT
# -------------------- S e t t i n g s
LogFile="${RootDir}/logs/$(date '+%Y%m%d_%H%M%S').${Name}_${InstrSet}.log"
source ${HOME}/.pyenv/python3.10-venv/bin/activate
export PYTHONPATH=${RootDir}
Cmd="nohup"
Cmd="${Cmd} python3"
Cmd="${Cmd} ${RootDir}/cvttpy/apps/md/md_recorder.py"
Cmd="${Cmd} --config=${ConfigFile}"
Cmd="${Cmd} --active_exchanges=${Exchange}"
Cmd="${Cmd} --instruments=${Instruments}"
Cmd="${Cmd} --admin_port=${AdminPort}"
Cmd="${Cmd} --log_level=INFO"
Cmd="${Cmd} --log_file=${LogFile}"
Cmd="${Cmd} --db_credentials_key=${DbCredKey}"
Cmd="${Cmd} &"
function start_it {
echo ${Cmd}
eval ${Cmd} &
sleep 10
}
function check_it {
if !(timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
then
echo "${Script} doesn't respond. restarting..."
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
start_it
fi
}
function kill_it {
pids=$(ps -ef |grep "inst_set=${InstrSet}" |grep $(basename ${0}) |grep -v ${$} |grep -v grep |tr -s ' ' |cut -d' ' -f2)
if [ "${pids}" != "" ]
then
echo "Killing ${pids} ..."
kill -9 ${pids}
fi
while (timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
do
echo "Shutting down localhost:${AdminPort}/shutdown ..."
timeout 5 curl -s localhost:${AdminPort}/shutdown
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
done
}
if [ "${Stop}" == "Y" ] ; then
kill_it
exit 0
fi
if [ "${Once}" == "Y" ] ; then
start_it
exit 0
fi
if [ "${Restart}" == "Y" ]
then
kill_it
fi
start_it
while true
do
check_it
echo "${Script} is checked" | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
sleep ${SleepSec}
done

View File

@ -1,161 +0,0 @@
#!/bin/bash
Script="${0} ${*}"
function usage {
echo "Usage: ${0} --inst_set=<setting #> [--restart] [--stop] [--once] [--sleep_sec=<num_seconds>]"
exit 1
}
for arg in "${@}"
do
case ${arg} in
--restart)
Restart=Y
shift
;;
--stop)
Stop="Y"
shift
;;
--once)
Once="Y"
shift
;;
--inst_set=*)
InstrSet="${arg#*=}"
shift
;;
--sleep_sec=*)
SleepSec="${arg#*=}"
shift
;;
-*|--*)
usage
;;
*)
;;
esac
done
declare -A InstrSetInstrs
InstrSetInstrs[1]=PAIR-BTC-USD,PAIR-ETH-USD,PAIR-LTC-USD,PAIR-XRP-USD
InstrSetInstrs[2]=PAIR-USDT-USD,PAIR-MATIC-USD
InstrSetInstrs[3]=PAIR-DOGE-USD,PAIR-AVAX-USD,
InstrSetInstrs[4]=PAIR-BCH-USD,PAIR-UNI-USD,PAIR-AAVE-USD,PAIR-LINK-USD,PAIR-SOL-USD
if [ "${InstrSet}" == "" ]
then
usage
fi
if [ "${SleepSec}" == "" ]
then
SleepSec=3
fi
Instruments=${InstrSetInstrs[${InstrSet}]}
if [ "${Instruments}" == "" ]
then
echo "Unrecognized instrument settings: ${InstrSet}"
usage
fi
# -------------------- S e t t i n g s
RootDir=/home/cvtt/prod
# ConfigFile="${RootDir}/config/md_recorder.cfg"
ConfigFile="http://cloud11.cvtt.vpn:6789/apps/md_recorder"
AdminPort="720${InstrSet}"
Exchange=COINBASE_AT
DbCredKey=TSDB_COINBS_1
Name=MD_RECORDER_COINBASE
# -------------------- S e t t i n g s
LogFile="${RootDir}/logs/$(date '+%Y%m%d_%H%M%S').${Name}_${InstrSet}.log"
source ${HOME}/.pyenv/python3.10-venv/bin/activate
export PYTHONPATH=${RootDir}
Cmd="nohup"
Cmd="${Cmd} python3"
Cmd="${Cmd} ${RootDir}/cvttpy/apps/md/md_recorder.py"
Cmd="${Cmd} --config=${ConfigFile}"
Cmd="${Cmd} --active_exchanges=${Exchange}"
Cmd="${Cmd} --instruments=${Instruments}"
Cmd="${Cmd} --admin_port=${AdminPort}"
Cmd="${Cmd} --log_level=INFO"
Cmd="${Cmd} --log_file=${LogFile}"
Cmd="${Cmd} --db_credentials_key=${DbCredKey}"
Cmd="${Cmd} &"
function start_it {
echo ${Cmd}
eval ${Cmd} &
sleep 10
}
function check_it {
if !(timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
then
echo "${Script} doesn't respond. restarting..."
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
start_it
fi
}
function kill_it {
pids=$(ps -ef |grep "inst_set=${InstrSet}" |grep $(basename ${0}) |grep -v ${$} |grep -v grep |tr -s ' ' |cut -d' ' -f2)
if [ "${pids}" != "" ]
then
echo "Killing ${pids} ..."
kill -9 ${pids}
fi
while (timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
do
echo "Shutting down localhost:${AdminPort}/shutdown ..."
timeout 5 curl -s localhost:${AdminPort}/shutdown
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
done
}
if [ "${Stop}" == "Y" ] ; then
kill_it
exit 0
fi
if [ "${Once}" == "Y" ] ; then
start_it
exit 0
fi
if [ "${Restart}" == "Y" ]
then
kill_it
fi
start_it
while true
do
check_it
echo "${Script} is checked" | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
sleep ${SleepSec}
done

View File

@ -1,128 +0,0 @@
#!/bin/bash
Script="${0} ${*}"
function usage {
echo "Usage: ${0} [--restart] [--stop] [--once] [--sleep_sec=<num_seconds>]"
exit 1
}
for arg in "${@}"
do
case ${arg} in
--restart)
Restart=Y
shift
;;
--stop)
Stop="Y"
shift
;;
--once)
Once="Y"
shift
;;
--sleep_sec=*)
SleepSec="${arg#*=}"
shift
;;
-*|--*)
usage
;;
*)
;;
esac
done
if [ "${SleepSec}" == "" ]
then
SleepSec=5
fi
RootDir=/home/cvtt/prod
CfgSvcDir=${RootDir}/config_service
export PYTHONPATH="${RootDir}:${PYTHONPATH}"
Name=config_server
ServerPy="${RootDir}/cvttpy/apps/utils/config_server.py"
ServicePort=6789
ServiceDir=${CfgSvcDir}/data
LogDir=${RootDir}/logs
source ${HOME}/.pyenv/python3.10-venv/bin/activate
export PYTHONPATH="${RootDir}:${PYTHONPATH}"
Cmd="nohup"
Cmd="${Cmd} python3"
Cmd="${Cmd} ${ServerPy}"
Cmd="${Cmd} --port=${ServicePort}"
Cmd="${Cmd} --root=${ServiceDir}"
Cmd="${Cmd} --log_file=${LogDir}/%T.config_service.log"
function start_it {
echo ${Cmd}
eval ${Cmd} &
sleep 10
}
function check_it {
if !(timeout 3 curl -s localhost:${ServicePort}/ping > /dev/null)
then
echo "${Script} doesn't respond. restarting..."
pids=$(ps -ef | grep "port=${ServicePort}" | grep "root=${ServiceDir}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
start_it
fi
}
function kill_it {
pids=$(ps -ef |grep $(basename ${0}) |grep -v ${$} |grep -v grep |tr -s ' ' |cut -d' ' -f2)
if [ "${pids}" != "" ]
then
echo "Killing ${pids} ..."
kill -9 ${pids}
fi
while (timeout 3 curl -s localhost:${ServicePort}/ping > /dev/null)
do
echo "Shutting down localhost:${ServicePort}/__shutdown__ ..."
timeout 5 curl -s localhost:${ServicePort}/__shutdown__
pids=$(ps -ef | grep "port=${ServicePort}" | grep "root=${ServiceDir}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
done
}
if [ "${Stop}" == "Y" ] ; then
kill_it
exit 0
fi
if [ "${Once}" == "Y" ] ; then
start_it
exit 0
fi
if [ "${Restart}" == "Y" ]
then
kill_it
fi
start_it
while true
do
check_it
echo "${Script} is checked" | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
sleep ${SleepSec}
done

View File

@ -1,144 +0,0 @@
#!/bin/bash
Script="${0} ${*}"
function usage {
echo "Usage: ${0} [--restart] [--stop] [--once] [--sleep_sec=<num_seconds>]"
exit 1
}
for arg in "${@}"
do
case ${arg} in
--restart)
Restart=Y
shift
;;
--stop)
Stop="Y"
shift
;;
--once)
Once="Y"
shift
;;
--inst_set=*)
InstrSet="${arg#*=}"
shift
;;
--sleep_sec=*)
SleepSec="${arg#*=}"
shift
;;
-*|--*)
usage
;;
*)
;;
esac
done
if [ "${SleepSec}" == "" ]
then
SleepSec=3
fi
# -------------------- S e t t i n g s
ConfigServiceHost=cloud11.cvtt.vpn
ConfigServicePort=6789
RootDir=/home/cvtt/prod
ConfigFile="http://${ConfigServiceHost}:${ConfigServicePort}/apps/executor_app"
AdminPort="7210"
Exchange=COINBASE_AT
Name=EXECUTOR_APP
# -------------------- S e t t i n g s
LogFile="${RootDir}/logs/$(date '+%Y%m%d_%H%M%S').${Name}.log"
source ${HOME}/.pyenv/python3.10-venv/bin/activate
export PYTHONPATH=${RootDir}
Cmd="nohup"
Cmd="${Cmd} python3"
Cmd="${Cmd} ${RootDir}/cvttpy/apps/executor_app.py"
Cmd="${Cmd} --config=${ConfigFile}"
Cmd="${Cmd} --active_exchanges=${Exchange}"
Cmd="${Cmd} --admin_port=${AdminPort}"
Cmd="${Cmd} --log_level=INFO"
Cmd="${Cmd} --log_file=${LogFile}"
Cmd="${Cmd} &"
function start_it {
echo ${Cmd}
eval ${Cmd} &
sleep 10
}
function check_it {
if !(timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
then
echo "${Script} doesn't respond. restarting..."
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
start_it
fi
}
function kill_it {
pids=$(ps -ef |grep $(basename ${0}) |grep -v ${$} |grep -v grep |tr -s ' ' |cut -d' ' -f2)
if [ "${pids}" != "" ]
then
echo "Killing ${pids} ..."
kill -9 ${pids}
fi
while (timeout 3 curl -s localhost:${AdminPort}/ping > /dev/null)
do
echo "Shutting down localhost:${AdminPort}/shutdown ..."
timeout 5 curl -s localhost:${AdminPort}/shutdown
sleep 3
pids=$(ps -ef | grep "admin_port=${AdminPort}" | grep -v grep | tr -s ' ' |cut -d' ' -f2)
echo pids=${pids}
if [ "${pids}" != "" ]
then
kill -9 ${pids}
fi
done
}
if [ "${Stop}" == "Y" ] ; then
kill_it
exit 0
fi
if [ "${Once}" == "Y" ] ; then
start_it
exit 0
fi
if [ "${Restart}" == "Y" ]
then
kill_it
fi
start_it
while true
do
check_it
echo "${Script} is checked" | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
sleep ${SleepSec}
done

View File

@ -0,0 +1,42 @@
#!/bin/bash
pushd () {
command pushd "$@" > /dev/null
}
popd () {
command popd "$@" > /dev/null
}
SourceRoot=/works/cvtt/md_archive/equity/alpaca_md.2
TargetRoot=/works/cvtt/md_archive/equity/alpaca_md.NEW
mkdir -p ${TargetDir}
unalias ls
cd ${SourceRoot}
for year in $(ls -d 2*)
do
echo $year
pushd $year
for letter in $(ls -d ?)
do
pushd ${letter}
echo "${year}/${letter}"
for symbol in $(ls -d ${letter}*)
do
pushd $symbol
echo "${year}/${letter}/${symbol}/* --> ${letter}/${symbol}/${year}/"
mkdir -p ${TargetRoot}/${letter}/${symbol}/${year}
mv ${SourceRoot}/${year}/${letter}/${symbol}/* ${TargetRoot}/${letter}/${symbol}/${year}/
popd
done
popd
done
popd
done

View File

@ -1,44 +0,0 @@
#!/bin/bash
Python=/home/cvtt/.pyenv/python3.10-venv/bin/python3.10
RootDir=/home/cvtt/prod
export PYTHONPATH=${RootDir}
host=$(hostname)
if [ "${host}" == "cvttdata" ]
then
ArchiveRootDir=/home/cvtt/prod/archive/md_archive
CredKey=TSDB_MD_CVTTDATA_RO
elif [ "${host}" == "cloud21.cryptovaltrading.com" ]
then
ArchiveRootDir=/opt/store/cvtt/archive/md_archive
CredKey=TSDB_MD_CLD21_RO
else
echo "Unknown host ${host}. ${0} Aborted."
exit 1
fi
mkdir -p ${ArchiveRootDir}
yesterday=$(date -d "yesterday" +%Y%m%d)
Schemas=${1}
if [ "${Schemas}" == "" ]
then
Schemas="coinbase,bnbspot,bnbfut"
fi
echo "Schemas=${Schemas}"
Cmd=
Cmd="${Python}"
Cmd="${Cmd} ${RootDir}/cvttpy/research/utils/archive_ts_md.py"
Cmd="${Cmd} --config=http://cloud16.cvtt.vpn:6789/apps/md_recorder"
Cmd="${Cmd} --db_credentials_key=${CredKey}"
Cmd="${Cmd} --date=${yesterday}"
Cmd="${Cmd} --schemas=${Schemas}"
Cmd="${Cmd} --root_dir=${ArchiveRootDir}"
Cmd="${Cmd} --format=SQLite"
Cmd="${Cmd} --compress"
echo ${Cmd}
eval ${Cmd}
echo "${0} ${*} Done."

View File

@ -0,0 +1,88 @@
#!/bin/bash
usage() {
echo -n "Usage: ${0}"
echo -n " [-c <config (dflt: apps/cvtt_eqt_alpaca)>]"
echo -n " [-s <config_serverice (dflt: http://cloud16.cvtt.vpn:6789)>]"
echo -n " [-N <name (dflt: ALPACA-SNDBX)>]"
echo
exit 1
}
Python="${HOME}/.pyenv/python3.12-venv/bin/python3.12"
RootDir="${HOME}/prod"
# **** D E B U G
RootDir=/home/oleg/develop/cvtt2
# **** D E B U G
export PYTHONPATH=${RootDir}
StatusChannel=Status-CVTT
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigService=http://cloud16.cvtt.vpn:6789
Config=apps/cvtt_eqt_alpaca
Name="ALPACA-SNDBX"
echo $0 $* | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
while getopts ":s:c:n:" opt; do
case ${opt} in
c )
Config=$OPTARG
;;
s )
ConfigService=$OPTARG
;;
n )
Name=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ----- For DEBUGGING
# Sender=cat
# StatusChannel=
tmpfile=$(mktemp)
function cleanup {
# echo Cleaing up temporary files: ${TempFiles}
if [ "" != "${TempFiles}" ]; then
rm -f ${TempFiles}
fi
}
trap cleanup EXIT
Cmd="${Python}"
Cmd="${Cmd} ${RootDir}/cvttpy/apps/utils/services_checker.py"
Cmd="${Cmd} --config=${ConfigService}/${Config}"
Cmd="${Cmd} --log_level=ERROR"
Cmd="${Cmd} --log_stdout"
Cmd="${Cmd} | tee ${tmpfile} 2>&1"
echo ${Cmd}
eval ${Cmd}
cat ${tmpfile}
exit
# echo "## :card_file_box: STORAGE HEALTH CHECK" >> ${tmpfile}
# echo >> ${tmpfile}
# echo "| host | filesystem | usage |" >> ${tmpfile}
# echo "| --- | --- | --- |" >> ${tmpfile}
# cat ${tmpfile2} | sort -h -r | awk -F'%' '{printf "%s%%%s\n",$2,$3}' >> ${tmpfile}
# cat ${tmpfile} | ${Sender} ${StatusChannel}

View File

@ -0,0 +1,79 @@
#!/bin/bash
# "cryptovaltrading.com": {
# "cloud18": {
# "users": ["oleg"],
# "type": "cloud",
# "ssh_port": 7822,
# "to_check": "false"
# "timeout_sec": 5
# },
RootDir="${HOME}/prod"
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts
HOSTS_CONFIG=$(curl -s ${ConfigUrl} | ${HOME}/bin/hjson -j)
get_domains() {
echo ${HOSTS_CONFIG} | jq -r '. | keys[]'
}
get_user_hosts() {
local User=${1}
local Domain=${2}
jdcmd="jq -r --arg domain \"$Domain\""
jdcmd="$jdcmd --arg usr \"$User\""
jdcmd="$jdcmd '.[\$domain]"
jdcmd="$jdcmd | to_entries[]"
jdcmd="$jdcmd | select(.value.users[]"
jdcmd="$jdcmd | contains(\$usr))"
jdcmd="$jdcmd | .key'"
echo ${HOSTS_CONFIG} | eval ${jdcmd} |sed "s/$/.$Domain/" # >&2
}
function host_alert() {
alert=${1}
if [ "${alert}" != "" ]
then
echo -e "### :fire: HOST ALERT \n${alert}" | ${Sender} ${AlertChannel}
fi
}
User=oleg
Hosts=()
DEFAULT_SSH_PORT=22
DEFAULT_TIMEOUT=5
DEFAULT_TO_CHECK="true"
Domains=("${Domains[@]}" "$(get_domains)")
for Domain in ${Domains[@]} ; do
Hosts=("${Hosts[@]}" "$(get_user_hosts ${User} ${Domain})")
done
for Host in ${Hosts[@]} ; do
host=$(echo $Host | cut -d'.' -f1)
Domain=$(echo $Host | cut -d'.' -f2-)
PortSSH=$(echo "$HOSTS_CONFIG" | jq -r --arg domain "$Domain" --arg host "$host" '.[$domain][$host].ssh_port // '"$DEFAULT_SSH_PORT"'')
Timeout=$(echo "$HOSTS_CONFIG" | jq -r --arg domain "$Domain" --arg host "$host" '.[$domain][$host].timeout_sec // '"$DEFAULT_TIMEOUT"'')
ToCheck=$(echo "$HOSTS_CONFIG" | jq -r --arg domain "$Domain" --arg host "$host" '.[$domain][$host].to_check // '"$DEFAULT_TO_CHECK"'')
to_check="${ToCheck^^}"
if [ "${to_check}" == "TRUE" -o "${to_check}" == "YES" -o "${to_check}" == "Y" -o "${to_check}" == "T" ] ; then
echo "Checking host: $Host on port $PortSSH"
else
continue
fi
# Use nc to check if the specified port is open
if ! nc -z -w ${Timeout} "$Host" "$PortSSH"; then
echo "Host $Host is not available on port $PortSSH"
host_alert "Host $Host is not available on port $PortSSH"
fi
done

View File

@ -0,0 +1,58 @@
#!/bin/bash
# "Jenkins" {
# "to_check": "Yes",
# "protocol": "http",
# "host": "cvtt-build.cvtt.vpn",
# "port": 8080
# },
RootDir="${HOME}/prod"
# RootDir=/home/oleg/develop/cvtt2 ###### D E B U G
AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh
ConfigUrl=http://cloud16.cvtt.vpn:6789/admin/cvtt_services
SERVICES_CONFIG=$(curl -s ${ConfigUrl} | ${HOME}/bin/hjson -j)
echo $Sender
echo $AlertChannel
function service_alert() {
alert="${1}"
if [ "${alert}" != "" ]
then
#it may contain quotes
alert=$(echo "${alert}" | sed 's/"/\\"/g')
echo -e "### :boom: SERVICE ALERT\n${alert}" | ${Sender} ${AlertChannel}
fi
}
User=oleg
Hosts=()
DEFAULT_TO_CHECK=Yes
mapfile -t SvcNames < <(echo ${SERVICES_CONFIG} | jq -r '. | keys[]')
DEFAULT_TO_CHECK=Yes
for SvcName in "${SvcNames[@]}" ; do
ToCheck=$(echo "$SERVICES_CONFIG" | jq -r --arg svcname "$SvcName" '.[$svcname].to_check // "Yes"')
if [ "${ToCheck^^}" == "NO" ]; then
continue
fi
Host=$(echo "$SERVICES_CONFIG" | jq -r --arg svcname "$SvcName" '.[$svcname].host ')
Port=$(echo "$SERVICES_CONFIG" | jq -r --arg svcname "$SvcName" '.[$svcname].port ')
echo "Checking \"$SvcName\" (${Host}:${Port})"
# Use nc to check if the specified port is open
if ! nc -z -w5 "$Host" "$Port"; then
msg="Service \"${SvcName}\" (${Host}:${Port}) is not available"
echo ${msg}
service_alert "${msg}"
fi
done

View File

@ -1,4 +1,4 @@
#!/bin/sh #!/bin/bash
echo $0 $* | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]' echo $0 $* | /usr/bin/ts '[%Y-%m-%d %H:%M:%S]'
RootDir="${HOME}/prod" RootDir="${HOME}/prod"
@ -10,39 +10,42 @@ StatusChannel=Status-CVTT
AlertChannel=Alerts-CVTT AlertChannel=Alerts-CVTT
Sender=${RootDir}/ops/utils/send_mmost.sh Sender=${RootDir}/ops/utils/send_mmost.sh
# ----- For DEBUGGING
# Sender=cat # Sender=cat
# StatusChannel= # StatusChannel=
Hosts= get_user_hosts() {
Hosts="${Hosts} cloud11.cvtt.vpn" local User=${1}
Hosts="${Hosts} cloud14.cvtt.vpn" local Domain=${2}
Hosts="${Hosts} cloud15.cvtt.vpn"
Hosts="${Hosts} cloud16.cvtt.vpn"
Hosts="${Hosts} cloud17.cvtt.vpn"
Hosts="${Hosts} cloud21.cvtt.vpn"
Hosts="${Hosts} cloud22.cryptovaltrading.com"
Hosts="${Hosts} cvttdata.cvtt.vpn" Cmd="curl -s http://cloud16.cvtt.vpn:6789/admin/cvtt_hosts"
Hosts="${Hosts} cryptoval1.cvtt.vpn" Cmd+=" | ${HOME}/bin/hjson -j"
Hosts="${Hosts} cryptoval2.cvtt.vpn" Cmd+=" | jq -r"
Hosts="${Hosts} cryptoval3.cvtt.vpn" Cmd+=" --arg domain \"${Domain}\""
Cmd+=" --arg usr \"${User}\""
Cmd+=" '.[\$domain] | to_entries[] | select(.value.users[] | contains(\$usr)) | .key'"
Hosts="${Hosts} homestore.cvtt.vpn" Cmd+=" | sed 's/\$/.${Domain}/'"
Hosts="${Hosts} nsbackup.cvtt.vpn" eval ${Cmd}
Hosts="${Hosts} dtvmhost.cvtt.vpn" }
Hosts="${Hosts} ops-server.cvtt.vpn"
Hosts="${Hosts} cvtt-prod-01.cvtt.vpn" function cleanup {
Hosts="${Hosts} cvtt-prod-02.cvtt.vpn" echo Cleaing up temporary files: ${TempFiles}
if [ "" != "${TempFiles}" ]; then
rm -f ${TempFiles}
fi
}
trap cleanup EXIT
function space_alert() { function space_alert() {
ALERT_USAGE=75% ALERT_USAGE=75%
for ln in "${Measurements[@]}" for metric in "${Metrics[@]}"
do do
IFS=$' '; args=($ln); unset IFS IFS=$' '; args=($metric); unset IFS
host=${args[0]} host=${args[0]}
fs=${args[1]} fs=${args[1]}
space_used=${args[2]} space_used=${args[2]}
if [ ${space_used%?} -ge ${ALERT_USAGE%?} ]; then if [ ${space_used%?} -ge ${ALERT_USAGE%?} ]; then
echo ":red_circle: Filesystem **${host}:${fs}** is using **${space_used}** :red_circle:" echo ":red_circle: Filesystem **${host}:${fs}** is using **${space_used}** :red_circle:"
fi fi
@ -50,28 +53,41 @@ function space_alert() {
} }
function storage_check() { function storage_check() {
local Hosts=("${@}")
result_lines=() result_lines=()
for host in ${Hosts} declare -a SingleMeas
for host in ${Hosts[@]}
do do
echo "storage_check host=${host}" >&2
if [[ "${host}" == *"cryptovaltrading.com" ]]; then if [[ "${host}" == *"cryptovaltrading.com" ]]; then
port=7822 port=7822
else else
port=22 port=22
fi fi
Cmd="ssh -p ${port} $host" Cmd="ssh -p ${port}"
Cmd="${Cmd} eval \"df -hTl" Cmd+=" -o StrictHostKeyChecking=no"
Cmd="${Cmd} -x squashfs" Cmd+=" -o UserKnownHostsFile=/dev/null"
Cmd="${Cmd} | grep -v tmpfs" Cmd+=" $host"
Cmd="${Cmd} | grep -v Filesystem\"" Cmd+=" eval \"df -hTl"
Cmd+=" -x squashfs"
Cmd+=" -x tmpfs"
Cmd+=" -x vfat"
Cmd+=" -x devtmpfs"
Cmd+=" | grep -v Filesystem\""
IFS=$'\n' ; lines=$(eval ${Cmd}) IFS=$'\n' ; lines=$(eval ${Cmd})
for ln in $lines for ln in $lines
do do
IFS=$' '; args=($ln); unset IFS IFS=$' '; args=($ln); unset IFS
res="${args[5]}| **${host}** | ***${args[6]}*** | *${args[5]}* |" res="${args[5]}| **${host}** | ***${args[6]}*** | *${args[5]}* |"
result_lines+=("$res") result_lines+=("$res")
fs=${args[6]}
used=${args[5]}
Metrics+=("$host $fs $used")
done done
unset IFS unset IFS
done done
@ -81,9 +97,22 @@ function storage_check() {
done done
} }
User=oleg
Metrics=()
TempFiles=
Hosts=()
for Domain in cvtt.vpn cryptovaltrading.com ; do
Hosts=("${Hosts[@]}" "$(get_user_hosts ${User} ${Domain})")
done
tmpfile=$(mktemp) tmpfile=$(mktemp)
TempFiles="${TempFiles} ${tmpfile}"
tmpfile2=$(mktemp) tmpfile2=$(mktemp)
storage_check > ${tmpfile2} TempFiles="${TempFiles} ${tmpfile2}"
storage_check "${Hosts[@]}" > ${tmpfile2}
echo "## :card_file_box: STORAGE HEALTH CHECK" >> ${tmpfile} echo "## :card_file_box: STORAGE HEALTH CHECK" >> ${tmpfile}
echo >> ${tmpfile} echo >> ${tmpfile}
@ -91,17 +120,15 @@ echo "| host | filesystem | usage |" >> ${tmpfile}
echo "| --- | --- | --- |" >> ${tmpfile} echo "| --- | --- | --- |" >> ${tmpfile}
cat ${tmpfile2} | sort -h -r | awk -F'%' '{printf "%s%%%s\n",$2,$3}' >> ${tmpfile} cat ${tmpfile2} | sort -h -r | awk -F'%' '{printf "%s%%%s\n",$2,$3}' >> ${tmpfile}
cat ${tmpfile} | ${Sender} ${StatusChannel} cat ${tmpfile} | ${Sender} ${StatusChannel}
rm ${tmpfile} ${tmpfile2}
Measurements=()
tmpfile=$(mktemp) tmpfile=$(mktemp)
TempFiles="${TempFiles} ${tmpfile}"
space_alert > ${tmpfile} space_alert > ${tmpfile}
cat ${tmpfile}
if [ -s ${tmpfile} ] if [ -s ${tmpfile} ]
then then
(echo "### :card_file_box: STORAGE ALERTS" && cat ${tmpfile}) | ${Sender} ${AlertChannel} (echo "### :card_file_box: STORAGE ALERTS" && cat ${tmpfile}) | ${Sender} ${AlertChannel}
else else
echo File ${tmpfile} is empty echo "No Storage Alerts"
fi fi
rm ${tmpfile}

View File

@ -4,7 +4,7 @@ Start=${1}
NumJobs=${2} NumJobs=${2}
InstListFile=${3} InstListFile=${3}
export CalendarURL=http://cloud16.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS export CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
is_business_day() { is_business_day() {
dt=${1} dt=${1}
@ -45,19 +45,12 @@ echo "Start=${Start} End=${End} NumJobs=${NumJobs}"
# export PyScript=/home/cvtt/prod/cvttpy/exchanges/alpaca/hist_mkt_data.py # export PyScript=/home/cvtt/prod/cvttpy/exchanges/alpaca/hist_mkt_data.py
export PYTHONPATH=/home/cvtt/prod export PYTHONPATH=/home/cvtt/prod
export Python=/home/cvtt/.pyenv/python3.10-venv/bin/python3 export Python=/home/cvtt/.pyenv/python3.12-venv/bin/python3
export Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md export Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md
export PyScript=/home/cvtt/prod/cvttpy/exchanges/alpaca/hist_md/hist_md_bars.py export PyScript=/home/cvtt/prod/cvttpy/exchanges/alpaca/hist_md/hist_md_bars.py
export OutputDir=/home/cvtt/host_drive/alpaca_md # Local
export LogDir=/home/cvtt/prod/logs/alpaca_md
# ----- T E M P export OutputDir=/home/cvtt/prod/archive/md_archive/equity/alpaca_md # Local
# export PYTHONPATH=/home/oleg/develop/cvtt2 export LogDir=/home/cvtt/prod/logs/alpaca_md
# export Python=/home/oleg/.pyenv/python3.10-venv/bin/python3
# export Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md
# export PyScript=/home/oleg/develop/cvtt2/cvttpy/exchanges/alpaca/hist_md/hist_md_bars.py
# export OutputDir=/home/oleg/develop/cvtt2/tmp # Local
# export LogDir=/home/oleg/develop/cvtt2/tmp
mkdir -p ${LogDir} mkdir -p ${LogDir}
@ -112,3 +105,10 @@ for ((ii=0; ii <${#Instruments[@]}; ii+=slice_size)); do
parallel -j ${NumJobs} run_proc {} ${Start} ${End} ::: "${InstSlice[@]}" parallel -j ${NumJobs} run_proc {} ${Start} ${End} ::: "${InstSlice[@]}"
done done
echo "Compressing"
for file in $(find ${OutputDir} -type f -name '*db' -print )
do
echo "Compressing ${file}"
gzip ${file}
done

View File

@ -0,0 +1,18 @@
#!/bin/bash
export PYTHONPATH=${HOME}/prod
Python=${HOME}/.pyenv/python3.12-venv/bin/python3
Script=${HOME}/prod/cvttpy/exchanges/alpaca/hist_md/rl_calc_loader.py
DbFile=${HOME}/prod/data/rel_liquidity.db
Config=http://cloud16.cvtt.vpn:6789/apps/minimal_md_eqt
Cmd="${Python}"
Cmd="${Cmd} ${Script}"
Cmd="${Cmd} --config=${Config}"
Cmd="${Cmd} --db_file=${DbFile}"
echo ${Cmd}
eval ${Cmd}
echo Done ${0} ${*}

View File

@ -3,7 +3,7 @@
is_business_day() { is_business_day() {
dt=${1} dt=${1}
CalendarURL=http://cloud16.cvtt.vpn:8000/api/v1/markets/hours?mic=XNYS CalendarURL=https://trading-calendar.cvtt.net/api/v1/markets/hours?mic=XNYS
open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time') open_time=$(curl -s "${CalendarURL}&start=${dt}&end=${dt}" | jq '.[] | .open_time')
if [ -n "${open_time}" ]; then if [ -n "${open_time}" ]; then
return 0 return 0

View File

@ -0,0 +1,60 @@
#!/usr/bin/env bash
usage() {
echo -n "Usage: ${0}"
echo
exit 1
}
RootDir="${HOME}/prod"
# RootDir=/home/oleg/develop/cvtt2 ###### D E B U G
export PYTHONPATH=${RootDir}
StatusChannel="MD-Status"
Sender=${RootDir}/ops/utils/send_mmost.sh
# ----- For DEBUGGING
# Sender=cat
# StatusChannel=
run_checklist() {
yr=$(date -d 'yesterday' '+%Y')
mn=$(date -d 'yesterday' '+%m')
declare -A Commands
Commands=(
["hs01:cloud21"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud21/${yr}/${mn} | tail -5"
["hs01:cloud28"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud28/${yr}/${mn} | tail -5"
["hs01:sim"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/sim/ | tail -5"
["cloud21:cloud21"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud21/${yr}/${mn} | tail -5"
["cloud21:cloud28"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/cloud28/${yr}/${mn} | tail -5"
["cloud21:sim"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/crypto/sim | tail -5"
["gpushnik"]="ssh oleg@gpushnik.cvtt.vpn 'ls -l /opt/jupyter_gpu/data/crypto_md | tail -10'"
)
for name in $(printf "%s\n" "${!Commands[@]}" | sort); do
Cmd=${Commands[${name}]}
echo "------- ${name}"
echo ${Cmd}
eval ${Cmd}
done
}
tmpfile=$(mktemp)
function cleanup {
cd ${HOME}
rm -f ${tmpfile}
}
trap cleanup EXIT
echo "## :hearts: CRYPTO MD HEALTH CHECK (cvtt-md.cvtt.vpn)" >> ${tmpfile}
echo '```' >> ${tmpfile}
run_checklist >> ${tmpfile}
echo '```' >> ${tmpfile}
cat ${tmpfile} | ${Sender} ${StatusChannel}
cat $tmpfile

View File

@ -0,0 +1,73 @@
#!/usr/bin/env bash
usage() {
echo -n "Usage: ${0}"
echo
exit 1
}
RootDir="${HOME}/prod"
export PYTHONPATH=${RootDir}
StatusChannel="MD-Status"
Sender=${RootDir}/ops/utils/send_mmost.sh
# ----- For DEBUGGING
# RootDir=/home/oleg/develop/cvtt2 ###### D E B U G
# Sender=cat #### D E B U G
# StatusChannel= #### D E B U G
run_checklist() {
yr=$(date -d 'yesterday' '+%Y')
CheckSymbols="A/AAPL N/NVDA M/META"
declare -A Commands
Commands["hs01"]=""
for sym in ${CheckSymbols}; do
Commands["hs01"]+="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/${yr}/${sym} | tail -3; "
done
Commands["hs01"]+="echo"
Commands["hs01:sim"]="ssh cvtt@hs01.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/sim | tail -5"
Commands["cloud21"]=""
for sym in ${CheckSymbols}; do
Commands["cloud21"]+="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/${yr}/${sym} | tail -3; "
done
Commands["cloud21"]+="echo"
Commands["cloud21:sim"]="ssh cvtt@cloud21.cvtt.vpn ls -l /works/cvtt/md_archive/equity/alpaca_md/sim | tail -5"
Commands["gpushnik"]="ssh oleg@gpushnik.cvtt.vpn 'ls -l /opt/jupyter_gpu/data/eqty_md | tail -10'"
for name in $(printf "%s\n" "${!Commands[@]}" | sort); do
echo "------- ${name}"
Cmd=${Commands[${name}]}
echo ${Cmd}
eval ${Cmd}
done
}
tmpfile=$(mktemp)
function cleanup {
cd ${HOME}
rm -f ${tmpfile}
}
trap cleanup EXIT
# run_checklist
tmpfile=$(mktemp)
function cleanup {
cd ${HOME}
rm -f ${tmpfile}
}
trap cleanup EXIT
echo "## :fire: EQUITY MD HEALTH CHECK (cryptoval4)" >> ${tmpfile}
echo '```' | tee -a ${tmpfile}
run_checklist | tee -a ${tmpfile}
echo '```' | tee -a ${tmpfile}
echo "Sending result to ${Sender} ${StatusChannel}"
cat ${tmpfile} | ${Sender} ${StatusChannel}

View File

@ -0,0 +1,14 @@
#!/bin/bash
export PYTHONPATH=/home/cvtt/prod
Cmd="/home/cvtt/.pyenv/python3.12-venv/bin/python3"
Cmd="${Cmd} /home/cvtt/prod/cvttpy/apps/research/exchange_trading_stats.py"
Cmd="${Cmd} --config=http://cloud16.cvtt.vpn:6789/apps/tests/listen_market_data"
Cmd="${Cmd} --active_exchanges=OKX,GEMINI,BITSTAMP,COINBASE_AT,BNBSPOT,KRAKEN"
Cmd="${Cmd} --instruments=OKX:PAIR-BTC-USDT,GEMINI:PAIR-BTC-USD,BITSTAMP:PAIR-BTC-USD,COINBASE:PAIR-BTC-USD,BNBSPOT:PAIR-BTC-USDT,KRAKEN:PAIR-BTC-USD"
Cmd="${Cmd} --db_file=/home/cvtt/prod/data/exchange_trading_stats.db"
Cmd="${Cmd} --log_file=/home/cvtt/prod/logs/%%T.EXCHANGE_TRADING_STATS.log"
echo ${Cmd}
eval ${Cmd}

View File

@ -0,0 +1,33 @@
#!/bin/bash
# Collect all "A" files
letter=${1}
if [ "${letter}" == "" ]; then
echo "Usage $0 <letter>"
exit 1
fi
# copy text data to local location
rsync -ahv /home/cvtt/host_drive/eqt_hist_md/txt/${letter} /home/cvtt/tmp/txt/
# create db files
PYTHONPATH=/home/cvtt/prod /home/cvtt/.pyenv/python3.12-venv/bin/python3 /home/cvtt/prod/cvttpy/trading/mkt_data/ad_hoc/eqt_md_to_db.py /home/cvtt/tmp/txt/${letter} /home/cvtt/tmp/db
if [ "$?" != "0" ] ; then
exit
fi
# Move all files to host drive
# a) create file list
(cd /home/cvtt/tmp/db/ && find . -name '*db' -print | grep "/${letter}/") > /home/cvtt/tmp/tran_db/${letter}_files
echo ${letter} is done
exit
# b) rsync files to host drive
rsync -ahv --remove-source-files --files-from=/home/cvtt/tmp/tran_db/${letter}_files /home/cvtt/tmp/db/ cvtt@my-vm-host:/localdisk/cvtt/eqt_hist_md/db/
# Clean directories
( cd /home/cvtt/tmp/db && (for d in $(find . -name $letter -type d -print); do echo $d ; done) | grep -v /$letter/$letter | xargs rm -rf) && rm -rf /home/cvtt/tmp/txt/${letter}

View File

@ -0,0 +1,14 @@
#!/bin/bash
Source=/home/cvtt/prod/archive/md_archive/
Targets=
Targets="${Targets} cvtt@hs01.cvtt.vpn:/works/cvtt/md_archive/"
Targets="${Targets} cvtt@cloud21.cvtt.vpn:/opt/store/cvtt/md_archive/"
for tgt in ${Targets}
do
Cmd="/usr/bin/rsync -ahv ${Source} ${tgt}"
echo $Cmd
eval $Cmd
done

View File

@ -1,5 +1,17 @@
#!/bin/bash #!/bin/bash
# ----- Settings
LocalSoftwareDir=${HOME}/software/cvtt2
ProdDir=$(pwd)
ReleaseHosts=("cloud21.cvtt.vpn")
ReleasePorts=("22")
ReleaseUsers=("cvttdist")
ReleaseDir=("/home/cvttdist/software/cvtt2")
# ----- Settings
function usage() { function usage() {
echo "Usage: ${0} <project> <version>" echo "Usage: ${0} <project> <version>"
exit 1 exit 1
@ -18,17 +30,6 @@ then
Version=latest Version=latest
fi fi
# ----- Settings
LocalSoftwareDir=${HOME}/software/cvtt2
ProdDir=${HOME}/prod
ReleaseHosts=("cloud21.cvtt.vpn")
ReleasePorts=("22")
ReleaseUsers=("cvttdist")
ReleaseDir=("/home/cvttdist/software/cvtt2")
# ----- Settings
function rsync_load_version() { function rsync_load_version() {
for idx in "${!ReleaseHosts[@]}" for idx in "${!ReleaseHosts[@]}"
do do
@ -46,7 +47,7 @@ function rsync_load_version() {
if ssh -q -p ${port} ${user}@${host} "test -d ${rel_dir}/${Project}/${Version}" if ssh -q -p ${port} ${user}@${host} "test -d ${rel_dir}/${Project}/${Version}"
then then
echo "Directory found..." echo "Directory found..."
rsync_cmd="rsync -ahvv -e \"ssh -p ${port}\"" rsync_cmd="rsync -ahv -e \"ssh -p ${port}\""
rsync_cmd="${rsync_cmd} ${user}@${host}:${rel_dir}/${Project}/${Version}" rsync_cmd="${rsync_cmd} ${user}@${host}:${rel_dir}/${Project}/${Version}"
rsync_cmd="${rsync_cmd} ${LocalSoftwareDir}/${Project}/" rsync_cmd="${rsync_cmd} ${LocalSoftwareDir}/${Project}/"
echo ${rsync_cmd} echo ${rsync_cmd}
@ -68,6 +69,7 @@ function rsync_load_version() {
} }
mkdir -p ${LocalSoftwareDir} mkdir -p ${LocalSoftwareDir}
mkdir -p ${ProdDir}
# exists and not empty # exists and not empty
rsync_load_version rsync_load_version
@ -75,9 +77,9 @@ rsync_load_version
Location="${LocalSoftwareDir}/${Project}/${Version}/${Project}" Location="${LocalSoftwareDir}/${Project}/${Version}/${Project}"
Cmd="cd ${ProdDir}" Cmd="cd ${ProdDir}"
Cmd="${Cmd} && rm -rf ${Project}" Cmd+=" && rm -rf ${Project}"
Cmd="${Cmd} && ln -snf ${Location} ${Project}" Cmd+=" && ln -snf ${Location} ${Project}"
echo ${Cmd} && eval ${Cmd} echo ${Cmd} && eval ${Cmd}
echo "Done: $0 $*" echo "Done: $0 ${*}"

View File

@ -6,7 +6,7 @@ SUBDIRS += configs
FILES= FILES=
FILES += release_version.txt FILES += VERSION
all: install all: install

View File

@ -1,36 +1,63 @@
#!/bin/bash #!/bin/bash
function usage { function usage {
echo "Usage: ${0} <log directory> [days (default 2)]" echo -n "Usage: ${0}"
echo -n " -L <log directory>"
echo -n " [ -A <archive_logs_dir> (default /works/archive/logs)]"
echo -n " [-D <older than time criteria> (default: '2 days ago')]"
echo
exit 1 exit 1
} }
echo Starting $0 $* echo Starting $0 $*
LogDir=${1} # ---- D e f a u l t s
LogArchiveDir=/works/archive/logs
DateCriteria="2 days ago"
# ---- D e f a u l t s
# ---------------- cmdline
while getopts "A:L:D:" opt; do
case ${opt} in
A )
LogArchiveDir=$OPTARG
;;
L )
LogDir=$OPTARG
;;
D )
DateCriteria=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
if [ "${LogDir}" == "" ] if [ "${LogDir}" == "" ]
then then
usage usage
fi fi
Days=${2} Cmd="mkdir -p ${LogArchiveDir}"
if [ "${Days}" == "" ] echo ${Cmd} && eval ${Cmd}
then
Days=2
fi
DateCriteria="${Days} days ago" echo "Looking for log files older than '${DateCriteria}' in ${LogDir}"
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d') Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d %H:%M:%S')
echo "Looking for log files older than ${DateCriteria} in ${LogDir}" Cmd="find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt \"${Oldest}\""
echo $Cmd
LogArchiveDir=${HOME}/prod/archive/logs files=$(eval ${Cmd})
mkdir -p ${LogArchiveDir}
echo "find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt ${Oldest})"
files=$(find ${LogDir}/ '(' -name '*.log' -o -name '*.log.*' ')' -type f -not -newermt ${Oldest})
if [ "$files" == "" ] if [ "$files" == "" ]
then then
echo "No files found older than ${Oldest} in ${LogDir}" echo "No files found older than ${Oldest} in ${LogDir}"

49
utils/git/create_remote_repo.sh Executable file
View File

@ -0,0 +1,49 @@
#!/bin/bash
# ---------------------
# user "git" owned remote repository
#
# must run on remote server
# change GitTopDir, HostName and RemoteName
# ---------------------
# R e s u l t E x a m p l e:
# sudo bash -c 'mkdir /opt/store/git/cvtt2/tests.git && cd /opt/store/git/cvtt2/tests.git && git init --bare && chown -R git:git /opt/store/git/cvtt2/tests.git'
# ---------------------
#
# ----- S e t t i n g s
GitTopDir=/ops/store/git
HostName=cloud21.cvtt.vpn
RemoteName=origin
# ----- S e t t i n g s
usage() {
echo "Usage: ${0} <repo_name> [<repo_parent_dir> *${GitTopDir}/cvtt2]"
exit 1
}
RepoName=${1}
if [ "" == "${RepoName}" ] ; then
usage
fi
RepoParentDir=${GitTopDir}/cvtt2
if [ "${2}" != "" ] ; then
RepoParentDir=${2}
fi
RepoDir="${RepoParentDir}/${RepoName}.git"
Cmd="sudo bash -c 'mkdir ${RepoDir}"
Cmd="${Cmd} && cd ${RepoDir}"
Cmd="${Cmd} && git init --bare "
Cmd="${Cmd} && chown -R git:git ${RepoDir}'"
echo ${Cmd}
eval ${Cmd} || exit 1
echo "====================="
echo "Repository is created. Use this command to add it:"
echo " git remote add ${RemoteName} git@${HostName}:${RepoDir}"
echo "====================="

View File

@ -1,40 +1,57 @@
#!/bin/bash #!/bin/bash
# FOR hosts with limited disk space - move to storage server
# FOR cloud hosts with limited disk space - move to storage server
function usage { function usage {
echo "Usage: ${0} <host> <from_dir> <days>" echo -n "Usage: ${0}"
echo -n " -H <host_label>"
echo -n " [ -A <archive_dir> (default /works/archive)]"
echo -n " [-D <older than time criteria> (default: '2 days ago')]"
echo
exit 1 exit 1
} }
echo Starting $0 $* echo Starting $0 $*
# ---- D e f a u l t s
ArchiveDir=/works/archive
DateCriteria="2 days ago"
FromHost=$(hostname -s) FromHost=$(hostname -s)
# ---- D e f a u l t s
# ---------------- cmdline
while getopts "A:H:D:" opt; do
case ${opt} in
A )
ArchiveDir=$OPTARG
;;
H )
FromHost=$OPTARG
;;
D )
DateCriteria=$OPTARG
;;
\? )
echo "Invalid option: -$OPTARG" >&2
usage
;;
: )
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
# ---------------- cmdline
if [ "${FromHost}" == "" ] if [ "${FromHost}" == "" ]
then then
usage usage
fi fi
ArchiveDir=${2}
if [ "${ArchiveDir}" == "" ]
then
usage
fi
Days=${3}
if [ "${Days}" == "" ]
then
Days=2
fi
DateCriteria="${Days} days ago"
TargetHost=cloud21.cvtt.vpn TargetHost=cloud21.cvtt.vpn
TargetRootDir=/opt/store/cvtt/archive TargetRootDir=/opt/store/cvtt/archive
Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d') Oldest=$(date -d "${DateCriteria}" '+%Y-%m-%d %H:%M:%S')
Now=$(date '+%Y%m%d_%H%M%S')
echo "Looking for log files older than ${DateCriteria} in ${ArchiveDir}" echo "Looking for log files older than ${DateCriteria} in ${ArchiveDir}"
Cmd="find ${ArchiveDir}/" Cmd="find ${ArchiveDir}/"
@ -45,7 +62,7 @@ Cmd="${Cmd} -o -name '*.logs.*'"
Cmd="${Cmd} -o -name '*.tgz'" Cmd="${Cmd} -o -name '*.tgz'"
Cmd="${Cmd} ')'" Cmd="${Cmd} ')'"
Cmd="${Cmd} -type f" Cmd="${Cmd} -type f"
Cmd="${Cmd} -not -newermt ${Oldest}" Cmd="${Cmd} -not -newermt \"${Oldest}\""
echo ${Cmd} echo ${Cmd}
files=$(eval ${Cmd}) files=$(eval ${Cmd})

58
utils/prune_data.sh Executable file
View File

@ -0,0 +1,58 @@
#!/bin/bash
Src=${1}
Days=${2}
if [ -z "$Days" ] || [ -z "$Src" ]; then
echo "Usage: $0 <source_dir> <num_days>"
exit 1
fi
# Add / if Src does not have it (symlinks)
if [ "${Src: -1}" != "/" ]; then
Src="${Src}/"
fi
declare -A Settings=()
Settings[Src]=${Src}
Settings[PruneDate]=$(date -d "${Days} days ago" '+%Y-%m-%d')
src=${Settings[Src]}
prune_date=${Settings[PruneDate]}
echo "Finding files older than ${prune_date} in ${Src} ..."
Cmd="find ${src} -type f ! -newermt \"${prune_date}\""
echo ${Cmd}
files=($(eval ${Cmd}))
total_files=${#files[*]}
if [[ ${total_files} == 0 ]]
then
echo "No files found to be pruned. Bye..."
exit 0
fi
echo Before Pruning....
duf ${src}
echo "The following files will be removed:"
echo "===================================="
for f in $files ; do ls -l $f; done
echo "===================================="
echo "Total files to be pruned: ${total_files}"
echo "Removing files..."
Cmd="${Cmd} -print -delete"
echo ${Cmd}
eval ${Cmd}
echo "Removing empty directories..."
Cmd="find ${src} -type d -empty -print -delete"
echo ${Cmd}
eval ${Cmd}
echo After Pruning....
duf ${src}
echo $0 Done