Skip to content
Snippets Groups Projects
Commit b54ec470 authored by Bensong Liu's avatar Bensong Liu
Browse files

Initial commit

parents
No related branches found
No related tags found
No related merge requests found
*.log
# dataset
There would be 3 dataset to be collect: long-term dataset (daily), short-term bids (every second), trade-log (every trade, usually 5/second)
# training
Create model which considers both long-term dataset and short-term datasets. Make a long-term prediction and short-term prediction.
# trading
.
function dolog
test -f /tmp/rate_limit ; and echo 'RATE LIMIT' >> out.log ; and return
set err (curl 'https://api.binance.com/api/v3/depth?symbol=BTCUSDT' >> out.log -v 2>| grep 'HTTP/[^ ]* 4')
echo '' >> out.log
test $err != '' ; and begin
echo 1 > /tmp/rate_limit
sleep 60 # TODO
rm /tmp/rate_limit
end
end
function dolog2
test -f /tmp/rate_limit ; and echo 'RATE LIMIT' >> out.log ; and return
set err (curl 'https://api.binance.com/api/v3/avgPrice?symbol=BTCUSDT' >> out.log -v 2>| grep 'HTTP/[^ ]* 4')
echo '' >> out.log
test $err != '' ; and begin
echo 1 > /tmp/rate_limit
sleep 60 # TODO
rm /tmp/rate_limit
end
end
dolog
dolog2
while true
sleep 1
nohup fish binance_collect_data.fish > /dev/null 2>&1 & disown
end
ws.py 0 → 100644
# https://binance-docs.github.io/apidocs/spot/en/#test-new-order-trade
# we have one thread to maintain the dataset, another thread would run the pytorch model every 5 seconds, to see if it's time to place order.
import json, time
from websocket import create_connection
# [(Snapshot_Time, [(buy_price1, amount1), ...] , [(sell_price1, amount1), ...]), ...]
# [(1620457034392, [(56000, 0.01), (55900, 1), (55700, 30), ...] , [(57000, 0.01), (57100, 1), ...] ), (1620457034394, [...]), ...]
# The snapshots should has almost identical time-interval. Good for LSTM.
# Time axis: [history, older, newer, ..., latest]
realtime_shortterm_dataset_aggtrade = []
realtime_shortterm_dataset_aggtrade_size = 1024
# [(Trade_Time, PRICE, AMOUNT), ...]
# [(1620457034392, 56000, 0.5), (1620457034394, 56001, 0.05), ...]
# The trades usually have various time-interval. TODO: transform it to [(WeightedAvgPrice, Volume), ...] for every 1 minutes?
# Time axis: [history, older, newer, ..., latest]
realtime_shortterm_dataset_depth = []
realtime_shortterm_dataset_depth_size = 1024*1024
def dataset_maintain_thread_main():
global realtime_shortterm_dataset_aggtrade, realtime_shortterm_dataset_aggtrade_size, realtime_shortterm_dataset_depth, realtime_shortterm_dataset_depth_size
ws = create_connection("wss://stream.binance.com:9443/ws/btcusdt")
ws.send(json.dumps({
"method": "SUBSCRIBE",
"params": ["btcusdt@aggTrade", "btcusdt@depth"],
"id": 1,
}))
subs_response = ws.recv()
if '"result":null' not in subs_response:
raise RuntimeError("Failed to subscribe: server says: " + subs_response)
_debug_tmp = 0
while True:
result = ws.recv()
# print("DEBUG: result=", result)
parsed = json.loads(result)
if 'p' in parsed.keys():
# Is a trade message
trade_time, price, amount = parsed.get('T'), parsed.get('p'), parsed.get('q')
realtime_shortterm_dataset_depth.append((trade_time, price, amount))
elif 'b' in parsed.keys():
# Is a depth snapshot update
print('TODO')
if int(time.time()) % 60 == 0 and _debug_tmp != int(time.time()):
_debug_tmp = int(time.time())
print("DEBUG: dumping realtime_shortterm_dataset_aggtrade================")
print(realtime_shortterm_dataset_aggtrade)
print("DEBUG: dumping realtime_shortterm_dataset_depth================")
print(realtime_shortterm_dataset_depth)
print("DEBUG: dumping END ==|||||||||||||||||||||||||||||================")
ws.close()
dataset_maintain_thread_main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment