Skip to content
This repository has been archived by the owner on Jul 27, 2023. It is now read-only.

Added automatic OHLCV fetcher for crypto exchanges #14

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 26 additions & 16 deletions agent/Evolution_Bayesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import seaborn as sns
import random
from bayes_opt import BayesianOptimization
from crypto_data_loader import get_candles
sns.set()

def get_state(data, t, n):
Expand All @@ -15,15 +16,6 @@ def get_state(data, t, n):
res.append(block[i + 1] - block[i])
return np.array([res])

df = pd.read_csv('../dataset/GOOG-year.csv')
print(df.tail())

close = df.Close.values.tolist()
window_size = 30
skip = 5
l = len(close) - 1


class Deep_Evolution_Strategy:

inputs = None
Expand Down Expand Up @@ -291,20 +283,38 @@ def find_best_agent(
if investment > accbest:
costbest = investment
return investment
##--------------------------------------------------
##Settings (Later as argv)
exchange = 'bitmex'
symbol = 'BTC/USD'
start_date = '2018-01-01T00:00:00Z'
timeframe = '1d'

#---------------------------------------------------
##Get Data
df = get_candles(exchange , 3 , symbol ,timeframe , start_date ,25) #df = pd.read_csv('../dataset/GOOG-year.csv')
print(df)

close = df.Close.values.tolist()
window_size = 30
skip = 5
l = len(close) - 1

##----------------------------------------------------
## Bayesian Stuff
accbest = 0.0
NN_BAYESIAN = BayesianOptimization(
find_best_agent,
{
'window_size': (2, 50),#2,50
'skip': (1, 15), #1,15
'population_size': (1, 50),#1,50
'window_size': (2, 50),#standard: 2,50
'skip': (1, 15), #standard: 1,15
'population_size': (1, 50),#standard: 1,50
'sigma': (0.01, 0.99),
'learning_rate': (0.000001, 0.49),#0.000001 , 0.49
'size_network': (10, 1000),#10,1000
'learning_rate': (0.000001, 0.49),#standard: 0.000001 , 0.49
'size_network': (10, 1000),#standard: 10,1000
},
)
NN_BAYESIAN.maximize(init_points = 50, n_iter = 200, acq = 'ei', xi = 0.0)#n_iter=50 init_points=30
NN_BAYESIAN.maximize(init_points = 50, n_iter = 80, acq = 'ei', xi = 0.0)#standard: init_points=30 n_iter=50


print('----------------------------------------------')
Expand All @@ -321,6 +331,6 @@ def find_best_agent(
model = Model(int(params['window_size']) , int(params['size_network']) ,3)
agent = Agent(int(params['population_size']) , params['sigma'] , params['learning_rate'] , model , 10000 , 5 , 5, int(params['skip']),int(params['window_size']))

agent.fit(500, 100)
agent.fit(500, 50)

agent.buy()
64 changes: 64 additions & 0 deletions agent/crypto_data_loader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import os
import ccxt
from datetime import datetime
import pandas as pd

def retry_fetch_ohlcv(exchange, max_retries, symbol, timeframe, since, limit):
num_retries = 0
try:
num_retries += 1
ohlcv = exchange.fetch_ohlcv(symbol, timeframe, since , limit)
print('Fetched', len(ohlcv), symbol, 'candles from', exchange.iso8601 (ohlcv[0][0]), 'to', exchange.iso8601 (ohlcv[-1][0]))
return ohlcv
except Exception:
if num_retries > max_retries:
raise Exception('Failed to fetch', timeframe, symbol, 'OHLCV in', max_retries, 'attempts')


def scrape_ohlcv(exchange, max_retries, symbol, timeframe, since, limit):
earliest_timestamp = exchange.milliseconds()
timeframe_duration_in_seconds = exchange.parse_timeframe(timeframe)
timeframe_duration_in_ms = timeframe_duration_in_seconds * 1000
timedelta = limit * timeframe_duration_in_ms
all_ohlcv = []
while True:
fetch_since = earliest_timestamp - timedelta
ohlcv = retry_fetch_ohlcv(exchange, max_retries, symbol, timeframe, fetch_since, limit)
# if we have reached the beginning of history
if ohlcv[0][0] >= earliest_timestamp:
break
earliest_timestamp = ohlcv[0][0]
all_ohlcv = ohlcv + all_ohlcv
print(len(all_ohlcv), 'candles in total from', exchange.iso8601(all_ohlcv[0][0]), 'to', exchange.iso8601(all_ohlcv[-1][0]))
# if we have reached the checkpoint
if fetch_since < since:
break
return all_ohlcv

def get_candles(exchange_id, max_retries, symbol, timeframe, since, limit):
# instantiate the exchange by id
exchange = getattr(ccxt, exchange_id)({
'enableRateLimit': True, # required by the Manual
})
# convert since from string to milliseconds integer if needed
if isinstance(since, str):
since = exchange.parse8601(since)
# preload all markets from the exchange
exchange.load_markets()
# fetch all candles
ohlcv = scrape_ohlcv(exchange, max_retries, symbol, timeframe, since, limit)

key = 0
for candle in ohlcv:
epoch = int(candle[0]) / 1000
ohlcv[key][0] = datetime.utcfromtimestamp(epoch).strftime('%Y-%m-%d') # %H:%M:%S
ohlcv[key][5] = int(candle[5])
key += 1

# save them to csv file
candles = pd.DataFrame(ohlcv)
candles.columns = ['Date' , 'Open' , 'High' , 'Low' , 'Close' , 'Volume']
print(candles.head())
print(candles.tail())
print('Saved', len(ohlcv), 'candles from', exchange.iso8601(ohlcv[0][0]), 'to', exchange.iso8601(ohlcv[-1][0]))
return(candles)