|
import tensorflow as tf |
|
from tensorflow.keras import layers |
|
import pandas as pd |
|
import numpy as np |
|
from typing import Tuple, List |
|
import logging |
|
from datetime import datetime |
|
from pathlib import Path |
|
import json |
|
from sklearn.preprocessing import MinMaxScaler |
|
from ta.trend import SMAIndicator, EMAIndicator, MACD |
|
from ta.momentum import RSIIndicator |
|
from ta.volatility import BollingerBands |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s' |
|
) |
|
logger = logging.getLogger(__name__) |
|
|
|
class DataPreprocessor: |
|
"""Handles data loading and preprocessing""" |
|
|
|
def __init__(self, config_path: str = 'training_config/hyperparameters.json'): |
|
with open(config_path) as f: |
|
self.config = json.load(f) |
|
self.scalers = {} |
|
|
|
def load_data(self, data_path: str) -> pd.DataFrame: |
|
"""Load data from CSV and add technical indicators""" |
|
df = pd.read_csv(data_path) |
|
df['timestamp'] = pd.to_datetime(df['timestamp']) |
|
df = df.sort_values('timestamp') |
|
|
|
|
|
df = self.add_technical_indicators(df) |
|
|
|
return df |
|
|
|
def add_technical_indicators(self, df: pd.DataFrame) -> pd.DataFrame: |
|
"""Add technical analysis indicators""" |
|
|
|
df['sma_20'] = SMAIndicator(close=df['price'], window=20).sma_indicator() |
|
df['sma_50'] = SMAIndicator(close=df['price'], window=50).sma_indicator() |
|
|
|
|
|
df['ema_20'] = EMAIndicator(close=df['price'], window=20).ema_indicator() |
|
|
|
|
|
macd = MACD(close=df['price']) |
|
df['macd'] = macd.macd() |
|
df['macd_signal'] = macd.macd_signal() |
|
|
|
|
|
df['rsi'] = RSIIndicator(close=df['price']).rsi() |
|
|
|
|
|
bb = BollingerBands(close=df['price']) |
|
df['bb_high'] = bb.bollinger_hband() |
|
df['bb_low'] = bb.bollinger_lband() |
|
|
|
return df |
|
|
|
def prepare_sequences(self, df: pd.DataFrame) -> Tuple[np.ndarray, np.ndarray]: |
|
"""Create sequences for training""" |
|
sequence_length = self.config['data']['sequence_length'] |
|
|
|
|
|
for column in df.select_dtypes(include=[np.number]).columns: |
|
if column not in self.scalers: |
|
self.scalers[column] = MinMaxScaler() |
|
df[column] = self.scalers[column].fit_transform(df[[column]]) |
|
|
|
|
|
sequences = [] |
|
targets = [] |
|
|
|
for i in range(len(df) - sequence_length): |
|
sequence = df.iloc[i:i + sequence_length] |
|
target = df.iloc[i + sequence_length]['price'] |
|
sequences.append(sequence) |
|
targets.append(target) |
|
|
|
return np.array(sequences), np.array(targets) |
|
|
|
class TransformerBlock(layers.Layer): |
|
"""Transformer block with multi-head attention""" |
|
|
|
def __init__(self, embed_dim, num_heads, ff_dim, rate=0.1): |
|
super().__init__() |
|
self.att = layers.MultiHeadAttention(num_heads=num_heads, key_dim=embed_dim) |
|
self.ffn = tf.keras.Sequential([ |
|
layers.Dense(ff_dim, activation="relu"), |
|
layers.Dense(embed_dim), |
|
]) |
|
self.layernorm1 = layers.LayerNormalization(epsilon=1e-6) |
|
self.layernorm2 = layers.LayerNormalization(epsilon=1e-6) |
|
self.dropout1 = layers.Dropout(rate) |
|
self.dropout2 = layers.Dropout(rate) |
|
|
|
def call(self, inputs, training): |
|
attn_output = self.att(inputs, inputs) |
|
attn_output = self.dropout1(attn_output, training=training) |
|
out1 = self.layernorm1(inputs + attn_output) |
|
ffn_output = self.ffn(out1) |
|
ffn_output = self.dropout2(ffn_output, training=training) |
|
return self.layernorm2(out1 + ffn_output) |
|
|
|
class CryptoTransformer(tf.keras.Model): |
|
"""Main transformer model for cryptocurrency prediction""" |
|
|
|
def __init__(self, config_path: str = 'training_config/hyperparameters.json'): |
|
super().__init__() |
|
|
|
with open(config_path) as f: |
|
self.config = json.load(f) |
|
|
|
|
|
self.num_layers = self.config['model']['n_layers'] |
|
self.d_model = self.config['model']['d_model'] |
|
self.num_heads = self.config['model']['n_heads'] |
|
self.ff_dim = self.config['model']['d_ff'] |
|
self.dropout = self.config['model']['dropout'] |
|
|
|
|
|
self.transformer_blocks = [ |
|
TransformerBlock(self.d_model, self.num_heads, self.ff_dim, self.dropout) |
|
for _ in range(self.num_layers) |
|
] |
|
self.dropout = layers.Dropout(self.dropout) |
|
self.dense = layers.Dense(1) |
|
|
|
def call(self, inputs): |
|
x = inputs |
|
for transformer_block in self.transformer_blocks: |
|
x = transformer_block(x) |
|
x = layers.GlobalAveragePooling1D()(x) |
|
x = self.dropout(x) |
|
return self.dense(x) |
|
|
|
def train_model(): |
|
"""Main training function""" |
|
logger.info("Starting model training") |
|
|
|
|
|
preprocessor = DataPreprocessor() |
|
df = preprocessor.load_data('data/training/kraken_trades.csv') |
|
|
|
|
|
X, y = preprocessor.prepare_sequences(df) |
|
|
|
|
|
train_size = int(0.8 * len(X)) |
|
X_train, X_test = X[:train_size], X[train_size:] |
|
y_train, y_test = y[:train_size], y[train_size:] |
|
|
|
|
|
model = CryptoTransformer() |
|
|
|
|
|
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-4) |
|
model.compile( |
|
optimizer=optimizer, |
|
loss='mse', |
|
metrics=['mae'] |
|
) |
|
|
|
|
|
history = model.fit( |
|
X_train, y_train, |
|
epochs=100, |
|
batch_size=32, |
|
validation_data=(X_test, y_test), |
|
callbacks=[ |
|
tf.keras.callbacks.EarlyStopping( |
|
monitor='val_loss', |
|
patience=10, |
|
restore_best_weights=True |
|
), |
|
tf.keras.callbacks.ModelCheckpoint( |
|
'models/crypto_transformer_{epoch}.h5', |
|
save_best_only=True, |
|
monitor='val_loss' |
|
), |
|
tf.keras.callbacks.TensorBoard(log_dir='logs') |
|
] |
|
) |
|
|
|
|
|
model.save('models/crypto_transformer_final') |
|
|
|
|
|
pd.DataFrame(history.history).to_csv('models/training_history.csv') |
|
|
|
logger.info("Training completed") |
|
return model, history |
|
|
|
if __name__ == "__main__": |
|
|
|
Path('models').mkdir(exist_ok=True) |
|
Path('logs').mkdir(exist_ok=True) |
|
|
|
|
|
model, history = train_model() |