import tensorflow as tf from tensorflow.keras.layers import Dense, LayerNormalization, Dropout, MultiHeadAttention, Flatten, Input, Lambda from tensorflow.keras.models import Model # Transformer model definition using Keras def create_transformer_model(sequence_length, d_model=32, num_heads=2, ff_dim=64, num_layers=1, dropout=0.1): inputs = Input(shape=(sequence_length,)) # Wrap tf.expand_dims in a Lambda layer to work with Keras tensors x = Lambda(lambda x: tf.expand_dims(x, axis=-1))(inputs) # Shape: (batch, sequence_length, 1) x = Dense(d_model)(x) for _ in range(num_layers): attn_output = MultiHeadAttention(num_heads=num_heads, key_dim=d_model, dropout=dropout)(x, x) attn_output = Dropout(dropout)(attn_output) x = LayerNormalization(epsilon=1e-6)(x + attn_output) ff = Dense(ff_dim, activation="relu")(x) ff = Dense(d_model)(ff) ff = Dropout(dropout)(ff) x = LayerNormalization(epsilon=1e-6)(x + ff) x = Flatten()(x) outputs = Dense(1)(x) model = Model(inputs=inputs, outputs=outputs) return model