79577063

Date: 2025-04-16 11:33:54
Score: 3
Natty:
Report link

I also encountered the same error. I have a training pipeline where firstly fine-tuning a DistilBERT model from HuggingFace and then further tune my custom layers. I was wondering how it relates to the loss and metrics used. Is it fine to use these?

loss="binary_crossentropy"

and

metrics=["accuracy"]

And by the way, could anyone please check what's happening for me with this error? I have no clue. Thank you guys!

My model:

def build_model(hp):
    inputs = tf.keras.layers.Input(shape=(X_train_resampled.shape[1],))  # (None, embedding_size)
    
    # Additional Layers
    x = tf.keras.layers.Reshape((3, -1))(inputs)

    # Bi-directional LSTM Layer
    x = tf.keras.layers.Bidirectional(
        tf.keras.layers.LSTM(
            units=hp.Int("lstm_units", min_value=64, max_value=256, step=64),
            return_sequences=False
        )
    )(x)

    # Dropout Layer
    x = tf.keras.layers.Dropout(
        rate=hp.Float("dropout_rate", 0.1, 0.5, step=0.1)
    )(x)

    # Dense Layer
    x = tf.keras.layers.Dense(
        units=hp.Int("dense_units", min_value=32, max_value=256, step=32),
        activation="relu"
    )(x)

    # Output
    outputs = tf.keras.layers.Dense(1, activation="sigmoid")(x)

    model = tf.keras.Model(inputs=inputs, outputs=outputs)

    # Build the Model with dummy data
    model(tf.zeros((1, X_train_resampled.shape[1])))

    # Compile the Model
    model.compile(
        optimizer=tf.keras.optimizers.Adam(
            learning_rate=hp.Float("learning_rate", 1e-5, 1e-3, sampling="LOG")
        ),
        loss="binary_crossentropy",
        metrics=["accuracy"]
    )
    return model

My Tuning:

kf = KFold(n_splits=5, shuffle=True, random_state=42)

best_val_acc = 0.0
best_model_path = None
bestHistory = None
bestFold = None

for fold, (train_index, val_index) in enumerate(kf.split(X_train_resampled)):
    print(f"\nCustom Classifier Fold {fold + 1}")

    X_train_fold, X_val_fold = X_train_resampled[train_index], X_train_resampled[val_index]
    y_train_fold, y_val_fold = y_train_resampled[train_index], y_train_resampled[val_index]

    train_fold_dataset = tf.data.Dataset.from_tensor_slices((X_train_fold, y_train_fold)).batch(4)
    val_fold_dataset = tf.data.Dataset.from_tensor_slices((X_val_fold, y_val_fold)).batch(4)

    tuner = Hyperband(
        build_model,
        objective="val_accuracy",
        max_epochs=CUSTOM_EPOCHS,
        directory=os.path.join(TRAINING_PATH, "models"),
        project_name=f"model_2_custom_classifier_fold_{fold + 1}"
    )

    # Monkey patch to bypass incompatible Keras model check
    def patched_validate_trial_model(self, model):
        if not isinstance(model, tf.keras.Model):
            print("⚠️ Model is not tf.keras.Model — bypassing check anyway")
            return
        return

    # keras_tuner.engine.trial.Trial._validate_trial_model = patched_validate_trial_model
    keras_tuner.engine.base_tuner.BaseTuner._validate_trial_model = patched_validate_trial_model


    tuner.search(
        train_fold_dataset,
        validation_data=val_fold_dataset,
        epochs=CUSTOM_EPOCHS
    )

    best_hp = tuner.get_best_hyperparameters(1)[0]
    print(f"✅ Best hyperparameters for fold {fold + 1}: {best_hp.values}")

    model = build_model(best_hp)

    # print the model's summary after complex modifications
    print("Model summary after hyperparameter tuning:")
    model.summary()

    history = model.fit(train_fold_dataset, validation_data=val_fold_dataset, epochs=CUSTOM_EPOCHS)

    val_acc = history.history['val_accuracy'][-1]
    model_save_path = os.path.join(TRAINING_PATH, "models", f"custom_classifier_fold_{fold + 1}")
    if val_acc > best_val_acc:
        best_val_acc = val_acc
        best_model_path = model_save_path
        bestHistory = history
        bestFile = fold

    model.save(os.path.join(TRAINING_PATH, "models", f"custom_classifier_fold_{fold + 1}.h5"))

if best_model_path:
    print(f"Saving the best model from fold with validation accuracy: {best_val_acc}")
    best_model_path = os.path.join(TRAINING_PATH, "models", f"BEST_custom_classifier.h5")
    model.save(best_model_path)
    print(f"✅ Best model saved at: {best_model_path}")

    modelType = "Custom Layers"
    plot(bestHistory, fold, modelType)
    print(f"✅ Convergence plots saved for {modelType} at fold {bestFold + 1}.")

Thanks for your time here!

Reasons:
  • Blacklisted phrase (0.5): Thank you
  • Blacklisted phrase (0.5): Thanks
  • Blacklisted phrase (2): was wondering
  • Long answer (-1):
  • Has code block (-0.5):
  • Contains question mark (0.5):
  • Low reputation (1):
Posted by: Kelvin Mock