You can download this code by clicking the button below.
This code is now available for download.
This function uses the XGBoost library to generate a random XGBoost model. It performs a random search over the parameters to find the best model.
Technology Stack : XGBoost, NumPy
Code Type : The type of code
Code Difficulty : Intermediate
import xgboost as xgb
import numpy as np
def random_xgb_model(X, y, num_round=100):
"""
Generates a random XGBoost model on the given dataset.
"""
# Define the parameters for the random search
param_dist = {
'max_depth': [3, 5, 7, 9],
'min_child_weight': [1, 3, 5],
'gamma': [0.1, 0.3, 0.5],
'subsample': [0.5, 0.7, 1.0],
'colsample_bytree': [0.5, 0.7, 1.0]
}
# Initialize the XGBoost classifier
xgb_clf = xgb.XGBClassifier(use_label_encoder=False, eval_metric='logloss')
# Perform random search
best_score = 0
best_params = {}
for i in range(10):
# Sample random parameters
sample_params = {key: np.random.choice(value) for key, value in param_dist.items()}
xgb_clf.set_params(**sample_params)
# Train the model
xgb_clf.fit(X, y)
# Evaluate the model
score = xgb_clf.score(X, y)
if score > best_score:
best_score = score
best_params = sample_params
# Return the best parameters
return best_params
# JSON representation of the code