alg.fit(dtrain[predictors], dtrain['Disbursed']) #Predict training set: 预测训练集数据 dtrain_predictions = alg.predict(dtrain[predictors]) dtrain_predprob = alg.predict_proba(dtrain[predictors])[:,1] #Perform cross-validation: 执行交叉验证 if performCV: cv_score = model_selection.cross_...
alg.fit(dtrain【predictors】, dtrain【'Disbursed'】) #Predict training set: dtrain_predictions = alg.predict(dtrain【predictors】) dtrain_predprob = alg.predict_proba(dtrain【predictors】)【:,1】 #Perform cross-validation: if performCV: cv_score = cross_validation.cross_val_score(alg, dtr...
dtrain_predprob = alg.predict_proba(dtrain[predictors])[:,1]#Perform cross-validation:ifperformCV: cv_score = cross_validation.cross_val_score(alg, dtrain[predictors], dtrain['Disbursed'], cv=cv_folds, scoring='roc_auc')#Print model report:print"\nModel Report"print"Accuracy : %.4g"...
# Make predictions on the test set pred_Labels_GBM = trained_Model_GBM.predict(X_test) pred_proba_GBM = trained_Model_GBM.predict_proba(X_test) # Evaluate performance print(); print('Evaluation of the trained model Gradient Boosting: ') accuracy = accuracy_score(y_test, pred_Labels_GBM)...
return (metrics.roc_auc_score(y_train,m.predict_proba(train)[:,1]), metrics.roc_auc_score(y_test,m.predict_proba(test)[:,1])) # Parameter Tuning model = xgb.XGBClassifier() param_dist = {"max_depth": [10,30,50], "min_child_weight" : [1,3,6], ...
return (metrics.roc_auc_score(y_train,m.predict_proba(train)[:,1]), metrics.roc_auc_score(y_test,m.predict_proba(test)[:,1])) # Parameter Tuning model = xgb.XGBClassifier() param_dist = {"max_depth": [10,30,50], "min_child_weight" : [1,3,6], ...
clf.fit(X_train,y_train) y_pred_valid = clf.predict_proba(X_valid)[:,1] y_oof[valid_index] = y_pred_valid print(f"Fold{fold_n +1}| AUC:{roc_auc_score(y_valid, y_pred_valid)}") score += roc_auc_score(y_valid, y_pred_valid) / NFOLDSdelX_train, X_valid, y_train,...
return(metrics.roc_auc_score(y_train,m.predict_proba(train)[:,1]), metrics.roc_auc_score(y_test,m.predict_proba(test)[:,1])) # Parameter Tuning model = xgb.XGBClassifier() param_dist = {"max_depth": [10,30,50], "min_child_weight": [1,3,6], ...
如所写,您的代码调整最终模型的超参数,而不是特征选择步骤中 GBM 的超参数。有几个选择: 扩展搜索空间以包含选择GBM的超参数,例如 feature_selection__estimator__max_depth。 删除模型步骤。 RFE 可以访问所选特征集 ( estimator_) 上的最终模型,并且您可能需要的方法可直接从 RFE 对象(例如 rfe.predict)...
defauc(m,train,test):return(metrics.roc_auc_score(y_train,m.predict_proba(train)[:,1]),metrics.roc_auc_score(y_test,m.predict_proba(test)[:,1]))# Parameter Tuning model=xgb.XGBClassifier()param_dist={"max_depth":[10,30,50],"min_child_weight":[1,3,6],"n_estimators":[200],...