14. Train LGBM
In [1]:
Copied!
import featurebyte as fb
import numpy as np
import pandas as pd
from itertools import product
from sklearn.metrics import roc_auc_score
from modeling_script import LightGBMPipeline, Objective, Metric
from typing import Optional
import featurebyte as fb
import numpy as np
import pandas as pd
from itertools import product
from sklearn.metrics import roc_auc_score
from modeling_script import LightGBMPipeline, Objective, Metric
from typing import Optional
17:05:38 | INFO | SDK version: 3.2.0.dev66 INFO :featurebyte:SDK version: 3.2.0.dev66 17:05:38 | INFO | No catalog activated. INFO :featurebyte:No catalog activated.
For those in an enterprise setting, explore how models can be trained in FeatureByte's User Interface.
Activate Catalog¶
In [2]:
Copied!
# Set your profile to the tutorial environment
fb.use_profile("tutorial")
catalog_name = "Credit Default Dataset SDK Tutorial"
catalog = fb.Catalog.activate(catalog_name)
# Set your profile to the tutorial environment
fb.use_profile("tutorial")
catalog_name = "Credit Default Dataset SDK Tutorial"
catalog = fb.Catalog.activate(catalog_name)
17:06:14 | INFO | Using profile: staging INFO :featurebyte:Using profile: staging 17:06:14 | INFO | Using configuration file at: /Users/gxav/.featurebyte/config.yaml INFO :featurebyte:Using configuration file at: /Users/gxav/.featurebyte/config.yaml 17:06:14 | INFO | Active profile: staging (https://staging.featurebyte.com/api/v1) INFO :featurebyte:Active profile: staging (https://staging.featurebyte.com/api/v1) 17:06:14 | INFO | SDK version: 3.2.0.dev66 INFO :featurebyte:SDK version: 3.2.0.dev66 17:06:14 | INFO | No catalog activated. INFO :featurebyte:No catalog activated. 17:06:15 | INFO | Catalog activated: Credit Default Dataset SDK Tutorial INFO :featurebyte.api.catalog:Catalog activated: Credit Default Dataset SDK Tutorial
Get Training and Holdout data¶
In [3]:
Copied!
catalog.list_historical_feature_tables()
catalog.list_historical_feature_tables()
Out[3]:
| id | name | feature_store_name | observation_table_name | shape | created_at | |
|---|---|---|---|---|---|---|
| 0 | 68ef634bcf8a902261e4386a | 40 features for Credit Default - TRAIN | playground | Applications up to March 2025 | [307511, 43] | 2025-10-15T09:05:07.959000 |
In [4]:
Copied!
training_data_table = catalog.get_historical_feature_table("40 features for Credit Default - TRAIN")
training_data_table = catalog.get_historical_feature_table("40 features for Credit Default - TRAIN")
In [5]:
Copied!
# download as pandas data frame
feature_data = training_data_table.to_pandas()
# download as pandas data frame
feature_data = training_data_table.to_pandas()
Downloading table |████████████████████████████████████████| 307511/307511 [100%
In [6]:
Copied!
training_from = "2019-04-01 00:00"
training_to = "2025-01-01 00:00"
validation_from = "2025-01-01 00:00"
validation_to = "2025-04-01 00:00"
feature_data["POINT_IN_TIME"] = pd.to_datetime(feature_data["POINT_IN_TIME"])
cond = (
(feature_data["POINT_IN_TIME"] >= training_from)
& (feature_data["POINT_IN_TIME"] < training_to)
)
training_data = feature_data.loc[cond].reset_index(drop=True)
cond = (
(feature_data["POINT_IN_TIME"] >= validation_from)
& (feature_data["POINT_IN_TIME"] < validation_to)
)
validation_data = feature_data.loc[cond].reset_index(drop=True)
validation_data.shape
training_from = "2019-04-01 00:00"
training_to = "2025-01-01 00:00"
validation_from = "2025-01-01 00:00"
validation_to = "2025-04-01 00:00"
feature_data["POINT_IN_TIME"] = pd.to_datetime(feature_data["POINT_IN_TIME"])
cond = (
(feature_data["POINT_IN_TIME"] >= training_from)
& (feature_data["POINT_IN_TIME"] < training_to)
)
training_data = feature_data.loc[cond].reset_index(drop=True)
cond = (
(feature_data["POINT_IN_TIME"] >= validation_from)
& (feature_data["POINT_IN_TIME"] < validation_to)
)
validation_data = feature_data.loc[cond].reset_index(drop=True)
validation_data.shape
Out[6]:
(12527, 43)
Categorize per feature type¶
In [7]:
Copied!
target_column = "Loan_Default"
entity_columns = ["SK_ID_CURR"]
excluded_columns = set([target_column, "POINT_IN_TIME", "__FB_TABLE_ROW_INDEX", "NEG_SAMPLE_WEIGHT"] + entity_columns)
feature_columns = [column for column in training_data.columns if column not in excluded_columns]
target_column = "Loan_Default"
entity_columns = ["SK_ID_CURR"]
excluded_columns = set([target_column, "POINT_IN_TIME", "__FB_TABLE_ROW_INDEX", "NEG_SAMPLE_WEIGHT"] + entity_columns)
feature_columns = [column for column in training_data.columns if column not in excluded_columns]
In [8]:
Copied!
feature_types = {}
for f in feature_columns:
feature = catalog.get_feature(f)
feature_types[f] = feature.feature_type
feature_types = {}
for f in feature_columns:
feature = catalog.get_feature(f)
feature_types[f] = feature.feature_type
Specify Training Parameters¶
In [9]:
Copied!
objective = Objective.BINARY
eval_metric = Metric.AUC
num_boost_round = 10000
early_stopping_rounds = 50
small_count_threshold = 10
param_grid = {
"learning_rate": [0.01],
"max_depth": [7],
"num_leaves": [48],
"subsample": [0.8],
"colsample_bytree": [0.5],
"min_split_gain": [0.025],
"reg_alpha": [0.5],
"reg_lambda": [0.5],
}
objective = Objective.BINARY
eval_metric = Metric.AUC
num_boost_round = 10000
early_stopping_rounds = 50
small_count_threshold = 10
param_grid = {
"learning_rate": [0.01],
"max_depth": [7],
"num_leaves": [48],
"subsample": [0.8],
"colsample_bytree": [0.5],
"min_split_gain": [0.025],
"reg_alpha": [0.5],
"reg_lambda": [0.5],
}
Run LightGBM¶
In [10]:
Copied!
target_train = training_data[target_column]
features_train = training_data[feature_columns]
target_test = validation_data[target_column]
features_test = validation_data[feature_columns]
# Prepare cartesian product of parameters
keys = list(param_grid.keys())
values = list(param_grid.values())
best_auc = 0
best_params = None
lgbm_pipeline: Optional[LightGBMPipeline]
for combo in product(*values):
params = dict(zip(keys, combo))
pipeline = LightGBMPipeline(
objective=objective,
eval_metric=eval_metric,
num_boost_round=num_boost_round,
early_stopping_rounds=early_stopping_rounds,
**params,
)
pipeline.train(
df_train=features_train,
df_test=features_test,
y_train=target_train,
y_test=target_test,
feature_types=feature_types,
)
# validate on test data
predictions = pipeline.predict(features_test)
auc = roc_auc_score(target_test, predictions)
print(f"params: {params}, AUC: {auc:.4f}")
if auc > best_auc:
best_auc = auc
best_params = params
lgbm_pipeline = pipeline
target_train = training_data[target_column]
features_train = training_data[feature_columns]
target_test = validation_data[target_column]
features_test = validation_data[feature_columns]
# Prepare cartesian product of parameters
keys = list(param_grid.keys())
values = list(param_grid.values())
best_auc = 0
best_params = None
lgbm_pipeline: Optional[LightGBMPipeline]
for combo in product(*values):
params = dict(zip(keys, combo))
pipeline = LightGBMPipeline(
objective=objective,
eval_metric=eval_metric,
num_boost_round=num_boost_round,
early_stopping_rounds=early_stopping_rounds,
**params,
)
pipeline.train(
df_train=features_train,
df_test=features_test,
y_train=target_train,
y_test=target_test,
feature_types=feature_types,
)
# validate on test data
predictions = pipeline.predict(features_test)
auc = roc_auc_score(target_test, predictions)
print(f"params: {params}, AUC: {auc:.4f}")
if auc > best_auc:
best_auc = auc
best_params = params
lgbm_pipeline = pipeline
Preprocessing done
[LightGBM] [Info] Number of positive: 23803, number of negative: 271181
[LightGBM] [Info] Auto-choosing row-wise multi-threading, the overhead of testing was 0.037304 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 12350
[LightGBM] [Info] Number of data points in the train set: 294984, number of used features: 58
[LightGBM] [Info] [binary:BoostFromScore]: pavg=0.080693 -> initscore=-2.432975
[LightGBM] [Info] Start training from score -2.432975
Training until validation scores don't improve for 50 rounds
Training until validation scores don't improve for 50 rounds
[10] valid_0's auc: 0.750745
[20] valid_0's auc: 0.757288
[30] valid_0's auc: 0.756694
[40] valid_0's auc: 0.760562
[50] valid_0's auc: 0.760036
[60] valid_0's auc: 0.762591
[70] valid_0's auc: 0.763298
[80] valid_0's auc: 0.764488
[90] valid_0's auc: 0.765392
[100] valid_0's auc: 0.766092
[110] valid_0's auc: 0.766556
[120] valid_0's auc: 0.767277
[130] valid_0's auc: 0.767725
[140] valid_0's auc: 0.768334
[150] valid_0's auc: 0.769309
[160] valid_0's auc: 0.769949
[170] valid_0's auc: 0.770453
[180] valid_0's auc: 0.770831
[190] valid_0's auc: 0.771823
[200] valid_0's auc: 0.772232
[210] valid_0's auc: 0.772727
[220] valid_0's auc: 0.773305
[230] valid_0's auc: 0.773729
[240] valid_0's auc: 0.773899
[250] valid_0's auc: 0.774413
[260] valid_0's auc: 0.774673
[270] valid_0's auc: 0.775133
[280] valid_0's auc: 0.775432
[290] valid_0's auc: 0.775822
[300] valid_0's auc: 0.776164
[310] valid_0's auc: 0.776413
[320] valid_0's auc: 0.776836
[330] valid_0's auc: 0.777213
[340] valid_0's auc: 0.777558
[350] valid_0's auc: 0.777882
[360] valid_0's auc: 0.778067
[370] valid_0's auc: 0.778413
[380] valid_0's auc: 0.778639
[390] valid_0's auc: 0.778791
[400] valid_0's auc: 0.779226
[410] valid_0's auc: 0.779471
[420] valid_0's auc: 0.779593
[430] valid_0's auc: 0.779847
[440] valid_0's auc: 0.780113
[450] valid_0's auc: 0.780337
[460] valid_0's auc: 0.780559
[470] valid_0's auc: 0.780833
[480] valid_0's auc: 0.781128
[490] valid_0's auc: 0.781373
[500] valid_0's auc: 0.781621
[510] valid_0's auc: 0.781943
[520] valid_0's auc: 0.782198
[530] valid_0's auc: 0.782492
[540] valid_0's auc: 0.782793
[550] valid_0's auc: 0.782932
[560] valid_0's auc: 0.783208
[570] valid_0's auc: 0.783413
[580] valid_0's auc: 0.78373
[590] valid_0's auc: 0.783911
[600] valid_0's auc: 0.784102
[610] valid_0's auc: 0.784272
[620] valid_0's auc: 0.784509
[630] valid_0's auc: 0.784653
[640] valid_0's auc: 0.78485
[650] valid_0's auc: 0.784933
[660] valid_0's auc: 0.785108
[670] valid_0's auc: 0.785289
[680] valid_0's auc: 0.785436
[690] valid_0's auc: 0.785658
[700] valid_0's auc: 0.785875
[710] valid_0's auc: 0.786005
[720] valid_0's auc: 0.786199
[730] valid_0's auc: 0.786343
[740] valid_0's auc: 0.786477
[750] valid_0's auc: 0.786715
[760] valid_0's auc: 0.786873
[770] valid_0's auc: 0.786969
[780] valid_0's auc: 0.787113
[790] valid_0's auc: 0.78723
[800] valid_0's auc: 0.787392
[810] valid_0's auc: 0.787533
[820] valid_0's auc: 0.787628
[830] valid_0's auc: 0.787734
[840] valid_0's auc: 0.787886
[850] valid_0's auc: 0.788029
[860] valid_0's auc: 0.788115
[870] valid_0's auc: 0.788218
[880] valid_0's auc: 0.788333
[890] valid_0's auc: 0.788505
[900] valid_0's auc: 0.78855
[910] valid_0's auc: 0.788592
[920] valid_0's auc: 0.788671
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[930] valid_0's auc: 0.788795
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[940] valid_0's auc: 0.788885
[950] valid_0's auc: 0.788999
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[960] valid_0's auc: 0.78913
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[970] valid_0's auc: 0.789223
[980] valid_0's auc: 0.789268
[990] valid_0's auc: 0.789386
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1000] valid_0's auc: 0.789435
[1010] valid_0's auc: 0.789506
[1020] valid_0's auc: 0.78959
[1030] valid_0's auc: 0.789604
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1040] valid_0's auc: 0.78974
[1050] valid_0's auc: 0.789782
[1060] valid_0's auc: 0.789851
[1070] valid_0's auc: 0.789835
[1080] valid_0's auc: 0.789897
[1090] valid_0's auc: 0.790064
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1100] valid_0's auc: 0.790104
[1110] valid_0's auc: 0.790167
[1120] valid_0's auc: 0.790201
[1130] valid_0's auc: 0.790249
[1140] valid_0's auc: 0.790304
[1150] valid_0's auc: 0.790427
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1160] valid_0's auc: 0.790469
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1170] valid_0's auc: 0.790526
[1180] valid_0's auc: 0.79061
[1190] valid_0's auc: 0.790639
[1200] valid_0's auc: 0.790697
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1210] valid_0's auc: 0.790709
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1220] valid_0's auc: 0.790739
[1230] valid_0's auc: 0.790789
[1240] valid_0's auc: 0.790839
[1250] valid_0's auc: 0.790864
[1260] valid_0's auc: 0.790867
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1270] valid_0's auc: 0.790861
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1280] valid_0's auc: 0.790886
[1290] valid_0's auc: 0.790969
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1300] valid_0's auc: 0.79098
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1310] valid_0's auc: 0.791042
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1320] valid_0's auc: 0.791087
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1330] valid_0's auc: 0.791123
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1340] valid_0's auc: 0.791184
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1350] valid_0's auc: 0.791222
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1360] valid_0's auc: 0.791258
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1370] valid_0's auc: 0.791272
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1380] valid_0's auc: 0.791317
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1390] valid_0's auc: 0.791296
[1400] valid_0's auc: 0.791344
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1410] valid_0's auc: 0.791378
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1420] valid_0's auc: 0.791394
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1430] valid_0's auc: 0.791396
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1440] valid_0's auc: 0.791416
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1450] valid_0's auc: 0.791402
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1460] valid_0's auc: 0.791456
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1470] valid_0's auc: 0.791508
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1480] valid_0's auc: 0.791575
[1490] valid_0's auc: 0.791618
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1500] valid_0's auc: 0.791627
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1510] valid_0's auc: 0.791631
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1520] valid_0's auc: 0.791684
[1530] valid_0's auc: 0.791708
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1540] valid_0's auc: 0.791757
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1550] valid_0's auc: 0.791805
[1560] valid_0's auc: 0.791905
[1570] valid_0's auc: 0.791901
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1580] valid_0's auc: 0.791908
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1590] valid_0's auc: 0.791885
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1600] valid_0's auc: 0.791902
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1610] valid_0's auc: 0.791927
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1620] valid_0's auc: 0.791915
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1630] valid_0's auc: 0.79194
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1640] valid_0's auc: 0.791969
[1650] valid_0's auc: 0.791978
[1660] valid_0's auc: 0.791981
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1670] valid_0's auc: 0.791989
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1680] valid_0's auc: 0.79198
[1690] valid_0's auc: 0.791922
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1700] valid_0's auc: 0.791901
[1710] valid_0's auc: 0.791903
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
[1720] valid_0's auc: 0.791907
[LightGBM] [Warning] No further splits with positive gain, best gain: -inf
Early stopping, best iteration is:
[1671] valid_0's auc: 0.792001
params: {'learning_rate': 0.01, 'max_depth': 7, 'num_leaves': 48, 'subsample': 0.8, 'colsample_bytree': 0.5, 'min_split_gain': 0.025, 'reg_alpha': 0.5, 'reg_lambda': 0.5}, AUC: 0.7920
In [11]:
Copied!
print(f"\nBest AUC: {best_auc:.4f}")
print(f"Best Parameters: {best_params}")
print(f"\nBest AUC: {best_auc:.4f}")
print(f"Best Parameters: {best_params}")
Best AUC: 0.7920
Best Parameters: {'learning_rate': 0.01, 'max_depth': 7, 'num_leaves': 48, 'subsample': 0.8, 'colsample_bytree': 0.5, 'min_split_gain': 0.025, 'reg_alpha': 0.5, 'reg_lambda': 0.5}
SHAP Importance¶
In [12]:
Copied!
pd.set_option('display.max_rows', None) # Show all rows
pd.set_option('display.max_colwidth', 100) # Show full column content
pd.set_option('display.max_rows', None) # Show all rows
pd.set_option('display.max_colwidth', 100) # Show full column content
In [13]:
Copied!
_, feature_importance = lgbm_pipeline.compute_shap(features_train.sample(n=10000, random_state=88))
feature_importance[["feature", "cumulative_importance_percent"]]
_, feature_importance = lgbm_pipeline.compute_shap(features_train.sample(n=10000, random_state=88))
feature_importance[["feature", "cumulative_importance_percent"]]
Out[13]:
| feature | cumulative_importance_percent | |
|---|---|---|
| 0 | NEW_APPLICATION_EXT_SOURCE_2 | 0.101159 |
| 1 | NEW_APPLICATION_EXT_SOURCE_3 | 0.191307 |
| 2 | NEW_APPLICATION_EXT_SOURCE_1 | 0.240541 |
| 3 | NEW_APPLICATION_Credit-Goods_Difference | 0.280781 |
| 4 | NEW_APPLICATION_DAYS_EMPLOYED | 0.320257 |
| 5 | CLIENT_Max_of_Active_Cr_active_BureauReportedCredits_AMT_CREDIT_SUM_DEBT_To_AMT_CREDIT_SUMs_104w | 0.357981 |
| 6 | NEW_APPLICATION_AMT_ANNUITY_To_AMT_CREDIT | 0.395454 |
| 7 | CLIENT_GENDER | 0.430334 |
| 8 | CLIENT_Max_of_PriorApplications_CNT_PAYMENTs_104w | 0.459419 |
| 9 | CLIENT_EDUCATION_TYPE | 0.488170 |
| 10 | NEW_APPLICATION_AMT_ANNUITY | 0.514980 |
| 11 | CLIENT_Installments_AMT_PAYMENTs_by_PriorApplication_CLIENT_TYPE_24cMo | 0.538778 |
| 12 | CLIENT_FAMILY_STATUS | 0.561909 |
| 13 | CLIENT_Avg_of_Consumer_credit_Cr_type_BureauReportedCredits_End_to_Update_Gaps_104w | 0.583095 |
| 14 | CLIENT_Min_of_Credit_card_monthly_balance_records_Available_Credits_6cMo | 0.604074 |
| 15 | CLIENT_Min_of_Installments_AMT_PAYMENTs_24cMo | 0.624995 |
| 16 | CLIENT_Age | 0.644880 |
| 17 | CLIENT_Avg_of_BureauReportedCredits_Available_Credits_104w | 0.664721 |
| 18 | CLIENT_PriorApplications_AMT_CREDITs_by_PriorApplication_YIELD_GROUP_104w | 0.683896 |
| 19 | NEW_APPLICATION_AMT_GOODS_PRICE | 0.702805 |
| 20 | CLIENT_Max_of_Loan_terminations_Loan_PriorApplication_AMT_APPLICATION_To_AMT_CREDITs_104w | 0.721424 |
| 21 | CLIENT_ORGANIZATION_TYPE | 0.739895 |
| 22 | CLIENT_Installments_AMT_PAYMENTs_by_PriorApplication_YIELD_GROUP_24cMo | 0.758206 |
| 23 | NEW_APPLICATION_DAYS_ID_PUBLISH | 0.775803 |
| 24 | CLIENT_Max_of_Installments_PriorApplication_AMT_ANNUITY_To_AMT_CREDITs_6cMo | 0.793042 |
| 25 | CLIENT_Avg_of_BureauReportedCredits_Available_Credits_26w | 0.810137 |
| 26 | CLIENT_Installments_AMT_PAYMENTs_by_INSTALLMENT_STATUS_12cMo | 0.826406 |
| 27 | NEW_APPLICATION_REGION_POPULATION_RELATIVE | 0.842601 |
| 28 | CLIENT_Max_of_Installments_Days_Difference_Actual_vs_Scheduleds_24cMo | 0.858608 |
| 29 | CLIENT_Avg_of_Consumer_credit_Cr_type_BureauReportedCredits_AMT_CREDIT_SUMs_104w | 0.873988 |
| 30 | CLIENT_Installments_AMT_PAYMENTs_by_PriorApplication_PAYMENT_TYPE_24cMo | 0.889068 |
| 31 | CLIENT_Time_To_Latest_Approved_Contract_status_PriorApplication_last_due_1st_version_timestamp_104w | 0.903369 |
| 32 | NEW_APPLICATION_FLAG_DOCUMENT_3 | 0.917451 |
| 33 | CLIENT_Std_of_Credit_card_monthly_balance_records_CNT_DRAWINGS_ATM_CURRENTs_24cMo | 0.930678 |
| 34 | CLIENT_PriorApplications_AMT_CREDITs_by_PriorApplication_NFLAG_INSURED_ON_APPROVAL_52w | 0.943479 |
| 35 | NEW_APPLICATION_FLOORSMAX_MODE | 0.956245 |
| 36 | CLIENT_Entropy_of_count_of_Installments_by_INSTALLMENT_STATUS_24cMo | 0.968924 |
| 37 | CLIENT_Installments_AMT_PAYMENTs_by_INSTALLMENT_STATUS_6cMo | 0.981031 |
| 38 | CLIENT_Std_of_BureauReportedCredits_Available_Credits_26w | 0.991874 |
| 39 | CLIENT_Max_of_Consumer_credit_Cr_type_BureauReportedCredits_AMT_CREDIT_SUM_DEBT_To_AMT_CREDIT_SU... | 1.000000 |
Iterate¶
Check out Credit Default UI Tutorials to get more ideas.
In [ ]:
Copied!