Feb 26, 2024

Ridge regression

Linear least squares with l2 regularization.

Ref.: https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.Ridge.html

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import Ridge
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score, mean_squared_error

# Data load
df = pd.read_csv('./data/xxx.csv')

# Target data separation
y = df['y']
X = df.drop('y', axis = 1)

# Split train : test data set = 7:3 and set random seed
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size = 0.3,
                                                    random_state = 42)


# Define model structure
rg = Ridge(alpha = 0.1)

# Train model
rg.fit(X_train, y_train)

# Model prediction
rg_pred = rg.predict(X_test)

# Confirm coefficient
rg.coef_

# Error metric
print("Ridge-r2 score:", r2_score(y_test, rg_pred))
print("Ridge-RMSE:", np.sqrt(mean_squared_error(y_test, rg_pred)))

# Plot
plt.bar(X_train.columns, rg.coef_)


No comments:

Post a Comment