Program :
import pandas as pd
from [Link] import DecisionTreeClassifier, plot_tree
from [Link] import RandomForestClassifier
from [Link] import LabelEncoder
from [Link] import accuracy_score, precision_score, recall_score, f1_score
from sklearn.model_selection import train_test_split
import [Link] as plt
df = pd.read_csv('car_evaluation.csv')
# Replace '5more' with 5 in doors and 'more' with 5 in persons
df['doors'] = df['doors'].replace('5more', '5').astype(int)
df['persons'] = df['persons'].replace('more', '5').astype(int)
# Columns to encode (all except 'doors' and 'persons')
cols_to_encode = [col for col in [Link] if col not in ['doors', 'persons', 'class']]
# Encode categorical columns except doors and persons
le_dict = {}
for col in cols_to_encode + ['class']: # Also encode the target 'class'
le = LabelEncoder()
df[col] = le.fit_transform(df[col])
le_dict[col] = le # Store LabelEncoders in case you need to decode later
df
# Split features and target
X = [Link]('class', axis=1)
y = df['class']
# Split data into train and test sets for evaluation
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
# Decision Tree Classifier
dt_clf = DecisionTreeClassifier(criterion='entropy', max_depth=4, random_state=0)
dt_clf.fit(X_train, y_train)
y_pred_dt = dt_clf.predict(X_test)
# Train Random Forest with limited depth
rf_clf = RandomForestClassifier(n_estimators=100, max_depth=4, random_state=42)
rf_clf.fit(X_train, y_train)
y_pred_rf = rf_clf.predict(X_test)
# Evaluation function
def evaluate_model(y_true, y_pred, model_name):
print(f"--- {model_name} Evaluation ---")
print(f"Accuracy: {accuracy_score(y_true, y_pred):.4f}")
print(f"Precision: {precision_score(y_true, y_pred, average='weighted'):.4f}")
print(f"Recall: {recall_score(y_true, y_pred, average='weighted'):.4f}")
print(f"F1 Score: {f1_score(y_true, y_pred, average='weighted'):.4f}")
print("\n")
# Print evaluations
evaluate_model(y_test, y_pred_dt, "Decision Tree")
evaluate_model(y_test, y_pred_rf, "Random Forest")
# Plot Decision Tree
[Link](figsize=(20,10))
plot_tree(dt_clf, feature_names=[Link], class_names=le_dict['class'].classes_, filled=True,
rounded=True, fontsize=12)
[Link]("Decision Tree")
[Link]()
# Plot first tree of RF with full plot_tree (no max_depth here, tree is already limited)
[Link](figsize=(16,8))
plot_tree(
rf_clf.estimators_[0],
feature_names=[Link],
class_names=le_dict['class'].classes_,
filled=True,
rounded=True,
fontsize=10
)
[Link]("Random Forest - Tree 1 (Limited depth)")
[Link]()
Output :