在C++中,實現決策樹算法通常包括以下幾個步驟:
下面是一個簡單的C++代碼示例,展示了如何實現決策樹算法:
#include<iostream>
#include<vector>
#include <map>
#include<algorithm>
#include <cmath>
using namespace std;
// 計算熵
double entropy(const vector<int>& labels) {
map<int, int> count;
for (int label : labels) {
count[label]++;
}
double result = 0;
for (auto& kv : count) {
double p = kv.second / static_cast<double>(labels.size());
result += -p * log2(p);
}
return result;
}
// 計算信息增益
double informationGain(const vector<vector<int>>& data, const vector<int>& labels, int featureIndex) {
double initialEntropy = entropy(labels);
double weightedEntropy = 0;
map<int, vector<int>> featureValues;
for (int i = 0; i< data.size(); ++i) {
featureValues[data[i][featureIndex]].push_back(labels[i]);
}
for (auto& kv : featureValues) {
double p = kv.second.size() / static_cast<double>(labels.size());
weightedEntropy += p * entropy(kv.second);
}
return initialEntropy - weightedEntropy;
}
// 構建決策樹
struct Node {
int featureIndex;
map<int, Node*> children;
int label;
};
Node* buildTree(const vector<vector<int>>& data, const vector<int>& labels, int depth) {
if (depth == 0 || labels.empty()) {
return nullptr;
}
int bestFeatureIndex = -1;
double bestInformationGain = 0;
for (int i = 0; i< data[0].size(); ++i) {
double gain = informationGain(data, labels, i);
if (gain > bestInformationGain) {
bestInformationGain = gain;
bestFeatureIndex = i;
}
}
Node* node = new Node();
node->featureIndex = bestFeatureIndex;
map<int, vector<int>> featureValues;
for (int i = 0; i< data.size(); ++i) {
featureValues[data[i][bestFeatureIndex]].push_back(labels[i]);
}
for (auto& kv : featureValues) {
vector<vector<int>> subData;
vector<int> subLabels = kv.second;
for (int i = 0; i< data.size(); ++i) {
if (data[i][bestFeatureIndex] == kv.first) {
subData.push_back(data[i]);
}
}
Node* child = buildTree(subData, subLabels, depth - 1);
node->children[kv.first] = child;
}
return node;
}
// 預測
int predict(Node* node, const vector<int>& sample) {
if (!node) {
return -1;
}
if (node->children.empty()) {
return node->label;
}
int featureValue = sample[node->featureIndex];
auto it = node->children.find(featureValue);
if (it != node->children.end()) {
return predict(it->second, sample);
} else {
return -1;
}
}
int main() {
// 示例數據
vector<vector<int>> data = {
{1, 2, 0},
{2, 3, 0},
{3, 2, 1},
{4, 3, 1},
{5, 2, 0},
{6, 3, 1},
};
vector<int> labels = {0, 0, 1, 1, 0, 1};
// 構建決策樹
Node* root = buildTree(data, labels, 3);
// 預測
vector<int> sample = {3, 2, 0};
int prediction = predict(root, sample);
cout << "Prediction: "<< prediction<< endl;
return 0;
}
這個示例僅用于演示基本的決策樹構建和預測過程,實際應用中需要根據具體問題進行相應的修改和優化。