# 熵權係數法(Entropy-Based Distance Method)之程式碼
```cpp=
#include <bits/stdc++.h>
using namespace std;
const int N = 3, M = 12; //N:row, M:column
class Weight{
private:
const double DOUBLE_MIN = -1e9,
DOUBLE_MAX = 1e9;
double x[N][M], Max[M], Min[M], Sum[M], p[N][M],
e[M], d[M], sum_d, w[M];
public:
void Input(){
ifstream infile;
infile.open("Data.txt", ios::in);
//assert(infile);
for(int i = 0 ; i < N ; i++){
for(int j = 0 ; j < M ; j++){
infile >> x[i][j];
}
}
infile.close();
}
void Output(){
ofstream outfile;
outfile.open("Weight.txt", ios::out);
//assert(outfile);
for(int j = 0 ; j < M ; j++){
outfile << w[j] << ' ';
}
outfile.close();
}
void Normalization(){
fill(Max, Max + M, DOUBLE_MIN);
fill(Min, Min + M, DOUBLE_MAX);
for(int j = 0 ; j < M ; j++){
for(int i = 0 ; i < N ; i++){
Max[j] = max(Max[j], x[i][j]);
Min[j] = min(Min[j], x[i][j]);
}
}
for(int j = 0 ; j < M ; j++){
Sum[j] = 0;
for(int i = 0 ; i < N ; i++){
x[i][j] = (x[i][j] - Min[j]) / (Max[j] - Min[j]) + 1;
Sum[j] += x[i][j];
}
}
}
void Proportion(){
for(int j = 0 ; j < M ; j++){
for(int i = 0 ; i < N ; i++){
p[i][j] = x[i][j] / Sum[j];
}
}
}
void Entropy(){
double k = 1 / log(N);
for(int j = 0 ; j < M ; j++){
double tmp = 0;
for(int i = 0 ; i < N ; i++){
tmp += p[i][j] * log(p[i][j]);
}
e[j] = (-1) * k * tmp;
}
}
void Redundancy(){
sum_d = 0;
for(int j = 0 ; j < M ; j++){
d[j] = 1 - e[j];
sum_d += d[j];
}
}
void _Weight(){
for(int j = 0 ; j < M ; j++){
w[j] = d[j] / sum_d;
}
}
void print_Normalization(){
for(int i = 0 ; i < N ; i++){
for(int j = 0 ; j < M ; j++){
cout << x[i][j] << ' ';
}
cout << endl;
}
}
void print_Entropy(){
for(int j = 0 ; j < M ; j++){
cout << e[j] << ' ';
}
cout << endl;
}
};
int main(){
Weight weight;
weight.Input();
weight.Normalization();
weight.print_Normalization();
weight.Proportion();
weight.Entropy();
weight.print_Entropy();
weight.Redundancy();
weight._Weight();
weight.Output();
return 0;
}
```