变量即内存、指针使用的架构原理:
1、用结构struct记录 网络架构,如 float*** ws 为权重矩阵的指针(指针地址);
2、用 = (float*)malloc (Num * sizeof(float)) 给 具体变量分配内存;
3、用 = (float**)malloc( Num* sizeof(float*) ) 给 指向 具体变量(一维数组)的指针…… 给分配 存放指针的变量……
……见代码
- // test22动态数组22多维数组23三维随机数230101.cpp : 此文件包含 "main" 函数。程序执行将在此处开始并结束。
-
- #include
- using namespace std;
-
- typedef struct {
- float*** ws;
- int num1;
- float** layer_outputs;
-
- }NeuralN;
-
- //初始化 神经网络的 weights权重矩阵等
- NeuralN init(int* t01, int num02) {
- NeuralN nn;
- nn.num1 = num02;
-
- nn.ws = (float***)malloc((num02 - 1) * sizeof(float**) );
-
- srand(time(NULL));
-
- cout << " [num02:" << num02 << endl;
-
- for (int i = 0; i <(num02 - 1); ++i) {
- nn.ws[i] = (float**)malloc( t01[i] * sizeof(float*) ); //为指针分配内存
- for (int j = 0; j < t01[i]; ++j) {
- nn.ws[i][j] = (float*)malloc( t01[i + 1 ] * sizeof(float) ); //为变量 分配内存
- for (int k = 0; k < t01[i + 1]; k++) {
- //下一句 使用变量、即使用内存!(使用变量的内存)
- nn.ws[i][j][k] = (float)rand() / RAND_MAX;
- }//for330k
- }//for220j
-
- }//for110i
-
- return nn;
-
- }//init
-
- int main()
- {
- int t001[] = { 2,8, 7,6, 1 ,2,1};
-
- //#define Num4 4
- //用 for(ForEach)的方法,计数、数出 动态数组长度
- int Len_t001 = 0; for (int ii : t001) { ++Len_t001; }
-
- int Numm = Len_t001;
- cout << "Numm:"<
-
- NeuralN nn = init(t001, Numm);// Num4);
-
- //
- // for(float ii: (nn.ws[0][1]) )
- //
- //显示三维的 张量(即 三维数组 的 内容)
- for (int i = 0; i < Numm - 1; ++i) {
- // nn.layer_outputs[i + 1] = (float*)malloc(t001[i + 1] * sizeof(float));
- printf("_{ i%d_", i);
- for (int j = 0; j < t001[i + 1]; ++j) {
- // nn.layer_outputs[i + 1][j] = 0;
- printf("[j%d", j);
- for (int k = 0; k < t001[i]; ++k) {
-
- printf("(k%d(%.1f,", k, nn.ws[i][k][j]);
- }//
- printf("_} \n");
-
- }//for220j
- printf("\n");
- }//for110i
-
- std::cout << "Hello World!\n";
- }//main
第二版本231001
- #include
- #include
- #include
- #include
-
- #define LEARNING_RATE 0.05//0.05
-
- // Sigmoid and its derivative
- float sigmoid(float x) { return 1 / (1 + exp(-x));}
-
- float sigmoid_derivative(float x) {
- //float sig = sigmoid(x);
- float sig = 1.0 / (exp(-x) + 1);
- return sig * (1 - sig);
- }
-
- typedef struct {
- float*** weights;
- int num_layers;
- int* layer_sizes;
- float** layer_outputs;
- float** deltas;
- } NeuralNetwork;
-
- NeuralNetwork initialize_nn(int* topology, int num_layers) {
- NeuralNetwork nn;
- nn.num_layers = num_layers;
- nn.layer_sizes = topology;
-
- // Allocate memory for weights, layer outputs, and deltas
- nn.weights = (float***)malloc((num_layers - 1) * sizeof(float**));
- nn.layer_outputs = (float**)malloc(num_layers * sizeof(float*));
- nn.deltas = (float**)malloc((num_layers - 1) * sizeof(float*));
-
- srand(time(NULL));
- for (int i = 0; i < num_layers - 1; i++) {
- nn.weights[i] = (float**)malloc(topology[i] * sizeof(float*));
- nn.deltas[i] = (float*)malloc(topology[i + 1] * sizeof(float));
- for (int j = 0; j < topology[i]; j++) {
- nn.weights[i][j] = (float*)malloc(topology[i + 1] * sizeof(float));
- for (int k = 0; k < topology[i + 1]; k++) {
- nn.weights[i][j][k] = ((float)rand() / RAND_MAX) * 2.0f - 1.0f; // Random weights between -1 and 1
- }
- }//for220j
- }//for110i
- return nn;
- }//NeuralNetwork initialize_nn
-
- float* feedforward(NeuralNetwork* nn, float* input) {
- nn->layer_outputs[0] = input;
- for (int i = 0; i < nn->num_layers - 1; i++) {
- nn->layer_outputs[i + 1] = (float*)malloc(nn->layer_sizes[i + 1] * sizeof(float));
- for (int j = 0; j < nn->layer_sizes[i + 1]; j++) {
- nn->layer_outputs[i + 1][j] = 0;
- for (int k = 0; k < nn->layer_sizes[i]; k++) {
- // int A01 = 01;
- nn->layer_outputs[i + 1][j] += nn->layer_outputs[i][k] * nn->weights[i][k][j];
-
- }//for330k
- nn->layer_outputs[i + 1][j] = sigmoid(nn->layer_outputs[i + 1][j]);
- }//for220j
- }//for110i
- return nn->layer_outputs[nn->num_layers - 1];
- }//feedforward
-
-
- void feedLoss(NeuralNetwork* nn, float* target) {
-
- //显示权重矩阵:
- //nn->layer_outputs[0] = input;
- for (int i = 0; i < nn->num_layers - 1; i++) {
- nn->layer_outputs[i + 1] = (float*)malloc(nn->layer_sizes[i + 1] * sizeof(float));
- for (int j = 0; j < nn->layer_sizes[i + 1]; j++) {
- nn->layer_outputs[i + 1][j] = 0;
- for (int k = 0; k < nn->layer_sizes[i]; k++) {
- // int A01 = 01;
- //nn->layer_outputs[i + 1][j] += nn->layer_outputs[i][k] * nn->weights[i][k][j];
- if (0 < nn->weights[i][k][j]) { SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_BLUE); // FOREROUND_RED);
- }
- else { SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_RED); // BLUE);
- }
- printf("(%.4f,", nn->weights[i][k][j]);
- // A01 = 22;
- }
- printf("] \n");
- nn->layer_outputs[i + 1][j] = sigmoid(nn->layer_outputs[i + 1][j]);
- }//for220j
- SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE);
- printf("};\n");
- }//for110i
- printf("_]};\n \n");
- //
-
- int Last01 = nn->num_layers - 1;
- // Calculate output layer deltas
- for (int i = 0; i < nn->layer_sizes[Last01]; ++i ) {
- float error = target[i] - nn->layer_outputs[Last01][i];
- printf("[i%d:%f] ", i, error);
- // nn->deltas[Last01 - 1][i] = error * sigmoid_derivative(nn->layer_outputs[Last01][i]);
- }
-
- // Calculate hidden layer deltas
-
- }//backpropagate(NeuralNetwork* nn, float* target
-
-
- void backpropagate(NeuralNetwork* nn, float* target) {
- int Last01 = nn->num_layers - 1;
-
- // Calculate output layer deltas//计算输出层变化
- for (int i = 0; i < nn->layer_sizes[Last01]; i++) {
- float error = target[i] - nn->layer_outputs[Last01][i];
- nn->deltas[Last01 - 1][i] = error * sigmoid_derivative(nn->layer_outputs[Last01][i]);
- }
-
- // Calculate hidden layer deltas//计算隐藏层变化
- for (int i = Last01 - 1; i > 0; i--) {
- for (int j = 0; j < nn->layer_sizes[i]; j++) {
- float sum = 0;
- for (int k = 0; k < nn->layer_sizes[i + 1]; k++) {
- sum += nn->weights[i][j][k] * nn->deltas[i][k];
- }
- nn->deltas[i - 1][j] = sum * sigmoid_derivative(nn->layer_outputs[i][j]);
- }
- }
-
- // Adjust weights
- for (int i = 0; i < Last01; i++) {
- for (int j = 0; j < nn->layer_sizes[i]; j++) {
- for (int k = 0; k < nn->layer_sizes[i + 1]; k++) {
- nn->weights[i][j][k] += LEARNING_RATE * nn->deltas[i][k] * nn->layer_outputs[i][j];
- }
- }
- }//
- }//backpropagate(NeuralNetwork* nn, float* target
-
- void train(NeuralNetwork* nn, float inputs[][2], float* targets, int num_samples, int num_epochs) {
- float* outputs;
- bool whetherOutputLoss = 0;
- #define Num10000 100000
- for (int epoch = 0; epoch < num_epochs; epoch++) {
- if (0 == (epoch % Num10000) ) { whetherOutputLoss = 1; }
- for (int i = 0; i < num_samples; i++) {
- //float* outputs =
- feedforward(nn, inputs[i]);
- //
- if (whetherOutputLoss) { feedLoss(nn, &targets[i]); } //当抽样时机到的时候,才显示
- //
- backpropagate(nn, &targets[i]);
- }//
- if (whetherOutputLoss) {printf("\n");
- whetherOutputLoss = 0;
- }
-
- }//for110i
- }//void train
-
- int main() {
- // int topology[] = { 2, 4, 3, 1 };
- // NeuralNetwork nn = initialize_nn(topology, 4);
-
- #define numLayer5 4
- //5
- //9
- //6
- //7
- int topology[] = { 2, /*128,*/ /*64,*/ /*32,*/ /*16,*/ /*8,*/ 3, 2, 1 };
- // 1, 2, 3, 4, 5, 6, 7, 8, 9
- NeuralNetwork nn = initialize_nn(topology, numLayer5); // 4);
-
- #define Num4 4
- float inputs[Num4][2] = { {1, 1}, {0, 0}, {1, 0}, {0, 1} };
- float targets[Num4] = { 0, 0, 1, 1 };
-
- #define Num200000 200000
- // train(&nn, inputs, targets, 4, 10000);
- train(&nn, inputs, targets, Num4, Num200000);
-
- //#define Num4 4
-
- float test_inputs[Num4][2] = { {0,0}, {1, 0}, {1, 1}, {0, 1} };
- for (int i = 0; i < Num4; i++) {
- float* output = feedforward(&nn, test_inputs[i]);
- printf("Output for [%f, %f]: %f\n", test_inputs[i][0], test_inputs[i][1], output[0]);
- free(output);
- }
-
- // Free memory
- for (int i = 0; i < nn.num_layers - 1; i++) {
- for (int j = 0; j < nn.layer_sizes[i]; j++) {
- free(nn.weights[i][j]);
- }
- free(nn.weights[i]);
- free(nn.deltas[i]);
- }
- free(nn.weights);
- free(nn.deltas);
- free(nn.layer_outputs);
-
- return 0;
- }//main
第一版本230901
- #include
- #include
- //#include
- #include
- #include
-
- #define LEARNING_RATE 0.05
- //0.05
-
- // Sigmoid and its derivative
- float sigmoid(float x) { return 1 / (1 + exp(-x));}
-
- float sigmoid_derivative(float x) {
- //float sig = sigmoid(x);
- float sig = 1.0 / (exp(-x) + 1);
- return sig * (1 - sig);
- }
-
- typedef struct {
- float*** weights;
- int num_layers;
- int* layer_sizes;
- float** layer_outputs;
- float** deltas;
- } NeuralNetwork;
-
- NeuralNetwork initialize_nn(int* topology, int num_layers) {
- NeuralNetwork nn;
- nn.num_layers = num_layers;
- nn.layer_sizes = topology;
-
- // Allocate memory for weights, layer outputs, and deltas
- nn.weights = (float***)malloc((num_layers - 1) * sizeof(float**));
- nn.layer_outputs = (float**)malloc(num_layers * sizeof(float*));
- nn.deltas = (float**)malloc((num_layers - 1) * sizeof(float*));
-
- srand(time(NULL));
- for (int i = 0; i < num_layers - 1; i++) {
- nn.weights[i] = (float**)malloc(topology[i] * sizeof(float*));
- nn.deltas[i] = (float*)malloc(topology[i + 1] * sizeof(float));
- for (int j = 0; j < topology[i]; j++) {
- nn.weights[i][j] = (float*)malloc(topology[i + 1] * sizeof(float));
- for (int k = 0; k < topology[i + 1]; k++) {
- nn.weights[i][j][k] = ((float)rand() / RAND_MAX) * 2.0f - 1.0f; // Random weights between -1 and 1
- }
- }//for220j
- }//for110i
- return nn;
- }//NeuralNetwork initialize_nn
-
- float* feedforward(NeuralNetwork* nn, float* input) {
- nn->layer_outputs[0] = input;
- for (int i = 0; i < nn->num_layers - 1; i++) {
- nn->layer_outputs[i + 1] = (float*)malloc(nn->layer_sizes[i + 1] * sizeof(float));
- for (int j = 0; j < nn->layer_sizes[i + 1]; j++) {
- nn->layer_outputs[i + 1][j] = 0;
- for (int k = 0; k < nn->layer_sizes[i]; k++) {
- // int A01 = 01;
- nn->layer_outputs[i + 1][j] += nn->layer_outputs[i][k] * nn->weights[i][k][j];
- // A01 = 22;
- }
- nn->layer_outputs[i + 1][j] = sigmoid(nn->layer_outputs[i + 1][j]);
- }//for220j
- }//for110i
- return nn->layer_outputs[nn->num_layers - 1];
- }//feedforward
-
-
- void feedLoss(NeuralNetwork* nn, float* target) {
-
- //显示权重矩阵:
- //nn->layer_outputs[0] = input;
- for (int i = 0; i < nn->num_layers - 1; i++) {
- nn->layer_outputs[i + 1] = (float*)malloc(nn->layer_sizes[i + 1] * sizeof(float));
- for (int j = 0; j < nn->layer_sizes[i + 1]; j++) {
- nn->layer_outputs[i + 1][j] = 0;
- for (int k = 0; k < nn->layer_sizes[i]; k++) {
- // int A01 = 01;
- //nn->layer_outputs[i + 1][j] += nn->layer_outputs[i][k] * nn->weights[i][k][j];
- if (0 < nn->weights[i][k][j]) { SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_BLUE); // FOREROUND_RED);
- }
- else { SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_RED); // BLUE);
- }
- printf("(%.4f,", nn->weights[i][k][j]);
- // A01 = 22;
- }
- printf("] \n");
- nn->layer_outputs[i + 1][j] = sigmoid(nn->layer_outputs[i + 1][j]);
- }//for220j
- SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE);
- printf("};\n");
- }//for110i
- printf("_]};\n");
- //
-
- int Last01 = nn->num_layers - 1;
- // Calculate output layer deltas
- for (int i = 0; i < nn->layer_sizes[Last01]; ++i ) {
- float error = target[i] - nn->layer_outputs[Last01][i];
- printf("[i%d:%f] ", i, error);
- // nn->deltas[Last01 - 1][i] = error * sigmoid_derivative(nn->layer_outputs[Last01][i]);
- }
-
- // Calculate hidden layer deltas
-
- }//backpropagate(NeuralNetwork* nn, float* target
-
-
- void backpropagate(NeuralNetwork* nn, float* target) {
- int Last01 = nn->num_layers - 1;
-
- // Calculate output layer deltas//计算输出层变化
- for (int i = 0; i < nn->layer_sizes[Last01]; i++) {
- float error = target[i] - nn->layer_outputs[Last01][i];
- nn->deltas[Last01 - 1][i] = error * sigmoid_derivative(nn->layer_outputs[Last01][i]);
- }
-
- // Calculate hidden layer deltas//计算隐藏层变化
- for (int i = Last01 - 1; i > 0; i--) {
- for (int j = 0; j < nn->layer_sizes[i]; j++) {
- float sum = 0;
- for (int k = 0; k < nn->layer_sizes[i + 1]; k++) {
- sum += nn->weights[i][j][k] * nn->deltas[i][k];
- }
- nn->deltas[i - 1][j] = sum * sigmoid_derivative(nn->layer_outputs[i][j]);
- }
- }
-
- // Adjust weights
- for (int i = 0; i < Last01; i++) {
- for (int j = 0; j < nn->layer_sizes[i]; j++) {
- for (int k = 0; k < nn->layer_sizes[i + 1]; k++) {
- nn->weights[i][j][k] += LEARNING_RATE * nn->deltas[i][k] * nn->layer_outputs[i][j];
- }
- }
- }//
- }//backpropagate(NeuralNetwork* nn, float* target
-
- void train(NeuralNetwork* nn, float inputs[][2], float* targets, int num_samples, int num_epochs) {
- float* outputs;
- bool whetherOutputLoss = 0;
- #define Num10000 50000
- for (int epoch = 0; epoch < num_epochs; epoch++) {
- if (0 == (epoch % Num10000) ) { whetherOutputLoss = 1; }
- for (int i = 0; i < num_samples; i++) {
- //float* outputs =
- feedforward(nn, inputs[i]);
- //
- if (whetherOutputLoss) { feedLoss(nn, &targets[i]); }
- //
- backpropagate(nn, &targets[i]);
- }//
- if (whetherOutputLoss) {printf("\n");
- whetherOutputLoss = 0;
- }
-
- }//for110i
- }//void train
-
- int main() {
- // int topology[] = { 2, 4, 3, 1 };
- // NeuralNetwork nn = initialize_nn(topology, 4);
-
- #define numLayer5 4
- //5
- //9
- //6
- //7
- int topology[] = { 2, /*128,*/ /*64,*/ /*32,*/ /*16,*/ /*8,*/ 3, 2, 1 };
- // 1, 2, 3, 4, 5, 6, 7, 8, 9
- NeuralNetwork nn = initialize_nn(topology, numLayer5); // 4);
-
- #define Num4 4
- float inputs[Num4][2] = { {1, 1}, {0, 0}, {1, 0}, {0, 1} };
- float targets[Num4] = { 0, 0, 1, 1 };
-
- #define Num200000 200000
- // train(&nn, inputs, targets, 4, 10000);
- train(&nn, inputs, targets, Num4, Num200000);
-
- //#define Num4 4
-
- float test_inputs[Num4][2] = { {0,0}, {1, 0}, {1, 1}, {0, 1} };
- for (int i = 0; i < Num4; i++) {
- float* output = feedforward(&nn, test_inputs[i]);
- printf("Output for [%f, %f]: %f\n", test_inputs[i][0], test_inputs[i][1], output[0]);
- free(output);
- }
-
- // Free memory
- for (int i = 0; i < nn.num_layers - 1; i++) {
- for (int j = 0; j < nn.layer_sizes[i]; j++) {
- free(nn.weights[i][j]);
- }
- free(nn.weights[i]);
- free(nn.deltas[i]);
- }
- free(nn.weights);
- free(nn.deltas);
- free(nn.layer_outputs);
-
- return 0;
- }//main
- (-0.1291,(0.7803,]
- (-0.6326,(0.5078,]
- };
- (-0.1854,(-0.5262,(0.8464,]
- (0.4913,(0.0774,(0.1000,]
- };
- (0.7582,(-0.7756,]
- };
- _]};
- [i0:-0.500000] (0.5459,(0.0427,]
- (-0.1289,(0.7804,]
- (-0.6327,(0.5076,]
- };
- (-0.1859,(-0.5268,(0.8458,]
- (0.4919,(0.0780,(0.1005,]
- };
- (0.7553,(-0.7786,]
- };
- _]};
- [i0:-0.500000] (0.5459,(0.0427,]
- (-0.1289,(0.7804,]
- (-0.6327,(0.5076,]
- };
- (-0.1864,(-0.5273,(0.8453,]
- (0.4924,(0.0785,(0.1011,]
- };
- (0.7524,(-0.7815,]
- };
- _]};
- [i0:0.500000] (0.5458,(0.0427,]
- (-0.1291,(0.7804,]
- (-0.6326,(0.5076,]
- };
- (-0.1859,(-0.5268,(0.8458,]
- (0.4919,(0.0780,(0.1005,]
- };
- (0.7553,(-0.7786,]
- };
- _]};
- [i0:0.500000]
- (0.5679,(-0.3593,]
- (-0.8321,(1.1025,]
- (-0.5647,(0.1703,]
- };
- (-0.5384,(-1.1479,(0.8445,]
- (0.2658,(0.1725,(-0.1653,]
- };
- (1.1137,(-0.7693,]
- };
- _]};
- [i0:-0.500000] (0.5682,(-0.3590,]
- (-0.8317,(1.1029,]
- (-0.5651,(0.1699,]
- };
- (-0.5391,(-1.1487,(0.8437,]
- (0.2663,(0.1730,(-0.1647,]
- };
- (1.1107,(-0.7722,]
- };
- _]};
- [i0:-0.500000] (0.5682,(-0.3590,]
- (-0.8317,(1.1029,]
- (-0.5651,(0.1699,]
- };
- (-0.5399,(-1.1495,(0.8429,]
- (0.2668,(0.1735,(-0.1642,]
- };
- (1.1078,(-0.7751,]
- };
- _]};
- [i0:0.500000] (0.5679,(-0.3590,]
- (-0.8321,(1.1029,]
- (-0.5647,(0.1699,]
- };
- (-0.5391,(-1.1487,(0.8437,]
- (0.2663,(0.1730,(-0.1647,]
- };
- (1.1107,(-0.7722,]
- };
- _]};
- [i0:0.500000]
- (6.5241,(-6.2462,]
- (-6.5361,(6.8406,]
- (0.2226,(0.6834,]
- };
- (-3.2613,(-3.6355,(2.0290,]
- (0.8144,(0.6639,(-0.7503,]
- };
- (4.2499,(-0.6959,]
- };
- _]};
- [i0:-0.500000] (6.5288,(-6.2415,]
- (-6.5309,(6.8458,]
- (0.2196,(0.6804,]
- };
- (-3.2642,(-3.6385,(2.0261,]
- (0.8149,(0.6644,(-0.7498,]
- };
- (4.2469,(-0.6989,]
- };
- _]};
- [i0:-0.500000] (6.5288,(-6.2415,]
- (-6.5309,(6.8458,]
- (0.2196,(0.6804,]
- };
- (-3.2671,(-3.6414,(2.0231,]
- (0.8154,(0.6649,(-0.7494,]
- };
- (4.2440,(-0.7018,]
- };
- _]};
- [i0:0.500000] (6.5241,(-6.2415,]
- (-6.5361,(6.8458,]
- (0.2226,(0.6804,]
- };
- (-3.2642,(-3.6385,(2.0260,]
- (0.8149,(0.6644,(-0.7498,]
- };
- (4.2469,(-0.6989,]
- };
- _]};
- [i0:0.500000]
- (114.9971,(-113.4876,]
- (-112.8603,(114.3747,]
- (0.6990,(0.7116,]
- };
- (-31.6319,(-31.7725,(45.2379,]
- (11.9645,(11.6226,(-25.5372,]
- };
- (22.2722,(-15.6809,]
- };
- _]};
- [i0:-0.500000] (115.2866,(-113.1981,]
- (-112.5715,(114.6635,]
- (0.2422,(0.2548,]
- };
- (-31.6473,(-31.7879,(45.2226,]
- (11.9753,(11.6335,(-25.5264,]
- };
- (22.2693,(-15.6838,]
- };
- _]};
- [i0:-0.500000] (115.2866,(-113.1981,]
- (-112.5715,(114.6635,]
- (0.2422,(0.2548,]
- };
- (-31.6626,(-31.8033,(45.2072,]
- (11.9861,(11.6443,(-25.5155,]
- };
- (22.2663,(-15.6867,]
- };
- _]};
- [i0:0.500000] (114.9968,(-113.1981,]
- (-112.8605,(114.6635,]
- (0.6987,(0.2548,]
- };
- (-31.6473,(-31.7879,(45.2226,]
- (11.9753,(11.6335,(-25.5264,]
- };
- (22.2693,(-15.6838,]
- };
- _]};
- [i0:0.500000]
- Output for [0.000000, 0.000000]: 0.005787
- Output for [1.000000, 0.000000]: 0.993864
- Output for [1.000000, 1.000000]: 0.011066
- Output for [0.000000, 1.000000]: 0.993822