-
Notifications
You must be signed in to change notification settings - Fork 0
/
convolution_test.cpp
99 lines (80 loc) · 5.71 KB
/
convolution_test.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#include <vector>
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <valarray>
template<typename T>
std::vector<T>
conv(std::vector<T> const &f, std::vector<T> const &g) {
int const nf = f.size();
int const ng = g.size();
int const n = nf + ng - 1;
std::vector<T> out(n, T());
for(auto i(0); i < n; ++i) {
int const jmn = (i >= ng - 1)? i - (ng - 1) : 0;
int const jmx = (i < nf - 1)? i : nf - 1;
for(auto j(jmn); j <= jmx; ++j) {
out[i] += (f[j] * g[i - j]);
}
}
return out;
}
int main() {
std::vector<double> f{0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6};
std::vector<double> v{-0.07146563,-0.07163653,-0.07169451,-0.07147173,-0.07171588,-0.07155718,-0.07155108,-0.07172808,-0.07126421,-0.07182574,-0.07171893,-0.07165484,-0.07179217,-0.07171893,-0.07182574,-0.07185626,-0.07201801,-0.07168536,-0.0716762,-0.07170672,-0.07162737,-0.07171893,-0.07168536,-0.07159991,-0.07155413,-0.07131609,-0.07158465,-0.0714992,-0.07150835,-0.07148699,-0.0715816,-0.07158465,-0.07136492,-0.07152667,-0.07136797,-0.07159686,-0.07173114,-0.07166094,-0.0715938,-0.07154192,-0.0715816,-0.07166094,-0.07127337,-0.07154498,-0.07155413,-0.07162127,-0.07140459,-0.07149004,-0.07153277,-0.07149004,-0.07190509,-0.07151446,-0.07145647,-0.07183185,-0.07159991,-0.07159991,-0.07174945,-0.07169757,-0.07174945,-0.07164874,-0.07145953,-0.07174945,-0.07168841,-0.07178607,-0.07154192,-0.0714107,-0.07170977,-0.07155413,-0.07156634,-0.07146868,-0.07160601,-0.07182269,-0.07168231,-0.07164569,-0.07166705,-0.07170062,-0.07162432,-0.07148699,-0.07148699,-0.07122759,-0.07128862,-0.07134966,-0.07133745,-0.07130388,-0.07176471,-0.07154803,-0.07121538,-0.07141985,-0.07135576,-0.07149615,-0.07112688,-0.07157549,-0.07147173,-0.07157549,-0.071664,-0.07171588,-0.0715816,-0.0715816,-0.0719173,-0.07132525,-0.07167315,-0.07148394,-0.07153887,-0.07163958,-0.07167315,0.03153887,-0.0176318,-0.03512169,-0.04177157,-0.04410925,-0.04465858,-0.04490272,-0.04516213,-0.04478065,-0.04479286,-0.0446891,-0.04425879,-0.04460365,-0.04431067,-0.04432898,-0.04423438,-0.04397498,-0.04380407,-0.04354467,-0.04312657,-0.04327916,-0.04331884,-0.04308995,-0.04298009,-0.04287022,-0.04278477,-0.04269017,-0.04247959,-0.04273594,-0.04251621,-0.04227207,-0.04200656,-0.04195468,-0.04186923,-0.04204013,-0.04178378,-0.04118563,-0.04161593,-0.04132906,-0.04153964,-0.04125277,-0.04126802,-0.04100862,-0.0408011,-0.04090181,-0.04093538,-0.04066072,-0.04067292,-0.04048066,-0.04032502,-0.04042267,-0.04023957,-0.03998627,-0.0396994,-0.04010224,-0.04007782,-0.03986114,-0.03964446,-0.03953765,-0.03939422,-0.03941558,-0.03930266,-0.03925078,-0.03931182,-0.0391989,-0.03887846,-0.03904631,-0.03863737,-0.0391043,-0.03880522,-0.03827726,-0.038439,-0.03847868,-0.03864652,-0.03894865,-0.03857633,-0.03838102,-0.03838407,-0.0381857,-0.03801785,-0.03803311,-0.03768826,-0.03793545,-0.03774319,-0.03749294,-0.03771572,-0.03764553,-0.03713283,-0.03747463,-0.03736477,-0.03723659,-0.03704128,-0.03737087,-0.03717556,-0.03728542,-0.0370016,-0.03690394};
std::vector<double> kernel{};
/***************/
/* READ KERNEL */
/***************/
std::ifstream kernel_file;
kernel_file.open("aec_kernel.txt");
std::string str;
while (std::getline(kernel_file, str)){
std::stringstream read_val(str);
double read_val_2;
read_val >> read_val_2;
kernel.push_back(read_val_2);
}
/****************/
/* PRINT KERNEL */
/****************/
//for(auto i : kernel) std::cout << i << " ";
//std::cout << std::endl << std::endl;
/***************/
/* CONVOLUCION */
/***************/
std::vector<double> v_cov = conv(kernel, f);
std::cout << v_cov.size() << std::endl;
for(auto i : v_cov) std::cout << i << " ";
std::cout << std::endl << std::endl;
/*********************************/
/* CONVOLUCION TAMAÑO CORREGUIDO */
/*********************************/
std::vector<double> v_cov2 = std::vector<double>(v_cov.begin(), v_cov.end()-(kernel.size()-1));
std::cout << v_cov2.size() << std::endl;
for(double i=0; i<v_cov2.size(); i++){
std::cout << v[i]-v_cov2[i] << " ";
}
std::cout << std::endl << std::endl;
/************************/
/* CONVOLUCION UN VALOR */
/************************/
std::vector<double> undato{};
undato.push_back(f[200]);
std::cout << "Un dato: ";
for(auto i : undato) std::cout << i << " ";
std::cout << std::endl << std::endl;
std::vector<double> conv_undato = conv(kernel, undato);
std::cout << conv_undato.size() << std::endl;
for(auto i : conv_undato) std::cout << i << " ";
std::cout << std::endl << std::endl;
std::vector<double> conv_undato2 = std::vector<double>(conv_undato.begin(), conv_undato.end()-(kernel.size()-1));
std::cout << conv_undato2.size() << std::endl;
for(double i=0; i<conv_undato2.size(); i++){
std::cout << v[200]-conv_undato2[i] << " ";
}
std::cout << std::endl;
std::cout << "\nCheck value A:\n" << v[200]-v_cov2[200] << std::endl;
std::cout << "\nCheck value B:\n" << v[200]-conv_undato2[0] << std::endl;
}