1 | // $Id: Local.cc 1120 2008-02-21 23:18:41Z peter $ |
---|
2 | |
---|
3 | /* |
---|
4 | Copyright (C) 2004 Peter Johansson |
---|
5 | Copyright (C) 2005, 2006, 2007 Jari Häkkinen, Peter Johansson |
---|
6 | |
---|
7 | This file is part of the yat library, http://trac.thep.lu.se/yat |
---|
8 | |
---|
9 | The yat library is free software; you can redistribute it and/or |
---|
10 | modify it under the terms of the GNU General Public License as |
---|
11 | published by the Free Software Foundation; either version 2 of the |
---|
12 | License, or (at your option) any later version. |
---|
13 | |
---|
14 | The yat library is distributed in the hope that it will be useful, |
---|
15 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
16 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
---|
17 | General Public License for more details. |
---|
18 | |
---|
19 | You should have received a copy of the GNU General Public License |
---|
20 | along with this program; if not, write to the Free Software |
---|
21 | Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA |
---|
22 | 02111-1307, USA. |
---|
23 | */ |
---|
24 | |
---|
25 | #include "Local.h" |
---|
26 | #include "Kernel.h" |
---|
27 | #include "OneDimensionalWeighted.h" |
---|
28 | #include "yat/utility/Vector.h" |
---|
29 | #include "yat/utility/VectorView.h" |
---|
30 | |
---|
31 | #include <algorithm> |
---|
32 | #include <cassert> |
---|
33 | #include <iostream> |
---|
34 | #include <sstream> |
---|
35 | #include <stdexcept> |
---|
36 | |
---|
37 | namespace theplu { |
---|
38 | namespace yat { |
---|
39 | namespace regression { |
---|
40 | |
---|
41 | Local::Local(OneDimensionalWeighted& r, Kernel& k) |
---|
42 | : kernel_(&k), regressor_(&r) |
---|
43 | { |
---|
44 | } |
---|
45 | |
---|
46 | Local::~Local(void) |
---|
47 | { |
---|
48 | } |
---|
49 | |
---|
50 | void Local::add(const double x, const double y) |
---|
51 | { |
---|
52 | data_.push_back(std::make_pair(x,y)); |
---|
53 | } |
---|
54 | |
---|
55 | void Local::fit(const size_t step_size, const size_t nof_points) |
---|
56 | { |
---|
57 | if (step_size==0){ |
---|
58 | std::stringstream ss; |
---|
59 | ss << "yat::regression::local: step_size must be larger than zero."; |
---|
60 | throw std::runtime_error(ss.str()); |
---|
61 | } |
---|
62 | if (nof_points<3){ |
---|
63 | std::stringstream ss; |
---|
64 | ss << "yat::regression::local: too few data points. " |
---|
65 | << "At least 3 data points are needed to perform fitting."; |
---|
66 | throw std::runtime_error(ss.str()); |
---|
67 | } |
---|
68 | |
---|
69 | size_t nof_fits=data_.size()/step_size; |
---|
70 | x_ = utility::Vector(nof_fits); |
---|
71 | y_predicted_ = utility::Vector(x_.size()); |
---|
72 | y_err_ = utility::Vector(x_.size()); |
---|
73 | sort(data_.begin(), data_.end()); |
---|
74 | |
---|
75 | // coying data to 2 utility vectors ONCE to use views from |
---|
76 | utility::Vector x(data_.size()); |
---|
77 | utility::Vector y(data_.size()); |
---|
78 | for (size_t j=0; j<x.size(); j++){ |
---|
79 | x(j)=data_[j].first; |
---|
80 | y(j)=data_[j].second; |
---|
81 | } |
---|
82 | |
---|
83 | // looping over regression points and perform local regression |
---|
84 | for (size_t i=0; i<nof_fits; i++) { |
---|
85 | size_t max_index = static_cast<size_t>( (i+0.5)*step_size ); |
---|
86 | size_t min_index; |
---|
87 | double width; // distance from middle of windo to border of window |
---|
88 | double x_mid; // middle of window |
---|
89 | // right border case |
---|
90 | if (max_index > data_.size()-1){ |
---|
91 | min_index = max_index - nof_points + 1; |
---|
92 | max_index = data_.size()-1; |
---|
93 | width = ( (( x(max_index)-x(0) )*(nof_points-1)) / |
---|
94 | ( 2*(max_index-min_index)) ); |
---|
95 | x_mid = x(min_index)+width; |
---|
96 | } |
---|
97 | // normal middle case |
---|
98 | else if (max_index > nof_points-1){ |
---|
99 | min_index = max_index - nof_points + 1; |
---|
100 | width = (x(max_index)-x(min_index))/2; |
---|
101 | x_mid = x(min_index)+width; |
---|
102 | } |
---|
103 | // left border case |
---|
104 | else { |
---|
105 | min_index = 0; |
---|
106 | width = ( (( x(max_index)-x(0) )*(nof_points-1)) / |
---|
107 | ( 2*(max_index-min_index)) ); |
---|
108 | x_mid = x(max_index)-width; |
---|
109 | } |
---|
110 | assert(min_index<data_.size()); |
---|
111 | assert(max_index<data_.size()); |
---|
112 | |
---|
113 | utility::VectorView x_local(x, min_index, max_index-min_index+1); |
---|
114 | utility::VectorView y_local(y, min_index, max_index-min_index+1); |
---|
115 | |
---|
116 | // calculating weights |
---|
117 | utility::Vector w(max_index-min_index+1); |
---|
118 | for (size_t j=0; j<w.size(); j++) |
---|
119 | w(j) = (*kernel_)( (x_local(j)- x_mid)/width ); |
---|
120 | |
---|
121 | // fitting the regressor locally |
---|
122 | regressor_->fit(x_local,y_local,w); |
---|
123 | assert(i<y_predicted_.size()); |
---|
124 | assert(i<y_err_.size()); |
---|
125 | y_predicted_(i) = regressor_->predict(x(i*step_size)); |
---|
126 | y_err_(i) = sqrt(regressor_->standard_error2(x(i*step_size))); |
---|
127 | } |
---|
128 | } |
---|
129 | |
---|
130 | const utility::Vector& Local::x(void) const |
---|
131 | { |
---|
132 | return x_; |
---|
133 | } |
---|
134 | |
---|
135 | const utility::Vector& Local::y_predicted(void) const |
---|
136 | { |
---|
137 | return y_predicted_; |
---|
138 | } |
---|
139 | |
---|
140 | const utility::Vector& Local::y_err(void) const |
---|
141 | { |
---|
142 | return y_err_; |
---|
143 | } |
---|
144 | |
---|
145 | std::ostream& operator<<(std::ostream& os, const Local& r) |
---|
146 | { |
---|
147 | os << "# column 1: x\n" |
---|
148 | << "# column 2: y\n" |
---|
149 | << "# column 3: y_err\n"; |
---|
150 | for (size_t i=0; i<r.x().size(); i++) { |
---|
151 | os << r.x()(i) << "\t" |
---|
152 | << r.y_predicted()(i) << "\t" |
---|
153 | << r.y_err()(i) << "\n"; |
---|
154 | } |
---|
155 | |
---|
156 | return os; |
---|
157 | } |
---|
158 | |
---|
159 | }}} // of namespaces regression, yat, and theplu |
---|