1 | // $Id: poisson.cc 3662 2017-07-19 01:53:55Z peter $ |
---|
2 | |
---|
3 | /* |
---|
4 | Copyright (C) 2017 Peter Johansson |
---|
5 | |
---|
6 | This file is part of the yat library, http://dev.thep.lu.se/yat |
---|
7 | |
---|
8 | The yat library is free software; you can redistribute it and/or |
---|
9 | modify it under the terms of the GNU General Public License as |
---|
10 | published by the Free Software Foundation; either version 3 of the |
---|
11 | License, or (at your option) any later version. |
---|
12 | |
---|
13 | The yat library is distributed in the hope that it will be useful, |
---|
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
---|
16 | General Public License for more details. |
---|
17 | |
---|
18 | You should have received a copy of the GNU General Public License |
---|
19 | along with yat. If not, see <http://www.gnu.org/licenses/>. |
---|
20 | */ |
---|
21 | |
---|
22 | #include <config.h> |
---|
23 | |
---|
24 | #include "Suite.h" |
---|
25 | |
---|
26 | #include "yat/regression/Poisson.h" |
---|
27 | #include "yat/random/random.h" |
---|
28 | #include "yat/statistics/Averager.h" |
---|
29 | #include "yat/utility/Matrix.h" |
---|
30 | #include "yat/utility/Vector.h" |
---|
31 | |
---|
32 | #include <vector> |
---|
33 | |
---|
34 | using namespace theplu::yat; |
---|
35 | |
---|
36 | void analyse(const utility::Vector& b, const utility::Matrix& X, |
---|
37 | std::vector<statistics::Averager>& stats, |
---|
38 | std::vector<statistics::Averager>& stats2); |
---|
39 | |
---|
40 | void generate_X(const utility::Vector& b, utility::Matrix& X); |
---|
41 | |
---|
42 | int main(int argc, char* argv[]) |
---|
43 | { |
---|
44 | test::Suite suite(argc, argv); |
---|
45 | |
---|
46 | regression::Poisson model; |
---|
47 | size_t n = 1000; |
---|
48 | size_t p = 4; |
---|
49 | utility::Vector y(n); |
---|
50 | utility::Matrix X(n, p); |
---|
51 | model.fit(X, y); |
---|
52 | if (model.fit_parameters().size() != p+1) { |
---|
53 | suite.add(false); |
---|
54 | suite.err() << "error: size of fit_parameters: " |
---|
55 | << model.fit_parameters().size() |
---|
56 | << " expected " << p+1 << "\n"; |
---|
57 | } |
---|
58 | model.predict(X.row_const_view(0)); |
---|
59 | |
---|
60 | utility::Vector b(4); |
---|
61 | b(0) = 0.5; |
---|
62 | b(1) = 2.0; |
---|
63 | b(2) = 0.75; |
---|
64 | b(3) = -1.25; |
---|
65 | generate_X(b, X); |
---|
66 | std::vector<statistics::Averager> stats(b.size()); |
---|
67 | std::vector<statistics::Averager> stats2(b.size()); |
---|
68 | for (size_t i=0; i<100; ++i) |
---|
69 | analyse(b, X, stats, stats2); |
---|
70 | |
---|
71 | for (size_t i=0; i<b.size(); ++i) { |
---|
72 | suite.out() << i << " " << b(i) << " " << stats[i].mean() << " " |
---|
73 | << stats[i].standard_error() << "\n"; |
---|
74 | if (stats[i].standard_error() == 0.0) { |
---|
75 | suite.add(false); |
---|
76 | suite.err() << "error: standard error is 0.0\n"; |
---|
77 | } |
---|
78 | else if (std::abs(stats[i].mean() - b(i)) > 5*stats[i].standard_error()) { |
---|
79 | suite.add(false); |
---|
80 | suite.err() << "error: average for param " << i << ": " |
---|
81 | << stats[i].mean() << "\n"; |
---|
82 | } |
---|
83 | } |
---|
84 | |
---|
85 | suite.out() << "\ntest covariance\n"; |
---|
86 | suite.out() << "i\t<inferred>\tobserved\tdelta\trelative error\n"; |
---|
87 | for (size_t i=0; i<b.size(); ++i) { |
---|
88 | // comparing uncertainty observed (stats2) and uncertainty |
---|
89 | // estimated (stats) |
---|
90 | |
---|
91 | double delta = stats2[i].mean() - stats[i].std(); |
---|
92 | double relative_error = delta / stats[i].std(); |
---|
93 | |
---|
94 | suite.out() << i << "\t" << stats2[i].mean() << "\t" |
---|
95 | << stats[i].std() << "\t" |
---|
96 | << delta << "\t" |
---|
97 | << relative_error << "\n"; |
---|
98 | |
---|
99 | // covariance matrix is only an approximation, so only check that |
---|
100 | // estimation is roughly correct (within 20%) |
---|
101 | if (std::abs(relative_error) > 0.2) { |
---|
102 | suite.add(false); |
---|
103 | suite.err() << "error: " << relative_error << " too large\n"; |
---|
104 | } |
---|
105 | } |
---|
106 | |
---|
107 | return suite.return_value(); |
---|
108 | } |
---|
109 | |
---|
110 | |
---|
111 | void generate_X(const utility::Vector& b, utility::Matrix& X) |
---|
112 | { |
---|
113 | size_t n = 5000; |
---|
114 | X.resize(n, b.size()-1); |
---|
115 | random::Gaussian gauss; |
---|
116 | for (utility::Matrix::iterator it=X.begin(); it!=X.end(); ++it) |
---|
117 | *it = gauss(); |
---|
118 | } |
---|
119 | |
---|
120 | |
---|
121 | void generate_y(const utility::Vector& b, |
---|
122 | const utility::Matrix& X, utility::Vector& y) |
---|
123 | { |
---|
124 | size_t n = X.rows(); |
---|
125 | y.resize(n); |
---|
126 | random::Poisson poisson; |
---|
127 | |
---|
128 | for (size_t i=0; i<n; ++i) { |
---|
129 | double lnmu = b(0); |
---|
130 | for (size_t j=1; j<b.size(); ++j) |
---|
131 | lnmu += X(i, j-1) * b(j); |
---|
132 | |
---|
133 | double mu = exp(lnmu); |
---|
134 | y(i) = poisson(mu); |
---|
135 | } |
---|
136 | } |
---|
137 | |
---|
138 | |
---|
139 | void analyse(const utility::Vector& b, const utility::Matrix& X, |
---|
140 | std::vector<statistics::Averager>& stats, |
---|
141 | std::vector<statistics::Averager>& stats2) |
---|
142 | { |
---|
143 | utility::Vector y; |
---|
144 | generate_y(b, X, y); |
---|
145 | theplu::yat::regression::Poisson poisson; |
---|
146 | poisson.fit(X, y); |
---|
147 | for (size_t i=0; i<poisson.fit_parameters().size(); ++i) { |
---|
148 | stats[i].add(poisson.fit_parameters()(i)); |
---|
149 | stats2[i].add(std::sqrt(poisson.covariance()(i, i))); |
---|
150 | } |
---|
151 | } |
---|