Welcome to mirror list, hosted at ThFree Co, Russian Federation.

bs.cc « vowpalwabbit - github.com/moses-smt/vowpal_wabbit.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: f55467a614726ba5e9e7cc5d1d6862b1d00cf377 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
/*
Copyright (c) by respective owners including Yahoo!, Microsoft, and
individual contributors. All rights reserved.  Released under a BSD (revised)
license as described in the file LICENSE.
 */
#include <float.h>
#include <math.h>
#include <sstream>
#include <numeric>
#include <vector>

#include "reductions.h"
#include "vw.h"
#include "simple_label.h"
#include "rand48.h"
#include "bs.h"

using namespace std;
using namespace LEARNER;

namespace BS {

  struct bs{
    uint32_t B; //number of bootstrap rounds
    size_t bs_type;
    float lb;
    float ub;
    vector<double> pred_vec;
    vw* all;
  };

  void bs_predict_mean(vw& all, example& ec, vector<double> &pred_vec)
  {
    ec.pred.scalar = (float)accumulate(pred_vec.begin(), pred_vec.end(), 0.0)/pred_vec.size();
    ec.loss = all.loss->getLoss(all.sd, ec.pred.scalar, ec.l.simple.label) * ec.l.simple.weight;    
  }

  void bs_predict_vote(vw& all, example& ec, vector<double> &pred_vec)
  { //majority vote in linear time
    unsigned int counter = 0;
    int current_label = 1, init_label = 1;
    // float sum_labels = 0; // uncomment for: "avg on votes" and getLoss()
    bool majority_found = false;
    bool multivote_detected = false; // distinct(votes)>2: used to skip part of the algorithm
    int* pred_vec_int = new int[pred_vec.size()];

    for(unsigned int i=0; i<pred_vec.size(); i++)
    {
      pred_vec_int[i] = (int)floor(pred_vec[i]+0.5); // could be added: link(), min_label/max_label, cutoff between true/false for binary

      if(multivote_detected == false) { // distinct(votes)>2 detection bloc
        if(i == 0) {
          init_label = pred_vec_int[i];
          current_label = pred_vec_int[i];
        }
        else if(init_label != current_label && pred_vec_int[i] != current_label
                 && pred_vec_int[i] != init_label)
             multivote_detected = true; // more than 2 distinct votes detected
      }

      if (counter == 0) {
        counter = 1;
        current_label = pred_vec_int[i];
      }
      else {
        if(pred_vec_int[i] == current_label)
          counter++;
        else {
          counter--;
        }
      }
    }

    if(counter > 0 && multivote_detected) { // remove this condition for: "avg on votes" and getLoss()
      counter = 0;
      for(unsigned int i=0; i<pred_vec.size(); i++)
        if(pred_vec_int[i] == current_label) {
          counter++;
          // sum_labels += pred_vec[i]; // uncomment for: "avg on votes" and getLoss()
      }
      if(counter*2 > pred_vec.size())
        majority_found = true;
    }

    if(multivote_detected && majority_found == false) { // then find most frequent element - if tie: smallest tie label
        std::sort(pred_vec_int, pred_vec_int+pred_vec.size());
        int tmp_label = pred_vec_int[0];
        counter = 1;
        for(unsigned int i=1, temp_count=1; i<pred_vec.size(); i++)
        {
          if(tmp_label == pred_vec_int[i])
            temp_count++;
          else {
            if(temp_count > counter) {
              current_label = tmp_label;
              counter = temp_count;
            }
            tmp_label = pred_vec_int[i];
            temp_count = 1;
          }
        }
        /* uncomment for: "avg on votes" and getLoss()
        sum_labels = 0;
        for(unsigned int i=0; i<pred_vec.size(); i++)
          if(pred_vec_int[i] == current_label)
            sum_labels += pred_vec[i]; */
    }

    // ld.prediction = sum_labels/(float)counter; //replace line below for: "avg on votes" and getLoss()
    ec.pred.scalar = (float)current_label;

    // ec.loss = all.loss->getLoss(all.sd, ld.prediction, ld.label) * ld.weight; //replace line below for: "avg on votes" and getLoss()
    ec.loss = ((ec.pred.scalar == ec.l.simple.label) ? 0.f : 1.f) * ec.l.simple.weight;
  }

  void print_result(int f, float res, float weight, v_array<char> tag, float lb, float ub)
  {
    if (f >= 0)
    {
      char temp[30];
      sprintf(temp, "%f", res);
      std::stringstream ss;
      ss << temp;
      print_tag(ss, tag);
      ss << ' ';
      sprintf(temp, "%f", lb);
      ss << temp;
      ss << ' ';
      sprintf(temp, "%f", ub);
      ss << temp;
      ss << '\n';
      ssize_t len = ss.str().size();
      ssize_t t = io_buf::write_file_or_socket(f, ss.str().c_str(), (unsigned int)len);
      if (t != len)
        cerr << "write error" << endl;
    }    
  }

  void output_example(vw& all, bs& d, example& ec)
  {
    label_data& ld = ec.l.simple;
    
    if(ec.test_only)
      {
	all.sd->weighted_holdout_examples += ld.weight;//test weight seen
	all.sd->weighted_holdout_examples_since_last_dump += ld.weight;
	all.sd->weighted_holdout_examples_since_last_pass += ld.weight;
	all.sd->holdout_sum_loss += ec.loss;
	all.sd->holdout_sum_loss_since_last_dump += ec.loss;
	all.sd->holdout_sum_loss_since_last_pass += ec.loss;//since last pass
      }
    else
      {
    if (ld.label != FLT_MAX)
        all.sd->weighted_labels += ld.label * ld.weight;
	all.sd->weighted_examples += ld.weight;
	all.sd->sum_loss += ec.loss;
	all.sd->sum_loss_since_last_dump += ec.loss;
	all.sd->total_features += ec.num_features;
	all.sd->example_number++;
      }
    
    if(all.final_prediction_sink.size() != 0)//get confidence interval only when printing out predictions
    {
      d.lb = FLT_MAX;
      d.ub = -FLT_MAX;
      for (unsigned i = 0; i < d.pred_vec.size(); i++)
      {
        if(d.pred_vec[i] > d.ub)
          d.ub = (float)d.pred_vec[i];
        if(d.pred_vec[i] < d.lb)
          d.lb = (float)d.pred_vec[i];
      }
    }

    for (int* sink = all.final_prediction_sink.begin; sink != all.final_prediction_sink.end; sink++)
      BS::print_result(*sink, ec.pred.scalar, 0, ec.tag, d.lb, d.ub);
  
    print_update(all, ec);
  }

  template <bool is_learn>
  void predict_or_learn(bs& d, base_learner& base, example& ec)
  {
    vw* all = d.all;
    bool shouldOutput = all->raw_prediction > 0;

    float weight_temp = ec.l.simple.weight;
  
    string outputString;
    stringstream outputStringStream(outputString);
    d.pred_vec.clear();

    for (size_t i = 1; i <= d.B; i++)
      {
        ec.l.simple.weight = weight_temp * (float) weight_gen();

	if (is_learn)
	  base.learn(ec, i-1);
	else
	  base.predict(ec, i-1);
	
        d.pred_vec.push_back(ec.pred.scalar);

        if (shouldOutput) {
          if (i > 1) outputStringStream << ' ';
          outputStringStream << i << ':' << ec.partial_prediction;
        }
      }	
    
    ec.l.simple.weight = weight_temp;
    
    switch(d.bs_type)
    {
      case BS_TYPE_MEAN:
        bs_predict_mean(*all, ec, d.pred_vec);
        break;
      case BS_TYPE_VOTE:
        bs_predict_vote(*all, ec, d.pred_vec);
        break;
      default:
        std::cerr << "Unknown bs_type specified: " << d.bs_type << ". Exiting." << endl;
        throw exception();
    }

    if (shouldOutput) 
      all->print_text(all->raw_prediction, outputStringStream.str(), ec.tag);

  }

  void finish_example(vw& all, bs& d, example& ec)
  {
    BS::output_example(all, d, ec);
    VW::finish_example(all, &ec);
  }

  void finish(bs& d)
  {
    d.pred_vec.~vector();
  }

  base_learner* setup(vw& all, po::variables_map& vm)
  {
    bs& data = calloc_or_die<bs>();
    data.ub = FLT_MAX;
    data.lb = -FLT_MAX;

    po::options_description bs_options("Bootstrap options");
    bs_options.add_options()
      ("bs_type", po::value<string>(), "prediction type {mean,vote}");
    
    vm = add_options(all, bs_options);

    data.B = (uint32_t)vm["bootstrap"].as<size_t>();

    //append bs with number of samples to options_from_file so it is saved to regressor later
    *all.file_options << " --bootstrap " << data.B;

    std::string type_string("mean");

    if (vm.count("bs_type"))
    {
      type_string = vm["bs_type"].as<std::string>();
      
      if (type_string.compare("mean") == 0) { 
        data.bs_type = BS_TYPE_MEAN;
      }
      else if (type_string.compare("vote") == 0) {
        data.bs_type = BS_TYPE_VOTE;
      }
      else {
        std::cerr << "warning: bs_type must be in {'mean','vote'}; resetting to mean." << std::endl;
        data.bs_type = BS_TYPE_MEAN;
      }
    }
    else //by default use mean
      data.bs_type = BS_TYPE_MEAN;
    *all.file_options << " --bs_type " << type_string;

    data.pred_vec.reserve(data.B);
    data.all = &all;

    learner<bs>& l = init_learner(&data, all.l, data.B);
    l.set_learn(predict_or_learn<true>);
    l.set_predict(predict_or_learn<false>);
    l.set_finish_example(finish_example);
    l.set_finish(finish);

    return make_base(l);
  }
}