Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/vowpal_wabbit.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHal Daume III <me@hal3.name>2014-07-22 19:19:39 +0400
committerHal Daume III <me@hal3.name>2014-07-22 19:19:39 +0400
commitdba5a75de1d9f3afdfaba1d877eecf2a36377b51 (patch)
treeda4914a8840c9fd0385899aaa73c5494ddecefd0
parent360855361f00cbe474a0fcedbe7bcec2de3bb91b (diff)
pulled ezexample into vw instead of library, added library to runtests
-rw-r--r--library/Makefile10
-rw-r--r--library/ezexample.h250
-rw-r--r--library/ezexample_predict.cc18
-rw-r--r--library/ezexample_predict_threaded.cc18
-rw-r--r--library/ezexample_train.cc2
-rwxr-xr-xlibrary/train.sh4
-rw-r--r--library/train.wbin74 -> 349 bytes
-rwxr-xr-xtest/RunTests14
-rw-r--r--vowpalwabbit/csoaa.cc2
-rw-r--r--vowpalwabbit/parse_args.cc1
-rw-r--r--vowpalwabbit/searn.cc1
-rw-r--r--vowpalwabbit/searn_sequencetask.cc41
-rw-r--r--vowpalwabbit/searn_sequencetask.h7
13 files changed, 94 insertions, 274 deletions
diff --git a/library/Makefile b/library/Makefile
index 499a0a69..6b7fb917 100644
--- a/library/Makefile
+++ b/library/Makefile
@@ -11,21 +11,21 @@ endif
VWLIBS = -L ../vowpalwabbit -l vw -l allreduce
STDLIBS = $(BOOST_LIBRARY) $(LIBS)
-all: ezexample_predict ezexample_train library_example recommend gd_mf_weights
+all: ezexample_predict ezexample_train library_example recommend gd_mf_weights # ezexample_predict_threaded
-ezexample_predict: ezexample_predict.cc ../vowpalwabbit/libvw.a ezexample.h
+ezexample_predict: ezexample_predict.cc ../vowpalwabbit/libvw.a
$(CXX) -g $(FLAGS) -o $@ $< $(VWLIBS) $(STDLIBS)
-ezexample_predict_threaded: ezexample_predict_threaded.cc ../vowpalwabbit/libvw.a ezexample.h
+ezexample_predict_threaded: ezexample_predict_threaded.cc ../vowpalwabbit/libvw.a
$(CXX) -g $(FLAGS) -o $@ $< $(VWLIBS) $(BOOST_PROGRAM_OPTIONS) -l z -l boost_thread
-ezexample_train: ezexample_train.cc ../vowpalwabbit/libvw.a ezexample.h
+ezexample_train: ezexample_train.cc ../vowpalwabbit/libvw.a
$(CXX) -g $(FLAGS) -o $@ $< $(VWLIBS) $(STDLIBS)
library_example: library_example.cc ../vowpalwabbit/libvw.a
$(CXX) -g $(FLAGS) -o $@ $< $(VWLIBS) $(STDLIBS)
-recommend: recommend.cc ../vowpalwabbit/libvw.a ezexample.h
+recommend: recommend.cc ../vowpalwabbit/libvw.a
$(CXX) -g $(FLAGS) -o $@ $< $(VWLIBS) $(STDLIBS)
gd_mf_weights: gd_mf_weights.cc ../vowpalwabbit/libvw.a
diff --git a/library/ezexample.h b/library/ezexample.h
deleted file mode 100644
index 69797738..00000000
--- a/library/ezexample.h
+++ /dev/null
@@ -1,250 +0,0 @@
-#ifndef EZEXAMPLE_H
-#define EZEXAMPLE_H
-
-#include <stdio.h>
-#include "../vowpalwabbit/parser.h"
-#include "../vowpalwabbit/vw.h"
-
-using namespace std;
-typedef uint32_t fid;
-
-struct vw_namespace {
- char namespace_letter;
-public: vw_namespace(const char c) : namespace_letter(c) {}
-};
-
-
-class ezexample {
- private:
- vw*vw_ref;
- vw*vw_par_ref; // an extra parser if we're multithreaded
- bool is_multiline;
-
- char str[2];
- example*ec;
- vector<fid> past_seeds;
- fid current_seed;
- size_t quadratic_features_num;
- float quadratic_features_sqr;
- char current_ns;
- bool ns_exists[256];
- bool example_changed_since_prediction;
-
- v_array<example*> example_copies;
-
- ezexample(const ezexample & ex);
- ezexample & operator=(const ezexample & ex);
-
- example* get_new_example() {
- example* new_ec = VW::new_unused_example(*vw_par_ref);
- vw_par_ref->p->lp.default_label(new_ec->ld);
- return new_ec;
- }
-
- public:
-
- // REAL FUNCTIONALITY
- ezexample(vw*this_vw, bool multiline=false, vw*this_vw_parser=NULL) {
- vw_ref = this_vw;
- vw_par_ref = (this_vw_parser == NULL) ? this_vw : this_vw_parser;
- is_multiline = multiline;
-
- str[0] = 0; str[1] = 0;
- current_seed = 0;
- current_ns = 0;
-
- ec = get_new_example();
-
- quadratic_features_num = 0;
- quadratic_features_sqr = 0.;
-
- for (size_t i=0; i<256; i++) ns_exists[i] = false;
-
- if (vw_ref->add_constant)
- VW::add_constant_feature(*vw_ref, ec);
-
- example_changed_since_prediction = true;
- }
-
- ~ezexample() {
- if (ec->in_use)
- VW::finish_example(*vw_par_ref, ec);
- for (example**ecc=example_copies.begin; ecc!=example_copies.end; ecc++)
- if ((*ecc)->in_use)
- VW::finish_example(*vw_par_ref, *ecc);
- example_copies.erase();
- free(example_copies.begin);
- }
-
- bool ensure_ns_exists(char c) { // returns TRUE iff we should ignore it :)
- if (vw_ref->ignore_some && vw_ref->ignore[(int)c]) return true;
- if (ns_exists[(int)c]) return false;
- ec->indices.push_back((size_t)c);
- ns_exists[(int)c] = true;
- return false;
- }
-
- void addns(char c) {
- if (ensure_ns_exists(c)) return;
-
- ec->atomics[(int)c].erase();
- ec->sum_feat_sq[(int)c] = 0;
- past_seeds.push_back(current_seed);
- current_ns = c;
- str[0] = c;
- current_seed = VW::hash_space(*vw_ref, str);
- }
-
- void remns() {
- if (ec->indices.size() == 0) {
- current_seed = 0;
- current_ns = 0;
- } else {
- if (ns_exists[(int)current_ns]) {
- ec->total_sum_feat_sq -= ec->sum_feat_sq[(int)current_ns];
- ec->sum_feat_sq[(int)current_ns] = 0;
- ec->num_features -= ec->atomics[(int)current_ns].size();
- ec->atomics[(int)current_ns].erase();
-
- ns_exists[(int)current_ns] = false;
- }
-
- current_seed = past_seeds.back();
- past_seeds.pop_back();
- ec->indices.pop();
- example_changed_since_prediction = true;
- }
- }
-
-
- inline fid addf(char to_ns, fid fint, float v) {
- if (to_ns == 0) return 0;
- if (ensure_ns_exists(to_ns)) return 0;
-
- feature f = { v, fint << vw_ref->reg.stride_shift };
- ec->atomics[(int)to_ns].push_back(f);
- ec->sum_feat_sq[(int)to_ns] += v * v;
- ec->total_sum_feat_sq += v * v;
- ec->num_features++;
- example_changed_since_prediction = true;
- return fint;
- }
-
- inline fid addf(fid fint, float v) { return addf(current_ns, fint, v); }
-
- inline ezexample& set_label(string label) {
- VW::parse_example_label(*vw_par_ref, *ec, label);
- example_changed_since_prediction = true;
- return *this;
- }
-
- void mini_setup_example() {
- ec->partial_prediction = 0.;
- vw_ref->sd->t += vw_par_ref->p->lp.get_weight(ec->ld);
- ec->example_t = vw_ref->sd->t;
-
- ec->num_features -= quadratic_features_num;
- ec->total_sum_feat_sq -= quadratic_features_sqr;
-
- quadratic_features_num = 0;
- quadratic_features_sqr = 0.;
-
- for (vector<string>::iterator i = vw_ref->pairs.begin(); i != vw_ref->pairs.end(); i++) {
- quadratic_features_num
- += (ec->atomics[(int)(*i)[0]].end - ec->atomics[(int)(*i)[0]].begin)
- * (ec->atomics[(int)(*i)[1]].end - ec->atomics[(int)(*i)[1]].begin);
- quadratic_features_sqr
- += ec->sum_feat_sq[(int)(*i)[0]]
- * ec->sum_feat_sq[(int)(*i)[1]];
- }
- ec->num_features += quadratic_features_num;
- ec->total_sum_feat_sq += quadratic_features_sqr;
- }
-
- float predict() {
- static example* empty_example = is_multiline ? VW::read_example(*vw_par_ref, (char*)"") : NULL;
- if (example_changed_since_prediction) {
- mini_setup_example();
- vw_ref->learn(ec);
- if (is_multiline) vw_ref->learn(empty_example);
- example_changed_since_prediction = false;
- }
- return ((label_data*) ec->ld)->prediction;
- }
-
- void train() { // if multiline, add to stack; otherwise, actually train
- if (example_changed_since_prediction) {
- mini_setup_example();
- example_changed_since_prediction = false;
- }
-
- if (!is_multiline) {
- vw_ref->learn(ec);
- } else { // is multiline
- // we need to make a copy
- example* copy = get_new_example();
- assert(ec->in_use);
- VW::copy_example_data(vw_ref->audit, copy, ec, vw_par_ref->p->lp.label_size, vw_par_ref->p->lp.copy_label);
- assert(copy->in_use);
- vw_ref->learn(copy);
- example_copies.push_back(copy);
- }
- }
-
- void clear_features() {
- for (size_t i=0; i<256; i++) {
- if (current_ns == 0) break;
- remns();
- }
- }
-
- void finish() {
- static example* empty_example = is_multiline ? VW::read_example(*vw_par_ref, (char*)"") : NULL;
- if (is_multiline) {
- vw_ref->learn(empty_example);
- for (example**ecc=example_copies.begin; ecc!=example_copies.end; ecc++)
- if ((*ecc)->in_use)
- VW::finish_example(*vw_par_ref, *ecc);
- example_copies.erase();
- }
- }
-
-
- // HELPER FUNCTIONALITY
-
- inline fid hash(string fstr) { return VW::hash_feature(*vw_ref, fstr, current_seed); }
- inline fid hash(char* fstr) { return VW::hash_feature_cstr(*vw_ref, fstr, current_seed); }
- inline fid hash(char c, string fstr) { str[0] = c; return VW::hash_feature(*vw_ref, fstr, VW::hash_space(*vw_ref, str)); }
- inline fid hash(char c, char* fstr) { str[0] = c; return VW::hash_feature_cstr(*vw_ref, fstr, VW::hash_space(*vw_ref, str)); }
-
- inline fid addf(fid fint ) { return addf(fint , 1.0); }
- inline fid addf(string fstr, float val) { return addf(hash(fstr), val); }
- inline fid addf(string fstr ) { return addf(hash(fstr), 1.0); }
-
- inline fid addf(char ns, fid fint ) { return addf(ns, fint , 1.0); }
- inline fid addf(char ns, string fstr, float val) { return addf(ns, hash(ns, fstr), val); }
- inline fid addf(char ns, string fstr ) { return addf(ns, hash(ns, fstr), 1.0); }
-
- inline ezexample& operator()(fid fint ) { addf(fint, 1.0); return *this; }
- inline ezexample& operator()(string fstr ) { addf(fstr, 1.0); return *this; }
- inline ezexample& operator()(const char* fstr ) { addf(fstr, 1.0); return *this; }
- inline ezexample& operator()(fid fint, float val) { addf(fint, val); return *this; }
- inline ezexample& operator()(string fstr, float val) { addf(fstr, val); return *this; }
- inline ezexample& operator()(const char* fstr, float val) { addf(fstr, val); return *this; }
- inline ezexample& operator()(const vw_namespace&n) { addns(n.namespace_letter); return *this; }
-
- inline ezexample& operator()(char ns, fid fint ) { addf(ns, fint, 1.0); return *this; }
- inline ezexample& operator()(char ns, string fstr ) { addf(ns, fstr, 1.0); return *this; }
- inline ezexample& operator()(char ns, const char* fstr ) { addf(ns, fstr, 1.0); return *this; }
- inline ezexample& operator()(char ns, fid fint, float val) { addf(ns, fint, val); return *this; }
- inline ezexample& operator()(char ns, string fstr, float val) { addf(ns, fstr, val); return *this; }
- inline ezexample& operator()(char ns, const char* fstr, float val) { addf(ns, fstr, val); return *this; }
-
-
- inline ezexample& operator--() { remns(); return *this; }
-
- inline float operator()() { return predict(); }
-};
-
-
-#endif
diff --git a/library/ezexample_predict.cc b/library/ezexample_predict.cc
index 0aa95941..db061f61 100644
--- a/library/ezexample_predict.cc
+++ b/library/ezexample_predict.cc
@@ -1,14 +1,22 @@
#include <stdio.h>
#include "../vowpalwabbit/parser.h"
#include "../vowpalwabbit/vw.h"
-#include "ezexample.h"
+#include "../vowpalwabbit/ezexample.h"
using namespace std;
int main(int argc, char *argv[])
{
+ string init_string = "-t -q st --hash all --noconstant --ldf_override s -i ";
+ if (argc > 1)
+ init_string += argv[1];
+ else
+ init_string += "train.w";
+
+ cerr << "initializing with: '" << init_string << "'" << endl;
+
// INITIALIZE WITH WHATEVER YOU WOULD PUT ON THE VW COMMAND LINE -- THIS READS IN A MODEL FROM train.w
- vw* vw = VW::initialize("-t -i train.w -q st --hash all --noconstant --csoaa_ldf s --quiet");
+ vw* vw = VW::initialize(init_string); // "-t -q st --hash all --noconstant --ldf_override s -i train.w");
{
// HAL'S SPIFFY INTERFACE USING C++ CRAZINESS
@@ -22,7 +30,7 @@ int main(int argc, char *argv[])
("w^le")
("w^homme");
ex.set_label("1");
- cerr << ex.predict() << endl;
+ cerr << ex.predict_partial() << endl;
// ex.clear_features();
@@ -32,14 +40,14 @@ int main(int argc, char *argv[])
("w^un")
("w^homme");
ex.set_label("2");
- cerr << ex.predict() << endl;
+ cerr << ex.predict_partial() << endl;
--ex; // remove the most recent namespace, and add features with explicit ns
ex('t', "p^un_homme")
('t', "w^un")
('t', "w^homme");
ex.set_label("2");
- cerr << ex.predict() << endl;
+ cerr << ex.predict_partial() << endl;
}
// AND FINISH UP
diff --git a/library/ezexample_predict_threaded.cc b/library/ezexample_predict_threaded.cc
index a45d76ca..0fa5b1e6 100644
--- a/library/ezexample_predict_threaded.cc
+++ b/library/ezexample_predict_threaded.cc
@@ -1,6 +1,6 @@
#include <stdio.h>
#include "../vowpalwabbit/vw.h"
-#include "ezexample.h"
+#include "../vowpalwabbit/ezexample.h"
#include <boost/thread/thread.hpp>
@@ -87,8 +87,8 @@ int main(int argc, char *argv[])
int threadcount = atoi(argv[1]);
runcount = atoi(argv[2]);
// INITIALIZE WITH WHATEVER YOU WOULD PUT ON THE VW COMMAND LINE -- THIS READS IN A MODEL FROM train.w
- string vw_init_string_all = "-t --csoaa_ldf s --quiet -q st --noconstant --hash all -i train.w";
- string vw_init_string_parser = "-t --csoaa_ldf s --quiet -q st --noconstant --hash all --noop"; // this needs to have enough arguments to get the parser right
+ string vw_init_string_all = "-t --ldf_override s --quiet -q st --noconstant --hash all -i train.w";
+ string vw_init_string_parser = "-t --ldf_override s --quiet -q st --noconstant --hash all --noop"; // this needs to have enough arguments to get the parser right
vw*vw = VW::initialize(vw_init_string_all);
vector<double> results;
@@ -104,8 +104,8 @@ int main(int argc, char *argv[])
("w^le")
("w^homme");
ex.set_label("1");
- results.push_back(ex.predict());
- cerr << "should be near zero = " << ex.predict() << endl;
+ results.push_back(ex.predict_partial());
+ cerr << "should be near zero = " << ex.predict_partial() << endl;
--ex; // remove the most recent namespace
ex(vw_namespace('t'))
@@ -113,8 +113,8 @@ int main(int argc, char *argv[])
("w^un")
("w^homme");
ex.set_label("1");
- results.push_back(ex.predict());
- cerr << "should be near one = " << ex.predict() << endl;
+ results.push_back(ex.predict_partial());
+ cerr << "should be near one = " << ex.predict_partial() << endl;
--ex; // remove the most recent namespace
// add features with explicit ns
@@ -122,8 +122,8 @@ int main(int argc, char *argv[])
('t', "w^un")
('t', "w^homme");
ex.set_label("1");
- results.push_back(ex.predict());
- cerr << "should be near one = " << ex.predict() << endl;
+ results.push_back(ex.predict_partial());
+ cerr << "should be near one = " << ex.predict_partial() << endl;
}
if (threadcount == 0)
diff --git a/library/ezexample_train.cc b/library/ezexample_train.cc
index 7df2dc50..a0f66a99 100644
--- a/library/ezexample_train.cc
+++ b/library/ezexample_train.cc
@@ -1,7 +1,7 @@
#include <stdio.h>
#include "../vowpalwabbit/parser.h"
#include "../vowpalwabbit/vw.h"
-#include "ezexample.h"
+#include "../vowpalwabbit/ezexample.h"
using namespace std;
diff --git a/library/train.sh b/library/train.sh
index ce118f04..e0d5f121 100755
--- a/library/train.sh
+++ b/library/train.sh
@@ -1,5 +1,5 @@
#!/bin/bash
rm -f train.cache train.w
-../vowpalwabbit/vw -c -d train -f train.w -q st --passes 100 --hash all --noconstant --csoaa_ldf m
-../vowpalwabbit/vw -t -d train -i train.w -p train.pred --noconstant --csoaa_ldf m
+../vowpalwabbit/vw -c -d train -f train.w -q st --passes 100 --hash all --noconstant --csoaa_ldf m --holdout_off
+../vowpalwabbit/vw -t -d train -i train.w -p train.pred --noconstant
diff --git a/library/train.w b/library/train.w
index be1820ff..02f74c42 100644
--- a/library/train.w
+++ b/library/train.w
Binary files differ
diff --git a/test/RunTests b/test/RunTests
index f10e05b3..5b64bf84 100755
--- a/test/RunTests
+++ b/test/RunTests
@@ -288,6 +288,10 @@ sub next_test() {
}
next;
}
+ if ($line =~ /library\/ezexample_/) {
+ $cmd = trim_spaces($line);
+ next;
+ }
if ($line =~ m/\.stdout\b/) {
$out_ref = ref_file(trim_spaces($line));
next;
@@ -1015,3 +1019,13 @@ __DATA__
{VW} --stage_poly --sched_exponent 1.0 --batch_sz 1000 -d train-sets/rcv1_small.dat -p stage_poly.s100.doubling.predict --quiet
train-sets/ref/stage_poly.s100.doubling.stderr
train-sets/ref/stage_poly.s100.doubling.predict
+
+# Test 65: library test, train the initial model
+{VW} -c -k -d train-sets/library_train -f models/library_train.w -q st --passes 100 --hash all --noconstant --csoaa_ldf m --holdout_off
+ train-sets/ref/library_train.stdout
+ train-sets/ref/library_train.stderr
+
+# Test 66: library test, run ezexample_predict
+../library/ezexample_predict models/library_train.w
+ train-sets/ref/ezexample_predict.stdout
+ train-sets/ref/ezexample_predict.stderr
diff --git a/vowpalwabbit/csoaa.cc b/vowpalwabbit/csoaa.cc
index 162d8608..f78a84a0 100644
--- a/vowpalwabbit/csoaa.cc
+++ b/vowpalwabbit/csoaa.cc
@@ -744,6 +744,8 @@ namespace LabelDict {
all.file_options.append(" --wap_ldf ");
all.file_options.append(ldf_arg);
}
+ if ( vm.count("ldf_override") )
+ ldf_arg = vm["ldf_override"].as<string>();
all.p->lp = COST_SENSITIVE::cs_label;
diff --git a/vowpalwabbit/parse_args.cc b/vowpalwabbit/parse_args.cc
index 7c71dc37..b33d2661 100644
--- a/vowpalwabbit/parse_args.cc
+++ b/vowpalwabbit/parse_args.cc
@@ -747,6 +747,7 @@ void parse_score_users(vw& all, po::variables_map& vm, bool& got_cs)
("wap", po::value<size_t>(), "Use weighted all-pairs multiclass learning with <k> costs")
("csoaa_ldf", po::value<string>(), "Use one-against-all multiclass learning with label dependent features. Specify singleline or multiline.")
("wap_ldf", po::value<string>(), "Use weighted all-pairs multiclass learning with label dependent features. Specify singleline or multiline.")
+ ("ldf_override", po::value<string>(), "Override singleline or multiline from csoaa_ldf or wap_ldf, eg if stored in file")
;
vm = add_options(all, multiclass_opt);
diff --git a/vowpalwabbit/searn.cc b/vowpalwabbit/searn.cc
index 9f78e676..2d01560e 100644
--- a/vowpalwabbit/searn.cc
+++ b/vowpalwabbit/searn.cc
@@ -47,6 +47,7 @@ namespace Searn
&ArgmaxTask::task,
&SequenceTask_DemoLDF::task,
&SequenceSpanTask::task,
+ &SequenceDoubleTask::task,
&EntityRelationTask::task,
NULL }; // must NULL terminate!
diff --git a/vowpalwabbit/searn_sequencetask.cc b/vowpalwabbit/searn_sequencetask.cc
index 97ac4ab8..06b8d9db 100644
--- a/vowpalwabbit/searn_sequencetask.cc
+++ b/vowpalwabbit/searn_sequencetask.cc
@@ -8,11 +8,13 @@ license as described in the file LICENSE.
#include "memory.h"
#include "example.h"
#include "gd.h"
+#include "ezexample.h"
namespace SequenceTask { Searn::searn_task task = { "sequence", initialize, finish, structured_predict }; }
-namespace ArgmaxTask { Searn::searn_task task = { "argmax", initialize, finish, structured_predict }; }
-namespace SequenceTask_DemoLDF { Searn::searn_task task = { "sequence_demoldf", initialize, finish, structured_predict }; }
+namespace ArgmaxTask { Searn::searn_task task = { "argmax", initialize, finish, structured_predict }; }
+namespace SequenceDoubleTask { Searn::searn_task task = { "sequencedouble", initialize, finish, structured_predict }; }
namespace SequenceSpanTask { Searn::searn_task task = { "sequencespan", initialize, finish, structured_predict }; }
+namespace SequenceTask_DemoLDF { Searn::searn_task task = { "sequence_demoldf", initialize, finish, structured_predict }; }
namespace SequenceTask {
@@ -110,6 +112,41 @@ namespace ArgmaxTask {
}
}
+
+namespace SequenceDoubleTask {
+ using namespace Searn;
+
+ void initialize(searn& srn, size_t& num_actions, po::variables_map& vm) {
+ srn.set_options( AUTO_HISTORY | // automatically add history features to our examples, please
+ EXAMPLES_DONT_CHANGE ); // we don't do any internal example munging
+ }
+
+ void finish(searn& srn) { } // if we had task data, we'd want to free it here
+
+ void structured_predict(searn& srn, vector<example*> ec) {
+ size_t N = ec.size();
+ for (size_t j=0; j<N*2; j++) {
+ srn.snapshot(j, 1, &j, sizeof(j), true);
+ size_t i =
+ (j == 0) ? 0 :
+ (j == 2*N-1) ? (N-1) :
+ (j%2 == 0) ? (j/2 - 1) :
+ ((j+1)/2);
+
+ size_t prediction = srn.predict(ec[i], MULTICLASS::get_example_label(ec[i]));
+
+ if ((j >= 2) && (j%2==0)) {
+ srn.loss( prediction != MULTICLASS::get_example_label(ec[i]) );
+ if (srn.output().good())
+ srn.output() << prediction << ' ';
+ } else
+ srn.loss(0.);
+ }
+ }
+}
+
+
+
namespace SequenceSpanTask {
enum EncodingType { BIO, BILOU };
// the format for the BIO encoding is:
diff --git a/vowpalwabbit/searn_sequencetask.h b/vowpalwabbit/searn_sequencetask.h
index 92621e68..7d99ac94 100644
--- a/vowpalwabbit/searn_sequencetask.h
+++ b/vowpalwabbit/searn_sequencetask.h
@@ -29,6 +29,13 @@ namespace SequenceSpanTask {
extern Searn::searn_task task;
}
+namespace SequenceDoubleTask {
+ void initialize(Searn::searn&, size_t&, po::variables_map&);
+ void finish(Searn::searn&);
+ void structured_predict(Searn::searn&, vector<example*>);
+ extern Searn::searn_task task;
+}
+
namespace SequenceTask_DemoLDF {
void initialize(Searn::searn&, size_t&, po::variables_map&);
void finish(Searn::searn&);