Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/moses/FF
diff options
context:
space:
mode:
Diffstat (limited to 'moses/FF')
-rw-r--r--moses/FF/Factory.cpp14
-rw-r--r--moses/FF/FeatureFunction.cpp18
-rw-r--r--moses/FF/FeatureFunction.h12
-rw-r--r--moses/FF/LexicalReordering/LexicalReordering.cpp8
-rw-r--r--moses/FF/LexicalReordering/LexicalReorderingState.cpp2
-rw-r--r--moses/FF/LexicalReordering/LexicalReorderingState.h2
-rw-r--r--moses/FF/LexicalReordering/SparseReordering.cpp8
-rw-r--r--moses/FF/LexicalReordering/SparseReordering.h2
-rw-r--r--moses/FF/Model1Feature.cpp32
-rw-r--r--moses/FF/Model1Feature.h4
-rw-r--r--moses/FF/PhraseOrientationFeature.cpp4
-rw-r--r--moses/FF/RulePairUnlexicalizedSource.cpp8
-rw-r--r--moses/FF/RuleScope.cpp4
-rw-r--r--moses/FF/SoftSourceSyntacticConstraintsFeature.cpp8
-rw-r--r--moses/FF/VW/VW.h10
15 files changed, 68 insertions, 68 deletions
diff --git a/moses/FF/Factory.cpp b/moses/FF/Factory.cpp
index 14ef4f90f..81c6bdeb9 100644
--- a/moses/FF/Factory.cpp
+++ b/moses/FF/Factory.cpp
@@ -147,8 +147,8 @@ protected:
FeatureFactory() {}
};
-template <class F>
-void
+template <class F>
+void
FeatureFactory
::DefaultSetup(F *feature)
{
@@ -156,7 +156,7 @@ FeatureFactory
const string &featureName = feature->GetScoreProducerDescription();
std::vector<float> weights = static_data.GetParameter()->GetWeights(featureName);
-
+
if (feature->GetNumScoreComponents())
{
if (weights.size() == 0)
@@ -177,19 +177,19 @@ FeatureFactory
}
UTIL_THROW_IF2(weights.size() != feature->GetNumScoreComponents(),
"FATAL ERROR: Mismatch in number of features and number "
- << "of weights for Feature Function " << featureName
- << " (features: " << feature->GetNumScoreComponents()
+ << "of weights for Feature Function " << featureName
+ << " (features: " << feature->GetNumScoreComponents()
<< " vs. weights: " << weights.size() << ")");
static_data.SetWeights(feature, weights);
}
- else if (feature->IsTuneable())
+ else if (feature->IsTuneable())
static_data.SetWeights(feature, weights);
}
namespace
{
-template <class F>
+template <class F>
class DefaultFeatureFactory : public FeatureFactory
{
public:
diff --git a/moses/FF/FeatureFunction.cpp b/moses/FF/FeatureFunction.cpp
index fcce15c4d..298a9e65c 100644
--- a/moses/FF/FeatureFunction.cpp
+++ b/moses/FF/FeatureFunction.cpp
@@ -38,10 +38,10 @@ void FeatureFunction::Destroy()
RemoveAllInColl(s_staticColl);
}
-// The original declaration as
+// The original declaration as
// void FeatureFunction::CallChangeSource(InputType *&input)
-// had me a bit perplexed. Would you really want to allow
-// any feature function to replace the InputType behind the
+// had me a bit perplexed. Would you really want to allow
+// any feature function to replace the InputType behind the
// back of the others? And change what the vector is pointing to?
void FeatureFunction::CallChangeSource(InputType * const&input)
@@ -190,17 +190,17 @@ void FeatureFunction::SetTuneableComponents(const std::string& value)
}
}
-void
+void
FeatureFunction
::InitializeForInput(ttasksptr const& ttask)
{ InitializeForInput(*(ttask->GetSource().get())); }
-void
+void
FeatureFunction
-::CleanUpAfterSentenceProcessing(ttasksptr const& ttask)
+::CleanUpAfterSentenceProcessing(ttasksptr const& ttask)
{ CleanUpAfterSentenceProcessing(*(ttask->GetSource().get())); }
-size_t
+size_t
FeatureFunction
::GetIndex() const
{ return m_index; }
@@ -208,9 +208,9 @@ FeatureFunction
/// set index
// @return index of the next FF
-size_t
+size_t
FeatureFunction
-::SetIndex(size_t const idx)
+::SetIndex(size_t const idx)
{
m_index = idx;
return this->GetNumScoreComponents() + idx;
diff --git a/moses/FF/FeatureFunction.h b/moses/FF/FeatureFunction.h
index 082542554..a8f189f0b 100644
--- a/moses/FF/FeatureFunction.h
+++ b/moses/FF/FeatureFunction.h
@@ -27,7 +27,7 @@ class FactorMask;
class InputPath;
class StackVec;
class DistortionScoreProducer;
-class TranslationTask;
+class TranslationTask;
/** base class for all feature functions.
*/
@@ -128,12 +128,12 @@ protected:
public:
//! Called before search and collecting of translation options
- virtual void
+ virtual void
InitializeForInput(ttasksptr const& ttask);
// clean up temporary memory, called after processing each sentence
- virtual void
- CleanUpAfterSentenceProcessing(ttasksptr const& ttask);
+ virtual void
+ CleanUpAfterSentenceProcessing(ttasksptr const& ttask);
const std::string &
GetArgLine() const { return m_argLine; }
@@ -151,11 +151,11 @@ public:
// may have more factors than actually need, but not guaranteed.
// For SCFG decoding, the source contains non-terminals, NOT the raw
// source from the input sentence
- virtual void
+ virtual void
EvaluateInIsolation(const Phrase &source, const TargetPhrase &targetPhrase,
ScoreComponentCollection& scoreBreakdown,
ScoreComponentCollection& estimatedFutureScore) const = 0;
-
+
// override this method if you want to change the input before decoding
virtual void ChangeSource(InputType * const&input) const { }
diff --git a/moses/FF/LexicalReordering/LexicalReordering.cpp b/moses/FF/LexicalReordering/LexicalReordering.cpp
index d9b1843e9..c67a16076 100644
--- a/moses/FF/LexicalReordering/LexicalReordering.cpp
+++ b/moses/FF/LexicalReordering/LexicalReordering.cpp
@@ -65,8 +65,8 @@ LexicalReordering(const std::string &line)
}
// sanity check: number of default scores
- size_t numScores
- = m_numScoreComponents
+ size_t numScores
+ = m_numScoreComponents
= m_numTuneableComponents
= m_configuration->GetNumScoreComponents();
UTIL_THROW_IF2(m_haveDefaultScores && m_defaultScores.size() != numScores,
@@ -137,7 +137,7 @@ void
LexicalReordering::
SetCache(TranslationOption& to) const
{
- if (to.GetLexReorderingScores(this)) return;
+ if (to.GetLexReorderingScores(this)) return;
// Scores were were set already (e.g., by sampling phrase table)
Phrase const& sphrase = to.GetInputPath().GetPhrase();
@@ -157,7 +157,7 @@ void
LexicalReordering::
SetCache(TranslationOptionList& tol) const
{
- BOOST_FOREACH(TranslationOption* to, tol)
+ BOOST_FOREACH(TranslationOption* to, tol)
this->SetCache(*to);
}
diff --git a/moses/FF/LexicalReordering/LexicalReorderingState.cpp b/moses/FF/LexicalReordering/LexicalReorderingState.cpp
index f21c45455..48fd577f1 100644
--- a/moses/FF/LexicalReordering/LexicalReorderingState.cpp
+++ b/moses/FF/LexicalReordering/LexicalReorderingState.cpp
@@ -101,7 +101,7 @@ GetOrientation(int const reoDistance) const
// this one is for HierarchicalReorderingBackwardState
return ((m_modelType == LeftRight)
? (reoDistance >= 1) ? R : L
- : (reoDistance == 1) ? M
+ : (reoDistance == 1) ? M
: (m_modelType == Monotonic) ? NM
: (reoDistance == -1) ? S
: (m_modelType == MSD) ? D
diff --git a/moses/FF/LexicalReordering/LexicalReorderingState.h b/moses/FF/LexicalReordering/LexicalReorderingState.h
index 48bf4698a..1e488fc41 100644
--- a/moses/FF/LexicalReordering/LexicalReorderingState.h
+++ b/moses/FF/LexicalReordering/LexicalReorderingState.h
@@ -44,7 +44,7 @@ public:
static const ReorderingType L = 1; // left
static const ReorderingType MAX = 3; // largest possible
#else
- enum ReorderingType
+ enum ReorderingType
{
M = 0, // monotonic
NM = 1, // non-monotonic
diff --git a/moses/FF/LexicalReordering/SparseReordering.cpp b/moses/FF/LexicalReordering/SparseReordering.cpp
index 27e090ccd..5397dcb10 100644
--- a/moses/FF/LexicalReordering/SparseReordering.cpp
+++ b/moses/FF/LexicalReordering/SparseReordering.cpp
@@ -113,10 +113,10 @@ void SparseReordering::PreCalculateFeatureNames(size_t index, const string& id,
for (size_t position = SparseReorderingFeatureKey::First;
position <= SparseReorderingFeatureKey::Last; ++position) {
for (int reoType = 0; reoType <= LRModel::MAX; ++reoType) {
- SparseReorderingFeatureKey
- key(index, static_cast<SparseReorderingFeatureKey::Type>(type),
- factor, isCluster,
- static_cast<SparseReorderingFeatureKey::Position>(position),
+ SparseReorderingFeatureKey
+ key(index, static_cast<SparseReorderingFeatureKey::Type>(type),
+ factor, isCluster,
+ static_cast<SparseReorderingFeatureKey::Position>(position),
side, static_cast<LRModel::ReorderingType>(reoType));
m_featureMap.insert(pair<SparseReorderingFeatureKey, FName>(key,m_producer->GetFeatureName(key.Name(id))));
}
diff --git a/moses/FF/LexicalReordering/SparseReordering.h b/moses/FF/LexicalReordering/SparseReordering.h
index 958ce998b..ada17d1b2 100644
--- a/moses/FF/LexicalReordering/SparseReordering.h
+++ b/moses/FF/LexicalReordering/SparseReordering.h
@@ -115,7 +115,7 @@ private:
typedef boost::unordered_map<std::string, float> WeightMap;
WeightMap m_weightMap;
bool m_useWeightMap;
- std::vector<FName> m_featureMap2;
+ std::vector<FName> m_featureMap2;
void ReadWordList(const std::string& filename, const std::string& id,
SparseReorderingFeatureKey::Side side, std::vector<WordList>* pWordLists);
diff --git a/moses/FF/Model1Feature.cpp b/moses/FF/Model1Feature.cpp
index 38883c12e..6f6552461 100644
--- a/moses/FF/Model1Feature.cpp
+++ b/moses/FF/Model1Feature.cpp
@@ -19,7 +19,7 @@ Model1Vocabulary::Model1Vocabulary()
Store(m_NULL,0);
}
-bool Model1Vocabulary::Store(const Factor* word, const unsigned id)
+bool Model1Vocabulary::Store(const Factor* word, const unsigned id)
{
boost::unordered_map<const Factor*, unsigned>::iterator iter = m_lookup.find( word );
if ( iter != m_lookup.end() ) {
@@ -33,7 +33,7 @@ bool Model1Vocabulary::Store(const Factor* word, const unsigned id)
return true;
}
-unsigned Model1Vocabulary::StoreIfNew(const Factor* word)
+unsigned Model1Vocabulary::StoreIfNew(const Factor* word)
{
boost::unordered_map<const Factor*, unsigned>::iterator iter = m_lookup.find( word );
@@ -47,7 +47,7 @@ unsigned Model1Vocabulary::StoreIfNew(const Factor* word)
return id;
}
-unsigned Model1Vocabulary::GetWordID(const Factor* word) const
+unsigned Model1Vocabulary::GetWordID(const Factor* word) const
{
boost::unordered_map<const Factor*, unsigned>::const_iterator iter = m_lookup.find( word );
if ( iter == m_lookup.end() ) {
@@ -56,7 +56,7 @@ unsigned Model1Vocabulary::GetWordID(const Factor* word) const
return iter->second;
}
-const Factor* Model1Vocabulary::GetWord(unsigned id) const
+const Factor* Model1Vocabulary::GetWord(unsigned id) const
{
if (id >= m_vocab.size()) {
return NULL;
@@ -64,7 +64,7 @@ const Factor* Model1Vocabulary::GetWord(unsigned id) const
return m_vocab[ id ];
}
-void Model1Vocabulary::Load(const std::string& fileName)
+void Model1Vocabulary::Load(const std::string& fileName)
{
InputFileStream inFile(fileName);
FactorCollection &factorCollection = FactorCollection::Instance();
@@ -84,7 +84,7 @@ void Model1Vocabulary::Load(const std::string& fileName)
UTIL_THROW_IF2(!stored, "Line " << i << " in " << fileName << " overwrites existing vocabulary entry.");
}
}
- while ( getline(inFile, line) )
+ while ( getline(inFile, line) )
{
++i;
std::vector<std::string> tokens = Tokenize(line);
@@ -104,7 +104,7 @@ void Model1LexicalTable::Load(const std::string &fileName, const Model1Vocabular
std::string line;
unsigned i = 0;
- while ( getline(inFile, line) )
+ while ( getline(inFile, line) )
{
++i;
std::vector<std::string> tokens = Tokenize(line);
@@ -126,8 +126,8 @@ void Model1LexicalTable::Load(const std::string &fileName, const Model1Vocabular
float Model1LexicalTable::GetProbability(const Factor* wordS, const Factor* wordT) const
{
float prob = m_floor;
-
- boost::unordered_map< const Factor*, boost::unordered_map< const Factor*, float > >::const_iterator iter1 = m_ltable.find( wordS );
+
+ boost::unordered_map< const Factor*, boost::unordered_map< const Factor*, float > >::const_iterator iter1 = m_ltable.find( wordS );
if ( iter1 != m_ltable.end() ) {
boost::unordered_map< const Factor*, float >::const_iterator iter2 = iter1->second.find( wordT );
@@ -193,10 +193,10 @@ void Model1Feature::EvaluateWithSourceContext(const InputType &input
float score = 0.0;
float norm = TransformScore(1+sentence.GetSize());
- for (size_t posT=0; posT<targetPhrase.GetSize(); ++posT)
+ for (size_t posT=0; posT<targetPhrase.GetSize(); ++posT)
{
const Word &wordT = targetPhrase.GetWord(posT);
- if ( !wordT.IsNonTerminal() )
+ if ( !wordT.IsNonTerminal() )
{
float thisWordProb = m_model1.GetProbability(m_emptyWord,wordT[0]); // probability conditioned on empty word
@@ -231,7 +231,7 @@ void Model1Feature::EvaluateWithSourceContext(const InputType &input
float thisWordScore = TransformScore(thisWordProb) - norm;
FEATUREVERBOSE(3, "score( " << wordT << " ) = " << thisWordScore << std::endl);
{
- #ifdef WITH_THREADS
+ #ifdef WITH_THREADS
// need to update cache; write lock
boost::unique_lock<boost::shared_mutex> lock(m_accessLock);
#endif
@@ -240,14 +240,14 @@ void Model1Feature::EvaluateWithSourceContext(const InputType &input
score += thisWordScore;
}
}
- }
+ }
scoreBreakdown.PlusEquals(this, score);
}
-
-void Model1Feature::CleanUpAfterSentenceProcessing(const InputType& source)
+
+void Model1Feature::CleanUpAfterSentenceProcessing(const InputType& source)
{
- #ifdef WITH_THREADS
+ #ifdef WITH_THREADS
// need to update cache; write lock
boost::unique_lock<boost::shared_mutex> lock(m_accessLock);
#endif
diff --git a/moses/FF/Model1Feature.h b/moses/FF/Model1Feature.h
index d526d165a..9c380e3ae 100644
--- a/moses/FF/Model1Feature.h
+++ b/moses/FF/Model1Feature.h
@@ -37,7 +37,7 @@ protected:
class Model1LexicalTable
{
public:
- Model1LexicalTable(float floor=1e-7) : m_floor(floor)
+ Model1LexicalTable(float floor=1e-7) : m_floor(floor)
{}
void Load(const std::string& fileName, const Model1Vocabulary& vcbS, const Model1Vocabulary& vcbT);
@@ -100,7 +100,7 @@ private:
const Factor* m_emptyWord;
void Load();
-
+
// cache
mutable boost::unordered_map<const InputType*, boost::unordered_map<const Factor*, float> > m_cache;
#ifdef WITH_THREADS
diff --git a/moses/FF/PhraseOrientationFeature.cpp b/moses/FF/PhraseOrientationFeature.cpp
index 528896f71..2a59340ea 100644
--- a/moses/FF/PhraseOrientationFeature.cpp
+++ b/moses/FF/PhraseOrientationFeature.cpp
@@ -197,7 +197,7 @@ void PhraseOrientationFeature::EvaluateInIsolation(const Phrase &source,
FEATUREVERBOSE(4, "lastNonTerminalPreviousSourceSpanIsAligned== " << reoClassData->lastNonTerminalPreviousSourceSpanIsAligned << std::endl);
FEATUREVERBOSE(4, "lastNonTerminalFollowingSourceSpanIsAligned== " << reoClassData->lastNonTerminalFollowingSourceSpanIsAligned << std::endl;);
- if (reoClassData->lastNonTerminalPreviousSourceSpanIsAligned &&
+ if (reoClassData->lastNonTerminalPreviousSourceSpanIsAligned &&
reoClassData->lastNonTerminalFollowingSourceSpanIsAligned) {
// discontinuous
r2lOrientation = Moses::GHKM::PhraseOrientation::REO_CLASS_DLEFT;
@@ -467,7 +467,7 @@ FFState* PhraseOrientationFeature::EvaluateWhenApplied(
if ( (nNT == currTarPhr.GetAlignNonTerm().GetSize()-1) && reoClassData->lastNonTerminalIsBoundary ) {
// delay right-to-left scoring
-
+
FEATUREVERBOSE(3, "Delaying right-to-left scoring" << std::endl);
std::bitset<3> possibleFutureOrientationsR2L(0x7);
diff --git a/moses/FF/RulePairUnlexicalizedSource.cpp b/moses/FF/RulePairUnlexicalizedSource.cpp
index c31978423..148d54052 100644
--- a/moses/FF/RulePairUnlexicalizedSource.cpp
+++ b/moses/FF/RulePairUnlexicalizedSource.cpp
@@ -10,7 +10,7 @@ using namespace std;
namespace Moses
{
-
+
RulePairUnlexicalizedSource::RulePairUnlexicalizedSource(const std::string &line)
: StatelessFeatureFunction(0, line)
, m_glueRules(false)
@@ -51,7 +51,7 @@ void RulePairUnlexicalizedSource::EvaluateInIsolation(const Phrase &source
return;
}
- for (size_t posS=0; posS<source.GetSize(); ++posS)
+ for (size_t posS=0; posS<source.GetSize(); ++posS)
{
const Word &wordS = source.GetWord(posS);
if ( !wordS.IsNonTerminal() ) {
@@ -61,7 +61,7 @@ void RulePairUnlexicalizedSource::EvaluateInIsolation(const Phrase &source
ostringstream namestr;
- for (size_t posT=0; posT<targetPhrase.GetSize(); ++posT)
+ for (size_t posT=0; posT<targetPhrase.GetSize(); ++posT)
{
const Word &wordT = targetPhrase.GetWord(posT);
const Factor* factorT = wordT[0];
@@ -78,7 +78,7 @@ void RulePairUnlexicalizedSource::EvaluateInIsolation(const Phrase &source
namestr << targetPhraseLHS->GetString() << "|";
for (AlignmentInfo::const_iterator it=targetPhrase.GetAlignNonTerm().begin();
- it!=targetPhrase.GetAlignNonTerm().end(); ++it)
+ it!=targetPhrase.GetAlignNonTerm().end(); ++it)
{
namestr << "|" << it->first << "-" << it->second;
}
diff --git a/moses/FF/RuleScope.cpp b/moses/FF/RuleScope.cpp
index bc1cb3ebd..08987537d 100644
--- a/moses/FF/RuleScope.cpp
+++ b/moses/FF/RuleScope.cpp
@@ -70,11 +70,11 @@ void RuleScope::EvaluateInIsolation(const Phrase &source
estimatedFutureScore.PlusEquals(this, scores);
}
else {
- scoreBreakdown.PlusEquals(this, scores);
+ scoreBreakdown.PlusEquals(this, scores);
}
}
else if (m_futureCostOnly) {
- estimatedFutureScore.PlusEquals(this, score);
+ estimatedFutureScore.PlusEquals(this, score);
}
else {
scoreBreakdown.PlusEquals(this, score);
diff --git a/moses/FF/SoftSourceSyntacticConstraintsFeature.cpp b/moses/FF/SoftSourceSyntacticConstraintsFeature.cpp
index d57c42d99..f788f8e53 100644
--- a/moses/FF/SoftSourceSyntacticConstraintsFeature.cpp
+++ b/moses/FF/SoftSourceSyntacticConstraintsFeature.cpp
@@ -556,8 +556,8 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
for (boost::unordered_set<size_t>::const_iterator treeInputLabelsLHSIt = treeInputLabelsLHS.begin();
treeInputLabelsLHSIt != treeInputLabelsLHS.end(); ++treeInputLabelsLHSIt) {
- scoreBreakdown.PlusEquals(this,
- "LHSPAIR_" + targetLHS->GetString().as_string() + "_" + m_sourceLabelsByIndex[*treeInputLabelsLHSIt],
+ scoreBreakdown.PlusEquals(this,
+ "LHSPAIR_" + targetLHS->GetString().as_string() + "_" + m_sourceLabelsByIndex[*treeInputLabelsLHSIt],
(float)1/treeInputLabelsLHS.size());
if (!m_targetSourceLHSJointCountFile.empty()) {
@@ -567,8 +567,8 @@ void SoftSourceSyntacticConstraintsFeature::EvaluateWithSourceContext(const Inpu
}
}
if ( treeInputLabelsLHS.size() == 0 ) {
- scoreBreakdown.PlusEquals(this,
- "LHSPAIR_" + targetLHS->GetString().as_string() + "_" + outputDefaultNonTerminal[0]->GetString().as_string(),
+ scoreBreakdown.PlusEquals(this,
+ "LHSPAIR_" + targetLHS->GetString().as_string() + "_" + outputDefaultNonTerminal[0]->GetString().as_string(),
1);
if (!m_targetSourceLHSJointCountFile.empty()) {
t2sLabelsScore = TransformScore(m_floor);
diff --git a/moses/FF/VW/VW.h b/moses/FF/VW/VW.h
index dd9d0b858..c94791c32 100644
--- a/moses/FF/VW/VW.h
+++ b/moses/FF/VW/VW.h
@@ -165,7 +165,7 @@ public:
const std::vector<VWFeatureBase*>& sourceFeatures =
VWFeatureBase::GetSourceFeatures(GetScoreProducerDescription());
- const std::vector<VWFeatureBase*>& targetFeatures =
+ const std::vector<VWFeatureBase*>& targetFeatures =
VWFeatureBase::GetTargetFeatures(GetScoreProducerDescription());
const WordsRange &sourceRange = translationOptionList.Get(0)->GetSourceWordsRange();
@@ -229,7 +229,7 @@ public:
//
// predict using a trained classifier, use this in decoding (=at test time)
//
-
+
std::vector<float> losses(translationOptionList.size());
// extract source side features
@@ -296,7 +296,7 @@ public:
// classifier (squared/logistic/hinge/...), hence the name "loss"
if (value == "logistic") {
m_normalizer = (Discriminative::Normalizer *) new Discriminative::LogisticLossNormalizer();
- } else if (value == "squared") {
+ } else if (value == "squared") {
m_normalizer = (Discriminative::Normalizer *) new Discriminative::SquaredLossNormalizer();
} else {
UTIL_THROW2("Unknown loss type:" << value);
@@ -317,7 +317,7 @@ public:
const TabbedSentence& tabbedSentence = static_cast<const TabbedSentence&>(source);
UTIL_THROW_IF2(tabbedSentence.GetColumns().size() < 2,
"TabbedSentence must contain target<tab>alignment");
-
+
// target sentence represented as a phrase
Phrase *target = new Phrase();
target->CreateFromString(
@@ -431,7 +431,7 @@ private:
const TargetPhrase &targetPhrase = topt->GetTargetPhrase();
// extract raw counts from phrase-table property
- const CountsPhraseProperty *property =
+ const CountsPhraseProperty *property =
static_cast<const CountsPhraseProperty *>(targetPhrase.GetProperty("Counts"));
if (! property) {