Skip to content

Commit 05ead45

Browse files
committed
beautify
1 parent 91cb549 commit 05ead45

File tree

406 files changed

+20139
-21129
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

406 files changed

+20139
-21129
lines changed

OnDiskPt/Main.cpp

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -153,19 +153,19 @@ OnDiskPt::PhrasePtr Tokenize(SourcePhrase &sourcePhrase, TargetPhrase &targetPhr
153153
break;
154154
}
155155
case 4: {
156-
// store only the 3rd one (rule count)
157-
float val = Moses::Scan<float>(tok);
158-
misc[0] = val;
159-
break;
156+
// store only the 3rd one (rule count)
157+
float val = Moses::Scan<float>(tok);
158+
misc[0] = val;
159+
break;
160160
}
161161
case 5: {
162-
// sparse features
163-
sparseFeatures << tok << " ";
162+
// sparse features
163+
sparseFeatures << tok << " ";
164164
break;
165165
}
166166
case 6: {
167-
property << tok << " ";
168-
break;
167+
property << tok << " ";
168+
break;
169169
}
170170
default:
171171
cerr << "ERROR in line " << line << endl;

OnDiskPt/TargetPhrase.cpp

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -166,10 +166,10 @@ char *TargetPhrase::WriteOtherInfoToMemory(OnDiskWrapper &onDiskWrapper, size_t
166166
size_t propSize = m_property.size();
167167

168168
size_t memNeeded = sizeof(UINT64) // file pos (phrase id)
169-
+ sizeof(UINT64) + 2 * sizeof(UINT64) * numAlign // align
170-
+ sizeof(float) * numScores // scores
171-
+ sizeof(UINT64) + sparseFeatureSize // sparse features string
172-
+ sizeof(UINT64) + propSize; // property string
169+
+ sizeof(UINT64) + 2 * sizeof(UINT64) * numAlign // align
170+
+ sizeof(float) * numScores // scores
171+
+ sizeof(UINT64) + sparseFeatureSize // sparse features string
172+
+ sizeof(UINT64) + propSize; // property string
173173

174174
char *mem = (char*) malloc(memNeeded);
175175
//memset(mem, 0, memNeeded);
@@ -350,13 +350,13 @@ UINT64 TargetPhrase::ReadStringFromFile(std::fstream &fileTPColl, std::string &o
350350
bytesRead += sizeof(UINT64);
351351

352352
if (strSize) {
353-
char *mem = (char*) malloc(strSize + 1);
354-
mem[strSize] = '\0';
355-
fileTPColl.read(mem, strSize);
356-
outStr = string(mem);
357-
free(mem);
353+
char *mem = (char*) malloc(strSize + 1);
354+
mem[strSize] = '\0';
355+
fileTPColl.read(mem, strSize);
356+
outStr = string(mem);
357+
free(mem);
358358

359-
bytesRead += strSize;
359+
bytesRead += strSize;
360360
}
361361

362362
return bytesRead;

OnDiskPt/TargetPhrase.h

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -113,14 +113,12 @@ class TargetPhrase: public Phrase
113113

114114
virtual void DebugPrint(std::ostream &out, const Vocab &vocab) const;
115115

116-
void SetProperty(const std::string &value)
117-
{
118-
m_property = value;
116+
void SetProperty(const std::string &value) {
117+
m_property = value;
119118
}
120119

121-
void SetSparseFeatures(const std::string &value)
122-
{
123-
m_sparseFeatures = value;
120+
void SetSparseFeatures(const std::string &value) {
121+
m_sparseFeatures = value;
124122
}
125123
};
126124

OnDiskPt/Word.cpp

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -105,18 +105,17 @@ void Word::ConvertToMoses(
105105
overwrite = Moses::Word(m_isNonTerminal);
106106

107107
if (m_isNonTerminal) {
108-
const std::string &tok = vocab.GetString(m_vocabId);
109-
overwrite.SetFactor(0, factorColl.AddFactor(tok, m_isNonTerminal));
110-
}
111-
else {
112-
// TODO: this conversion should have been done at load time.
113-
util::TokenIter<util::SingleCharacter> tok(vocab.GetString(m_vocabId), '|');
114-
115-
for (std::vector<Moses::FactorType>::const_iterator t = outputFactorsVec.begin(); t != outputFactorsVec.end(); ++t, ++tok) {
116-
UTIL_THROW_IF2(!tok, "Too few factors in \"" << vocab.GetString(m_vocabId) << "\"; was expecting " << outputFactorsVec.size());
117-
overwrite.SetFactor(*t, factorColl.AddFactor(*tok, m_isNonTerminal));
118-
}
119-
UTIL_THROW_IF2(tok, "Too many factors in \"" << vocab.GetString(m_vocabId) << "\"; was expecting " << outputFactorsVec.size());
108+
const std::string &tok = vocab.GetString(m_vocabId);
109+
overwrite.SetFactor(0, factorColl.AddFactor(tok, m_isNonTerminal));
110+
} else {
111+
// TODO: this conversion should have been done at load time.
112+
util::TokenIter<util::SingleCharacter> tok(vocab.GetString(m_vocabId), '|');
113+
114+
for (std::vector<Moses::FactorType>::const_iterator t = outputFactorsVec.begin(); t != outputFactorsVec.end(); ++t, ++tok) {
115+
UTIL_THROW_IF2(!tok, "Too few factors in \"" << vocab.GetString(m_vocabId) << "\"; was expecting " << outputFactorsVec.size());
116+
overwrite.SetFactor(*t, factorColl.AddFactor(*tok, m_isNonTerminal));
117+
}
118+
UTIL_THROW_IF2(tok, "Too many factors in \"" << vocab.GetString(m_vocabId) << "\"; was expecting " << outputFactorsVec.size());
120119
}
121120
}
122121

biconcor/phrase-lookup.cpp

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,8 @@ size_t lookup( string );
77
vector<string> tokenize( const char input[] );
88
SuffixArray suffixArray;
99

10-
int main(int argc, char* argv[]) {
10+
int main(int argc, char* argv[])
11+
{
1112
// handle parameters
1213
string query;
1314
string fileNameSuffix;
@@ -95,14 +96,14 @@ int main(int argc, char* argv[]) {
9596
}
9697
cout << lookup( query ) << endl;
9798
}
98-
}
99-
else if (queryFlag) {
99+
} else if (queryFlag) {
100100
cout << lookup( query ) << endl;
101101
}
102102
return 0;
103103
}
104104

105-
size_t lookup( string query ) {
105+
size_t lookup( string query )
106+
{
106107
cerr << "query is " << query << endl;
107108
vector< string > queryString = tokenize( query.c_str() );
108109
return suffixArray.Count( queryString );

mert/FeatureStats.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,8 @@ void SparseVector::set(const string& name, FeatureStatsType value)
6161
m_fvector[id] = value;
6262
}
6363

64-
void SparseVector::set(size_t id, FeatureStatsType value) {
64+
void SparseVector::set(size_t id, FeatureStatsType value)
65+
{
6566
assert(m_id_to_name.size() > id);
6667
m_fvector[id] = value;
6768
}
@@ -204,7 +205,7 @@ FeatureStats::FeatureStats(const size_t size)
204205

205206
FeatureStats::~FeatureStats()
206207
{
207-
delete [] m_array;
208+
delete [] m_array;
208209
}
209210

210211
void FeatureStats::Copy(const FeatureStats &stats)

mert/ForestRescore.cpp

Lines changed: 39 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,11 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
3131

3232
using namespace std;
3333

34-
namespace MosesTuning {
34+
namespace MosesTuning
35+
{
3536

36-
std::ostream& operator<<(std::ostream& out, const WordVec& wordVec) {
37+
std::ostream& operator<<(std::ostream& out, const WordVec& wordVec)
38+
{
3739
out << "[";
3840
for (size_t i = 0; i < wordVec.size(); ++i) {
3941
out << wordVec[i]->first;
@@ -44,7 +46,8 @@ std::ostream& operator<<(std::ostream& out, const WordVec& wordVec) {
4446
}
4547

4648

47-
void ReferenceSet::Load(const vector<string>& files, Vocab& vocab) {
49+
void ReferenceSet::Load(const vector<string>& files, Vocab& vocab)
50+
{
4851
for (size_t i = 0; i < files.size(); ++i) {
4952
util::FilePiece fh(files[i].c_str());
5053
size_t sentenceId = 0;
@@ -55,14 +58,15 @@ void ReferenceSet::Load(const vector<string>& files, Vocab& vocab) {
5558
} catch (util::EndOfFileException &e) {
5659
break;
5760
}
58-
AddLine(sentenceId, line, vocab);
59-
++sentenceId;
61+
AddLine(sentenceId, line, vocab);
62+
++sentenceId;
6063
}
6164
}
6265

6366
}
6467

65-
void ReferenceSet::AddLine(size_t sentenceId, const StringPiece& line, Vocab& vocab) {
68+
void ReferenceSet::AddLine(size_t sentenceId, const StringPiece& line, Vocab& vocab)
69+
{
6670
//cerr << line << endl;
6771
NgramCounter ngramCounts;
6872
list<WordVec> openNgrams;
@@ -74,14 +78,14 @@ void ReferenceSet::AddLine(size_t sentenceId, const StringPiece& line, Vocab& vo
7478
openNgrams.push_front(WordVec());
7579
for (list<WordVec>::iterator k = openNgrams.begin(); k != openNgrams.end(); ++k) {
7680
k->push_back(nextTok);
77-
++ngramCounts[*k];
81+
++ngramCounts[*k];
7882
}
7983
if (openNgrams.size() >= kBleuNgramOrder) openNgrams.pop_back();
8084
}
8185

8286
//merge into overall ngram map
8387
for (NgramCounter::const_iterator ni = ngramCounts.begin();
84-
ni != ngramCounts.end(); ++ni) {
88+
ni != ngramCounts.end(); ++ni) {
8589
size_t count = ni->second;
8690
//cerr << *ni << " " << count << endl;
8791
if (ngramCounts_.size() <= sentenceId) ngramCounts_.resize(sentenceId+1);
@@ -104,8 +108,9 @@ void ReferenceSet::AddLine(size_t sentenceId, const StringPiece& line, Vocab& vo
104108
//cerr << endl;
105109

106110
}
107-
108-
size_t ReferenceSet::NgramMatches(size_t sentenceId, const WordVec& ngram, bool clip) const {
111+
112+
size_t ReferenceSet::NgramMatches(size_t sentenceId, const WordVec& ngram, bool clip) const
113+
{
109114
const NgramMap& ngramCounts = ngramCounts_.at(sentenceId);
110115
NgramMap::const_iterator ngi = ngramCounts.find(ngram);
111116
if (ngi == ngramCounts.end()) return 0;
@@ -114,7 +119,8 @@ size_t ReferenceSet::NgramMatches(size_t sentenceId, const WordVec& ngram, bool
114119

115120
VertexState::VertexState(): bleuStats(kBleuNgramOrder), targetLength(0) {}
116121

117-
void HgBleuScorer::UpdateMatches(const NgramCounter& counts, vector<FeatureStatsType>& bleuStats ) const {
122+
void HgBleuScorer::UpdateMatches(const NgramCounter& counts, vector<FeatureStatsType>& bleuStats ) const
123+
{
118124
for (NgramCounter::const_iterator ngi = counts.begin(); ngi != counts.end(); ++ngi) {
119125
//cerr << "Checking: " << *ngi << " matches " << references_.NgramMatches(sentenceId_,*ngi,false) << endl;
120126
size_t order = ngi->first.size();
@@ -124,7 +130,8 @@ void HgBleuScorer::UpdateMatches(const NgramCounter& counts, vector<FeatureStats
124130
}
125131
}
126132

127-
size_t HgBleuScorer::GetTargetLength(const Edge& edge) const {
133+
size_t HgBleuScorer::GetTargetLength(const Edge& edge) const
134+
{
128135
size_t targetLength = 0;
129136
for (size_t i = 0; i < edge.Words().size(); ++i) {
130137
const Vocab::Entry* word = edge.Words()[i];
@@ -137,7 +144,8 @@ size_t HgBleuScorer::GetTargetLength(const Edge& edge) const {
137144
return targetLength;
138145
}
139146

140-
FeatureStatsType HgBleuScorer::Score(const Edge& edge, const Vertex& head, vector<FeatureStatsType>& bleuStats) {
147+
FeatureStatsType HgBleuScorer::Score(const Edge& edge, const Vertex& head, vector<FeatureStatsType>& bleuStats)
148+
{
141149
NgramCounter ngramCounts;
142150
size_t childId = 0;
143151
size_t wordId = 0;
@@ -147,7 +155,7 @@ FeatureStatsType HgBleuScorer::Score(const Edge& edge, const Vertex& head, vecto
147155
bool inRightContext = false;
148156
list<WordVec> openNgrams;
149157
const Vocab::Entry* currentWord = NULL;
150-
while (wordId < edge.Words().size()) {
158+
while (wordId < edge.Words().size()) {
151159
currentWord = edge.Words()[wordId];
152160
if (currentWord != NULL) {
153161
++wordId;
@@ -214,7 +222,7 @@ FeatureStatsType HgBleuScorer::Score(const Edge& edge, const Vertex& head, vecto
214222
}
215223
if (openNgrams.size() >= kBleuNgramOrder) openNgrams.pop_back();
216224
}
217-
225+
218226
//Collect matches
219227
//This edge
220228
//cerr << "edge ngrams" << endl;
@@ -227,26 +235,27 @@ FeatureStatsType HgBleuScorer::Score(const Edge& edge, const Vertex& head, vecto
227235
bleuStats[j] += vertexStates_[edge.Children()[i]].bleuStats[j];
228236
}
229237
}
230-
238+
231239

232240
FeatureStatsType sourceLength = head.SourceCovered();
233241
size_t referenceLength = references_.Length(sentenceId_);
234-
FeatureStatsType effectiveReferenceLength =
242+
FeatureStatsType effectiveReferenceLength =
235243
sourceLength / totalSourceLength_ * referenceLength;
236244

237245
bleuStats[bleuStats.size()-1] = effectiveReferenceLength;
238-
//backgroundBleu_[backgroundBleu_.size()-1] =
246+
//backgroundBleu_[backgroundBleu_.size()-1] =
239247
// backgroundRefLength_ * sourceLength / totalSourceLength_;
240248
FeatureStatsType bleu = sentenceLevelBackgroundBleu(bleuStats, backgroundBleu_);
241249

242250
return bleu;
243251
}
244252

245-
void HgBleuScorer::UpdateState(const Edge& winnerEdge, size_t vertexId, const vector<FeatureStatsType>& bleuStats) {
253+
void HgBleuScorer::UpdateState(const Edge& winnerEdge, size_t vertexId, const vector<FeatureStatsType>& bleuStats)
254+
{
246255
//TODO: Maybe more efficient to absorb into the Score() method
247256
VertexState& vertexState = vertexStates_[vertexId];
248257
//cerr << "Updating state for " << vertexId << endl;
249-
258+
250259
//leftContext
251260
int wi = 0;
252261
const VertexState* childState = NULL;
@@ -263,9 +272,9 @@ void HgBleuScorer::UpdateState(const Edge& winnerEdge, size_t vertexId, const ve
263272
//start of child state
264273
childState = &(vertexStates_[winnerEdge.Children()[childi++]]);
265274
contexti = 0;
266-
}
275+
}
267276
if ((size_t)contexti < childState->leftContext.size()) {
268-
vertexState.leftContext.push_back(childState->leftContext[contexti++]);
277+
vertexState.leftContext.push_back(childState->leftContext[contexti++]);
269278
} else {
270279
//end of child context
271280
childState = NULL;
@@ -314,7 +323,8 @@ typedef pair<const Edge*,FeatureStatsType> BackPointer;
314323
* Recurse through back pointers
315324
**/
316325
static void GetBestHypothesis(size_t vertexId, const Graph& graph, const vector<BackPointer>& bps,
317-
HgHypothesis* bestHypo) {
326+
HgHypothesis* bestHypo)
327+
{
318328
//cerr << "Expanding " << vertexId << " Score: " << bps[vertexId].second << endl;
319329
//UTIL_THROW_IF(bps[vertexId].second == kMinScore+1, HypergraphException, "Landed at vertex " << vertexId << " which is a dead end");
320330
if (!bps[vertexId].first) return;
@@ -334,7 +344,7 @@ static void GetBestHypothesis(size_t vertexId, const Graph& graph, const vector<
334344
}
335345
}
336346

337-
void Viterbi(const Graph& graph, const SparseVector& weights, float bleuWeight, const ReferenceSet& references , size_t sentenceId, const std::vector<FeatureStatsType>& backgroundBleu, HgHypothesis* bestHypo)
347+
void Viterbi(const Graph& graph, const SparseVector& weights, float bleuWeight, const ReferenceSet& references , size_t sentenceId, const std::vector<FeatureStatsType>& backgroundBleu, HgHypothesis* bestHypo)
338348
{
339349
BackPointer init(NULL,kMinScore);
340350
vector<BackPointer> backPointers(graph.VertexSize(),init);
@@ -349,7 +359,7 @@ void Viterbi(const Graph& graph, const SparseVector& weights, float bleuWeight,
349359
//UTIL_THROW(HypergraphException, "Vertex " << vi << " has no incoming edges");
350360
//If no incoming edges, vertex is a dead end
351361
backPointers[vi].first = NULL;
352-
backPointers[vi].second = kMinScore;
362+
backPointers[vi].second = kMinScore;
353363
} else {
354364
//cerr << "\nVertex: " << vi << endl;
355365
for (size_t ei = 0; ei < incoming.size(); ++ei) {
@@ -362,10 +372,10 @@ void Viterbi(const Graph& graph, const SparseVector& weights, float bleuWeight,
362372
incomingScore = max(incomingScore + backPointers[childId].second, kMinScore);
363373
}
364374
vector<FeatureStatsType> bleuStats(kBleuNgramOrder*2+1);
365-
// cerr << "Score: " << incomingScore << " Bleu: ";
366-
// if (incomingScore > nonbleuscore) {nonbleuscore = incomingScore; nonbleuid = ei;}
375+
// cerr << "Score: " << incomingScore << " Bleu: ";
376+
// if (incomingScore > nonbleuscore) {nonbleuscore = incomingScore; nonbleuid = ei;}
367377
FeatureStatsType totalScore = incomingScore;
368-
if (bleuWeight) {
378+
if (bleuWeight) {
369379
FeatureStatsType bleuScore = bleuScorer.Score(*(incoming[ei]), vertex, bleuStats);
370380
if (isnan(bleuScore)) {
371381
cerr << "WARN: bleu score undefined" << endl;
@@ -379,7 +389,7 @@ void Viterbi(const Graph& graph, const SparseVector& weights, float bleuWeight,
379389
}
380390
//UTIL_THROW_IF(isnan(bleuScore), util::Exception, "Bleu score undefined, smoothing problem?");
381391
totalScore += bleuWeight * bleuScore;
382-
// cerr << bleuScore << " Total: " << incomingScore << endl << endl;
392+
// cerr << bleuScore << " Total: " << incomingScore << endl << endl;
383393
//cerr << "is " << incomingScore << " bs " << bleuScore << endl;
384394
}
385395
if (totalScore >= winnerScore) {

0 commit comments

Comments
 (0)