Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/AutoregressionLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,14 @@ struct AutoregressionLayer: public LinearOutputLayer
targets = this->source->outputActivations;
fill (seqMeanTargs, 0);
int seqSize = targets.seq_size();
loop(int i, range(seqSize))
loop(int i, ::range(seqSize))
{
range_plus_equals(seqMeanTargs, targets[i]);
}
range_divide_val(seqMeanTargs, seqSize);
double seqRmsNormFactor = 0;
double rmsError = 0;
loop(int i, range(seqSize))
loop(int i, ::range(seqSize))
{
seqRmsNormFactor += euclidean_squared(seqMeanTargs, targets[i]);
loop(TDDD t, zip(this->outputErrors[i], this->outputActivations[i], targets[i]))
Expand Down
6 changes: 3 additions & 3 deletions src/ClassificationLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,14 @@ struct ClassificationLayer: public SoftmaxLayer
}
double calculate_errors(const DataSequence& seq)
{
assert(equal(seq.targetClasses.seq_shape(), this->outputActivations.seq_shape()));
assert(::equal(seq.targetClasses.seq_shape(), this->outputActivations.seq_shape()));
loop(vector<int>& v, confusionMatrix)
{
fill(v, 0);
}
targets.reshape(this->outputActivations, 0);
double crossEntropyError = 0;
loop(int i, range(this->outputActivations.seq_size()))
loop(int i, ::range(this->outputActivations.seq_size()))
{
int targetClass = seq.targetClasses[i].front();
if (targetClass >= 0)
Expand Down Expand Up @@ -122,7 +122,7 @@ struct ClassificationLayer: public SoftmaxLayer
if (this->num_seq_dims() == 0)
{
View<LogDouble> logActs = this->logActivations[0];
loop (int i, range(this->output_size()))
loop (int i, ::range(this->output_size()))
{
labelProbs[i] = make_pair(logActs[i].log, i);
}
Expand Down
2 changes: 1 addition & 1 deletion src/DecodingLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ struct DecodingLayer: public TranscriptionLayer
}

//pass the tokens
loop(int t, range(1, totalTime))
loop(int t, ::range(1, totalTime))
{
//reduce newTokens list to nBest, unless using language model
if (oneGrams.empty())
Expand Down
40 changes: 20 additions & 20 deletions src/Helpers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,22 @@ template <class R> static size_t count_adjacent(const R& r)
}
return count;
}
template<class R1, class R2> static size_t range_min_size (const R1& a, const R2& b)
{
return min(boost::size(a), boost::size(b));
}
template<class R1, class R2, class R3> static size_t range_min_size (const R1& a, const R2& b, const R3& c)
{
return min(min(boost::size(a), boost::size(b)), boost::size(c));
}
template<class R1, class R2, class R3, class R4> static size_t range_min_size (const R1& a, const R2& b, const R3& c, const R4& d)
{
return min(min(min(boost::size(a), boost::size(b)), boost::size(c)), boost::size(d));
}
template<class R1, class R2, class R3, class R4, class R5> static size_t range_min_size (const R1& a, const R2& b, const R3& c, const R4& d, const R5& e)
{
return min(min(min(min(boost::size(a), boost::size(b)), boost::size(c)), boost::size(d)), boost::size(e));
}
template <class R1, class R2> static pair<zip_iterator<tuple<typename range_iterator<R1>::type, typename range_iterator<R2>::type> >,
zip_iterator<tuple<typename range_iterator<R1>::type, typename range_iterator<R2>::type> > >
zip(R1& r1, R2& r2)
Expand Down Expand Up @@ -280,10 +296,6 @@ zip(R1& r1, R2& r2, R3& r3, R4& r4, R5& r5)
return make_pair(make_zip_iterator(make_tuple(boost::begin(r1), boost::begin(r2), boost::begin(r3), boost::begin(r4), boost::begin(r5))),
make_zip_iterator(make_tuple(boost::end(r1) - (boost::size(r1) - size), boost::end(r2) - (boost::size(r2) - size), boost::end(r3) - (boost::size(r3) - size), boost::end(r4) - (boost::size(r4) - size), boost::end(r5) - (boost::size(r5) - size))));
}
template <class R> static pair<counting_iterator<typename range_difference<R>::type>, counting_iterator<typename range_difference<R>::type> > indices(const R& r)
{
return range(boost::size(r));
}
template <class R> static pair<zip_iterator<tuple<typename range_iterator<R>::type, counting_iterator<typename range_difference<R>::type> > >,
zip_iterator<tuple<typename range_iterator<R>::type, counting_iterator<typename range_difference<R>::type> > > >
enumerate(R& r)
Expand All @@ -299,6 +311,10 @@ template <class T> static pair<counting_iterator<T>, counting_iterator<T> > rang
{
return make_pair(counting_iterator<T>(t1), counting_iterator<T>(t2));
}
template <class R> static pair<counting_iterator<typename range_size<R>::type>, counting_iterator<typename range_size<R>::type> > indices(const R& r)
{
return ::range(boost::size(r));
}
template <class R1, class R2, class F> static typename range_iterator<R2>::type transform(const R1& r1, R2& r2, F f)
{
return transform(boost::begin(r1), boost::end(r1), boost::begin(r2), f);
Expand Down Expand Up @@ -436,22 +452,6 @@ template<class R> void delete_range(R& r)
delete *it;
}
}
template<class R1, class R2> static size_t range_min_size (const R1& a, const R2& b)
{
return min(boost::size(a), boost::size(b));
}
template<class R1, class R2, class R3> static size_t range_min_size (const R1& a, const R2& b, const R3& c)
{
return min(min(boost::size(a), boost::size(b)), boost::size(c));
}
template<class R1, class R2, class R3, class R4> static size_t range_min_size (const R1& a, const R2& b, const R3& c, const R4& d)
{
return min(min(min(boost::size(a), boost::size(b)), boost::size(c)), boost::size(d));
}
template<class R1, class R2, class R3, class R4, class R5> static size_t range_min_size (const R1& a, const R2& b, const R3& c, const R4& d, const R5& e)
{
return min(min(min(min(boost::size(a), boost::size(b)), boost::size(c)), boost::size(d)), boost::size(e));
}
template <class R> static int max_index(const R& r)
{
return distance(boost::begin(r), max_element(boost::begin(r), boost::end(r)));
Expand Down
36 changes: 18 additions & 18 deletions src/LstmLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
#endif
{
//initialise the state delays
loop(int i, range(this->num_seq_dims()))
loop(int i, ::range(this->num_seq_dims()))
{
stateDelays[i].resize(this->num_seq_dims(), 0);
stateDelays[i][i] = -directions[i];
Expand Down Expand Up @@ -116,7 +116,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
double* stateBegin = states[coords].begin();
double* preGateStateBegin = preGateStates[coords].begin();
double* preOutGateActBegin = preOutGateActs[coords].begin();
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
oldStates[d] = states.at(range_plus(delayedCoords, coords, stateDelays[d]));
}
Expand All @@ -126,7 +126,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
int cellStart = 0;
int cellEnd = cellsPerBlock;
double* fgActEnd = fgActBegin + this->num_seq_dims();
loop(int b, range(numBlocks))
loop(int b, ::range(numBlocks))
{
#ifdef PEEPS
View<double> fgActs(fgActBegin, fgActEnd);
Expand All @@ -147,7 +147,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer

//forget gates
//extra inputs from peepholes (from old states)
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
#ifdef PEEPS
const View<double>& os = oldStates[d];
Expand All @@ -166,10 +166,10 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
inActIt += cellsPerBlock;

//cell states
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
double state = inGateAct * preGateStateBegin[c];
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
const View<double>& os = oldStates[d];
if (os.begin())
Expand Down Expand Up @@ -218,7 +218,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
#ifdef PEEPS
const double* peepWtIt = WeightContainer::instance().get_weights(peepRange).begin();
#endif
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
oldStates[d] = states.at(range_plus(delayedCoords, coords, stateDelays[d]));
range_minus(delayedCoords, coords, stateDelays[d]);
Expand All @@ -230,7 +230,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
int cellEnd = cellsPerBlock;
int fgStart = 0;
int gateStart = 0;
loop(int b, range(numBlocks))
loop(int b, ::range(numBlocks))
{
double inGateAct = inGateActBegin[b];
double outGateAct = outGateActBegin[b];
Expand All @@ -240,7 +240,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
inner_product(preOutGateActBegin + cellStart, preOutGateActBegin + cellEnd, outputErrorBegin + cellStart, 0.0);

//cell pds (dE/dState)
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
double deriv = (CO::deriv(preOutGateActBegin[c]) * outGateAct * outputErrorBegin[c]);
#ifdef PEEPS
Expand All @@ -249,7 +249,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
double ogPeepWt = peepWtIt[peepsPerBlock - cellsPerBlock + cOffset];
deriv += outGateError * ogPeepWt;
#endif
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
#ifdef PEEPS
double fgPeepWt = peepWtIt[cOffset + (cellsPerBlock * (d + 1))];
Expand All @@ -272,7 +272,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
++errorIt;

//forget gate error
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
const View<double>& os = oldStates[d];
if (os.begin())
Expand All @@ -288,7 +288,7 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
}

//cell errors
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
*errorIt = inGateAct * CI::deriv(preGateStateBegin[c]) * cellErrorBegin[c];
++errorIt;
Expand Down Expand Up @@ -316,34 +316,34 @@ template <class CI, class CO, class G> struct LstmLayer: public Layer
const double* stateBegin = states[coords].begin();
const double* errorBegin = this->inputErrors[coords].begin();
double* pdIt = WeightContainer::instance().get_derivs(peepRange).begin();
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
oldStates[d] = states.at(range_plus(delayedCoords, coords, stateDelays[d]));
}
loop(int b, range(numBlocks))
loop(int b, ::range(numBlocks))
{
int cellStart = b * cellsPerBlock;
int cellEnd = cellStart + cellsPerBlock;
int errorOffset = b * unitsPerBlock;
double inGateError = errorBegin[errorOffset];
loop(int d, range(this->num_seq_dims()))
loop(int d, ::range(this->num_seq_dims()))
{
const View<double>& os = oldStates[d];
if (os.begin())
{
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
pdIt[c - cellStart] += inGateError * os[c];
}
double forgGateError = errorBegin[errorOffset + d + 1];
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
pdIt[(c - cellStart) + ((d + 1) * cellsPerBlock)] += forgGateError * os[c];
}
}
}
double outGateError = errorBegin[errorOffset + unitsPerBlock - 1];
loop(int c, range(cellStart, cellEnd))
loop(int c, ::range(cellStart, cellEnd))
{
pdIt[(c - cellStart) + peepsPerBlock - cellsPerBlock] += outGateError * stateBegin[c];
}
Expand Down
10 changes: 5 additions & 5 deletions src/NetcdfDataset.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ struct DataHeader
}
inputSize = load_nc_dim(nc, "inputPattSize");
numSequences = load_nc_dim(nc, "numSeqs") * bound(dataFraction, 0.0, 1.0);
loop(int s, range(numSequences))
loop(int s, ::range(numSequences))
{
vector<int> seqDims = get_nc_array_step<int>(nc, "seqDims", s, numDims != 1);
if (seqDims.empty())
Expand Down Expand Up @@ -189,11 +189,11 @@ struct DataHeader
}
else if (task == "sequence_classification" || task == "transcription" || task == "dictionary_transcription")
{
loop(int i, range(outputSize))
loop(int i, ::range(outputSize))
{
targetLabelCounts[targetLabels.left.at(i)] = 0;
}
loop(int s, range(numSequences))
loop(int s, ::range(numSequences))
{
stringstream labelSeq (get_nc_string(nc, "targetStrings", s));
string label;
Expand Down Expand Up @@ -319,7 +319,7 @@ struct NetcdfDataset
pair<int,int> get_offset(int seqNum) const
{
pair<int, int> offset(0, 0);
loop(int i, range(seqNum))
loop(int i, ::range(seqNum))
{
offset += seq_to_offset(i);
}
Expand All @@ -328,7 +328,7 @@ struct NetcdfDataset
void load_sequences (int first, int last)
{
pair<int, int> offsets = get_offset(first);
loop(int i, range(first, last))
loop(int i, ::range(first, last))
{
check(i >= 0 && i < inputSeqDims.shape[0], "sequence " + str(i) + " requested from data file " + str(filename) + " containing " + str(inputSeqDims.shape[0]) + " sequences");
DataSequence* seq = new DataSequence(header.inputSize, in(task, "regression") ? header.outputSize : 0);
Expand Down
2 changes: 1 addition & 1 deletion src/RegressionLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ struct RegressionLayer: public LinearOutputLayer
+ ", target shape = " + str(seq.targetPatterns.shape));
targets = seq.targetPatterns;
double sumSquaresError = 0;
loop(int i, range(this->outputActivations.seq_size()))
loop(int i, ::range(this->outputActivations.seq_size()))
{
// double errScale = (seq.importance.empty() ? 1 : *(seq.importance[i].first));
loop(TDDCF t, zip(this->outputErrors[i], this->outputActivations[i], seq.targetPatterns[i]))
Expand Down
4 changes: 2 additions & 2 deletions src/SeqBuffer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -222,9 +222,9 @@ template <class T> struct SeqBuffer: public MultiArray<T>
void print(ostream& out) const
{
out << "DIMENSIONS: " << seq_shape() << endl;
loop(int j, range(this->shape.back()))
loop(int j, ::range(this->shape.back()))
{
loop(int i, range(seq_size()))
loop(int i, ::range(seq_size()))
{
out << (*this)[i][j] << " ";
}
Expand Down
12 changes: 6 additions & 6 deletions src/TranscriptionLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ struct TranscriptionLayer: public SoftmaxLayer
{
totalSegs = totalSegments;
}
return range<int>(max(0, totalSegs - (2 *(totalTime-time))), min(totalSegs, 2 * (time + 1)));
return ::range<int>(max(0, totalSegs - (2 *(totalTime-time))), min(totalSegs, 2 * (time + 1)));
}
vector<int>& path_to_string(const vector<int>& path) const
{
Expand All @@ -81,7 +81,7 @@ struct TranscriptionLayer: public SoftmaxLayer
{
static vector<int> path;
path.clear();
loop(int i, range(this->outputActivations.seq_size()))
loop(int i, ::range(this->outputActivations.seq_size()))
{
path += max_index(this->outputActivations[i]);
}
Expand Down Expand Up @@ -114,7 +114,7 @@ struct TranscriptionLayer: public SoftmaxLayer
{
forwardVariables.data[1] = this->logActivations.data[seq.labelSeq[0]];
}
loop(int t, range(1, totalTime))
loop(int t, ::range(1, totalTime))
{
View<LogDouble> logActs = this->logActivations[t];
View<LogDouble> oldFvars = forwardVariables[t-1];
Expand Down Expand Up @@ -165,7 +165,7 @@ struct TranscriptionLayer: public SoftmaxLayer
nth_last(lastBvs, 2) = 1;
}
//loop over time, calculating backward variables recursively
loop_back(int t, range(totalTime - 1))
loop_back(int t, ::range(totalTime - 1))
{
View<LogDouble> oldLogActs = this->logActivations[t+1];
View<LogDouble> oldBvars = backwardVariables[t+1];
Expand Down Expand Up @@ -200,12 +200,12 @@ struct TranscriptionLayer: public SoftmaxLayer
}
}
//inject the training errors
loop(int time, range(totalTime))
loop(int time, ::range(totalTime))
{
fill(dEdYTerms, LogDouble(0));
View<LogDouble> fvars = forwardVariables[time];
View<LogDouble> bvars = backwardVariables[time];
loop (int s, range(totalSegments))
loop (int s, ::range(totalSegments))
{
//k = blank for even s, target label for odd s
int k = (s&1) ? seq.labelSeq[s/2] : blank;
Expand Down