|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
Uses of VectorWritable in org.apache.mahout.cf.taste.hadoop.als |
---|
Methods in org.apache.mahout.cf.taste.hadoop.als with parameters of type VectorWritable | |
---|---|
protected void |
SolveImplicitFeedbackMapper.map(org.apache.hadoop.io.IntWritable userOrItemID,
VectorWritable ratingsWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
SolveExplicitFeedbackMapper.map(org.apache.hadoop.io.IntWritable userOrItemID,
VectorWritable ratingsWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
PredictionMapper.map(org.apache.hadoop.io.IntWritable userIndexWritable,
VectorWritable ratingsWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
Uses of VectorWritable in org.apache.mahout.cf.taste.hadoop.item |
---|
Methods in org.apache.mahout.cf.taste.hadoop.item with parameters of type VectorWritable | |
---|---|
protected void |
SimilarityMatrixRowWrapperMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
UserVectorSplitterMapper.map(VarLongWritable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.cf.taste.hadoop.preparation |
---|
Methods in org.apache.mahout.cf.taste.hadoop.preparation with parameters of type VectorWritable | |
---|---|
protected void |
ToItemVectorsMapper.map(VarLongWritable rowIndex,
VectorWritable vectorWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
Method parameters in org.apache.mahout.cf.taste.hadoop.preparation with type arguments of type VectorWritable | |
---|---|
protected void |
ToItemVectorsReducer.reduce(org.apache.hadoop.io.IntWritable row,
Iterable<VectorWritable> vectors,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
Uses of VectorWritable in org.apache.mahout.cf.taste.hadoop.similarity.item |
---|
Methods in org.apache.mahout.cf.taste.hadoop.similarity.item with parameters of type VectorWritable | |
---|---|
protected void |
ItemSimilarityJob.MostSimilarItemPairsMapper.map(org.apache.hadoop.io.IntWritable itemIDIndexWritable,
VectorWritable similarityVector,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
Uses of VectorWritable in org.apache.mahout.classifier.naivebayes.test |
---|
Methods in org.apache.mahout.classifier.naivebayes.test with parameters of type VectorWritable | |
---|---|
protected void |
BayesTestMapper.map(org.apache.hadoop.io.Text key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.classifier.naivebayes.training |
---|
Methods in org.apache.mahout.classifier.naivebayes.training with parameters of type VectorWritable | |
---|---|
protected void |
WeightsMapper.map(org.apache.hadoop.io.IntWritable index,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
ThetaMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
IndexInstancesMapper.map(org.apache.hadoop.io.Text labelText,
VectorWritable instance,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
Uses of VectorWritable in org.apache.mahout.clustering |
---|
Methods in org.apache.mahout.clustering that return types with arguments of type VectorWritable | |
---|---|
Model<VectorWritable> |
Model.sampleFromPosterior()
|
Methods in org.apache.mahout.clustering with parameters of type VectorWritable | |
---|---|
void |
AbstractCluster.observe(VectorWritable x)
|
void |
AbstractCluster.observe(VectorWritable x,
double weight)
|
Method parameters in org.apache.mahout.clustering with type arguments of type VectorWritable | |
---|---|
void |
AbstractCluster.observe(Model<VectorWritable> x)
|
Uses of VectorWritable in org.apache.mahout.clustering.canopy |
---|
Method parameters in org.apache.mahout.clustering.canopy with type arguments of type VectorWritable | |
---|---|
protected void |
CanopyReducer.reduce(org.apache.hadoop.io.Text arg0,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
Uses of VectorWritable in org.apache.mahout.clustering.classify |
---|
Methods in org.apache.mahout.clustering.classify with parameters of type VectorWritable | |
---|---|
protected void |
ClusterClassificationMapper.map(org.apache.hadoop.io.WritableComparable<?> key,
VectorWritable vw,
org.apache.hadoop.mapreduce.Mapper.Context context)
Mapper which classifies the vectors to respective clusters. |
Uses of VectorWritable in org.apache.mahout.clustering.fuzzykmeans |
---|
Methods in org.apache.mahout.clustering.fuzzykmeans with parameters of type VectorWritable | |
---|---|
double |
SoftCluster.pdf(VectorWritable vw)
|
Uses of VectorWritable in org.apache.mahout.clustering.iterator |
---|
Methods in org.apache.mahout.clustering.iterator that return types with arguments of type VectorWritable | |
---|---|
Model<VectorWritable> |
DistanceMeasureCluster.sampleFromPosterior()
|
Methods in org.apache.mahout.clustering.iterator with parameters of type VectorWritable | |
---|---|
protected void |
CIMapper.map(org.apache.hadoop.io.WritableComparable<?> key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
double |
DistanceMeasureCluster.pdf(VectorWritable vw)
|
Uses of VectorWritable in org.apache.mahout.clustering.lda.cvb |
---|
Methods in org.apache.mahout.clustering.lda.cvb with parameters of type VectorWritable | |
---|---|
void |
CachingCVB0PerplexityMapper.map(org.apache.hadoop.io.IntWritable docId,
VectorWritable document,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
CVB0TopicTermVectorNormalizerMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
void |
CVB0DocInferenceMapper.map(org.apache.hadoop.io.IntWritable docId,
VectorWritable doc,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
void |
CachingCVB0Mapper.map(org.apache.hadoop.io.IntWritable docId,
VectorWritable document,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.clustering.spectral |
---|
Methods in org.apache.mahout.clustering.spectral with parameters of type VectorWritable | |
---|---|
protected void |
VectorMatrixMultiplicationJob.VectorMatrixMultiplicationMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable row,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
UnitVectorizerJob.UnitVectorizerMapper.map(org.apache.hadoop.io.IntWritable row,
VectorWritable vector,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
MatrixDiagonalizeJob.MatrixDiagonalizeMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable row,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.clustering.streaming.mapreduce |
---|
Methods in org.apache.mahout.clustering.streaming.mapreduce with parameters of type VectorWritable | |
---|---|
void |
StreamingKMeansMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable point,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Method parameters in org.apache.mahout.clustering.streaming.mapreduce with type arguments of type VectorWritable | |
---|---|
static Iterable<Centroid> |
StreamingKMeansUtilsMR.getCentroidsFromVectorWritable(Iterable<VectorWritable> inputIterable)
Returns an Iterable of centroids from an Iterable of VectorWritables by creating a new Centroid containing a RandomAccessSparseVector as a delegate for each VectorWritable. |
Uses of VectorWritable in org.apache.mahout.clustering.topdown.postprocessor |
---|
Method parameters in org.apache.mahout.clustering.topdown.postprocessor with type arguments of type VectorWritable | |
---|---|
protected void |
ClusterOutputPostProcessorReducer.reduce(org.apache.hadoop.io.IntWritable key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
The key is the remapped cluster id and the values contains the vectors in that cluster. |
Uses of VectorWritable in org.apache.mahout.common.mapreduce |
---|
Methods in org.apache.mahout.common.mapreduce with parameters of type VectorWritable | |
---|---|
protected void |
TransposeMapper.map(org.apache.hadoop.io.IntWritable r,
VectorWritable v,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
Method parameters in org.apache.mahout.common.mapreduce with type arguments of type VectorWritable | |
---|---|
protected void |
VectorSumReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
protected void |
VectorSumCombiner.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
void |
MergeVectorsReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> vectors,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
void |
MergeVectorsCombiner.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> vectors,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
Uses of VectorWritable in org.apache.mahout.math |
---|
Methods in org.apache.mahout.math that return VectorWritable | |
---|---|
static VectorWritable |
VectorWritable.merge(Iterator<VectorWritable> vectors)
|
Method parameters in org.apache.mahout.math with type arguments of type VectorWritable | |
---|---|
static VectorWritable |
VectorWritable.merge(Iterator<VectorWritable> vectors)
|
static Vector |
VectorWritable.mergeToVector(Iterator<VectorWritable> vectors)
|
Uses of VectorWritable in org.apache.mahout.math.hadoop |
---|
Methods in org.apache.mahout.math.hadoop with parameters of type VectorWritable | |
---|---|
void |
TransposeJob.TransposeMapper.map(org.apache.hadoop.io.IntWritable r,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.IntWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TimesSquaredJob.TimesMapper.map(org.apache.hadoop.io.IntWritable rowNum,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter rep)
|
void |
TimesSquaredJob.TimesSquaredMapper.map(T rowNum,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter rep)
|
void |
MatrixColumnMeansJob.MatrixColumnMeansMapper.map(org.apache.hadoop.io.Writable r,
VectorWritable v,
org.apache.hadoop.mapreduce.Mapper.Context context)
The mapper computes a running sum of the vectors the task has seen. |
protected double |
TimesSquaredJob.TimesSquaredMapper.scale(VectorWritable v)
|
Method parameters in org.apache.mahout.math.hadoop with type arguments of type VectorWritable | |
---|---|
void |
MatrixMultiplicationJob.MatrixMultiplyMapper.map(org.apache.hadoop.io.IntWritable index,
org.apache.hadoop.mapred.join.TupleWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.IntWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TransposeJob.TransposeMapper.map(org.apache.hadoop.io.IntWritable r,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.IntWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TimesSquaredJob.TimesMapper.map(org.apache.hadoop.io.IntWritable rowNum,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter rep)
|
void |
TimesSquaredJob.TimesSquaredMapper.map(T rowNum,
VectorWritable v,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter rep)
|
void |
MatrixMultiplicationJob.MatrixMultiplicationReducer.reduce(org.apache.hadoop.io.IntWritable rowNum,
Iterator<VectorWritable> it,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.IntWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
MatrixMultiplicationJob.MatrixMultiplicationReducer.reduce(org.apache.hadoop.io.IntWritable rowNum,
Iterator<VectorWritable> it,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.IntWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
MatrixColumnMeansJob.MatrixColumnMeansReducer.reduce(org.apache.hadoop.io.NullWritable n,
Iterable<VectorWritable> vectors,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
void |
TimesSquaredJob.VectorSummingReducer.reduce(org.apache.hadoop.io.NullWritable n,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TimesSquaredJob.VectorSummingReducer.reduce(org.apache.hadoop.io.NullWritable n,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.NullWritable,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TransposeJob.MergeVectorsCombiner.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.WritableComparable<?>,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TransposeJob.MergeVectorsCombiner.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.WritableComparable<?>,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TransposeJob.MergeVectorsReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.WritableComparable<?>,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
void |
TransposeJob.MergeVectorsReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterator<VectorWritable> vectors,
org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.WritableComparable<?>,VectorWritable> out,
org.apache.hadoop.mapred.Reporter reporter)
|
Uses of VectorWritable in org.apache.mahout.math.hadoop.similarity |
---|
Methods in org.apache.mahout.math.hadoop.similarity with parameters of type VectorWritable | |
---|---|
protected void |
VectorDistanceMapper.map(org.apache.hadoop.io.WritableComparable<?> key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
VectorDistanceInvertedMapper.map(org.apache.hadoop.io.WritableComparable<?> key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.math.hadoop.similarity.cooccurrence |
---|
Methods in org.apache.mahout.math.hadoop.similarity.cooccurrence with parameters of type VectorWritable | |
---|---|
protected void |
RowSimilarityJob.VectorNormMapper.map(org.apache.hadoop.io.IntWritable row,
VectorWritable vectorWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
RowSimilarityJob.CooccurrencesMapper.map(org.apache.hadoop.io.IntWritable column,
VectorWritable occurrenceVector,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
RowSimilarityJob.UnsymmetrifyMapper.map(org.apache.hadoop.io.IntWritable row,
VectorWritable similaritiesWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
protected void |
RowSimilarityJob.CountObservationsMapper.map(org.apache.hadoop.io.IntWritable rowIndex,
VectorWritable rowVectorWritable,
org.apache.hadoop.mapreduce.Mapper.Context ctx)
|
static Vector.Element[] |
Vectors.toArray(VectorWritable vectorWritable)
|
Method parameters in org.apache.mahout.math.hadoop.similarity.cooccurrence with type arguments of type VectorWritable | |
---|---|
static Vector |
Vectors.merge(Iterable<VectorWritable> partialVectors)
|
protected void |
RowSimilarityJob.MergeVectorsReducer.reduce(org.apache.hadoop.io.IntWritable row,
Iterable<VectorWritable> partialVectors,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
protected void |
RowSimilarityJob.SimilarityReducer.reduce(org.apache.hadoop.io.IntWritable row,
Iterable<VectorWritable> partialDots,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
protected void |
RowSimilarityJob.MergeToTopKSimilaritiesReducer.reduce(org.apache.hadoop.io.IntWritable row,
Iterable<VectorWritable> partials,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
protected void |
RowSimilarityJob.SumObservationsReducer.reduce(org.apache.hadoop.io.NullWritable nullWritable,
Iterable<VectorWritable> partialVectors,
org.apache.hadoop.mapreduce.Reducer.Context ctx)
|
static Vector |
Vectors.sum(Iterator<VectorWritable> vectors)
|
Uses of VectorWritable in org.apache.mahout.math.hadoop.stochasticsvd |
---|
Fields in org.apache.mahout.math.hadoop.stochasticsvd with type parameters of type VectorWritable | |
---|---|
protected org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Writable,VectorWritable> |
ABtJob.QRReducer.rhatCollector
|
protected org.apache.hadoop.mapred.OutputCollector<org.apache.hadoop.io.Writable,VectorWritable> |
ABtDenseOutJob.QRReducer.rhatCollector
|
Methods in org.apache.mahout.math.hadoop.stochasticsvd with parameters of type VectorWritable | |
---|---|
protected void |
VJob.VMapper.map(org.apache.hadoop.io.IntWritable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
YtYJob.YtYMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
BtJob.BtMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
We maintain A and QtHat inputs partitioned the same way, so we essentially are performing map-side merge here of A and QtHats except QtHat is stored not row-wise but block-wise. |
protected void |
ABtDenseOutJob.ABtMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
ABtJob.ABtMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
QJob.QMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
protected void |
UJob.UMapper.map(org.apache.hadoop.io.Writable key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Method parameters in org.apache.mahout.math.hadoop.stochasticsvd with type arguments of type VectorWritable | |
---|---|
protected void |
YtYJob.YtYReducer.reduce(org.apache.hadoop.io.IntWritable key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context arg2)
|
Uses of VectorWritable in org.apache.mahout.math.hadoop.stochasticsvd.qr |
---|
Constructor parameters in org.apache.mahout.math.hadoop.stochasticsvd.qr with type arguments of type VectorWritable | |
---|---|
QRFirstStep(org.apache.hadoop.conf.Configuration jobConf,
org.apache.hadoop.mapred.OutputCollector<? super org.apache.hadoop.io.Writable,? super DenseBlockWritable> qtHatOut,
org.apache.hadoop.mapred.OutputCollector<? super org.apache.hadoop.io.Writable,? super VectorWritable> rHatOut)
|
|
QRLastStep(Iterator<DenseBlockWritable> qHatInput,
Iterator<VectorWritable> rHatInput,
int blockNum)
|
Uses of VectorWritable in org.apache.mahout.vectorizer.common |
---|
Method parameters in org.apache.mahout.vectorizer.common with type arguments of type VectorWritable | |
---|---|
protected void |
PartialVectorMergeReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
Uses of VectorWritable in org.apache.mahout.vectorizer.pruner |
---|
Method parameters in org.apache.mahout.vectorizer.pruner with type arguments of type VectorWritable | |
---|---|
protected void |
WordsPrunerReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
protected void |
PrunedPartialVectorMergeReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
Uses of VectorWritable in org.apache.mahout.vectorizer.term |
---|
Methods in org.apache.mahout.vectorizer.term with parameters of type VectorWritable | |
---|---|
protected void |
TermDocumentCountMapper.map(org.apache.hadoop.io.WritableComparable<?> key,
VectorWritable value,
org.apache.hadoop.mapreduce.Mapper.Context context)
|
Uses of VectorWritable in org.apache.mahout.vectorizer.tfidf |
---|
Method parameters in org.apache.mahout.vectorizer.tfidf with type arguments of type VectorWritable | |
---|---|
protected void |
TFIDFPartialVectorReducer.reduce(org.apache.hadoop.io.WritableComparable<?> key,
Iterable<VectorWritable> values,
org.apache.hadoop.mapreduce.Reducer.Context context)
|
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |