Skip to content
Snippets Groups Projects
Commit 36d51f8d authored by Daniel Povey's avatar Daniel Povey Committed by Dan Povey
Browse files

CTC code: Add testing code for context-dependent CTC (CCTC) transition-model...

CTC code: Add testing code for context-dependent CTC (CCTC) transition-model object, and numerous bug fixes
parent e6e5d44b
No related branches found
No related tags found
No related merge requests found
......@@ -3,9 +3,10 @@ all:
include ../kaldi.mk
EXTRA_CXXFLAGS += -Wno-sign-compare
LDFLAGS += $(CUDA_LDFLAGS)
LDLIBS += $(CUDA_LDLIBS)
TESTFILES = language-model-test
TESTFILES = language-model-test cctc-transition-model-test
OBJFILES = language-model.o cctc-transition-model.o # ctc-functions.o
......
// ctc/cctc-transition-model-test.cc
// Copyright 2015 Johns Hopkins University (author: Daniel Povey)
// See ../../COPYING for clarification regarding multiple authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
// See the Apache 2 License for the specific language governing permissions and
// limitations under the License.
#include "ctc/cctc-transition-model.h"
#include "ctc/language-model.h"
#include "tree/build-tree.h"
#include "tree/build-tree-utils.h"
namespace kaldi {
namespace ctc {
static void GetTestingData(int32 *vocab_size,
std::vector<std::vector<int32> > *data,
std::vector<std::vector<int32> > *validation_data) {
// read the code of a C++ file as training data.
bool binary;
Input input("language-model.cc", &binary);
KALDI_ASSERT(!binary);
std::istream &is = input.Stream();
std::string line;
*vocab_size = 127;
int32 line_count = 0;
for (; getline(is, line); line_count++) {
std::vector<int32> int_line(line.size());
for (size_t i = 0; i < line.size(); i++) {
int32 this_char = line[i];
if (this_char == 0) {
this_char = 1; // should never happen, but just make sure, as 0 is
// treated as BOS/EOS in the language modeling code.
}
int_line[i] = std::min<int32>(127, this_char);
}
if (line_count % 10 != 0)
data->push_back(int_line);
else
validation_data->push_back(int_line);
}
KALDI_ASSERT(line_count > 0);
}
// This function, modified from GenRandContextDependency(), generates a random
// context-dependency tree that only has left-context, and ensures that all
// pdf-classes are numbered zero (as required for the CCTC code).
static ContextDependency *GenRandContextDependencySpecial(
const std::vector<int32> &phone_ids) {
bool ensure_all_covered = true;
KALDI_ASSERT(IsSortedAndUniq(phone_ids));
int32 num_stats = 1 + (Rand() % 15) * (Rand() % 15); // up to 14^2 + 1 separate stats.
int32 N = 1 + Rand() % 2; // 1, 2 or 3. So 0, 1 or 2 phones of left context.
// The transition-model creation code blows up if
// we have more, as it's based on enumerating all
// phone contexts and then merging identical
// history-states.
int32 P = N - 1; // Ensure tree left-context only.
float ctx_dep_prob = 0.7 + 0.3*RandUniform();
int32 max_phone = *std::max_element(phone_ids.begin(), phone_ids.end());
std::vector<bool> is_ctx_dep(max_phone + 1);
std::vector<int32> hmm_lengths(max_phone + 1, -1);
// I'm guessing the values for i==0 will never be accessed.
for (int32 i = 1; i <= max_phone; i++) {
hmm_lengths[i] = 1;
is_ctx_dep[i] = (RandUniform() < ctx_dep_prob); // true w.p. ctx_dep_prob.
}
// Generate rand stats.
BuildTreeStatsType stats;
size_t dim = 3 + Rand() % 20;
GenRandStats(dim, num_stats, N, P, phone_ids, hmm_lengths,
is_ctx_dep, ensure_all_covered, &stats);
// Now build the tree.
Questions qopts;
int32 num_quest = Rand() % 10, num_iters = rand () % 5;
qopts.InitRand(stats, num_quest, num_iters, kAllKeysUnion); // This was tested in build-tree-utils-test.cc
float thresh = 100.0 * RandUniform();
EventMap *tree = NULL;
std::vector<std::vector<int32> > phone_sets(phone_ids.size());
for (size_t i = 0; i < phone_ids.size(); i++)
phone_sets[i].push_back(phone_ids[i]);
std::vector<bool> share_roots(phone_sets.size(), true),
do_split(phone_sets.size(), true);
tree = BuildTree(qopts, phone_sets, hmm_lengths, share_roots,
do_split, stats, thresh, 1000, 0.0, P);
DeleteBuildTreeStats(&stats);
return new ContextDependency(N, P, tree);
}
void TestCctcTransitionModelIo(const CctcTransitionModel &trans_model) {
bool binary = (RandInt(0, 1) == 0);
std::ostringstream os;
trans_model.Write(os, binary);
CctcTransitionModel trans_model2;
std::istringstream is(os.str());
trans_model2.Read(is, binary);
std::ostringstream os2;
trans_model2.Write(os2, binary);
if (binary)
KALDI_ASSERT(os.str() == os2.str());
}
void TestCctcTransitionModelProbs(const CctcTransitionModel &trans_model,
const LanguageModel &lm) {
int32 num_phones = trans_model.NumPhones(),
ngram_order = lm.NgramOrder(),
sequence_length = RandInt(1, 20);
std::vector<int32> history;
history.push_back(0); // Beginning-of-sentence history.
int32 current_history_state = trans_model.InitialHistoryState();
for (int32 i = 0; i < sequence_length; i++) {
int32 next_phone = RandInt(1, num_phones);
std::vector<int32> history_plus_eos(history);
history.push_back(next_phone);
history_plus_eos.push_back(0); // add end-of-sentence to the old history
BaseFloat lm_prob = lm.GetProb(history),
lm_prob_renormalized = lm_prob / (1.0 - lm.GetProb(history_plus_eos));
BaseFloat lm_prob_from_trans_model =
trans_model.GetLmProb(current_history_state, next_phone);
AssertEqual(lm_prob_renormalized, lm_prob_from_trans_model);
current_history_state =
trans_model.GetNextHistoryState(current_history_state, next_phone);
if (history.size() > ngram_order - 1)
history.erase(history.begin());
}
}
int32 GetOutputIndex(int32 num_non_blank_indexes,
const ContextDependency &ctx_dep,
const LmHistoryStateMap &history_state_map,
const std::vector<int32> &hist,
int32 phone) {
if (phone == 0) { // blank.
return num_non_blank_indexes + history_state_map.GetLmHistoryState(hist);
} else {
std::vector<int32> ngram(hist);
ngram.push_back(phone);
int32 context_width = ctx_dep.ContextWidth();
while (ngram.size() < static_cast<size_t>(context_width))
ngram.insert(ngram.begin(), 0); // pad with 0s to left.
while (ngram.size() > static_cast<size_t>(context_width))
ngram.erase(ngram.begin()); // shift left.
int32 pdf_class = 0; // we always set pdf_class to 0 in the CCTC code
// (make it as if each phone has one state).
int32 pdf_id;
bool ans = ctx_dep.Compute(ngram, pdf_class, &pdf_id);
KALDI_ASSERT(ans && "Failure computing from tree.");
KALDI_ASSERT(pdf_id >= 0 && pdf_id < num_non_blank_indexes);
return pdf_id;
}
}
void TestCctcTransitionModelIndexes(const CctcTransitionModel &trans_model,
const ContextDependency &ctx_dep,
const LmHistoryStateMap &history_state_map) {
int32 num_phones = trans_model.NumPhones(),
left_context = trans_model.PhoneLeftContext(),
sequence_length = RandInt(1, 20),
num_non_blank_indexes = trans_model.NumNonBlankIndexes();
KALDI_ASSERT(num_non_blank_indexes == ctx_dep.NumPdfs());
std::vector<int32> history;
history.push_back(0); // Beginning-of-sentence history.
int32 current_history_state = trans_model.InitialHistoryState();
for (int32 i = 0; i < sequence_length; i++) {
// test_phone is the phone whose output index we will test, which may be
// zero (blank)
int32 test_phone = RandInt(0, num_phones);
if (RandInt(0, 3) == 0) // Boost probability of seeing zero (blank phone).
test_phone = 0;
int32 trans_model_output_index = trans_model.GetOutputIndex(
current_history_state, test_phone),
output_index = GetOutputIndex(num_non_blank_indexes, ctx_dep,
history_state_map, history, test_phone);
KALDI_ASSERT(trans_model_output_index == output_index);
// Now advance the history-state using a "real" (non-blank) phone.
int32 next_phone = RandInt(1, num_phones);
history.push_back(next_phone);
current_history_state =
trans_model.GetNextHistoryState(current_history_state, next_phone);
if (history.size() > left_context)
history.erase(history.begin());
}
}
void CctcTransitionModelTest() {
int32 order = RandInt(1, 4);
int32 vocab_size;
std::vector<std::vector<int32> > data, validation_data;
GetTestingData(&vocab_size, &data, &validation_data);
LanguageModelOptions opts;
opts.ngram_order = order;
if (RandInt(0,3) == 0)
opts.state_count_cutoff1 = 100.0;
if (RandInt(0,3) == 0) {
opts.state_count_cutoff1 = 10.0;
opts.state_count_cutoff2plus = 10.0;
}
if (RandInt(0,5) == 0) {
opts.state_count_cutoff1 = 0.0;
opts.state_count_cutoff2plus = 0.0;
}
LanguageModelEstimator estimator(opts, vocab_size);
for (size_t i = 0; i < data.size(); i++) {
std::vector<int32> &sentence = data[i];
estimator.AddCounts(sentence);
}
estimator.Discount();
LanguageModel lm;
estimator.Output(&lm);
KALDI_LOG << "For order " << order << ", cutoffs "
<< opts.state_count_cutoff1 << ","
<< opts.state_count_cutoff2plus << ", perplexity is "
<< ComputePerplexity(lm, validation_data) << "[valid]"
<< " and " << ComputePerplexity(lm, data) << "[train].";
std::vector<int32> phones;
for (int32 p = 1; p <= 127; p++)
phones.push_back(p);
ContextDependency *dep = GenRandContextDependencySpecial(phones);
CctcTransitionModelCreator creator(*dep, lm);
CctcTransitionModel trans_model;
creator.InitCctcTransitionModel(&trans_model);
TestCctcTransitionModelIo(trans_model);
TestCctcTransitionModelProbs(trans_model, lm);
LmHistoryStateMap history_state_map;
history_state_map.Init(lm);
TestCctcTransitionModelIndexes(trans_model, *dep, history_state_map);
delete dep;
}
} // namespace ctc
} // namespace kaldi
int main() {
for (int32 i = 0; i < 10; i++)
kaldi::ctc::CctcTransitionModelTest();
}
......@@ -39,6 +39,27 @@ int32 CctcTransitionModel::GraphLabelToHistoryState(int32 graph_label) const {
return history;
}
int32 CctcTransitionModel::GetNextHistoryState(int32 history_state,
int32 phone) const {
KALDI_ASSERT(static_cast<size_t>(history_state) < history_state_info_.size() &&
phone >= 0 && phone <= num_phones_);
return history_state_info_[history_state].next_history_state[phone];
}
BaseFloat CctcTransitionModel::GetLmProb(int32 history_state,
int32 phone) const {
KALDI_ASSERT(static_cast<size_t>(history_state) < history_state_info_.size() &&
phone >= 0 && phone <= num_phones_);
return history_state_info_[history_state].phone_lm_prob(phone);
}
int32 CctcTransitionModel::GetOutputIndex(int32 history_state,
int32 phone) const {
KALDI_ASSERT(static_cast<size_t>(history_state) < history_state_info_.size() &&
phone >= 0 && phone <= num_phones_);
return history_state_info_[history_state].output_index[phone];
}
int32 CctcTransitionModel::GraphLabelToNextHistoryState(
int32 graph_label) const {
int32 history = graph_label / (num_phones_ + 1),
......@@ -79,7 +100,7 @@ void CctcTransitionModel::Check() const {
const HistoryStateInfo &info = history_state_info_[h];
// see blank should not change the history state.
KALDI_ASSERT(info.next_history_state[0] == h);
for (int32 p = 1; p < num_phones; p++) {
for (int32 p = 1; p <= num_phones; p++) {
int32 next_h = info.next_history_state[p];
KALDI_ASSERT(next_h >= 0 && next_h < num_histories);
}
......@@ -87,7 +108,7 @@ void CctcTransitionModel::Check() const {
// non-blank indexes.
KALDI_ASSERT(info.output_index[0] >= num_non_blank_indexes_);
output_index_seen[info.output_index[0]] = true;
for (int32 p = 1; p < num_phones; p++) {
for (int32 p = 1; p <= num_phones; p++) {
int32 output_index = info.output_index[p];
KALDI_ASSERT(output_index < num_non_blank_indexes_);
output_index_seen[output_index] = true;
......@@ -177,6 +198,8 @@ void CctcTransitionModel::Read(std::istream &is, bool binary) {
ExpectToken(is, binary, "<NumHistoryStates>");
int32 num_history_states = history_state_info_.size();
ReadBasicType(is, binary, &num_history_states);
KALDI_ASSERT(num_history_states > 0 && num_history_states < 10000000);
history_state_info_.resize(num_history_states);
ExpectToken(is, binary, "<HistoryStates>");
for (int32 h = 0; h < num_history_states; h++) {
HistoryStateInfo &info = history_state_info_[h];
......@@ -199,7 +222,7 @@ void CctcTransitionModel::ComputeWeights() {
for (int32 h = 0; h < num_history_states; h++) {
const HistoryStateInfo &info = history_state_info_[h];
SubVector<BaseFloat> row(weights, h);
for (int32 p = 0; p < num_phones; p++) {
for (int32 p = 0; p <= num_phones; p++) {
int32 output_index = info.output_index[p];
BaseFloat lm_prob = info.phone_lm_prob(p);
row(output_index) += lm_prob;
......@@ -208,10 +231,20 @@ void CctcTransitionModel::ComputeWeights() {
weights_.Swap(&weights);
}
CctcTransitionModelCreator::CctcTransitionModelCreator(
const ContextDependency &ctx_dep,
const LanguageModel &phone_lang_model):
ctx_dep_(ctx_dep),
phone_lang_model_(phone_lang_model) { }
void CctcTransitionModelCreator::InitCctcTransitionModel(
CctcTransitionModel *model) {
lm_hist_state_map_.Init(phone_lang_model_);
KALDI_LOG << "Phone language model has "
<< lm_hist_state_map_.NumLmHistoryStates() << " history states.";
KALDI_LOG << "Decision tree has " << (ctx_dep_.ContextWidth() - 1)
<< " phones of left context.";
num_tree_leaves_ = ctx_dep_.NumPdfs();
num_output_indexes_ = num_tree_leaves_ + lm_hist_state_map_.NumLmHistoryStates();
KALDI_LOG << "There are " << num_output_indexes_ << " output indexes, = "
......@@ -350,16 +383,19 @@ void CctcTransitionModelCreator::GetInitialHistoryStates() {
{
// Work out the index of the initial history-state that appears
// at the beginning of the sentence. This is the one whose
// vector is [ 0 0 ] (zeros repeated up to the left-context of
// the decision tree).
int32 tree_left_context = ctx_dep_.ContextWidth() - 1;
std::vector<int32> sentence_start_hist(tree_left_context, 0);
// vector is [ 0 0 ], i.e. zeros repeated up to the
// the left-context of the decision tree, but at least one zero
// if the phone LM is not a 1-gram (because this is how it
// represents the beginning-of-sentence history).
int32 tree_left_context = ctx_dep_.ContextWidth() - 1,
start_state_left_context = std::max<int32>(
tree_left_context, phone_lang_model_.NgramOrder() > 1 ? 1 : 0);
std::vector<int32> sentence_start_hist(start_state_left_context, 0);
MapType::iterator iter;
if ((iter = hist_to_state.find(sentence_start_hist)) == hist_to_state.end())
KALDI_ERR << "Cannot find history state for beginning of sentence.";
initial_history_state_ = iter->second;
}
}
void CctcTransitionModelCreator::CreateHistoryInfo(
......@@ -377,6 +413,7 @@ void CctcTransitionModelCreator::CreateHistoryInfo(
state.lm_history_state = lm_hist_state_map_.GetLmHistoryState(hist);
state.output_index.resize(num_phones + 1);
state.next_history_state.resize(num_phones + 1);
state.history = hist; // this member only needed for ease of debugging.
KALDI_ASSERT(hist.size() >= static_cast<size_t>(tree_left_context));
for (int32 phone = 0; phone <= num_phones; phone++)
state.output_index[phone] = GetOutputIndex(hist, phone);
......
......@@ -196,13 +196,13 @@ class CctcTransitionModel {
// return the number of phones. Phones are one-based, so NumPhones() is the
// index of the largest phone, but phone 0 is used to mean the blank symbol.
int32 NumPhones() { return num_phones_; }
int32 NumPhones() const { return num_phones_; }
// returns the matrix of weights, used for calculating denominator
// probabilities: row index is history-state index from 0 to
// NumHistoryStates() - 1, column index is neural-net output index, from 0 to
// NumOutputIndexes() - 1.
const CuMatrix<BaseFloat> &Weights() { return weights_; }
const CuMatrix<BaseFloat> &Weights() const { return weights_; }
// A graph-label is a similar concept to a transition-id in HMM-based models;
// it's a one-based index that appears on the input side of a decoding graph
......@@ -230,7 +230,7 @@ class CctcTransitionModel {
// added some other phone to the right of the existing phone-sequence. (The
// identity of that added phone wouldn't matter as it wouldn't be part of the
// history).
int32 GraphLabelToNextHistoryState(int32 graph_label) const;
int32 GraphLabelToNextHistoryState(int32 graph_label) const;
// Returns the history-state at the beginning of an utterance, corresponding
// to beginning of sentence.
......@@ -240,6 +240,19 @@ class CctcTransitionModel {
// corresponding graph label.
int32 PairToGraphLabel(int32 history_state, int32 phone) const;
// Given a history-state and a phone (or 0 for blank), gives the
// next history state.
int32 GetNextHistoryState(int32 history_state, int32 phone) const;
// Returns the language model probability of this phone given this history
// state (or zero for blank).
BaseFloat GetLmProb(int32 history_state, int32 phone) const;
// Returns the output-index for this phone [or 0 for blank] given this history
// state.
int32 GetOutputIndex(int32 history_state, int32 phone) const;
// Maps graph-label to the output index (between zero NumOutputIndexes() - 1),
// which will be used to look up (in the nnet output) the numerator of the
// expression for the likelihood of this phone (or blank).
......@@ -318,8 +331,9 @@ class CctcTransitionModel {
// probabilities; it doesn't include the actual neural net.
class CctcTransitionModelCreator {
public:
// This class stores const references to these arguments.
CctcTransitionModelCreator(const ContextDependency &ctx_dep,
const LanguageModel &phone_lang_model);
const LanguageModel &phone_lang_model);
void InitCctcTransitionModel(CctcTransitionModel *model);
private:
......@@ -382,6 +396,13 @@ class CctcTransitionModelCreator {
// merging.
std::vector<int32> next_history_state;
// This member is provided only for possible debugging use in future, is not
// needed for most of the code, and is not compared in the operator ==. It
// represents a lanugage-model history vector (a sequence of context
// phones); after merging states, it simply represents an arbitrarily chosen
// history vector, one out of many merged ones.1
std::vector<int32> history;
bool operator == (const HistoryState &other) const {
return lm_history_state == other.lm_history_state &&
output_index == other.output_index &&
......@@ -399,9 +420,16 @@ class CctcTransitionModelCreator {
vec_hasher(hist_info->next_history_state);
}
};
struct HistoryStateEqual {
size_t operator () (const HistoryState *const hist_info1,
const HistoryState *const hist_info2) const {
return (*hist_info1 == *hist_info2);
}
};
typedef unordered_map<const HistoryState*, int32,
HistoryStateHasher> HistoryMapType;
HistoryStateHasher, HistoryStateEqual> HistoryMapType;
const ContextDependency &ctx_dep_;
......
......@@ -22,16 +22,16 @@
namespace kaldi {
namespace ctc {
void GetTestingData(int32 *vocab_size,
std::vector<std::vector<int32> > *data,
std::vector<std::vector<int32> > *validation_data) {
static void GetTestingData(int32 *vocab_size,
std::vector<std::vector<int32> > *data,
std::vector<std::vector<int32> > *validation_data) {
// read the code of a C++ file as training data.
bool binary;
Input input("language-model.cc", &binary);
KALDI_ASSERT(!binary);
std::istream &is = input.Stream();
std::string line;
*vocab_size = 255;
*vocab_size = 127;
int32 line_count = 0;
for (; getline(is, line); line_count++) {
std::vector<int32> int_line(line.size());
......@@ -41,7 +41,7 @@ void GetTestingData(int32 *vocab_size,
this_char = 1; // should never happen, but just make sure, as 0 is
// treated as BOS/EOS in the language modeling code.
}
int_line[i] = this_char;
int_line[i] = std::min<int32>(127, this_char);
}
if (line_count % 10 != 0)
data->push_back(int_line);
......
......@@ -53,20 +53,23 @@ ContextDependency *GenRandContextDependency(const std::vector<int32> &phone_ids,
float ctx_dep_prob = 0.7 + 0.3*RandUniform();
int32 max_phone = *std::max_element(phone_ids.begin(), phone_ids.end());
hmm_lengths->clear();
hmm_lengths->resize(max_phone+1, -1);
std::vector<bool> is_ctx_dep(max_phone+1);
hmm_lengths->resize(max_phone + 1, -1);
std::vector<bool> is_ctx_dep(max_phone + 1);
for (int32 i = 0; i <= max_phone; i++) {
(*hmm_lengths)[i] = 1 + Rand() % 3;
is_ctx_dep[i] = (RandUniform() < ctx_dep_prob); // true w.p. ctx_dep_prob.
}
for (size_t i = 0;i < (size_t) num_phones;i++) {
KALDI_VLOG(2) << "For idx = "<< i << ", (phone_id, hmm_length, is_ctx_dep) == " << (phone_ids[i]) << " " << ((*hmm_lengths)[phone_ids[i]]) << " " << (is_ctx_dep[phone_ids[i]]);
}
for (size_t i = 0; i < (size_t) num_phones; i++)
KALDI_VLOG(2) << "For idx = " << i
<< ", (phone_id, hmm_length, is_ctx_dep) == "
<< (phone_ids[i]) << " " << ((*hmm_lengths)[phone_ids[i]])
<< " " << (is_ctx_dep[phone_ids[i]]);
// Generate rand stats.
BuildTreeStatsType stats;
size_t dim = 3 + Rand() % 20;
GenRandStats(dim, num_stats, N, P, phone_ids, *hmm_lengths, is_ctx_dep, ensure_all_covered, &stats);
GenRandStats(dim, num_stats, N, P, phone_ids, *hmm_lengths,
is_ctx_dep, ensure_all_covered, &stats);
// Now build the tree.
......@@ -101,14 +104,14 @@ ContextDependency *GenRandContextDependencyLarge(const std::vector<int32> &phone
KALDI_ASSERT(num_phones > 0);
hmm_lengths->clear();
int32 max_phone = *std::max_element(phone_ids.begin(), phone_ids.end());
hmm_lengths->resize(max_phone+1, -1);
std::vector<bool> is_ctx_dep(max_phone+1);
hmm_lengths->resize(max_phone + 1, -1);
std::vector<bool> is_ctx_dep(max_phone + 1);
for (int32 i = 0; i <= max_phone; i++) {
(*hmm_lengths)[i] = 1 + Rand() % 3;
is_ctx_dep[i] = (RandUniform() < ctx_dep_prob); // true w.p. ctx_dep_prob.
}
for (size_t i = 0;i < (size_t) num_phones;i++) {
for (size_t i = 0; i < (size_t) num_phones; i++) {
KALDI_VLOG(2) << "For idx = "<< i << ", (phone_id, hmm_length, is_ctx_dep) == " << (phone_ids[i]) << " " << ((*hmm_lengths)[phone_ids[i]]) << " " << (is_ctx_dep[phone_ids[i]]);
}
// Generate rand stats.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment