Skip to content
Snippets Groups Projects
Commit 4cda6042 authored by meixu song's avatar meixu song Committed by Daniel Povey
Browse files

[scripts] xconfig: make scripts work when LDA-like preconditioning layer is not used (#1447)

parent 37bebd15
Branches
No related tags found
No related merge requests found
......@@ -444,12 +444,20 @@ def smooth_presoftmax_prior_scale_vector(pdf_counts,
def prepare_initial_network(dir, run_opts, srand=-3):
if os.path.exists(dir+"/configs/init.config"):
common_lib.run_job(
"""{command} {dir}/log/add_first_layer.log \
nnet3-init --srand={srand} {dir}/init.raw \
{dir}/configs/layer1.config {dir}/0.raw""".format(
command=run_opts.command, srand=srand,
dir=dir))
else:
common_lib.run_job(
"""{command} {dir}/log/add_first_layer.log \
nnet3-init --srand={srand} \
{dir}/configs/layer1.config {dir}/0.raw""".format(
command=run_opts.command, srand=srand,
dir=dir))
def verify_iterations(num_iters, num_epochs, num_hidden_layers,
......
......@@ -786,7 +786,8 @@ class XconfigFixedAffineLayer(XconfigLayerBase):
# the most recent layer.
self.config = { 'input':'[-1]',
'dim':-1,
'affine-transform-file':''}
'affine-transform-file':'',
'write-init-config':True}
def check_configs(self):
if self.config['affine-transform-file'] is None:
......@@ -817,7 +818,7 @@ class XconfigFixedAffineLayer(XconfigLayerBase):
output_dim = self.output_dim()
transform_file = self.config['affine-transform-file']
if self.config['write-init-config']:
# to init.config we write an output-node with the name 'output' and
# with a Descriptor equal to the descriptor that's the input to this
# layer. This will be used to accumulate stats to learn the LDA transform.
......
......@@ -199,7 +199,7 @@ def train(args, run_opts, background_process_handler):
# we do this as it's a convenient way to get the stats for the 'lda-like'
# transform.
if (args.stage <= -5):
if (args.stage <= -5) and os.path.exists(args.dir+"/configs/init.config"):
logger.info("Initializing a basic network for estimating "
"preconditioning matrix")
common_lib.run_job(
......@@ -245,7 +245,7 @@ def train(args, run_opts, background_process_handler):
# use during decoding
common_train_lib.copy_egs_properties_to_exp_dir(egs_dir, args.dir)
if (args.stage <= -3):
if (args.stage <= -3) and os.path.exists(args.dir+"/configs/init.config"):
logger.info('Computing the preconditioning matrix for input features')
train_lib.common.compute_preconditioning_matrix(
......
......@@ -170,7 +170,24 @@ def write_config_files(config_dir, all_layers):
# preserves the backtrace
raise
# remove previous init.config
try:
os.remove(config_dir + '/init.config')
except OSError:
pass
for basename, lines in config_basename_to_lines.items():
# check the lines num start with 'output-node':
num_output_node_lines = sum( [ 1 if line.startswith('output-node' ) else 0
for line in lines ] )
if num_output_node_lines == 0:
if basename == 'init':
continue # do not write the init.config
else:
print('{0}: error in xconfig file {1}: may be lack of a output layer'.format(
sys.argv[0], sys.argv[2]), file=sys.stderr)
raise
header = config_basename_to_header[basename]
filename = '{0}/{1}.config'.format(config_dir, basename)
try:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment