Discussion:
[theano-users] How to save this error :"TypeError: bool output not supported" ?
l***@gmail.com
2017-11-15 14:03:35 UTC
Permalink
I trained the model . But at last it output this error ? Can anyone help
me? ,thankyou

-------------------------------------------------------------------------------
TRAINING HAS BEEN FINISHED:
-------------------------------------------------------------------------------
.。。。。。。

Traceback (most recent call last):
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/bin/run.py",
line 154, in <module>
getattr(lvsr.main, args.pop('func'))(config, **args)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 920, in train_multistage
stage_params, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 693, in train
load_log, fast_start)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 325, in initialize_all
batch=True, prediction=prediction, prediction_mask=prediction_mask)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
line 445, in get_cost_graph
**inputs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
line 390, in cost
attended=encoded, attended_mask=encoded_mask)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
line 326, in cost_matrix
return self.evaluate(outputs, mask=mask, **kwargs)[0]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
line 269, in evaluate
**dict_union(inputs, states, contexts))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 738, in apply
preprocessed_attended}))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 231, in recurrent_apply
brick.name, application.application_name))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scan_module/scan.py",
line 773, in scan
condition, outputs, updates =
scan_utils.get_updates_and_outputs(fn(*args))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 213, in scan_function
outputs = application(iterate=False, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 133, in recurrent_apply
return application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 703, in do_apply
self.preprocessed_attended_name: preprocessed_attended}))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 616, in take_glimpses
**dict_union(states, glimpses_needed))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/attention.py",
line 141, in take_glimpses
ali_median_pos = ali_to_05[:,1:] - ali_to_05[:,:-1]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/var.py",
line 147, in __sub__
return theano.tensor.basic.sub(self, other)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/gof/op.py",
line 615, in __call__
node = self.make_node(*inputs, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
line 578, in make_node
DimShuffle, *inputs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
line 520, in get_output_info
for i in inputs])
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 1041, in make_node
for input in inputs])]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 1049, in output_types
variables = self.output_types_preference(*types)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 842, in upcast_out_nobool
raise TypeError("bool output not supported")
TypeError: bool output not supported
--
---
You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to theano-users+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
Pascal Lamblin
2017-11-15 20:46:59 UTC
Permalink
Post by l***@gmail.com
ali_to_05[:,1:] - ali_to_05[:,:-1]
you are subtracting Booleans from Booleans, which is not supported.
Maybe cast `ali_to_05` to int8?
Post by l***@gmail.com
I trained the model . But at last it output this error ?  Can anyone
help me? ,thankyou
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
.。。。。。。
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/bin/run.py",
line 154, in <module>
    getattr(lvsr.main, args.pop('func'))(config, **args)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 920, in train_multistage
    stage_params, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 693, in train
    load_log, fast_start)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
line 325, in initialize_all
    batch=True, prediction=prediction, prediction_mask=prediction_mask)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
line 445, in get_cost_graph
    **inputs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
line 390, in cost
    attended=encoded, attended_mask=encoded_mask)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
line 326, in cost_matrix
    return self.evaluate(outputs, mask=mask, **kwargs)[0]
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
line 269, in evaluate
    **dict_union(inputs, states, contexts))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 738, in apply
    preprocessed_attended}))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 231, in recurrent_apply
    brick.name, application.application_name))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scan_module/scan.py",
line 773, in scan
    condition, outputs, updates =
scan_utils.get_updates_and_outputs(fn(*args))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 213, in scan_function
    outputs = application(iterate=False, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
line 133, in recurrent_apply
    return application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 703, in do_apply
    self.preprocessed_attended_name: preprocessed_attended}))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
line 616, in take_glimpses
    **dict_union(states, glimpses_needed))
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 377, in __call__
    return self.application.apply(self, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
line 312, in apply
    outputs = self.application_function(brick, *args, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/attention.py",
line 141, in take_glimpses
    ali_median_pos = ali_to_05[:,1:] - ali_to_05[:,:-1]
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/var.py",
line 147, in __sub__
    return theano.tensor.basic.sub(self, other)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/gof/op.py",
line 615, in __call__
    node = self.make_node(*inputs, **kwargs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
line 578, in make_node
    DimShuffle, *inputs)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
line 520, in get_output_info
    for i in inputs])
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 1041, in make_node
    for input in inputs])]
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 1049, in output_types
    variables = self.output_types_preference(*types)
  File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
line 842, in upcast_out_nobool
    raise TypeError("bool output not supported")
TypeError: bool output not supported
--
---
You received this message because you are subscribed to the Google
Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send
For more options, visit https://groups.google.com/d/optout.
--
Pascal Lamblin
--
---
You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to theano-users+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
l***@gmail.com
2017-11-16 02:10:54 UTC
Permalink
Thank you ! I will try it!

圚 2017幎11月16日星期四 UTC+8䞊午4:47:06Pascal Lamblin写道
Post by Pascal Lamblin
Post by l***@gmail.com
ali_to_05[:,1:] - ali_to_05[:,:-1]
you are subtracting Booleans from Booleans, which is not supported.
Maybe cast `ali_to_05` to int8?
Post by l***@gmail.com
I trained the model . But at last it output this error ? Can anyone
help me? ,thankyou
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
Post by l***@gmail.com
.。。。。。。
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/bin/run.py",
Post by l***@gmail.com
line 154, in <module>
getattr(lvsr.main, args.pop('func'))(config, **args)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
Post by l***@gmail.com
line 920, in train_multistage
stage_params, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
Post by l***@gmail.com
line 693, in train
load_log, fast_start)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/main.py",
Post by l***@gmail.com
line 325, in initialize_all
batch=True, prediction=prediction, prediction_mask=prediction_mask)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
Post by l***@gmail.com
line 445, in get_cost_graph
**inputs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/recognizer.py",
Post by l***@gmail.com
line 390, in cost
attended=encoded, attended_mask=encoded_mask)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
Post by l***@gmail.com
line 326, in cost_matrix
return self.evaluate(outputs, mask=mask, **kwargs)[0]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/sequence_generators.py",
Post by l***@gmail.com
line 269, in evaluate
**dict_union(inputs, states, contexts))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
Post by l***@gmail.com
line 738, in apply
preprocessed_attended}))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
Post by l***@gmail.com
line 231, in recurrent_apply
brick.name, application.application_name))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scan_module/scan.py",
Post by l***@gmail.com
line 773, in scan
condition, outputs, updates =
scan_utils.get_updates_and_outputs(fn(*args))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
Post by l***@gmail.com
line 213, in scan_function
outputs = application(iterate=False, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/recurrent.py",
Post by l***@gmail.com
line 133, in recurrent_apply
return application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
Post by l***@gmail.com
line 703, in do_apply
self.preprocessed_attended_name: preprocessed_attended}))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/attention.py",
Post by l***@gmail.com
line 616, in take_glimpses
**dict_union(states, glimpses_needed))
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 377, in __call__
return self.application.apply(self, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/blocks/blocks/bricks/base.py",
Post by l***@gmail.com
line 312, in apply
outputs = self.application_function(brick, *args, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/lvsr/bricks/attention.py",
Post by l***@gmail.com
line 141, in take_glimpses
ali_median_pos = ali_to_05[:,1:] - ali_to_05[:,:-1]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/var.py",
Post by l***@gmail.com
line 147, in __sub__
return theano.tensor.basic.sub(self, other)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/gof/op.py",
Post by l***@gmail.com
line 615, in __call__
node = self.make_node(*inputs, **kwargs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
Post by l***@gmail.com
line 578, in make_node
DimShuffle, *inputs)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/tensor/elemwise.py",
Post by l***@gmail.com
line 520, in get_output_info
for i in inputs])
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
Post by l***@gmail.com
line 1041, in make_node
for input in inputs])]
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
Post by l***@gmail.com
line 1049, in output_types
variables = self.output_types_preference(*types)
File
"/nobackup/f1/asr/wulong/tensorflowEnv/attention-lvcsr-master/libs/Theano/theano/scalar/basic.py",
Post by l***@gmail.com
line 842, in upcast_out_nobool
raise TypeError("bool output not supported")
TypeError: bool output not supported
--
---
You received this message because you are subscribed to the Google
Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send
For more options, visit https://groups.google.com/d/optout.
--
Pascal Lamblin
--
---
You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to theano-users+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
Loading...