Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

please help in this regard. #3

Open
srinivasb72s1 opened this issue Apr 10, 2019 · 11 comments
Open

please help in this regard. #3

srinivasb72s1 opened this issue Apr 10, 2019 · 11 comments

Comments

@srinivasb72s1
Copy link

Sir
Really, you have done wonderful work. But there is not available the file weights-full-best .h5.
how to overcome this.

@srinivasb72s1
Copy link
Author

could you please help in this.
how to get these files weights-full-best.h5, weights-core-best.h5, weights-ET-best.h5

@srinivasb72s1
Copy link
Author

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.

InvalidArgumentError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1658 try:
-> 1659 c_op = c_api.TF_FinishOperation(op_desc)
1660 except errors.InvalidArgumentError as e:

InvalidArgumentError: Shape must be rank 1 but is rank 0 for 'batch_normalization_1/cond/Reshape_4' (op: 'Reshape') with input shapes: [1,64,1,1], [].

During handling of the above exception, another exception occurred:

ValueError Traceback (most recent call last)
in ()
----> 1 model = unet_model()
2 model.load_weights('weights-full-best.h5')
3 #history = model.fit(x, y, batch_size=16, validation_split=0,validation_data = (val_x,val_y) ,epochs = 40,callbacks = callbacks_list ,verbose=1, shuffle=True)

in unet_model()
12 inputs = Input((2, img_size, img_size))
13 conv1 = Conv2D(64, (3, 3), activation='relu', padding='same') (inputs)
---> 14 batch1 = BatchNormalization(axis=1)(conv1)
15 conv1 = Conv2D(64, (3, 3), activation='relu', padding='same') (batch1)
16 batch1 = BatchNormalization(axis=1)(conv1)

/usr/local/lib/python3.6/dist-packages/keras/engine/base_layer.py in call(self, inputs, **kwargs)
455 # Actually call the layer,
456 # collecting output(s), mask(s), and shape(s).
--> 457 output = self.call(inputs, **kwargs)
458 output_mask = self.compute_mask(inputs, previous_mask)
459

/usr/local/lib/python3.6/dist-packages/keras/layers/normalization.py in call(self, inputs, training)
204 return K.in_train_phase(normed_training,
205 normalize_inference,
--> 206 training=training)
207
208 def get_config(self):

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in in_train_phase(x, alt, training)
3121
3122 # else: assume learning phase is a placeholder tensor.
-> 3123 x = switch(training, x, alt)
3124 if uses_learning_phase:
3125 x._uses_learning_phase = True

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in switch(condition, then_expression, else_expression)
3056 x = tf.cond(condition,
3057 then_expression_fn,
-> 3058 else_expression_fn)
3059 else:
3060 # tf.where needs its condition tensor

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
505 'in a future version' if date is None else ('after %s' % date),
506 instructions)
--> 507 return func(*args, **kwargs)
508
509 doc = _add_deprecated_arg_notice_to_docstring(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/control_flow_ops.py in cond(pred, true_fn, false_fn, strict, name, fn1, fn2)
2106 try:
2107 context_f.Enter()
-> 2108 orig_res_f, res_f = context_f.BuildCondBranch(false_fn)
2109 if orig_res_f is None:
2110 raise ValueError("false_fn must have a return value.")

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/control_flow_ops.py in BuildCondBranch(self, fn)
1939 """Add the subgraph defined by fn() to the graph."""
1940 pre_summaries = ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
-> 1941 original_result = fn()
1942 post_summaries = ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
1943 if len(post_summaries) > len(pre_summaries):

/usr/local/lib/python3.6/dist-packages/keras/layers/normalization.py in normalize_inference()
165 broadcast_gamma,
166 axis=self.axis,
--> 167 epsilon=self.epsilon)
168 else:
169 return K.batch_normalization(

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in batch_normalization(x, mean, var, beta, gamma, axis, epsilon)
1906 # so it may have extra axes with 1, it is not needed and should be removed
1907 if ndim(mean) > 1:
-> 1908 mean = tf.reshape(mean, (-1))
1909 if ndim(var) > 1:
1910 var = tf.reshape(var, (-1))

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in reshape(tensor, shape, name)
7177 try:
7178 _, _, _op = _op_def_lib._apply_op_helper(
-> 7179 "Reshape", tensor=tensor, shape=shape, name=name)
7180 except (TypeError, ValueError):
7181 result = _dispatch.dispatch(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
787 input_types=input_types, attrs=attr_protos,
--> 788 op_def=op_def)
789 return output_structure, op_def.is_stateful, op
790

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
505 'in a future version' if date is None else ('after %s' % date),
506 instructions)
--> 507 return func(*args, **kwargs)
508
509 doc = _add_deprecated_arg_notice_to_docstring(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(failed resolving arguments)
3298 input_types=input_types,
3299 original_op=self._default_original_op,
-> 3300 op_def=op_def)
3301 self._create_op_helper(ret, compute_device=compute_device)
3302 return ret

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in init(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
1821 op_def, inputs, node_def.attr)
1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
-> 1823 control_input_ops)
1824
1825 # Initialize self._outputs.

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1660 except errors.InvalidArgumentError as e:
1661 # Convert to ValueError for backwards compatibility.
-> 1662 raise ValueError(str(e))
1663
1664 return c_op

ValueError: Shape must be rank 1 but is rank 0 for 'batch_normalization_1/cond/Reshape_4' (op: 'Reshape') with input shapes: [1,64,1,1], [].

@AndyWangON
Copy link
Owner

had posted the link in readme.

@asoftlabai
Copy link

thank you

@hechengen
Copy link

@srinivasb72s1 This is a bug in Keras, I reinstall Keras 2.1.6, and this bug disappears.

@srinivasb72s1
Copy link
Author

when I am executing following code on brats 2017. I have faced the below error. please help me with this.
#training
num = 31100

model = unet_model()
history = model.fit(x, y, batch_size=16, validation_split=0.2 ,epochs= num_epoch, verbose=1, shuffle=True)
pred = model.predict(x[num:num+100])

ValueError Traceback (most recent call last)
in ()
2
3 model = unet_model()
----> 4 history = model.fit(x, y, batch_size=16, validation_split=0.2 ,epochs= num_epoch, verbose=1, shuffle=True)
5 pred = model.predict(x[num:num+100])

2 frames
/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)
1628 sample_weight=sample_weight,
1629 class_weight=class_weight,
-> 1630 batch_size=batch_size)
1631 # Prepare validation data.
1632 do_validation = False

/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, check_array_lengths, batch_size)
1474 self._feed_input_shapes,
1475 check_batch_axis=False,
-> 1476 exception_prefix='input')
1477 y = _standardize_input_data(y, self._feed_output_names,
1478 output_shapes,

/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in _standardize_input_data(data, names, shapes, check_batch_axis, exception_prefix)
111 ': expected ' + names[i] + ' to have ' +
112 str(len(shape)) + ' dimensions, but got array '
--> 113 'with shape ' + str(data_shape))
114 if not check_batch_axis:
115 data_shape = data_shape[1:]

ValueError: Error when checking input: expected input_7 to have 4 dimensions, but got array with shape (0, 1)

@srinivasb72s1
Copy link
Author

@polo8214, thank you so much sir.

@srinivasb72s1
Copy link
Author

@hechengen, thank you sir

@srinivasb72s1
Copy link
Author

please help me in training the model.

@srinivasb72s1
Copy link
Author

is there any alternative to find the values of dice coeff and loss, without a history of the model (training).

@Jithinpandu
Copy link

WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.

Instructions for updating:
Colocations handled automatically by placer.
InvalidArgumentError Traceback (most recent call last)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1658 try:
-> 1659 c_op = c_api.TF_FinishOperation(op_desc)
1660 except errors.InvalidArgumentError as e:

InvalidArgumentError: Shape must be rank 1 but is rank 0 for 'batch_normalization_1/cond/Reshape_4' (op: 'Reshape') with input shapes: [1,64,1,1], [].

During handling of the above exception, another exception occurred:

ValueError Traceback (most recent call last)
in ()
----> 1 model = unet_model()
2 model.load_weights('weights-full-best.h5')
3 #history = model.fit(x, y, batch_size=16, validation_split=0,validation_data = (val_x,val_y) ,epochs = 40,callbacks = callbacks_list ,verbose=1, shuffle=True)

in unet_model()
12 inputs = Input((2, img_size, img_size))
13 conv1 = Conv2D(64, (3, 3), activation='relu', padding='same') (inputs)
---> 14 batch1 = BatchNormalization(axis=1)(conv1)
15 conv1 = Conv2D(64, (3, 3), activation='relu', padding='same') (batch1)
16 batch1 = BatchNormalization(axis=1)(conv1)

/usr/local/lib/python3.6/dist-packages/keras/engine/base_layer.py in call(self, inputs, **kwargs)
455 # Actually call the layer,
456 # collecting output(s), mask(s), and shape(s).
--> 457 output = self.call(inputs, **kwargs)
458 output_mask = self.compute_mask(inputs, previous_mask)
459

/usr/local/lib/python3.6/dist-packages/keras/layers/normalization.py in call(self, inputs, training)
204 return K.in_train_phase(normed_training,
205 normalize_inference,
--> 206 training=training)
207
208 def get_config(self):

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in in_train_phase(x, alt, training)
3121
3122 # else: assume learning phase is a placeholder tensor.
-> 3123 x = switch(training, x, alt)
3124 if uses_learning_phase:
3125 x._uses_learning_phase = True

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in switch(condition, then_expression, else_expression)
3056 x = tf.cond(condition,
3057 then_expression_fn,
-> 3058 else_expression_fn)
3059 else:
3060 # tf.where needs its condition tensor

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
505 'in a future version' if date is None else ('after %s' % date),
506 instructions)
--> 507 return func(*args, **kwargs)
508
509 doc = _add_deprecated_arg_notice_to_docstring(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/control_flow_ops.py in cond(pred, true_fn, false_fn, strict, name, fn1, fn2)
2106 try:
2107 context_f.Enter()
-> 2108 orig_res_f, res_f = context_f.BuildCondBranch(false_fn)
2109 if orig_res_f is None:
2110 raise ValueError("false_fn must have a return value.")

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/control_flow_ops.py in BuildCondBranch(self, fn)
1939 """Add the subgraph defined by fn() to the graph."""
1940 pre_summaries = ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
-> 1941 original_result = fn()
1942 post_summaries = ops.get_collection(ops.GraphKeys._SUMMARY_COLLECTION) # pylint: disable=protected-access
1943 if len(post_summaries) > len(pre_summaries):

/usr/local/lib/python3.6/dist-packages/keras/layers/normalization.py in normalize_inference()
165 broadcast_gamma,
166 axis=self.axis,
--> 167 epsilon=self.epsilon)
168 else:
169 return K.batch_normalization(

/usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py in batch_normalization(x, mean, var, beta, gamma, axis, epsilon)
1906 # so it may have extra axes with 1, it is not needed and should be removed
1907 if ndim(mean) > 1:
-> 1908 mean = tf.reshape(mean, (-1))
1909 if ndim(var) > 1:
1910 var = tf.reshape(var, (-1))

/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/gen_array_ops.py in reshape(tensor, shape, name)
7177 try:
7178 _, _, _op = _op_def_lib._apply_op_helper(
-> 7179 "Reshape", tensor=tensor, shape=shape, name=name)
7180 except (TypeError, ValueError):
7181 result = _dispatch.dispatch(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py in _apply_op_helper(self, op_type_name, name, **keywords)
786 op = g.create_op(op_type_name, inputs, output_types, name=scope,
787 input_types=input_types, attrs=attr_protos,
--> 788 op_def=op_def)
789 return output_structure, op_def.is_stateful, op
790

/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py in new_func(*args, **kwargs)
505 'in a future version' if date is None else ('after %s' % date),
506 instructions)
--> 507 return func(*args, **kwargs)
508
509 doc = _add_deprecated_arg_notice_to_docstring(

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in create_op(failed resolving arguments)
3298 input_types=input_types,
3299 original_op=self._default_original_op,
-> 3300 op_def=op_def)
3301 self._create_op_helper(ret, compute_device=compute_device)
3302 return ret

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in init(self, node_def, g, inputs, output_types, control_inputs, input_types, original_op, op_def)
1821 op_def, inputs, node_def.attr)
1822 self._c_op = _create_c_op(self._graph, node_def, grouped_inputs,
-> 1823 control_input_ops)
1824
1825 # Initialize self._outputs.

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py in _create_c_op(graph, node_def, inputs, control_inputs)
1660 except errors.InvalidArgumentError as e:
1661 # Convert to ValueError for backwards compatibility.
-> 1662 raise ValueError(str(e))
1663
1664 return c_op

ValueError: Shape must be rank 1 but is rank 0 for 'batch_normalization_1/cond/Reshape_4' (op: 'Reshape') with input shapes: [1,64,1,1], [].

sir how did you resolve this issue ?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

5 participants