Skip to content
This repository has been archived by the owner on Nov 3, 2022. It is now read-only.

Commit

Permalink
Merge pull request #216 from mendesmiguel/typos/fix
Browse files Browse the repository at this point in the history
Typos/fix
  • Loading branch information
ahundt authored Feb 3, 2018
2 parents 5ba2eef + 45ac946 commit 8dde782
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion keras_contrib/datasets/coco.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def coco_image_segmentation_stats(seg_mask_output_paths, annotation_paths, seg_m
# print('\ntarget_shape:', target_shape)
mask_one_hot = np.zeros(target_shape, dtype=np.uint8)

# Note to only count backgroung pixels once, we define a temporary
# Note to only count background pixels once, we define a temporary
# null class of 0, and shift all class category ids up by 1
mask_one_hot[:, :, 0] = 1 # every pixel begins as background

Expand Down
2 changes: 1 addition & 1 deletion keras_contrib/datasets/conll2000.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _process_data(data, vocab, pos_tags, chunk_tags, maxlen=None, onehot=False):

x = pad_sequences(x, maxlen) # left padding

y_pos = pad_sequences(y_pos, maxlen, value=-1) # lef padded with -1. Indeed, any interger works as it will be masked
y_pos = pad_sequences(y_pos, maxlen, value=-1) # lef padded with -1. Indeed, any integer works as it will be masked
y_chunk = pad_sequences(y_chunk, maxlen, value=-1)

if onehot:
Expand Down
2 changes: 1 addition & 1 deletion keras_contrib/initializers/convaware.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class ConvolutionAware(Initializer):
"""
Initializer that generates orthogonal convolution filters in the fourier
space. If this initializer is passed a shape that is not 3D or 4D,
orthogonal intialization will be used.
orthogonal initialization will be used.
# Arguments
eps_std: Standard deviation for the random normal noise used to break
symmetry in the inverse fourier transform.
Expand Down
10 changes: 5 additions & 5 deletions keras_contrib/layers/crf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class CRF(Layer):
This implementation has two modes for optimization:
1. (`join mode`) optimized by maximizing join likelihood, which is optimal in theory of statistics.
Note that in this case, CRF mast be the output/last layer.
Note that in this case, CRF must be the output/last layer.
2. (`marginal mode`) return marginal probabilities on each time step and optimized via composition
likelihood (product of marginal likelihood), i.e., using `categorical_crossentropy` loss.
Note that in this case, CRF can be either the last layer or an intermediate layer (though not explored).
Expand Down Expand Up @@ -72,9 +72,9 @@ class CRF(Layer):
gives one-hot representation of the best path at test (prediction) time,
while the latter is recommended and chosen as default when `learn_mode = 'marginal'`,
which produces marginal probabilities for each time step.
sparse_target: Boolen (default False) indicating if provided labels are one-hot or
sparse_target: Boolean (default False) indicating if provided labels are one-hot or
indices (with shape 1 at dim 3).
use_boundary: Boolen (default True) inidicating if trainable start-end chain energies
use_boundary: Boolean (default True) indicating if trainable start-end chain energies
should be added to model.
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix,
Expand Down Expand Up @@ -372,7 +372,7 @@ def add_boundary_energy(self, energy, mask, start, end):
return energy

def get_log_normalization_constant(self, input_energy, mask, **kwargs):
"""Compute logarithm of the normalization constance Z, where
"""Compute logarithm of the normalization constant Z, where
Z = sum exp(-E) -> logZ = log sum exp(-E) =: -nlogZ
"""
# should have logZ[:, i] == logZ[:, j] for any i, j
Expand Down Expand Up @@ -436,7 +436,7 @@ def step(self, input_energy_t, states, return_logZ=True):
def recursion(self, input_energy, mask=None, go_backwards=False, return_sequences=True, return_logZ=True, input_length=None):
"""Forward (alpha) or backward (beta) recursion
If `return_logZ = True`, compute the logZ, the normalization constance:
If `return_logZ = True`, compute the logZ, the normalization constant:
\[ Z = \sum_{y1, y2, y3} exp(-E) # energy
= \sum_{y1, y2, y3} exp(-(u1' y1 + y1' W y2 + u2' y2 + y2' W y3 + u3' y3))
Expand Down

0 comments on commit 8dde782

Please sign in to comment.