[go: nahoru, domu]

Skip to content

Commit

Permalink
Fix location of ccs bq embedding layer.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 506648389
  • Loading branch information
danielecook authored and Copybara-Service committed Feb 2, 2023
1 parent 63df275 commit 02274ba
Showing 1 changed file with 7 additions and 7 deletions.
14 changes: 7 additions & 7 deletions deepconsensus/models/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,13 +439,6 @@ def encode(self, inputs: tf.Tensor, attention_bias: tf.Tensor,
embedded = self.ip_embedding_layer(tf.cast(inputs[:, :, i], tf.int32))
embedded_inputs.append(embedded)

if self.params.use_ccs_bq:
for i in range(*ccs_bq_indices):
# Add 1 to ccs base quality scores to shift gaps from -1 to 0.
embedded = self.ccs_base_quality_scores_embedding_layer(
tf.cast(inputs[:, :, i] + 1, tf.int32))
embedded_inputs.append(embedded)

if self.params.use_strand:
for i in range(*strand_indices):
embedded = self.strand_embedding_layer(
Expand All @@ -458,6 +451,13 @@ def encode(self, inputs: tf.Tensor, attention_bias: tf.Tensor,
tf.cast(inputs[:, :, i], tf.int32))
embedded_inputs.append(embedded)

if self.params.use_ccs_bq:
for i in range(*ccs_bq_indices):
# Add 1 to ccs base quality scores to shift gaps from -1 to 0.
embedded = self.ccs_base_quality_scores_embedding_layer(
tf.cast(inputs[:, :, i] + 1, tf.int32))
embedded_inputs.append(embedded)

if self.params.use_sn:
# The last four elements in the last dimension in the inputs tensor
# correspond to the four signal-to-noise ratio scores for A, G, C, T.
Expand Down

0 comments on commit 02274ba

Please sign in to comment.