-
Notifications
You must be signed in to change notification settings - Fork 1.9k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Make MultiHeadAttention use masks from query and value tensors #7951
Changes from all commits
7cddfa6
0132ef5
1b89d25
bdf6ed1
6822278
94766d0
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -20,7 +20,7 @@ | |
*/ | ||
|
||
/* Original source: keras/layers/attention/multi_head_attention.py */ | ||
import { Tensor, einsum, linalg, mul, ones, serialization, tidy } from '@tensorflow/tfjs-core'; | ||
import { Tensor, einsum, linalg, logicalAnd, mul, ones, serialization, tidy } from '@tensorflow/tfjs-core'; | ||
// tslint:disable-next-line: no-imports-from-dist | ||
import { arraysEqual } from '@tensorflow/tfjs-core/dist/util_base'; | ||
|
||
|
@@ -813,12 +813,20 @@ export class MultiHeadAttention extends Layer { | |
return tidy(() => { | ||
let autoMask: Tensor; | ||
|
||
const queryMask = query.kerasMask; | ||
const valueMask = value.kerasMask; | ||
if (queryMask != null) { | ||
autoMask = queryMask.expandDims(2); // Shape is [B, T, 1] | ||
} | ||
if (valueMask != null) { | ||
const mask = valueMask.expandDims(1); // Shape is [B, 1, S] | ||
autoMask = autoMask ? logicalAnd(autoMask, mask) : mask; | ||
} | ||
if (useCausalMask) { | ||
// the shape of the causal mask is [1, T, S] | ||
const mask = this.computeCausalMask(query, value); | ||
autoMask = mask; | ||
autoMask = autoMask ? logicalAnd(autoMask, mask) : mask; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. how is this associated with the Topology computeMask logic? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Earlier layers (Embedding) have computeMask called to compute the mask for their output tensors. This layer uses those masks. |
||
} | ||
|
||
if (autoMask != null) { | ||
// Merge attentionMask & automatic mask, to shape [B, T, S] | ||
attentionMask = attentionMask ? | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I moved this here so I could write them as type guards (
tensors is Tensor...
)