xhci: clear extra bits from slot context when setting max exit latency
If we need to change the max exit latency with a Evaluate Context command, we copy the old output slot context and use it as input context for the command. This also copies the dev_state bits which are supposed to be zero in the input slot context. Signed-off-by: Mathias Nyman <mathias.nyman@linux.intel.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
This commit is contained in:
parent
69defe04ec
commit
4801d4ea55
|
@ -4009,6 +4009,7 @@ static int __maybe_unused xhci_change_max_exit_latency(struct xhci_hcd *xhci,
|
|||
slot_ctx = xhci_get_slot_ctx(xhci, command->in_ctx);
|
||||
slot_ctx->dev_info2 &= cpu_to_le32(~((u32) MAX_EXIT));
|
||||
slot_ctx->dev_info2 |= cpu_to_le32(max_exit_latency);
|
||||
slot_ctx->dev_state = 0;
|
||||
|
||||
xhci_dbg_trace(xhci, trace_xhci_dbg_context_change,
|
||||
"Set up evaluate context for LPM MEL change.");
|
||||
|
|
Loading…
Reference in New Issue