torch.autograd.BackwardOptions
export interface BackwardOptions {
/**
* If false, the graph used to compute the grads will be freed.
* Setting to true allows computing higher order derivatives.
* @default false
*/
retain_graph?: boolean;
/**
* If true, graph of the derivative will be constructed, allowing
* to compute higher order derivative products.
* @default false
*/
create_graph?: boolean;
/**
* Inputs w.r.t. which the gradient will be accumulated into .grad.
* All other tensors will be ignored. If not provided, the gradient
* is accumulated into all leaf tensors.
*/
inputs?: Tensor | Tensor[];
}retain_graph(boolean)optional- – If false, the graph used to compute the grads will be freed. Setting to true allows computing higher order derivatives.
create_graph(boolean)optional- – If true, graph of the derivative will be constructed, allowing to compute higher order derivative products.
Options for backward computation.