torch.autograd.GradOptions
export interface GradOptions {
/**
* If false, the graph used to compute the grad will be freed.
* @default false
*/
retain_graph?: boolean;
/**
* If true, graph of the derivative will be constructed, allowing
* to compute higher order derivative products.
* @default false
*/
create_graph?: boolean;
/**
* If true, specifying inputs that were not used when computing
* outputs (and therefore their grad is always zero) is an error.
* @default false
*/
allow_unused?: boolean;
/**
* If true, and some inputs were not used, their gradients will be
* set to zero tensors instead of None.
* @default false
*/
materialize_grads?: boolean;
}retain_graph(boolean)optional- – If false, the graph used to compute the grad will be freed.
create_graph(boolean)optional- – If true, graph of the derivative will be constructed, allowing to compute higher order derivative products.
allow_unused(boolean)optional- – If true, specifying inputs that were not used when computing outputs (and therefore their grad is always zero) is an error.
materialize_grads(boolean)optional- – If true, and some inputs were not used, their gradients will be set to zero tensors instead of None.
Options for grad computation.