instantiate_from_args

class BaseConfig(xyzfile: tuple[str | Path, ...], charge: int = 0, initialization: str = 'minao', normalize_initial_guess: bool = True, save_result: bool = True, ks_basis: str | None = None, proj_minao_module: str | None = None, sad_guess_kwargs: dict | None = None, disable_printing: bool | None = None)[source]

Configuration required to construct the base OFDFT inputs.

__eq__(other)

Return self==value.

__init__(xyzfile: tuple[str | Path, ...], charge: int = 0, initialization: str = 'minao', normalize_initial_guess: bool = True, save_result: bool = True, ks_basis: str | None = None, proj_minao_module: str | None = None, sad_guess_kwargs: dict | None = None, disable_printing: bool | None = None) None
__repr__()

Return repr(self).

class ModelConfig(model: str, use_last_ckpt: bool = True, device: str = 'cpu', transform_device: str = 'cpu', negative_integrated_density_penalty_weight: float = 0.0)[source]

Configuration describing the trained MLDFT model to use.

__eq__(other)

Return self==value.

__init__(model: str, use_last_ckpt: bool = True, device: str = 'cpu', transform_device: str = 'cpu', negative_integrated_density_penalty_weight: float = 0.0) None
__repr__()

Return repr(self).

class OptimizerConfig(optimizer: str = 'gradient-descent-torch', max_cycle: int = 10000, convergence_tolerance: float = 0.0001, lr: float = 0.001, momentum: float = 0.9, betas: Sequence[float] = (0.9, 0.999))[source]

Configuration controlling the density optimization routine.

__eq__(other)

Return self==value.

__init__(optimizer: str = 'gradient-descent-torch', max_cycle: int = 10000, convergence_tolerance: float = 0.0001, lr: float = 0.001, momentum: float = 0.9, betas: Sequence[float] = (0.9, 0.999)) None
__repr__()

Return repr(self).

get_gradient_descent_optimizer(optimizer_args: OptimizerConfig) GradientDescent[source]

Instantiate a simple gradient descent optimizer.

get_gradient_descent_torch_optimizer(optimizer_args: OptimizerConfig) TorchOptimizer[source]

Instantiate a gradient descent optimizer using PyTorch’s SGD.

get_optimizer_from_optimizer_args(optimizer_args: OptimizerConfig) GradientDescent | TorchOptimizer | VectorAdam[source]

Instantiate an optimizer from optimizer arguments.

get_runpath(name: str) Path[source]

Get the path to a named model.

get_sample_generator_from_model_args(model_args: ModelConfig) SampleGenerator[source]

Instantiate a SampleGenerator from model arguments.

get_vector_adam_optimizer(optimizer_args: OptimizerConfig) VectorAdam[source]

Instantiate a Vector Adam optimizer.

get_xyzfiles_from_base_args(base_args: BaseConfig) list[Path][source]

Get a list of XYZ files from base arguments.