Coverage for encodermap/parameters/parameters.py: 33%
209 statements
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-07 11:05 +0000
« prev ^ index » next coverage.py v7.1.0, created at 2023-02-07 11:05 +0000
1# -*- coding: utf-8 -*-
2# encodermap/parameters/parameters.py
3################################################################################
4# Encodermap: A python library for dimensionality reduction.
5#
6# Copyright 2019-2022 University of Konstanz and the Authors
7#
8# Authors:
9# Kevin Sawade, Tobias Lemke
10#
11# Encodermap is free software: you can redistribute it and/or modify
12# it under the terms of the GNU Lesser General Public License as
13# published by the Free Software Foundation, either version 2.1
14# of the License, or (at your option) any later version.
15# This package is distributed in the hope that it will be useful to other
16# researches. IT DOES NOT COME WITH ANY WARRANTY WHATSOEVER; without even the
17# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
18# See the GNU Lesser General Public License for more details.
19#
20# See <http://www.gnu.org/licenses/>.
21################################################################################
22"""Parameter Classes for Encodermap.
24This module contains parameter classes which are used to hold information for
25the encodermap autoencoder. Parameters can be set from keyworded arguments, by
26overwriting the class attributes or by reading them from .json, .yaml or ASCII files.
28Features:
29 * Setting and saving Parameters with the Parameter class.
30 * Loading parameters from disk and continue where you left off.
31 * The Parameter and ACDParamter class contains already good default values.
33"""
36################################################################################
37# Imports
38################################################################################
41from __future__ import annotations
43import datetime
44import json
45import os
46from math import pi
47from textwrap import wrap
49from .._optional_imports import _optional_import
50from ..misc.misc import _datetime_windows_and_linux_compatible, printTable
52################################################################################
53# Optional Imports
54################################################################################
57yaml = _optional_import("yaml")
60################################################################################
61# Typing
62################################################################################
65from typing import TYPE_CHECKING, Dict, Optional, TypeVar, Union
67ParametersData = Union[
68 float, int, str, bool, list[int], list[str], list[float], tuple[int, None], None
69]
70ParametersDict = Dict[str, ParametersData]
71ParametersType = TypeVar("Parameters", bound="Parent")
72ADCParametersType = TypeVar("Parameters", bound="Parent")
75################################################################################
76# Globals
77################################################################################
80__all__ = ["Parameters", "ADCParameters"]
83################################################################################
84# Functions
85################################################################################
88def search_and_replace(
89 file_path: str,
90 search_pattern: str,
91 replacement: str,
92 out_path: Optional[str] = None,
93 backup: bool = True,
94) -> None:
95 """Searches for a pattern in a text file and replaces it with the replacement
97 Args:
98 file_path (str): File path of the file to replace the text pattern in.
99 search_pattern (str): Pattern to search for.
100 replacement (str): What to replace `search_pattern` with.
101 out_path (str, optional): path where to write the output file.
102 If no path is given the original file will be replaced. Defaults to ''.
103 backup (bool, optional): If backup is true the original file is
104 renamed to filename.bak before it is overwritten
106 Examples:
107 >>> with open('path/to/file', 'r') as f:
108 ... lines = f.readlines()
109 >>> print(lines)
110 This is a Test file.
111 >>> search_and_replace('path/to/file', 'Test', 'new Test')
112 >>> with open('path/to/file', 'r') as f:
113 ... lines = f.readlines()
114 >>> print(lines)
115 This is a new Test file.
117 """
118 with open(file_path, "r") as f:
119 file_data = f.read()
121 file_data = file_data.replace(search_pattern, replacement)
123 if out_path is not None:
124 out_path = file_path
125 if backup:
126 os.rename(file_path, file_path + ".bak")
128 with open(out_path, "w") as file:
129 file.write(file_data)
132################################################################################
133# Classes
134################################################################################
137class ParametersFramework:
138 """Class to work with Parameters in the form of dict or attributes.
140 Parameters can be set via keyword args in init, set as
141 instance attributes or read from disk. Can write parameters
142 to disk in .yaml or .json format.
144 Attributes:
145 main_path (str): The main path of the parameter class.
146 defaults (dict): The defaults passed into the Parent Class by the child classes
147 Parameters() and ACDParameters()
149 Methods:
150 save ():
153 """
155 n_neurons: list[int]
156 activation_functions: list[str]
158 def __init__(self, defaults: ParametersDict, **kwargs: ParametersData) -> None:
159 """Instantiate the ParametersFramework class.
161 This class is not meant to be used alone, but as a parent class for
162 different parameters.
164 Args:
165 defaults (dict): A dictionary of default values.
166 **kwargs: Arbitrary keyword arguments. If these arguments are not
167 keys of the `defaults` dictionary, they will be ignored.
168 Otherwise, they will overwrite the keys in the defaults dict.
171 """
172 self.main_path = os.getcwd()
173 self.defaults = defaults
175 # overwrite class defaults with user input **kwargs
176 self._setattr(self.defaults)
177 for key, value in kwargs.items():
178 if key not in self.__dict__.keys(): 178 ↛ 179line 178 didn't jump to line 179, because the condition on line 178 was never true
179 if key == "n_epochs":
180 print(
181 "Parameter `n_epochs` and `n_steps_per_epoch` is deprecated. Use `n_steps` instead."
182 )
183 print(f"Dropping unknown dict entry for {{'{key}': {value}}}")
184 else:
185 setattr(self, key, value)
186 if len(self.n_neurons) != len(self.activation_functions) - 1: 186 ↛ 187line 186 didn't jump to line 187, because the condition on line 186 was never true
187 raise Exception(
188 f"Length of `n_neurons` and `activation_functions` (-1) does not match: {self.n_neurons}, {self.activation_functions}"
189 )
191 def save(self, path: Optional[str] = None) -> str:
192 """Save parameters in json format or yaml format.
194 Args:
195 path (str, optional): Path where parameters should be saved. Possible extensions are '.json' and '.yaml'.
196 If no path is given main_path/parameters.json is used. Defaults to ''.
198 Returns:
199 str: The path where the parameters were saved.
201 """
202 if path is None: 202 ↛ 205line 202 didn't jump to line 205, because the condition on line 202 was never false
203 path = os.path.join(self.main_path, f"parameters.json")
204 fmt = "json"
205 if os.path.isfile(path): 205 ↛ 206line 205 didn't jump to line 206, because the condition on line 205 was never true
206 filename, extension = os.path.splitext(path)
207 time = _datetime_windows_and_linux_compatible()
208 os.rename(path, filename + "_back_" + time + extension)
209 fmt = path.split(".")[-1]
210 if fmt not in ["json", "yaml"]: 210 ↛ 211line 210 didn't jump to line 211, because the condition on line 210 was never true
211 raise OSError(
212 f"Unrecognized extension .{fmt}. Please provide either '.json' or '.yaml'"
213 )
214 with open(path, "w") as f:
215 if fmt == "json": 215 ↛ 218line 215 didn't jump to line 218, because the condition on line 215 was never false
216 json.dump(self.__dict__, f, indent=4, sort_keys=True)
217 else:
218 yaml.dump(self.__dict__, f, default_flow_style=True)
219 return path
221 @property
222 def parameters(self) -> str:
223 """str: A string that contains tabulated parameter values."""
224 doc_p = Parameters.__doc__.split("Attributes:")[1].split("Examples:")[0]
225 doc_p = (
226 "\n".join(map(lambda x: x.lstrip(" "), doc_p.splitlines()))
227 .lstrip("\n")
228 .rstrip("\n\n")
229 .splitlines()
230 )
231 doc = ADCParameters.__doc__.split("Attributes:")[1].split("Examples:")[0]
232 doc = (
233 "\n".join(map(lambda x: x.lstrip(" "), doc.splitlines()))
234 .lstrip("\n")
235 .rstrip("\n\n")
236 .splitlines()
237 )
238 doc = doc_p + doc
239 descr_dict = {}
240 key = doc[0].split("):")[0].split()[0]
241 descr = doc[0].split("): ")[-1]
242 for line in doc[1:]:
243 if "):" not in line:
244 descr = descr + " " + line
245 else:
246 descr_dict[key] = descr
247 key = line.split("):")[0].split()[0]
248 descr = line.split("): ")[-1]
249 else:
250 descr_dict[key] = descr
251 out = []
252 for key, value in self.__dict__.items():
253 if key in self.defaults:
254 out.append(
255 {
256 "Parameter": key,
257 "Value": value,
258 "Description": "\n".join(wrap(descr_dict[key], width=50)),
259 }
260 )
261 return printTable(out, sep="\n")
263 @classmethod
264 def from_file(cls, path: str) -> Union[ParametersType, ADCParametersType]:
265 """Alternative constructor for ParameterFramework classes.
267 Reads a file and sets the attributes based on that.
269 Args:
270 path (str): Path to the parameters.json or parameters.yaml file
272 Returns:
273 ParametersFramework: A new ParametersFramework class.
275 """
276 with open(path, "r") as f:
277 if path.split(".")[-1] == "json":
278 params = json.load(f)
279 elif path.split(".")[-1] == "yaml":
280 params = yaml.load(f, Loader=yaml.FullLoader)
281 else:
282 raise ValueError(
283 f"The extension of the provided file should be `.json`, or `.yaml`. You provided {path.split('.')[1]}"
284 )
286 if "n_epochs" in params:
287 print(
288 "Detected old definition `n_epochs` and `n_steps_per_epoch`. I will change that to `n_steps` = `n_epochs` * `n_steps_per_epoch`."
289 )
290 params["n_steps"] = params["n_epochs"] * params["n_steps_per_epoch"]
292 # also check soft start
293 if "cartesian_cost_scale_soft_start" in params:
294 if params["cartesian_cost_scale_soft_start"] != (None, None) and params[
295 "cartesian_cost_scale_soft_start"
296 ] != [None, None]:
297 a, b = params["cartesian_cost_scale_soft_start"]
298 a *= params["n_steps_per_epoch"]
299 b *= params["n_steps_per_epoch"]
300 params["cartesian_cost_scale_soft_start"] = (a, b)
302 # fix summary step and checkpoint_step
303 params["summary_step"] *= params["n_steps_per_epoch"]
304 params["checkpoint_step"] *= params["n_steps_per_epoch"]
306 del params["n_epochs"]
307 del params["n_steps_per_epoch"]
309 return cls(**params)
311 @classmethod
312 def load(cls, path: str) -> Union[ParametersType, ADCParametersType]:
313 """Loads the parameters saved in a .json or .yaml file into a new Parameter object.
315 Args:
316 path (str): Path to the parameters.json or parameters.yaml file
318 Returns:
319 ParametersFramework: A new ParametersFramework class.
321 """
322 with open(path, "r") as f:
323 if path.split(".")[1] == "json":
324 params = json.load(f)
325 elif path.split(".")[1] == "yaml":
326 params = yaml.load(f, Loader=yaml.FullLoader)
327 else:
328 raise ValueError(
329 f"The extension of the provided file should be `.json`, or `.yaml`. You provided {path.split('.')[1]}"
330 )
332 if "n_epochs" in params:
333 print(
334 "Detected old definition `n_epochs` and `n_steps_per_epoch`. I will change that to `n_steps` = `n_epochs` * `n_steps_per_epoch`."
335 )
336 params["n_steps"] = params["n_epochs"] * params["n_steps_per_epoch"]
337 del params["n_epochs"]
338 del params["n_steps_per_epoch"]
340 # check whether the parameters file has been moved and update it accordingly.
341 if params["main_path"] != os.path.dirname(path):
342 print(
343 "seems like the parameter file was moved to another directory. Parameter file is updated ..."
344 )
345 search_and_replace(path, params["main_path"], os.path.dirname(path))
346 with open(path, "r") as file:
347 if path.split(".")[1] == "json":
348 params = json.load(f)
349 elif path.split(".")[1] == "yaml":
350 params = yaml.load(f, Loader=yaml.FullLoader)
351 else:
352 raise ValueError(
353 f"The extension of the provided file should be `.json`, or `.yaml`. You provided {path.split('.')[1]}"
354 )
356 return cls(**params)
358 def update(self, **kwargs: ParametersData) -> None:
359 """Updates the values of `self`.
361 Args:
362 **kwargs: Arbitrary keyword arguments. If these arguments are not
363 keys of the `self.defaults` dictionary, they will be ignored.
364 Otherwise, they will overwrite the keys in the defaults dict.
366 """
367 for key, value in kwargs.items():
368 if key not in self.__dict__.keys():
369 print(f"Dropping unknown dict entry for {{'{key}': {value}}}")
370 else:
371 setattr(self, key, value)
373 def _setattr(self, dictionary: ParametersDict) -> None:
374 """Updates the values of `self.`
376 Args:
377 dictionary (dict):
379 """
380 if "cartesian_cost_scale_soft_start" in dictionary:
381 if dictionary["cartesian_cost_scale_soft_start"] is not None or dictionary[ 381 ↛ 388line 381 didn't jump to line 388, because the condition on line 381 was never false
382 "cartesian_cost_scale_soft_start"
383 ] != (None, None):
384 if len(dictionary["cartesian_cost_scale_soft_start"]) != 2: 384 ↛ 385line 384 didn't jump to line 385, because the condition on line 384 was never true
385 raise Exception(
386 "Parameter cartesian_cost_scale_soft_start only takes a tuple of len 2."
387 )
388 for key, value in dictionary.items():
389 setattr(self, key, value)
391 def __setitiem__(self, key: str, value: ParametersData) -> None:
392 """Implements the setitem method. Values can be set like so:
394 Examples:
395 >>> from encodermap import Parameters
396 >>> p = Parameters()
397 >>> p["center_cost_scale"] = 2.5
398 >>> p["center_cost_scale"]
399 2.5
401 """
402 if key == "cartesian_cost_scale_soft_start":
403 if value is not None or value != (None, None):
404 if len(value) != 2:
405 raise Exception(
406 "Parameter cartesian_cost_scale_soft_start only takes a tuple of len 2."
407 )
408 setattr(self, key, value)
410 def __getitem__(self, item: str) -> ParametersData:
411 """Implements the getitem method. Get items with instance[key]."""
412 return getattr(self, item)
414 def _string_summary(self) -> str:
415 """Creates a short summary of a parameter class. Additionally, adds info about non-standard values."""
416 check_defaults = Parameters.defaults
417 if self.__class__.__name__ == "ADCParameters": 417 ↛ 419line 417 didn't jump to line 419, because the condition on line 417 was never false
418 check_defaults.update(ADCParameters.defaults)
419 diff_keys = list(
420 filter(
421 lambda x: not self.__dict__[x] == check_defaults[x],
422 check_defaults.keys(),
423 )
424 )
425 s = f"{self.__class__.__name__} class with Main path at {self.main_path}."
426 for d in diff_keys:
427 s += f"\nNon-standard value of {d}: {self.__dict__[d]}"
428 if diff_keys == []: 428 ↛ 429line 428 didn't jump to line 429, because the condition on line 428 was never true
429 s += " All parameters are set to default values."
430 return s
432 def __str__(self) -> str:
433 return self._string_summary()
435 def __repr__(self) -> str:
436 return f"<{self._string_summary()} Object at 0x{id(self):02x}>"
439class Parameters(ParametersFramework):
440 """Class to hold Parameters for the Autoencoder
442 Parameters can be set via keyword args while instantiating the class, set as
443 instance attributes or read from disk. This class can write parameters
444 to disk in .yaml or .json format.
446 Attributes:
447 defaults (dict): Classvariable dict that holds the defaults
448 even when the current values might have changed.
449 main_path (str): Defines a main path where the parameters and other things might be stored.
450 n_neurons (list of int): List containing number of neurons for each layer up to the bottleneck layer.
451 For example [128, 128, 2] stands for an autoencoder with the following architecture
452 {i, 128, 128, 2, 128, 128, i} where i is the number of dimensions of the input data.
453 These are Input/Output Layers that are not trained.
454 activation_functions (list of str): List of activation function names as implemented in TensorFlow.
455 For example: "relu", "tanh", "sigmoid" or "" to use no activation function.
456 The encoder part of the network takes the activation functions
457 from the list starting with the second element. The decoder part of
458 the network takes the activation functions in reversed order starting with
459 the second element form the back. For example ["", "relu", "tanh", ""] would
460 result in a autoencoder with {"relu", "tanh", "", "tanh", "relu", ""} as
461 sequence of activation functions.
462 periodicity (float): Defines the distance between periodic walls for the inputs.
463 For example 2pi for angular values in radians.
464 All periodic data processed by EncoderMap must be wrapped to one periodic window.
465 E.g. data with 2pi periodicity may contain values from -pi to pi or from 0 to 2pi.
466 Set the periodicity to float("inf") for non-periodic inputs.
467 learning_rate (float): Learning rate used by the optimizer.
468 n_steps (int): Number of training steps.
469 batch_size (int): Number of training points used in each training step
470 summary_step (int): A summary for TensorBoard is writen every summary_step steps.
471 checkpoint_step (int): A checkpoint is writen every checkpoint_step steps.
472 dist_sig_parameters (tuple of floats): Parameters for the sigmoid
473 functions applied to the high- and low-dimensional distances
474 in the following order (sig_h, a_h, b_h, sig_l, a_l, b_l)
475 distance_cost_scale (int): Adjusts how much the distance based metric is weighted in the cost function.
476 auto_cost_scale (int): Adjusts how much the autoencoding cost is weighted in the cost function.
477 auto_cost_variant (str): defines how the auto cost is calculated. Must be one of:
478 * `mean_square`
479 * `mean_abs`
480 * `mean_norm`
481 center_cost_scale (float): Adjusts how much the centering cost is weighted in the cost function.
482 l2_reg_constant (float): Adjusts how much the L2 regularisation is weighted in the cost function.
483 gpu_memory_fraction (float): Specifies the fraction of gpu memory blocked.
484 If set to 0, memory is allocated as needed.
485 analysis_path (str): A path that can be used to store analysis
486 id (str): Can be any name for the run. Might be useful for example for
487 specific analysis for different data sets.
488 model_api (str): A string defining the API to be used to build the keras model.
489 Defaults to `sequntial`. Possible strings are:
490 * `functional` will use keras' functional API.
491 * `sequential` will define a keras Model, containing two other models with the Sequential API.
492 These two models are encoder and decoder.
493 * `custom` will create a custom Model where even the layers are custom.
494 loss (str): A string defining the loss function.
495 Defaults to `emap_cost`. Possible losses are:
496 * `reconstruction_loss` will try to train output == input
497 * `mse`: Returns a mean squared error loss.
498 * `emap_cost` is the EncoderMap loss function. Depending on the class `Autoencoder`,
499 `Encodermap, `ACDAutoencoder`, different contributions are used for a combined loss.
500 Autoencoder uses atuo_cost, reg_cost, center_cost.
501 EncoderMap class adds sigmoid_loss.
502 batched (bool): Whether the dataset is batched or not.
503 training (str): A string defining what kind of training is performed when autoencoder.train() is callsed.
504 * `auto` does a regular model.compile() and model.fit() procedure.
505 * `custom` uses gradient tape and calculates losses and gradients manually.
506 tensorboard (bool): Whether to print tensorboard information. Defaults to False.
507 seed (Union[int, None]): Fixes the state of all operations using random numbers. Defaults to None.
509 Examples:
510 >>> import encodermap as em
511 >>> paramters = em.Parameters()
512 >>> parameters.auto_cost_variant
513 mean_abs
514 >>> parameters.save(path='/path/to/dir')
515 /path/to/dir/parameters.json
516 >>> # alternative constructor
517 >>> new_params = em.Parameters.from_file('/path/to/dir/parameters.json')
518 >>> new_params.main_path
519 /path/to/dir/parameters.json
521 """
523 defaults = dict(
524 n_neurons=[128, 128, 2],
525 activation_functions=["", "tanh", "tanh", ""],
526 periodicity=2 * pi,
527 learning_rate=0.001,
528 n_steps=100000,
529 batch_size=256,
530 summary_step=10,
531 checkpoint_step=5000,
532 dist_sig_parameters=(4.5, 12, 6, 1, 2, 6),
533 distance_cost_scale=500,
534 auto_cost_scale=1,
535 auto_cost_variant="mean_abs",
536 center_cost_scale=0.0001,
537 l2_reg_constant=0.001,
538 gpu_memory_fraction=0,
539 analysis_path="",
540 id="",
541 model_api="sequential",
542 loss="emap_cost",
543 training="auto",
544 batched=True,
545 tensorboard=False,
546 seed=None,
547 )
549 def __init__(self, **kwargs: ParametersData) -> None:
550 """Instantiate the Parameters class
552 Takes a dict as input and overwrites the class defaults. The dict is directly
553 stored as an attribute and can be accessed via instance attributes.
555 Args:
556 **kwargs (dcit): Dict containing values. If unknown keys are passed they will be dropped.
558 """
559 # set class variable defaults to be instance variable
560 if "defaults" in kwargs: 560 ↛ 561line 560 didn't jump to line 561, because the condition on line 560 was never true
561 kwargs.pop("defaults", None)
562 super().__init__(self.defaults, **kwargs)
564 @classmethod
565 def defaults_description(cls) -> str:
566 """str: A string that contains tabulated default parameter values."""
567 doc = cls.__doc__.split("Attributes:")[1].split("Examples:")[0]
568 doc = (
569 "\n".join(map(lambda x: x.lstrip(" "), doc.splitlines()))
570 .lstrip("\n")
571 .rstrip("\n\n")
572 .splitlines()
573 )
574 descr_dict = {}
575 key = doc[0].split("):")[0].split()[0]
576 descr = doc[0].split("): ")[-1]
577 for line in doc[1:]:
578 if "):" not in line:
579 descr = descr + " " + line
580 else:
581 descr_dict[key] = descr
582 key = line.split("):")[0].split()[0]
583 descr = line.split("): ")[-1]
584 else:
585 descr_dict[key] = descr
587 out = []
588 for key, value in cls.defaults.items():
589 out.append(
590 {
591 "Parameter": key,
592 "Default Value": value,
593 "Description": "\n".join(wrap(descr_dict[key], width=50)),
594 }
595 )
596 return printTable(out, sep="\n")
599class ADCParameters(ParametersFramework):
600 """This is the parameter object for the AngleDihedralCartesianEncoder.
601 It holds all the parameters that the Parameters object includes, plus the following attributes:
603 Attributes:
604 cartesian_pwd_start (int): Index of the first atom to use for the pairwise distance calculation.
605 cartesian_pwd_stop (int): Index of the last atom to use for the pairwise distance calculation.
606 cartesian_pwd_step (int): Step for the calculation of paiwise distances. E.g. for a chain of atoms
607 N-C_a-C-N-C_a-C... cartesian_pwd_start=1 and cartesian_pwd_step=3 will result in using all C-alpha atoms for the
608 pairwise distance calculation.
609 use_backbone_angles (bool): Allows to define whether backbone bond angles should be learned (True) or if instead mean
610 values should be used to generate conformations (False).
611 use_sidechains (bool): Whether sidechain dihedrals should be passed through the autoencoder.
612 angle_cost_scale (int): Adjusts how much the angle cost is weighted in the cost function.
613 angle_cost_variant (str): Defines how the angle cost is calculated. Must be one of:
614 * "mean_square"
615 * "mean_abs"
616 * "mean_norm".
617 angle_cost_reference (int): Can be used to normalize the angle cost with the cost of same reference model (dummy).
618 dihedral_cost_scale (int): Adjusts how much the dihedral cost is weighted in the cost function.
619 dihedral_cost_variant (str): Defines how the dihedral cost is calculated. Must be one of:
620 * "mean_square"
621 * "mean_abs"
622 * "mean_norm".
623 dihedral_cost_reference (int): Can be used to normalize the dihedral cost with the cost of same reference model (dummy).
624 side_dihedral_cost_scale (int): Adjusts how much the side dihedral cost is weighted in the cost function.
625 side_dihedral_cost_variant (str): Defines how the side dihedral cost is calculated. Must be one of:
626 * "mean_square"
627 * "mean_abs"
628 * "mean_norm".
629 side_dihedral_cost_reference (int): Can be used to normalize the side dihedral cost with the cost of same reference model (dummy).
630 cartesian_cost_scale (int): Adjusts how much the cartesian cost is weighted in the cost function.
631 cartesian_cost_scale_soft_start (tuple): Allows to slowly turn on the cartesian cost. Must be a tuple with
632 (start, end) or (None, None) If begin and end are given, cartesian_cost_scale will be increased linearly in the
633 given range.
634 cartesian_cost_variant (str): Defines how the cartesian cost is calculated. Must be one of:
635 * "mean_square"
636 * "mean_abs"
637 * "mean_norm".
638 cartesian_cost_reference (int): Can be used to normalize the cartesian cost with the cost of same reference model (dummy).
639 cartesian_dist_sig_parameters (tuple of floats): Parameters for the sigmoid functions applied to the high- and low-dimensional
640 distances in the following order (sig_h, a_h, b_h, sig_l, a_l, b_l).
641 cartesian_distance_cost_scale (int): Adjusts how much the cartesian distance cost is weighted in the cost function.
643 Examples:
644 >>> import encodermap as em
645 >>> parameters = em.ADCParameters()
646 >>> parameters.auto_cost_variant
647 mean_abs
648 >>> parameters.save(path='/path/to/dir')
649 /path/to/dir/parameters.json
650 >>> # alternative constructor
651 >>> new_params = em.Parameters.from_file('/path/to/dir/parameters.json')
652 >>> new_params.main_path
653 /path/to/dir/parameters.json
655 """
657 defaults = dict(
658 Parameters.defaults,
659 **dict(
660 model_api="functional", # overwrite main class. Functional allows multiple in and outputs.
661 cartesian_pwd_start=None,
662 cartesian_pwd_stop=None,
663 cartesian_pwd_step=None,
664 use_backbone_angles=False,
665 use_sidechains=False,
666 angle_cost_scale=0,
667 angle_cost_variant="mean_abs",
668 angle_cost_reference=1,
669 dihedral_cost_scale=1,
670 dihedral_cost_variant="mean_abs",
671 dihedral_cost_reference=1,
672 side_dihedral_cost_scale=0.5,
673 side_dihedral_cost_variant="mean_abs",
674 side_dihedral_cost_reference=1,
675 cartesian_cost_scale=1,
676 cartesian_cost_scale_soft_start=(None, None), # begin, end
677 cartesian_cost_variant="mean_abs",
678 cartesian_cost_reference=1,
679 cartesian_dist_sig_parameters=Parameters.defaults["dist_sig_parameters"],
680 cartesian_distance_cost_scale=1,
681 auto_cost_scale=None,
682 distance_cost_scale=None,
683 ),
684 )
686 def __init__(self, **kwargs: ParametersData) -> None:
687 """Instantiate the ADCParameters class
689 Takes a dict as input and overwrites the class defaults. The dict is directly
690 stored as an attribute and can be accessed via instance attributes.
692 Args:
693 **kwargs (dict): Dict containing values. If unknown values are passed they will be dropped.
695 """
696 if "cartesian_cost_scale_soft_start" in kwargs:
697 if kwargs["cartesian_cost_scale_soft_start"] is not None or kwargs[ 697 ↛ 705line 697 didn't jump to line 705, because the condition on line 697 was never false
698 "cartesian_cost_scale_soft_start"
699 ] != (None, None):
700 if len(kwargs["cartesian_cost_scale_soft_start"]) != 2: 700 ↛ 701line 700 didn't jump to line 701, because the condition on line 700 was never true
701 raise Exception(
702 "Parameter cartesian_cost_scale_soft_start only takes a tuple of len 2."
703 )
704 # set class variable defaults to be instance variable
705 if "defaults" in kwargs: 705 ↛ 706line 705 didn't jump to line 706, because the condition on line 705 was never true
706 kwargs.pop("defaults", None)
707 super().__init__(self.defaults, **kwargs)
709 @classmethod
710 def defaults_description(cls) -> str:
711 """str: A string that contains tabulated default parameter values."""
712 doc_p = Parameters.__doc__.split("Attributes:")[1].split("Examples:")[0]
713 doc_p = (
714 "\n".join(map(lambda x: x.lstrip(" "), doc_p.splitlines()))
715 .lstrip("\n")
716 .rstrip("\n\n")
717 .splitlines()
718 )
719 doc = cls.__doc__.split("Attributes:")[1].split("Examples:")[0]
720 doc = (
721 "\n".join(map(lambda x: x.lstrip(" "), doc.splitlines()))
722 .lstrip("\n")
723 .rstrip("\n\n")
724 .splitlines()
725 )
726 doc = doc_p + doc
727 descr_dict = {}
728 key = doc[0].split("):")[0].split()[0]
729 descr = doc[0].split("): ")[-1]
730 for line in doc:
731 if "):" not in line:
732 descr = descr + " " + line
733 else:
734 descr_dict[key] = descr
735 key = line.split("):")[0].split()[0]
736 descr = line.split("): ")[-1]
737 else:
738 descr_dict[key] = descr
740 out = []
741 for key, value in cls.defaults.items():
742 out.append(
743 {
744 "Parameter": key,
745 "Default Value": value,
746 "Description": "\n".join(wrap(descr_dict[key], width=50)),
747 }
748 )
749 return printTable(out, sep="\n")