This documentation page refers to a previous release of DIALS (1.14).
Click here to go to the corresponding page for the latest version of DIALS

dials.refine_bravais_settings

Introduction

This program takes as input the output of dials.index, i.e. experiments.json and indexed.pickle files. Full refinement of the crystal and experimental geometry parameters will be performed (by default) in all Bravais settings that are consistent with the input primitive unit cell. A table is printed containing various information for each potential Bravais setting, including the metric fit (a measure of the deviation from the triclinic cell), the root-mean-square-deviations (rmsd), in mm, between the observed and predicted spot centroids, the refined unit cell parameters in each Bravais setting, and the change of basis operator to transform from the triclinic cell to each Bravais setting.

The program also generates a .json file for each Bravais setting, e.g. bravais_setting_1.json, which is equivalent to the input experiments.json, but with the crystal model refined in the chosen Bravais setting. These bravais_setting_*.json files are suitable as input to dials.refine or dials.integrate, although the indexed.pickle file will need to be re-indexed using dials.reindex if the change of basis operator (cb_op) for the chosen Bravais setting is not the identity operator (a,b,c).

Examples:

dials.refine_bravais_settings experiments.json indexed.pickle

dials.refine_bravais_settings experiments.json indexed.pickle nproc=4

Basic parameters

lepage_max_delta = 5
verbosity = 0
nproc = Auto
crystal_id = None
normalise = False
normalise_bins = 0
cc_n_bins = None
output {
  directory = "."
  log = dials.refine_bravais_settings.log
  debug_log = dials.refine_bravais_settings.debug.log
  prefix = None
}
refinement {
  parameterisation {
    scan_varying = False
    beam {
      fix = all *in_spindle_plane out_spindle_plane *wavelength
    }
    crystal {
      fix = all cell orientation
    }
    detector {
      fix = all position orientation
    }
    goniometer {
      fix = *all in_beam_plane out_beam_plane
    }
  }
  reflections {
    outlier {
      algorithm = null *auto mcd tukey sauter_poon
    }
  }
}

Full parameter definitions

lepage_max_delta = 5
  .type = float(allow_none=True)
verbosity = 0
  .type = int(value_min=0, allow_none=True)
nproc = Auto
  .type = int(value_min=1, allow_none=True)
crystal_id = None
  .type = int(value_min=0, allow_none=True)
normalise = False
  .help = "Normalise intensities before calculating correlation coefficients."
  .type = bool
normalise_bins = 0
  .help = "Number of resolution bins for normalisation"
  .type = int(allow_none=True)
cc_n_bins = None
  .help = "Number of resolution bins to use for calculation of correlation"
          "coefficients"
  .type = int(value_min=1, allow_none=True)
output {
  directory = "."
    .type = path
  log = dials.refine_bravais_settings.log
    .type = path
  debug_log = dials.refine_bravais_settings.debug.log
    .type = path
  prefix = None
    .type = str
}
refinement
  .help = "Parameters to configure the refinement"
{
  mp
    .expert_level = 2
  {
    nproc = 1
      .help = "The number of processes to use. Not all choices of refinement"
              "engine support nproc > 1. Where multiprocessing is possible, it"
              "is helpful only in certain circumstances, so this is not"
              "recommended for typical use."
      .type = int(value_min=1, allow_none=True)
  }
  verbosity = 0
    .help = "verbosity level"
    .type = int(value_min=0, allow_none=True)
    .expert_level = 1
  parameterisation
    .help = "Parameters to control the parameterisation of experimental models"
  {
    auto_reduction
      .help = "determine behaviour when there are too few reflections to"
              "reasonably produce a full parameterisation of the experiment"
              "list"
      .expert_level = 1
    {
      min_nref_per_parameter = 5
        .help = "the smallest number of reflections per parameter for a model"
                "parameterisation below which the parameterisation will not be"
                "made in full, but the action described below will be"
                "triggered."
        .type = int(value_min=1, allow_none=True)
      action = *fail fix remove
        .help = "action to take if there are too few reflections across the"
                "experiments related to a particular model parameterisation."
                "If fail, an exception will be raised and refinement will not"
                "proceed. If fix, refinement will continue but with the"
                "parameters relating to that model remaining fixed at their"
                "initial values. If remove, parameters relating to that model"
                "will be fixed, and in addition all reflections related to"
                "that parameterisation will be removed. This will therefore"
                "remove these reflections from other parameterisations of the"
                "global model too. For example, if a crystal model could not"
                "be parameterised it will be excised completely and not"
                "contribute to the joint refinement of the detector and beam."
                "In the fix mode, reflections emanating from that crystal will"
                "still form residuals and will contribute to detector and beam"
                "refinement."
        .type = choice
      detector_reduce = False
        .help = "Special case designed for detector metrology refinement"
                "(particularly of the CSPAD). See detector_reduce_list for"
                "details."
        .type = bool
        .expert_level = 2
      detector_reduce_list = Dist Tau2 Tau3
        .help = "Partial names to match to detector parameters to try fixing."
                "If there are still not enough parameters for refinement after"
                "fixing these, then fail. This is to ensure that metrology"
                "refinement never completes if it is not able to refine some"
                "panels. The default is to try fixing the distance as well as"
                "Tau2 and Tau3 rotations of detector panel, leaving the"
                "in-plane shifts and the rotation around the detector normal"
                "for refinement. groups only."
        .type = strings
        .expert_level = 2
    }
    scan_varying = False
      .help = "Allow models that are not forced to be static to vary during"
              "the scan"
      .short_caption = "Scan-varying refinement"
      .type = bool
    compose_model_per = image *block
      .help = "For scan-varying parameterisations, compose a new model either"
              "every image or within blocks of a width specified in the"
              "reflections parameters. When this block width is larger than"
              "the image width the result is faster, with a trade-off in"
              "accuracy"
      .type = choice
      .expert_level = 1
    block_width = 1.0
      .help = "Width of a reflection 'block' (in degrees) determining how"
              "fine- grained the model used for scan-varying prediction during"
              "refinement is. Currently only has any effect if the crystal"
              "parameterisation is set to use compose_model_per=block"
      .type = float(value_min=0, allow_none=True)
      .expert_level = 1
    debug_centroid_analysis = False
      .help = "Set True to write out a file containing the reflections used"
              "for centroid analysis for automatic setting of the "
              "scan-varying interval width. This can then be analysed with"
              "dev.dials.plot_centroid_analysis"
      .type = bool
      .expert_level = 2
    beam
      .help = "beam parameters"
    {
      fix = all *in_spindle_plane out_spindle_plane *wavelength
        .help = "Whether to fix beam parameters. By default, in_spindle_plane"
                "is selected, and one of the two parameters is fixed. If a"
                "goniometer is present this leads to the beam orientation"
                "being restricted to a direction in the initial spindle-beam"
                "plane. Wavelength is also fixed by default, to allow"
                "refinement of the unit cell volume."
        .short_caption = "Fix beam parameters"
        .type = choice(multi=True)
      fix_list = None
        .help = "Fix specified parameters by a list of 0-based indices or"
                "partial names to match"
        .type = strings
        .expert_level = 1
      constraints
        .help = "Parameter equal shift constraints to use in refinement."
        .multiple = True
        .expert_level = 2
      {
        id = None
          .help = "Select only the specified experiments when looking up which"
                  "parameterisations to apply the constraint to. If an"
                  "identified parameterisation affects multiple experiments"
                  "then the index of any one of those experiments suffices to"
                  "identify that parameterisation. If None (the default) then"
                  "constraints will be applied to all parameterisations of"
                  "this type."
          .type = ints(value_min=0)
        parameter = None
          .help = "Identify which parameter of each parameterisation to"
                  "constrain by a (partial) parameter name to match. Model"
                  "name prefixes such as 'Detector1' will be ignored as"
                  "parameterisations are already identified by experiment id"
          .type = str
      }
      force_static = True
        .help = "Force a static parameterisation for the beam when doing"
                "scan-varying refinement"
        .type = bool
        .expert_level = 1
      smoother
        .help = "Options that affect scan-varying parameterisation"
        .expert_level = 1
      {
        interval_width_degrees = 36.0
          .help = "Width of scan between checkpoints in degrees. Can be set to"
                  "Auto."
          .type = float(value_min=0, allow_none=True)
        absolute_num_intervals = None
          .help = "Number of intervals between checkpoints if scan_varying"
                  "refinement is requested. If set, this overrides"
                  "interval_width_degrees"
          .type = int(value_min=1, allow_none=True)
      }
    }
    crystal
      .help = "crystal parameters"
    {
      fix = all cell orientation
        .help = "Fix crystal parameters"
        .short_caption = "Fix crystal parameters"
        .type = choice
      unit_cell
        .expert_level = 1
      {
        fix_list = None
          .help = "Fix specified parameters by a list of 0-based indices or"
                  "partial names to match"
          .type = strings
          .expert_level = 1
        restraints
          .help = "Least squares unit cell restraints to use in refinement."
          .expert_level = 1
        {
          tie_to_target
            .multiple = True
          {
            values = None
              .help = "Target unit cell parameters for the restraint for this"
                      "parameterisation"
              .type = floats(size=6)
            sigmas = None
              .help = "The unit cell target values are associated with sigmas"
                      "which are used to determine the weight of each"
                      "restraint. A sigma of zero will remove the restraint at"
                      "that position. If symmetry constrains two cell"
                      "dimensions to be equal then only the smaller of the two"
                      "sigmas will be kept"
              .type = floats(size=6, value_min=0)
            id = None
              .help = "Select only the specified experiments when looking up"
                      "which parameterisations to apply these restraints to."
                      "If an identified parameterisation affects multiple"
                      "experiments then the index of any one of those"
                      "experiments suffices to restrain that parameterisation."
                      "If None (the default) then the restraints will be"
                      "applied to all experiments."
              .type = ints(value_min=0)
          }
          tie_to_group
            .multiple = True
          {
            target = *mean low_memory_mean median
              .help = "Function to tie group parameter values to"
              .type = choice
            sigmas = None
              .help = "The unit cell parameters are associated with sigmas"
                      "which are used to determine the weight of each"
                      "restraint. A sigma of zero will remove the restraint at"
                      "that position."
              .type = floats(size=6, value_min=0)
            id = None
              .help = "Select only the specified experiments when looking up"
                      "which  parameterisations to apply these restraints to."
                      "For every parameterisation that requires a restraint at"
                      "least one experiment index must be supplied. If None"
                      "(the default) the restraints will be applied to all"
                      "experiments."
              .type = ints(value_min=0)
          }
        }
        constraints
          .help = "Parameter equal shift constraints to use in refinement."
          .multiple = True
          .expert_level = 2
        {
          id = None
            .help = "Select only the specified experiments when looking up"
                    "which parameterisations to apply the constraint to. If an"
                    "identified parameterisation affects multiple experiments"
                    "then the index of any one of those experiments suffices"
                    "to identify that parameterisation. If None (the default)"
                    "then constraints will be applied to all parameterisations"
                    "of this type."
            .type = ints(value_min=0)
          parameter = None
            .help = "Identify which parameter of each parameterisation to"
                    "constrain by a (partial) parameter name to match. Model"
                    "name prefixes such as 'Detector1' will be ignored as"
                    "parameterisations are already identified by experiment id"
            .type = str
        }
        force_static = False
          .help = "Force a static parameterisation for the crystal unit cell"
                  "when doing scan-varying refinement"
          .type = bool
          .expert_level = 1
        set_scan_varying_errors = False
          .help = "If scan-varying refinement is done, and if the estimated"
                  "covariance of the B matrix has been calculated by the"
                  "minimiser, choose whether to return this to the model or"
                  "not. The default is not to, in order to keep the file size"
                  "of the serialized model small."
          .type = bool
        smoother
          .help = "Options that affect scan-varying parameterisation"
          .expert_level = 1
        {
          interval_width_degrees = 36.0
            .help = "Width of scan between checkpoints in degrees. Can be set"
                    "to Auto."
            .type = float(value_min=0, allow_none=True)
          absolute_num_intervals = None
            .help = "Number of intervals between checkpoints if scan_varying"
                    "refinement is requested. If set, this overrides"
                    "interval_width_degrees"
            .type = int(value_min=1, allow_none=True)
        }
      }
      orientation
        .expert_level = 1
      {
        fix_list = None
          .help = "Fix specified parameters by a list of 0-based indices or"
                  "partial names to match"
          .type = strings
          .expert_level = 1
        constraints
          .help = "Parameter equal shift constraints to use in refinement."
          .multiple = True
          .expert_level = 2
        {
          id = None
            .help = "Select only the specified experiments when looking up"
                    "which parameterisations to apply the constraint to. If an"
                    "identified parameterisation affects multiple experiments"
                    "then the index of any one of those experiments suffices"
                    "to identify that parameterisation. If None (the default)"
                    "then constraints will be applied to all parameterisations"
                    "of this type."
            .type = ints(value_min=0)
          parameter = None
            .help = "Identify which parameter of each parameterisation to"
                    "constrain by a (partial) parameter name to match. Model"
                    "name prefixes such as 'Detector1' will be ignored as"
                    "parameterisations are already identified by experiment id"
            .type = str
        }
        force_static = False
          .help = "Force a static parameterisation for the crystal orientation"
                  "when doing scan-varying refinement"
          .type = bool
          .expert_level = 1
        smoother
          .help = "Options that affect scan-varying parameterisation"
          .expert_level = 1
        {
          interval_width_degrees = 36.0
            .help = "Width of scan between checkpoints in degrees. Can be set"
                    "to Auto."
            .type = float(value_min=0, allow_none=True)
          absolute_num_intervals = None
            .help = "Number of intervals between checkpoints if scan_varying"
                    "refinement is requested. If set, this overrides"
                    "interval_width_degrees"
            .type = int(value_min=1, allow_none=True)
        }
      }
    }
    detector
      .help = "detector parameters"
    {
      panels = *automatic single multiple hierarchical
        .help = "Select appropriate detector parameterisation. Both the single"
                "and multiple panel detector options treat the whole detector"
                "as a rigid body. The hierarchical parameterisation treats"
                "groups of panels as separate rigid bodies."
        .type = choice
        .expert_level = 1
      hierarchy_level = 0
        .help = "Level of the detector hierarchy (starting from the root at 0)"
                "at which to determine panel groups to parameterise"
                "independently"
        .type = int(value_min=0, allow_none=True)
        .expert_level = 1
      fix = all position orientation
        .help = "Fix detector parameters. The translational parameters"
                "(position) may be set separately to the orientation."
        .short_caption = "Fix detector parameters"
        .type = choice
      fix_list = None
        .help = "Fix specified parameters by a list of 0-based indices or"
                "partial names to match"
        .type = strings
        .expert_level = 1
      constraints
        .help = "Parameter equal shift constraints to use in refinement."
        .multiple = True
        .expert_level = 2
      {
        id = None
          .help = "Select only the specified experiments when looking up which"
                  "parameterisations to apply the constraint to. If an"
                  "identified parameterisation affects multiple experiments"
                  "then the index of any one of those experiments suffices to"
                  "identify that parameterisation. If None (the default) then"
                  "constraints will be applied to all parameterisations of"
                  "this type."
          .type = ints(value_min=0)
        parameter = None
          .help = "Identify which parameter of each parameterisation to"
                  "constrain by a (partial) parameter name to match. Model"
                  "name prefixes such as 'Detector1' will be ignored as"
                  "parameterisations are already identified by experiment id"
          .type = str
      }
      force_static = True
        .help = "Force a static parameterisation for the detector when doing"
                "scan-varying refinement"
        .type = bool
        .expert_level = 1
      smoother
        .help = "Options that affect scan-varying parameterisation"
        .expert_level = 1
      {
        interval_width_degrees = 36.0
          .help = "Width of scan between checkpoints in degrees. Can be set to"
                  "Auto."
          .type = float(value_min=0, allow_none=True)
        absolute_num_intervals = None
          .help = "Number of intervals between checkpoints if scan_varying"
                  "refinement is requested. If set, this overrides"
                  "interval_width_degrees"
          .type = int(value_min=1, allow_none=True)
      }
    }
    goniometer
      .help = "goniometer setting matrix parameters"
    {
      fix = *all in_beam_plane out_beam_plane
        .help = "Whether to fix goniometer parameters. By default, fix all."
                "Alternatively the setting matrix can be constrained to allow"
                "rotation only within the spindle-beam plane or to allow"
                "rotation only around an axis that lies in that plane. Set to"
                "None to refine the in two orthogonal directions."
        .short_caption = "Fix goniometer parameters"
        .type = choice(multi=True)
      fix_list = None
        .help = "Fix specified parameters by a list of 0-based indices or"
                "partial names to match"
        .type = strings
        .expert_level = 1
      constraints
        .help = "Parameter equal shift constraints to use in refinement."
        .multiple = True
        .expert_level = 2
      {
        id = None
          .help = "Select only the specified experiments when looking up which"
                  "parameterisations to apply the constraint to. If an"
                  "identified parameterisation affects multiple experiments"
                  "then the index of any one of those experiments suffices to"
                  "identify that parameterisation. If None (the default) then"
                  "constraints will be applied to all parameterisations of"
                  "this type."
          .type = ints(value_min=0)
        parameter = None
          .help = "Identify which parameter of each parameterisation to"
                  "constrain by a (partial) parameter name to match. Model"
                  "name prefixes such as 'Detector1' will be ignored as"
                  "parameterisations are already identified by experiment id"
          .type = str
      }
      force_static = True
        .help = "Force a static parameterisation for the goniometer when doing"
                "scan-varying refinement"
        .type = bool
        .expert_level = 1
      smoother
        .help = "Options that affect scan-varying parameterisation"
        .expert_level = 1
      {
        interval_width_degrees = 36.0
          .help = "Width of scan between checkpoints in degrees. Can be set to"
                  "Auto."
          .type = float(value_min=0, allow_none=True)
        absolute_num_intervals = None
          .help = "Number of intervals between checkpoints if scan_varying"
                  "refinement is requested. If set, this overrides"
                  "interval_width_degrees"
          .type = int(value_min=1, allow_none=True)
      }
    }
    sparse = Auto
      .help = "Calculate gradients using sparse data structures."
      .type = bool
      .expert_level = 1
    treat_single_image_as_still = False
      .help = "Set this to True to treat a single image scan with a non zero"
              "oscillation width as a still"
      .type = bool
      .expert_level = 1
    spherical_relp_model = False
      .help = "For stills refinement, set true to use the spherical relp model"
              "for prediction and gradients."
      .type = bool
      .expert_level = 1
  }
  refinery
    .help = "Parameters to configure the refinery"
    .expert_level = 1
  {
    engine = SimpleLBFGS LBFGScurvs GaussNewton *LevMar SparseLevMar
      .help = "The minimisation engine to use"
      .type = choice
    max_iterations = None
      .help = "Maximum number of iterations in refinement before termination."
              "None implies the engine supplies its own default."
      .type = int(value_min=1, allow_none=True)
    log = None
      .help = "Filename for an optional log that a minimisation engine may use"
              "to write additional information"
      .type = path
    journal
      .help = "Extra items to track in the refinement history"
    {
      track_step = False
        .help = "Record parameter shifts history in the refinement journal, if"
                "the engine supports it."
        .type = bool
      track_gradient = False
        .help = "Record parameter gradients history in the refinement journal,"
                "if the engine supports it."
        .type = bool
      track_parameter_correlation = False
        .help = "Record correlation matrix between columns of the Jacobian for"
                "each step of refinement."
        .type = bool
      track_condition_number = False
        .help = "Record condition number of the Jacobian for each step of "
                "refinement."
        .type = bool
      track_out_of_sample_rmsd = False
        .help = "Record RMSDs calculated using the refined experiments with"
                "reflections not used in refinement at each step. Only valid"
                "if a subset of input reflections was taken for refinement"
        .type = bool
    }
  }
  target
    .help = "Parameters to configure the target function"
    .expert_level = 1
  {
    rmsd_cutoff = *fraction_of_bin_size absolute
      .help = "Method to choose rmsd cutoffs. This is currently either as a"
              "fraction of the discrete units of the spot positional data,"
              "i.e. (pixel width, pixel height, image thickness in phi), or a"
              "tuple of absolute values to use as the cutoffs"
      .type = choice
    bin_size_fraction = 0.0
      .help = "Set this to a fractional value, say 0.2, to make a cut off in"
              "the natural discrete units of positional data, viz., (pixel"
              "width, pixel height, image thickness in phi). This would then"
              "determine when the RMSD target is achieved. Only used if"
              "rmsd_cutoff = fraction_of_bin_size."
      .type = float(value_min=0, allow_none=True)
    absolute_cutoffs = None
      .help = "Absolute Values for the RMSD target achieved cutoffs in X, Y"
              "and Phi. The units are (mm, mm, rad)."
      .type = floats(size=3, value_min=0)
    gradient_calculation_blocksize = None
      .help = "Maximum number of reflections to use for gradient calculation."
              "If there are more reflections than this in the manager then the"
              "minimiser must do the full calculation in blocks."
      .type = int(value_min=1, allow_none=True)
  }
  reflections
    .help = "Parameters used by the reflection manager"
  {
    reflections_per_degree = 100
      .help = "The number of centroids per degree of the sweep to use in"
              "refinement. The default (Auto) uses all reflections unless the"
              "dataset is wider than a single turn. Then the number of"
              "reflections may be reduced until a minimum of 100 per degree of"
              "the sweep is reached to speed up calculations. Set this to None"
              "to force use all of suitable reflections."
      .type = float(value_min=0, allow_none=True)
      .expert_level = 1
    minimum_sample_size = 1000
      .help = "cutoff that determines whether subsetting of the input"
              "reflection list is done"
      .type = int(allow_none=True)
      .expert_level = 1
    maximum_sample_size = None
      .help = "The maximum number of reflections to use in refinement."
              "Overrides reflections_per_degree if that produces a larger"
              "sample size."
      .type = int(value_min=1, allow_none=True)
      .expert_level = 1
    random_seed = 42
      .help = "Random seed to use when sampling to create a working set of"
              "reflections. May be int or None."
      .type = int(allow_none=True)
      .expert_level = 1
    close_to_spindle_cutoff = 0.02
      .help = "The inclusion criterion currently uses the volume of the"
              "parallelepiped formed by the spindle axis, the incident beam"
              "and the scattered beam. If this is lower than some value then"
              "the reflection is excluded from refinement. In detector space,"
              "these are the reflections located close to the rotation axis."
      .type = float(value_min=0, allow_none=True)
      .expert_level = 1
    trim_scan_edges = 0.0
      .help = "Reflections within this value in degrees from the centre of the"
              "first or last image of the scan will be removed before"
              "refinement, unless doing so would result in too few remaining"
              "reflections. Reflections that are truncated at the scan edges"
              "have poorly-determined centroids and can bias the refined model"
              "if they are included."
      .type = float(value_min=0, value_max=1, allow_none=True)
      .expert_level = 1
    weighting_strategy
      .help = "Parameters to configure weighting strategy overrides"
      .expert_level = 1
    {
      override = statistical stills constant external_deltapsi
        .help = "selection of a strategy to override default weighting"
                "behaviour"
        .type = choice
      delpsi_constant = 1000000
        .help = "used by the stills strategy to choose absolute weight value"
                "for the angular distance from Ewald sphere term of the target"
                "function, whilst the X and Y parts use statistical weights"
        .type = float(value_min=0, allow_none=True)
      constants = 1.0 1.0 1.0
        .help = "constant weights for three parts of the target function,"
                "whether the case is for stills or scans. The default gives"
                "unit weighting."
        .type = floats(size=3, value_min=0)
    }
    outlier
      .help = "Outlier rejection after initial reflection prediction."
    {
      algorithm = null *auto mcd tukey sauter_poon
        .help = "Outlier rejection algorithm. If auto is selected, the"
                "algorithm is chosen automatically"
        .short_caption = "Outlier rejection algorithm"
        .type = choice
      minimum_number_of_reflections = 20
        .help = "The minimum number of input observations per outlier"
                "rejection job below which all reflections in the job will be"
                "rejected as potential outliers."
        .type = int(value_min=0, allow_none=True)
        .expert_level = 1
      separate_experiments = True
        .help = "If true, outlier rejection will be performed on each"
                "experiment separately. Otherwise, the data from all"
                "experiments will be combined for outlier rejection."
        .type = bool
        .expert_level = 1
      separate_panels = Auto
        .help = "If true, outlier rejection will be performed separately for"
                "each panel of a multi-panel detector model. Otherwise data"
                "from across all panels will be combined for outlier"
                "rejection."
        .type = bool
        .expert_level = 1
      separate_blocks = True
        .help = "If true, for scans outlier rejection will be performed"
                "separately in equal-width blocks of phi, controlled by the"
                "parameter outlier.block_width."
        .type = bool
        .expert_level = 1
      block_width = Auto
        .help = "If separate_blocks, a scan will be divided into equal-sized"
                "blocks with width (in degrees) close to this value for"
                "outlier rejection. If Auto, a width of at least 18 degrees"
                "will be determined, such that each block contains enough"
                "reflections to perform outlier rejection."
        .type = float(value_min=1, allow_none=True)
        .expert_level = 1
      tukey
        .help = "Options for the tukey outlier rejector"
        .expert_level = 1
      {
        iqr_multiplier = 1.5
          .help = "The IQR multiplier used to detect outliers. A value of 1.5"
                  "gives Tukey's rule for outlier detection"
          .type = float(value_min=0, allow_none=True)
      }
      mcd
        .help = "Options for the mcd outlier rejector, which uses an algorithm"
                "based on FAST-MCD by Rousseeuw and van Driessen. See"
                "doi.org/10.1080/00401706.1999.10485670."
        .expert_level = 1
      {
        alpha = 0.5
          .help = "Decimal fraction controlling the size of subsets over which"
                  "the covariance matrix determinant is minimised."
          .type = float(value_min=0, value_max=1, allow_none=True)
        max_n_groups = 5
          .help = "The maximum number of groups to split the dataset into if"
                  "the dataset is 'large' (more observations than twice the"
                  "min_group_size)."
          .type = int(value_min=1, allow_none=True)
        min_group_size = 300
          .help = "The smallest sub-dataset size when splitting the dataset"
                  "into a number of groups, maximally max_n_groups."
          .type = int(value_min=100, allow_none=True)
        n_trials = 500
          .help = "The number of samples used for initial estimates to seed"
                  "the search within each sub-dataset."
          .type = int(value_min=1, allow_none=True)
        k1 = 2
          .help = "The number of concentration steps to take after initial"
                  "estimates."
          .type = int(value_min=1, allow_none=True)
        k2 = 2
          .help = "If the dataset is 'large', the number of concentration"
                  "steps to take after applying the best subset estimates to"
                  "the merged group."
          .type = int(value_min=1, allow_none=True)
        k3 = 100
          .help = "If the dataset is 'small', the number of concentration"
                  "steps to take after selecting the best of the initial"
                  "estimates, applied to the whole dataset."
          .type = int(value_min=1, allow_none=True)
        threshold_probability = 0.975
          .help = "Quantile probability from the Chi-squared distribution with"
                  "number of degrees of freedom equal to the number of"
                  "dimensions of the data data (e.g. 3 for X, Y and Phi"
                  "residuals). Observations whose robust Mahalanobis distances"
                  "are larger than the obtained quantile will be flagged as"
                  "outliers."
          .type = float(value_min=0, value_max=1, allow_none=True)
      }
      sauter_poon
        .help = "Options for the outlier rejector described in Sauter & Poon"
                "(2010) (https://doi.org/10.1107/S0021889810010782)"
        .expert_level = 1
      {
        px_sz = Auto
          .help = "X, Y pixel size in mm. If Auto, this will be taken from the"
                  "first panel of the first experiment."
          .type = floats(size=2, value_min=0.001)
        verbose = False
          .help = "Verbose output."
          .type = bool
          .multiple = False
        pdf = None
          .help = "Output file name for making graphs of |dr| vs spot number"
                  "and dy vs dx."
          .type = str
          .multiple = False
      }
    }
  }
}