Samples are analysed using various analysis settings. These can be specified from a .ini file or overridden using a dictionary.
Default settings from analysis_defaults.ini:
#For disgarding burn-in if using raw chains #if < 1 interpreted as a fraction of the total number of rows (0.3 ignores first 30% of lines) #(ignored when parameter grid or chain .properties.ini settings are explicitly set) ignore_rows = 0 #Minimum-weight sample to keep, as ration to the maximum weight sample. #This avoids very wide ranges of parameters (much wider than the posterior), e.g. when using nested sampling min_weight_ratio = 1e-30 #Confidence limits for marginalized constraints. #Also used for 2D plots, but only number set by plot settings actually shown contours = 0.68 0.95 0.99 #If the distribution is skewed, so two probability of tails differs by more #than credible_interval_threshold of the peak value, use equal-probability limits #rather than integrating inwards equally at both tails. #Note credible interval depends on density estimation parameters credible_interval_threshold = 0.05 #Determine bounds from projected ND confidence range for contours[ND_contour_range] #If -1 use bounds determined entirely from 1D marginalized densities #Use 0 or 1 if 2D plot contours are hitting edges range_ND_contour = -1 #1D marginalized confidence limit to use to determine parameter ranges range_confidence = 0.001 #Confidence limit to use for convergence tests (splits and Raftery Lewis) converge_test_limit = 0.95 #Sample binning for 1D plots fine_bins = 1024 #if -1: set optimized smoothing bandwidth automatically for each parameter #if >= 1: smooth by smooth_scale_1D bin widths #if > 0 and <1: smooth by Gaussian of smooth_scale_1D standard deviations in each parameter # (around 0.2-0.5 is often good) #if < 0: automatic, with the overall smoothing length scaled by abs(smooth_scale_1D) from default smooth_scale_1D =-1 #0 is basic normalization correction #1 is linear boundary kernel (should get gradient correct) #2 is a higher order kernel, that also affects estimates way from the boundary (1D only) boundary_correction_order=1 #Correct for (over-smoothing) biases using multiplicative bias correction #i.e. by interating estimates using the re-weighted 'flattened' bins #Note that automatic bandwidth accounts for this by increasing the smoothing scale #as mult_bias_correction_order increases (may not converge for large values). mult_bias_correction_order = 1 #if -1: automatic optimized bandwidth matrix selection #if >= 1: smooth by smooth_scale_2D bin widths #if > 0 and <1: smooth by Gaussian of smooth_scale_2D standard deviations in each parameter # (around 0.3-0.7 is often good) #if < 0: automatic, with the overall smoothing length scaled by abs(smooth_scale_2D) from default smooth_scale_2D = -1 #maximum correlation ellipticity to allow for 2D kernels. Set to 0 to force non-elliptical. max_corr_2D = 0.99 #sample binning in each direction for 2D plotting fine_bins_2D = 256 #maximum number of points for 3D plots max_scatter_points = 2000 #output bins for 1D plotting (only for GetDist.py output to files, or scale if smooth_scale_2D>1) num_bins = 100 #output bins for 2D plotting (not used, just scale if smooth_scale_2D>1) num_bins_2D=40