import myFitter as mf import matplotlib.pyplot as plt import scipy.stats as stats import sys import numpy as np import random # Import our example model. from mymodel import MyModel # Set random seed to get reproducable results random.seed(1234) np.random.seed(1234) # Create an instance of MyModel. model = MyModel() # Find global minimum (see fit_example.py for details) fitter = mf.Fitter(model) fitter.sample(100) globalmin = fitter.fit(tries=5) print globalmin ################################################################################ # 1D Profile without Profiler1D # ################################################################################ # Construct a list of values for the parameter `p1` for which the profile # likelihood will be computed. p1vals = np.linspace(1.0, 1.4, 41) # This list will hold the fit results for each point in `p1vals`. fitresults = [] # Loop over the values in `p1vals` for p1 in p1vals: # First clear the dictionary of sample points in `fitter`. fitter.clear() # Fix the value of the model parameter `p1`. fitter.model.fix({'p1' : p1}) # Sample 50 points (with `p1` fixed). fitter.sample(50) # Perform a fit with 3 tries and append the result to `fitresults`. fitresults.append(fitter.fit(tries=3)) # Extract the minimum chi-square values from `fitresults` chisqvals = [r.minChiSquare for r in fitresults] # Plot the chi-square values. plt.plot(p1vals, chisqvals) plt.show() ################################################################################ # 1D Profile with Profiler1D # ################################################################################ # Create a 1D profiler for the parameter p1 and link it to fitter profiler = mf.Profiler1D('p1', fitter=fitter) # Create a histogram with 41 evenly spaced bins. `xvals` are the bin *centres* # and the bin boundaries are chosen automatically. Use the `xbins` keyword if # you want to set the bin boundaries directly. hist = mf.Histogram1D(xvals=np.linspace(1.0, 1.4, 41)) # Compute the profile likelihood on the grid defined by `hist`. Use 50 sample # points and 3 local minimisations to find the global minima. In addition to # the 3 minimisations from randomly generated starting points the profiler will # run minimisations using the best-fit parameters from adjacent grid points as # starting points to improve the stability of the fits. The data values # of `hist` will be FitResult objects holding the results of the minimisations # for the individual grid points. profiler.scan(hist, nsamples=50, tries=3) # Plot the minimum chi-square as a function of p1. # The `convert` argument must be a function which maps the histogram data # (FitResult objects in this case) to floats. hist.plot(convert=lambda r: r.minChiSquare) plt.show() # If one of the grid points gives a better fit than `globalmin`, set # `globalmin` to this fit result. Note that we can use `min` on FitResult # objects since the FitResult class defines a total ordering. The # Histogram1D.min() method returns the smallest data value (with respect to # the same total ordering). globalmin = min(globalmin, hist.min()) # Plot the p-value as a function of p1. The conversion function uses # pvalueFromChisq() to convert the Delta chi^2 (i.e. the minimal chi-square # for fixed p1 minus the global minimal chi-square) to a p-value. The number # of degrees of freedom is 1 since we fixed one parameter. hist.plot(convert=lambda r: \ mf.pvalueFromChisq(r.minChiSquare - globalmin.minChiSquare, 1)) # Compute the p-values corresponding to 1, 2, and 3 standard deviations sigmaLevels = [mf.pvalueFromZvalue(z) for z in [1,2,3]] # Configure axes, labels etc. See matplotlib documentation for details. plt.ylim((1e-4, 1.0)) plt.yscale('log') plt.xlabel('p1') plt.ylabel('p-value') ax1 = plt.axes() ax2 = ax1.twinx() ax2.set_ylim(ax1.get_ylim()) ax2.set_yscale(ax1.get_yscale()) ax2.set_yticks(sigmaLevels) ax2.set_yticklabels([r'1 sigma', r'2 sigma', r'3 sigma']) ax2.grid(True, axis='y') plt.show() ################################################################################ # 2D Profile # ################################################################################ # Create a 2D profiler for the p1-p2-plane and link it to fitter. profiler = mf.Profiler2D('p1', 'p2', fitter=fitter) # Set up a 2D histogram for the grid in the p1-p2-plane. hist2d = mf.Histogram2D(xvals=np.linspace(1.0, 1.4, 31), yvals=np.linspace(0.6, 0.9, 31)) # Compute the profile likelihood on the 2D grid, using 5 sample points and # 1 local minimisation for each grid point. Also limit the number of iterations # for each local minimisation to 30. As for 1D profilers, additional # local minimisations are run from the best-fit parameters of adjacent grid # points to improve stability. The data values of `hist2d` will be FitResult # objects representing the results of the minimisations. profiler.scan(hist2d, nsamples=5, tries=1, solvercfg={'maxiter' : 30}) # Update the global minimum as before. globalmin = min(globalmin, hist2d.min()) # Compute the Delta chi^2 values corresponding to 1, 2, and 3 standard # deviations. Note that the number of degrees of freedom is now 2 since we # fix two parameters. levels = [mf.chisqFromZvalue(z, 2) for z in [1,2,3]] # Plot the 1, 2, and 3 sigma contours. The `convert` argument works as for # Profiler1D.plot(). The remaining keyword arguments are explained in the # matplotlib documentation. hist2d.contour(convert=lambda r: r.minChiSquare - globalmin.minChiSquare, levels=levels, linestyles=['solid', 'dashed', 'dotted'], colors=['b', 'b', 'b'], zorder=-1) # Colour the area where no feasible solution was found in grey. Note the use # of a converion function based on the value of the `feasible` attribute of # FitResult. hist2d.contourf(convert=lambda r: 0 if r.feasible else 1, levels=[0.5, 1.5], colors=['grey']) plt.show()