Use the interpolations stored in <ipolfile> to find optimised parameters with
the reference histograms found in the <refdir> as the optimisation target.
The <runsdir> is used to calculate the maximum error value seen for each bin,
to regularise interpolated errors which could otherwise blow up, leading to
an unrepresentative small chi2 (and hence fit result) outside the sampled ranges.
+The weight file syntax is derived from YODA path syntax, and allows selecting bin
+ranges either by physical value or by bin number, e.g.
+ /path/parts/to/histo weight
+ /path/parts/to/histo@xmin:xmax weight
+ /path/parts/to/histo#nmin:nmax weight
+
+
TODO:
* Handle run combination file/string (write a hash of the run list into the ipol filename?)
"""
import optparse, os, sys
op = optparse.OptionParser(usage=__doc__)
op.add_option("--wfile", dest="WFILE", default=None, help="Path to a weight file to specify unequal chi2 weights of each bin in the fit (default: %default)")
op.add_option("--output", dest="OUTPUT", default="tunes", help="Prefix for outputs (default: %default)")
op.add_option("--limits", dest="LIMITS", default=None, help="Simple text file with parameter limits and fixed parameters")
op.add_option("--scan", dest="SCAN", default=None, type=int, help="Perform a brute force scan of SCAN points to find minimiser start point")
op.add_option("--limit-errs", dest="USE_RUNSDIR", action="store_true", default=False, help="Re-read the runsdir to regularise error ipols")
op.add_option("--filter", dest="FILTER", action="store_true", default=False, help="Filter out data bins that have 0 error")
opts, args = op.parse_args()
## Get mandatory arguments
if len(args) < 1:
print "Argument missing... exiting\n\n"
op.print_usage()
sys.exit(1)
REFDIR = args[0]
IFILE = "ipol.dat"
RUNSDIR = os.path.join(REFDIR, "..", "mc")
if len(args) >= 2:
IFILE = args[1]
if len(args) >= 3:
RUNSDIR = args[2]
# TODO: ipol fit limits are in the ipol datfile... automatically use them / warn if result is outside?
## Load Professor and show the standard banner
import professor2 as prof
if not opts.QUIET:
print prof.logo
## Read interpolated and reference histos, and run data
IHISTOS, METADATA = prof.read_ipoldata(IFILE)
DHISTOS = prof.read_all_histos(REFDIR)
## Try to read run histos and extract maximum errors
MAXERRDICT = None
if USE_RUNSDIR:
try:
_, RUNHISTOS = prof.read_all_rundata(RUNSDIR, None) #< don't care about reading params files
MAXERRDICT = prof.find_maxerrs(RUNHISTOS)
except:
print "Could not read run data for error regularisation -- chi2 may be unstable"
## Weight file parsing
matchers = prof.read_pointmatchers(opts.WFILE) if opts.WFILE else None
## Find things available in both ipol and ref data, and in the weight file if there is one
available = []
for ihn in sorted(IHISTOS.keys()):
## Set default bin weights
for ib in IHISTOS[ihn].bins:
ib.w = 1.0
## Find user-specified bin weights if there was a weight file
if matchers is not None:
## Find matches
pathmatch_matchers = [(m,wstr) for m,wstr in matchers.iteritems() if m.match_path(ihn)]
## Ditch histos not listed in the weight file
if not pathmatch_matchers:
del IHISTOS[ihn]
continue
## Attach fit weights to the ibins, setting to zero if there's no position match
for ib in IHISTOS[ihn].bins:
posmatch_matchers = [(m,wstr) for (m,wstr) in pathmatch_matchers if m.match_pos(ib)]
ib.w = float(posmatch_matchers[-1][1]) if posmatch_matchers else 0 #< NB. using last match
for rhn in DHISTOS.keys():
if ihn in rhn: #< TODO: short for rhn = "/REF/"+ihn ?
# TODO: we should eliminate this potential mismatch of ref and MC hnames
available.append([ihn,rhn])
break #< TODO: ok?
# else:
# print "Could not find %s"%ihn
## Prepare lists of ibins and dbins
IBINS, DBINS, MAXERRS, FILTERED = [], [], [], []
BINDICES={} # Allows for more helpful error messages in case of prof.StatError
for a in available:
# TODO: print out the available observables
if len(IHISTOS[a[0]].bins) != len(DHISTOS[a[1]].bins):
print "Inconsistency discovered between data bins and parametrised bins:"
print "Removing histogram", a[0]
del IHISTOS[a[0]]
del DHISTOS[a[1]]
else:
BINDICES[a[0]] = []#range(len(IBINS), len(IBINS) + len(IHISTOS[a[0]])) # This is for debugging
for nb in xrange(len(IHISTOS[a[0]].bins)):
if opts.FILTER and DHISTOS[a[1]].bins[nb].err ==0:
FILTERED.append(1)
continue
if IHISTOS[a[0]].bins[nb].w >0:
IBINS.append(IHISTOS[a[0]].bins[nb])
DBINS.append(DHISTOS[a[1]].bins[nb])
BINDICES[a[0]].append(len(IBINS))
if MAXERRDICT:
MAXERRS.extend(MAXERRS[a[0]])
if not MAXERRS:
MAXERRS = None
if opts.DEBUG:
print "DEBUG: filtered %i bins due to zero data error"%len(FILTERED)
## Sanity checks
assert len(IBINS) == len(DBINS)
if not IBINS:
print "No bins ..., exiting"
sys.exit(1)
assert MAXERRS is None or len(IBINS) == len(MAXERRS)
def simpleGoF(params):
"""
Very straightforward goodness-of-fit measure
"""
chi2 = 0.0
for num, ibin in enumerate(IBINS):
## Weight is attached to the ipol bin (default set to 1.0 above)
w = ibin.w
if w == 0:
continue
## Get ipol & ref bin values and compute their difference
ival = ibin.val(params)
dval = DBINS[num].val
diff = dval - ival
## Data error
err2 = DBINS[num].err**2
## Plus interpolation error added in quadrature
maxierr = MAXERRS[ibin] if MAXERRS else None
err2 += ibin.err(params, emax=maxierr)**2
# TODO: compute asymm error for appropriate deviation direction cf. sum([e**2 for e in ibin.ierrs])
if not err2:
culprit=""
i_culprit=-1
for k, v in BINDICES.iteritems():
if num in v:
culprit=k
i_culprit = v.index(num)
raise prof.StatError("Zero uncertainty on a bin being used in the fit -- cannot compute a reasonable GoF!\n\tObservable: %s\n\t%s %f+=%f\n\t%s \nSee weight-syntax in documentation or user --filter CL arg to remove bins with zero data error automatically"%(culprit, ibin, ival, ibin.err(params, emax=maxierr), DBINS[num]))
# TODO: should we square w too, so it penalised deviations _linearly_?
chi2 += w * diff**2 / err2
return chi2
def simpleGoFGradient(params):
"""
Very straightforward goodness-of-fit measure
"""
dchi2 = [0 for x in params]
for num, ibin in enumerate(IBINS):
## Weight is attached to the ipol bin (default set to 1.0 above)
w = ibin.w
if w == 0:
continue
## Get ipol & ref bin values and compute their difference
ival = ibin.val(params)
dval = DBINS[num].val
diff = dval - ival
## Data error
err2 = DBINS[num].err**2
## Plus interpolation error added in quadrature
maxierr = MAXERRS[ibin] if MAXERRS else None
err2 += ibin.err(params, emax=maxierr)**2
# TODO: compute asymm error for appropriate deviation direction cf. sum([e**2 for e in ibin.ierrs])
if not err2:
raise prof.StatError("Zero uncertainty on a bin being used in the fit -- cannot compute a reasonable GoF")
# TODO: should we square w too, so it penalised deviations _linearly_?
igrad = ibin.grad(params)
for p in xrange(len(params)):
dchi2[p] += 2 * w * diff * igrad[p] / err2
N=sum(dchi2)
return [x/N for x in dchi2]
## Take parameter names directly from ifile, or fallback
PNAMES = METADATA["ParamNames"].split()
if not PNAMES:
PNAMES = ["A%03i" % i for i in xrange(int(METADATA["Dimension"]))]