Skip to content
Snippets Groups Projects
Commit e484db4e authored by Greg Henning's avatar Greg Henning
Browse files

[update] and [add] flux and fit

parent 86637476
Branches
Tags
No related merge requests found
Showing
with 2248 additions and 39 deletions
......@@ -2,5 +2,5 @@ module add Programming_Languages/python/3.9.1
module add Analysis/gnuplot
python -m venv `hostname`_env
source `hostname`_env/bin/activate
python -m pip install --upgrade pip
python -m pip install -r requirements.txt
\ No newline at end of file
source `hostname`_env/bin/activate; python -m pip install --upgrade pip
source `hostname`_env/bin/activate; python -m pip install -r requirements.txt
\ No newline at end of file
FC_EFF:
U3O8: 0.776
UF4: 0.944
u_FC_EFF:
U3O8: 0.04
UF4: 0.021
# Density in atoms per barns
FC_DENSITY:
U3O8: 9.912e-7
UF4: 8.2978e-7
AIR_LOSS: 0.0001
......@@ -3,4 +3,7 @@ max_neutron_energy: 27000.0
number_of_bins: 26990
nf_eval: 'ENDF/B-VIII.0: U-235(N,F)'
\ No newline at end of file
#nf_eval: 'ENDF/B-VIII.0: U-235(N,F)'
nf_eval: 'ENDF/B-VII.1: U-235(N,F)'
smoothing: 25
\ No newline at end of file
......@@ -5,7 +5,8 @@ inel259:
signal: [258.0, 261.0]
background: [[257, 258], [261, 262]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
inel209:
......@@ -14,6 +15,8 @@ inel209:
signal: [209.01, 210.6001]
background: [[206.001, 209.001], [210.7, 212.001]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
inel84:
energy: 84.7
......@@ -21,6 +24,8 @@ inel84:
signal: [84.0200001, 85.890000001]
background: [[83.3000001, 83.70000001], [86.0000001, 86.900000000009]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
inel291:
energy: 291.7
......@@ -28,6 +33,8 @@ inel291:
signal: [290.000001, 293.00001]
background: [[286.5000001, 289.50001], [294.000001, 296.00001]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
inel313:
energy: 313.0
......@@ -35,6 +42,8 @@ inel313:
signal: [311.700001, 314.60001]
background: [[310.00000001, 311.0000001],[314.800001, 316.00000]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
n2n229:
energy: 229.3
......@@ -42,6 +51,8 @@ n2n229:
signal: [228.5, 230.0]
background: [[227.5, 228.2], [230.5, 231.0]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
n2n100:
energy: 100.1
......@@ -49,6 +60,8 @@ n2n100:
signal: [99.7, 101.0]
background: [[101.1001, 101.701], [99.001, 99.55001]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
n2n351:
energy: 350.99
......@@ -56,6 +69,8 @@ n2n351:
signal: [350.0001, 352.0001]
background: [[348.000, 349.50001], [352.3001, 252.90001]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
n2n1121:
energy: 1121.29
......@@ -63,6 +78,8 @@ n2n1121:
signal: [119.50001, 1123.750001]
background: [[1117.4001, 1118.700012], [1124.3001, 1126.60005]]
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
what122:
......@@ -70,4 +87,6 @@ what122:
name: what122
signal: [121.70001, 123.673001]
background: [[120.00001, 121.396001], [124.800001, 126.000001]]
parasits: []
\ No newline at end of file
parasits: []
En limits: [200., 500., 800., 1000., 1500., 2000., 2500., 3000., 3500., 4000., 4500., 5000.,
6000., 7000., 8000., 9000., 10000., 12000., 14000., 16000., 18000.]
\ No newline at end of file
This diff is collapsed.
......@@ -5,6 +5,6 @@ pyhisto
#ascii-data-file
matplotlib
numpy
scipy
#!/bin/bash
#SBATCH --ntasks 4
#SBATCH --mem 10000
#SBATCH --time 5-00:00:00
#SBATCH --mem 12000
#SBATCH --time 0-20:00:00
# #SBATCH --mail-user=ghenning@iphc.cnrs.fr
# #SBATCH --mail-type=END,FAIL
......@@ -33,27 +33,30 @@ pwd && cd src/fill_into_h2/ && make binaries && cp bin/fill_into_h2.`hostname` .
echo "# Running tasks"
# echo "#... raw2histo"
#./run_task tasks/do_raw2histo.py
echo "#... FC time align"
./run_task tasks/do_fc_timealign.py
echo "#... FC find energy cut"
./run_task tasks/do_fc_ecut.py
echo "#... FC get pile up rate"
./run_task tasks/do_fc_pileup.py
echo "#... FC cut 2D and project on time"
./run_task tasks/do_fc_cut_and_project.py
echo "#... FC -> neutron flux [MC]"
./run_task tasks/do_fc_to_flux.py
# echo "#... ge_time_align"
#./run_task tasks/do_ge_timealign.py
#echo "#... ge_calibrate"
#./run_task tasks/do_ge_calibrate.py
#echo "#... ge_calibrated_2D"
#./run_task tasks/do_ge_calibrate_2D.py
#echo "#... cut the bidim to the interesting region"
#./run_task tasks/do_ge_inel.py
#echo "#... getting the transitions yields"
#./run_task tasks/do_extract_transitions_gnum.py
# echo "#... FC time align"
# ./run_task tasks/do_fc_timealign.py
# echo "#... FC find energy cut"
# ./run_task tasks/do_fc_ecut.py
# echo "#... FC get pile up rate"
# ./run_task tasks/do_fc_pileup.py
# echo "#... FC cut 2D and project on time"
# ./run_task tasks/do_fc_cut_and_project.py
# echo "#... FC -> neutron flux [MC]"
# ./run_task tasks/do_fc_to_flux.py
# echo "#... ge_time_align"
# ./run_task tasks/do_ge_timealign.py
# echo "#... ge_calibrate"
# ./run_task tasks/do_ge_calibrate.py
# echo "#... ge_calibrated_2D"
# ./run_task tasks/do_ge_calibrate_2D.py
# echo "#... cut the bidim to the interesting region"
# ./run_task tasks/do_ge_inel.py
echo "#... GE get pile up rate"
./run_task tasks/do_ge_pileup.py
echo "#... getting the transitions yields"
./run_task tasks/do_extract_transitions_gnum.py
echo "#finishing up to get the cross section"
./run_task tasks/do_make_transitions_xs.py
echo "# Removing job venv"
rm -rf `hostname`_env
......
#!/usr/env python
# -*- coding: utf-8 -*-
# -*- format: python -*-
# -*- author: G. Henning -*-
# -*- created: may 2020 -*-
# -*- copyright: GH/IPHC 2020 -*-
# -*- file: fit_one_peak.py -*-
# -*- purpose: -*-
'''
Fit peaks with a given energy, bg profile and parasitic peaks
'''
import argparse
import yaml
import numpy as np
import scipy
from scipy.optimize import curve_fit
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from pyhisto.histogram import Histogram
from pyhisto.tools import GaussRealFit
def adaptive_fit(histo_file: str,
xpeak: float = 0.0, xwide: float = 10.,
guessstdev: float = 1.,
xmin: float = None, xmax: float = None,
bg_type: str = 'lin',
parasites: list = [],
plot: str = None) -> str:
'''
Fit with parasites
'''
if xmin is None:
xmin = xpeak - xwide
if xmax is None:
xmax = xpeak + xwide
the_histo = Histogram(fromfile=histo_file).slice(xmin, xmax)
# if no parasites,
if parasites is None or len(parasites) == 0:
# fall back to realfit
the_fit = GaussRealFit(the_histo)
if plot is not None:
the_fit.plot(plot)
return yaml.dump({'integral': float(the_fit.integral),
'integral_u': float(the_fit.integral_u),
'mean': float(the_fit.mean),
'mean_u': float(the_fit.mean_u),
'stdev': float(the_fit.stdev),
'stdev_u': float(the_fit.stdev_u)
})
# if some parasites, now we are talking.
functions_to_fit = []
variables_guess = []
variables_limits = []
variables_parameters = []
# .. Background is either linear or constant
if bg_type == 'lin':
functions_to_fit.append(lambda x, B, SL: B + SL * (x - xpeak))
variables_guess.extend([the_histo[0].count, 0.])
variables_limits.extend([(0.0, np.inf),
(-np.inf, np.inf)])
variables_parameters.append(slice(0, 2))
else:
functions_to_fit.append(lambda x, B: B)
variables_guess.extend([the_histo[0].count])
variables_limits.extend([(0.0, np.inf)])
variables_parameters.append(slice(0, 1))
# .. Main peak
functions_to_fit.append(lambda x, S, M, R: S / np.sqrt(2. * np.pi * R * R) * np.exp(-(x - M) ** 2. / 2. / R / R))
variables_guess.extend([max(1, sum(the_histo) - the_histo[0].count * len(the_histo)),
xpeak,
guessstdev])
variables_limits.extend([(0., sum(the_histo)),
(xpeak - 3 * guessstdev, xpeak + 3 * guessstdev),
(0.2 * guessstdev, 2 * guessstdev)])
variables_parameters.append(slice(variables_parameters[-1].stop,
variables_parameters[-1].stop + 3))
# .. now the parasites:
for extra_peak in parasites:
functions_to_fit.append(lambda x, S, M, R: S / np.sqrt(2. * np.pi * R * R) * np.exp(-(x - M) ** 2. / 2. / R / R))
variables_guess.extend([(max(1, sum(the_histo) - the_histo[0].count * len(the_histo)) / 2.),
extra_peak, guessstdev])
variables_limits.extend([(0., sum(the_histo)),
(extra_peak - 1.0 * guessstdev, extra_peak + 1.0 * guessstdev),
(0.2 * guessstdev, 2 * guessstdev)])
variables_parameters.append(slice(variables_parameters[-1].stop,
variables_parameters[-1].stop + 3))
variables_limits = list(zip(*variables_limits))
# Now, defining our final function
def the_function(x, *p):
S = 0.0
for f, s in zip(functions_to_fit, variables_parameters):
S += f(x, *p[s])
return S
# And ready to fit!
fitrslt, cov = curve_fit(the_function,
*the_histo.xy(),
p0=variables_guess,
bounds=variables_limits,
max_nfev=len(variables_guess)*len(the_histo)*250)
uncerts = np.sqrt(np.diag(cov))
integral = float(fitrslt[variables_parameters[0].stop])/the_histo.find(xpeak).width()
integral_u = float(uncerts[variables_parameters[0].stop])/the_histo.find(xpeak).width()
mean = float(fitrslt[variables_parameters[0].stop + 1])
mean_u = float(uncerts[variables_parameters[0].stop + 1])
stdev = float(fitrslt[variables_parameters[0].stop + 2])
stdev_u = float(uncerts[variables_parameters[0].stop + 2])
the_result: dict = {'integral': integral,
'integral_u': integral_u,
'mean': mean,
'mean_u': mean_u,
'stdev': stdev,
'stdev_u': stdev_u}
# Plotting the result! (if)
if plot is None:
return yaml.dump(the_result)
plt.figure()
plt.grid(True)
plt.step(*the_histo.xy(), label='original')
X_values = list(the_histo.xy())[0]
BG_const = float(fitrslt[0])
plt.step(X_values,
list(map(lambda x: functions_to_fit[0](x, *fitrslt[variables_parameters[0]]),
X_values)),
label='background')
plt.step(X_values,
list(map(lambda x: BG_const+functions_to_fit[1](x, *fitrslt[variables_parameters[1]]),
X_values)),
label='Mainpeak')
for f, s in zip(functions_to_fit[2:], variables_parameters[2:]):
plt.step(X_values,
list(map(lambda x: BG_const+f(x, *fitrslt[s]),
X_values)),
label='Parasite ')
plt.legend()
plt.savefig(plot)
return yaml.dump(the_result)
# end of fit
# Now, execution part
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(description=__doc__)
arg_parser.add_argument('--xpeak', type=float,
default=0.0, nargs='?',
help='guess of energy')
arg_parser.add_argument('--xwide', type=float,
default=10.0, nargs='?',
help='fit window size')
arg_parser.add_argument('--guessstdev', type=float,
default=1.0, nargs='?',
help="guess for stdev of peaks")
arg_parser.add_argument('--xmin', type=float,
default=None, nargs='?')
arg_parser.add_argument('--xmax', type=float,
default=None, nargs='?')
arg_parser.add_argument('--bg_type', type=str,
choices=['const', 'lin'],
default='lin',
help='type of bg')
arg_parser.add_argument('--plot', type=str,
default=None, nargs='?',
help="add file name to save in png")
arg_parser.add_argument('--parasites', type=float,
default=None, nargs="*")
arg_parser.add_argument('--histo_file', type=str,
nargs='?', help="Faster files to read")
the_arguments = arg_parser.parse_args()
print(the_arguments)
print(adaptive_fit(**vars(the_arguments)))
# end of file
#!/usr/env python
# -*- coding: utf-8 -*-
# -*- format: python -*-
# -*- author: G. Henning -*-
# -*- created: may 2020 -*-
# -*- copyright: GH/IPHC 2020 -*-
# -*- file: fit_one_peak.py -*-
# -*- purpose: -*-
'''
Fit One (!) peak at a given energy
'''
import argparse
import yaml
import matplotlib
matplotlib.use('agg')
from pyhisto.histogram import Histogram
from pyhisto.tools import FindPeaks, GaussRealFit
def fit_one_peak(histo_file: str,
xguess: float = None,
xwide: float = 10.0,
binwidthscale: bool = True) -> str:
'''Fit _one_peak in the histogram
- histo_file (str): the file to read for the histogram
- xguess (float): the central energy
'''
h = Histogram(fromfile=histo_file).slice(xguess - xwide,
xguess + xwide)
the_fit = GaussRealFit(h)
the_fit.plot("the_latest_fit.png")
factor = h[0].width() if binwidthscale else 1.0
return yaml.dump({'integral': float(the_fit.integral)*factor,
'integral_u': float(the_fit.integral_u)*factor,
'mean': float(the_fit.mean),
'mean_u': float(the_fit.mean_u),
'stdev': float(the_fit.stdev)*factor,
'stdev_u': float(the_fit.stdev_u)*factor
})
# Now, execution part
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(description=__doc__)
arg_parser.add_argument('--xguess', type=float,
default=None, nargs='?',
help='guess of energy')
arg_parser.add_argument('--xwide', type=float,
default=10.0, nargs='?',
help='fit window size')
arg_parser.add_argument('--binwidthscale', action='store_true',
default=True,
help='scale by bin width')
arg_parser.add_argument('histo_file', type=str,
nargs='?', help="Faster files to read")
arguments = arg_parser.parse_args()
print(fit_one_peak(**vars(arguments)))
# end of file
......@@ -62,12 +62,13 @@ def time_to_energy_1D(h: histo,
for i, b in enumerate(h):
if (min(b.lowedge, b.upedge) < tmin
or max(b.lowedge, b.upedge)> tmax):
new_h[i].count = None
continue
new_h[i].upedge = time_to_energy_func(h[i].lowedge + tshift, dof, mass)
new_h[i].lowedge = time_to_energy_func(h[i].upedge + tshift, dof, mass)
new_h[i].count = h[i].count
#enf for enumerate bind
new_h._frombins( list( b for b in new_h.bins if (min(b.lowedge, b.upedge) > true_emin) and (max(b.lowedge, b.upedge) <true_emax)))
new_h._frombins( list( b for b in new_h.bins if (min(b.lowedge, b.upedge) > true_emin) and (max(b.lowedge, b.upedge) <true_emax) and (b.count is not None)))
return new_h
if __name__=="__main__":
......@@ -89,10 +90,10 @@ if __name__=="__main__":
help="time shift")
parser.add_argument('--emax', type=float,
default=1e30, nargs='?',
help="Maximum energy")
help="Maximum energy (keV)")
parser.add_argument('--emin', type=float,
default=0., nargs='?',
help="Minimum energy")
help="Minimum energy (keV)")
parser.add_argument('file', type=str,
help="histogram.1d.txt file to read and calibrate")
args = parser.parse_args()
......
......@@ -8,7 +8,10 @@ from doit.action import CmdAction
import yaml
import pathlib
from itertools import product, chain
import scipy
from scipy.interpolate import interp1d
import numpy as np
from pyhisto import Histogram
#
......@@ -24,6 +27,21 @@ the_transitions = yaml.load(open('etc/transitions_to_look_at.yaml'),
Loader=yaml.Loader)
def task_make_tof_window():
'''compute tof windows for given energy windows'''
for this_transition in the_transitions.keys():
def notask_project_for_fit():
'''project on the gamma eneergy axis for all time windows'''
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
for flavor in ('all', 'clean', 'puscu'):
source_file = f"./output/interesting_2D/{ge_det}_interest.h2_{flavor}.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.signal.h1.txt"
time_limits = []
egamma_limits = the_transitions[this_transition].background[0][0], the_transitions[this_transition].background[-1][1]
def task_project_transitions_signal():
'''project transitions bidim according to signal'''
for this_transition in the_transitions.keys():
......@@ -151,6 +169,40 @@ def task_nonegative_counts():
}
def task_gcount_puscu():
'''scale up the count according to pile up'''
# taking the clean flavor and scaling up by 1+PU_rate
def puscu_h1(src_file, pu_file, dest_file):
src_h = Histogram(fromfile=src_file)
pu_h = Histogram(fromfile=pu_file)
dest_h = src_h.copy()
#making an intrerpolator
pu_interp = interp1d(*pu_h.xy(),
kind='quadratic',
fill_value="extrapolate")
for i, b in enumerate(src_h.bins):
dest_h.bins[i].count = b.count * (1.0 + pu_interp(b.center()))
open(dest_file, 'w').write(str(dest_h))
return True
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
source_file = f"./output/transitions/{this_transition}/{ge_det}_clean.bgsub_noneg.h1.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_puscu.bgsub_noneg.h1.txt"
yield {
'name': source_file,
'file_dep': [source_file,
f'./output/Ge_pileup/{ge_det}.pileup_smooth.h1.txt'],
'targets': [dest_file],
'actions':[
(puscu_h1, [], {'src_file': source_file,
'pu_file': f'./output/Ge_pileup/{ge_det}.pileup_smooth.h1.txt',
'dest_file': dest_file}),
f"./env.run scripts/Draw1Dh.py {dest_file}"
],
}
return None
def task_smooth_gcount():
'''apply smoothing to data'''
def _smooth_gcount_cmd(input_h1_file, smooth_param_yaml, target_h):
......@@ -166,7 +218,7 @@ def task_smooth_gcount():
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
for flavor in ('all', 'clean'):
for flavor in ('all', 'clean', 'puscu'):
source_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.bgsub_noneg.h1.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.bgsub_noneg_smooth.h1.txt"
......@@ -180,7 +232,8 @@ def task_smooth_gcount():
'input_h1_file': source_file,
'smooth_param_yaml': "etc/ge_time_to_energy.yaml",
'target_h': dest_file
}))
})),
f"./env.run scripts/Draw1Dh.py {dest_file}"
]
}
......@@ -194,11 +247,13 @@ def task_gcount_time_to_energy():
return " ".join(["./env.run scripts/time_to_energy_1D.py",
f"--dof={distance_of_flight}",
f"--tmin={tmin} --tmax={tmax}",
f"--emin=1.0 --emax=50e3",
f"{source_h}",
f"> {target_h}"])
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
for flavor in ('all', 'clean'):
for flavor in ('all', 'clean', 'puscu'):
source_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.bgsub_noneg_smooth.h1.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gamcount_perenergy.h1.txt"
......@@ -212,9 +267,56 @@ def task_gcount_time_to_energy():
'source_h': source_file,
't2E_param_file': "etc/ge_time_to_energy.yaml",
'target_h': dest_file
}))
})),
f"./env.run scripts/Draw1Dh.py {dest_file}"
]
}
def task_gcount_per_energy_perkev():
''''''
def interp_to_keVbins(source_file: str,
target_file: str,
spect_yaml: str) -> bool:
hsrc = Histogram(fromfile=source_file)
spect_yml = yaml.load(open(spect_yaml), Loader=yaml.Loader)
htrgt = Histogram(nbins=spect_yml['number_of_bins'],
xmin=spect_yml['min_neutron_energy'],
xmax=spect_yml['max_neutron_energy'])
# Keeping original number of hits
Sorigin = sum(hsrc)
# interpolating
hinterpolator = interp1d(*hsrc.xy(),
kind='quadratic',
fill_value="extrapolate")
for i, b in enumerate(htrgt):
htrgt.bins[i].count = max(0, hinterpolator(b.center()))
# restoring sum
htrgt *= Sorigin / sum(htrgt)
# exporting the histogram
open(target_file, 'w').write(str(htrgt))
return True
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
for flavor in ('all', 'clean', 'puscu'):
source_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gamcount_perenergy.h1.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gamcount_perenergy_perkev.h1.txt"
yield {
'name': f"{this_transition}.{ge_det}.{flavor}",
'file_dep': [source_file, pathlib.Path('./etc/fc_flux_spect.yaml')],
'targets': [dest_file],
'actions': [
(interp_to_keVbins, None, {'source_file': source_file,
'target_file': dest_file,
'spect_yaml': './etc/fc_flux_spect.yaml'}),
f"./env.run scripts/Draw1Dh.py {dest_file}"
],
}
def task_draw_1D():
......
......@@ -11,6 +11,8 @@ from random import uniform as rand_uniform
import scipy
from scipy.interpolate import interp1d
import numpy as np
import json
from pyhisto import Histogram
......@@ -65,6 +67,14 @@ def task_FC_spect_to_perkeV():
htrgt = hsrc.empty_copy()
for i, b in enumerate(hsrc):
htrgt[i].count = b.count / b.width()
##### HERE : A FIRST SMOOTHING with consrvation of integral !
smooth_factor = 12
kernel = np.ones(smooth_factor) / smooth_factor
smoothed = np.convolve(list(htrgt.xy())[1], kernel, mode='same')
for i, b in enumerate(hsrc):
htrgt[i].count = smoothed[i]
######
open(target_file, 'w').write(str(htrgt))
return True
for FC_det in FC_detectors:
......@@ -97,7 +107,7 @@ def task_interpolate_perkeV():
hinterpolator = interp1d(*hsrc.xy(),
kind='quadratic',
fill_value="extrapolate")
for i, b in enumerate(htrgt):
htrgt.bins[i].count = max(0, hinterpolator(b.center()))
# restoring sum
......@@ -136,7 +146,7 @@ def task_divide_per_sigmanf():
eval_db['funcs']))['pts']
evalinterpolator = interp1d(list([pts['x']*1000. for pts in the_eval_pts]),
list([pts['y']*1.0 for pts in the_eval_pts]),
kind='linear',
kind='linear', #'linear'
fill_value="extrapolate")
for i, b in enumerate(hsrc):
......@@ -160,4 +170,81 @@ def task_divide_per_sigmanf():
],
}
# LAST TASK : multiply by all the characteristics of the target : mass, target density, ...
\ No newline at end of file
# LAST TASK : multiply by all the characteristics of the target : mass, target density, ...
def task_scale_by_physics():
'''multiply by air loss, efficenciy, mass of FC depsosit...'''
def scale_by_physics(source_file: str,
target_file: str,
factors_yaml: str,
this_FC: str
) -> bool:
hsrc = Histogram(fromfile=source_file)
htrgt = hsrc.copy()
# getting the factors
factors = yaml.load(open(factors_yaml), Loader=yaml.Loader)
efficiency = factors['FC_EFF'][this_FC]
density = factors['FC_DENSITY'][this_FC]
air_loss = factors['AIR_LOSS']
htrgt *= (1. - air_loss) / density / efficiency
open(target_file, 'w').write(str(htrgt))
return True
for FC_det in FC_detectors:
source_file = pathlib.Path(f'./output/flux/{FC_det}_nevents.inenergy_perkeV_interp_divpersigma.h1.txt')
target_file = pathlib.Path(f'./output/flux/{FC_det}_flux.h1.txt')
yield {
'name': FC_det,
'file_dep': [source_file, pathlib.Path('./etc/fc_flux_factors.yaml')],
'targets': [target_file],
'actions': [
(scale_by_physics, None, {'source_file': source_file,
'target_file': target_file,
'factors_yaml': './etc/fc_flux_factors.yaml',
'this_FC': FC_det,
}),
f"./env.run scripts/Draw1Dh.py --yscale=log --xscale=log {target_file}"
],
}
def task_smooth_flux():
''' smoothed the spectra for better usage'''
def smooth_flux(source_file: str,
target_file: str,
smooth_yaml: str,
) -> bool:
hsrc = Histogram(fromfile=source_file)
Soriginal = sum(hsrc)
htrgt = hsrc.copy()
# getting the factors
smooth_factor = yaml.load(open(smooth_yaml), Loader=yaml.Loader).get('smoothing', 0)
if smooth_factor > 0:
unsmoothed = list(hsrc.xy())[1]
kernel = np.ones(smooth_factor) / smooth_factor
smoothed = np.convolve(unsmoothed, kernel, mode='same')
for i, b in enumerate(htrgt.bins):
b.count = smoothed[i]
htrgt *= Soriginal / sum(htrgt)
else:
pass
open(target_file, 'w').write(str(htrgt))
return True
for FC_det in FC_detectors:
source_file = pathlib.Path(f'./output/flux/{FC_det}_flux.h1.txt')
target_file = pathlib.Path(f'./output/flux/{FC_det}_flux_smooth.h1.txt')
yield {
'name': FC_det,
'file_dep': [source_file, pathlib.Path('./etc/fc_flux_spect.yaml')],
'targets': [target_file],
'actions': [
(smooth_flux, None, {'source_file': source_file,
'target_file': target_file,
'smooth_yaml': './etc/fc_flux_spect.yaml',
}),
f"./env.run scripts/Draw1Dh.py --yscale=log --xscale=log {target_file}"
],
}
\ No newline at end of file
'''
This file contain all the necessary steps for a raw .evt to histogram conversion
'''
from doit.tools import create_folder
import yaml
import pathlib
from itertools import product, chain
from random import uniform as rand_uniform
from pyhisto.histogram import Histogram
the_mapping = yaml.load(open('etc/mapping.yaml'),
Loader=yaml.Loader)
detectors = the_mapping['detectors']
FC_detectors = list(filter(lambda x:x.startswith('U'), detectors))
Ge_detectors = list(filter(lambda x: not x.startswith('U'), detectors))
validated_data_weeks = yaml.load(open('data/validated_data_list.yaml'),
Loader=yaml.Loader)['data_weeks']
def task_Ge_get_pileup_rate():
'''Get the pile up rate (ie. Npileup/Nclean) for Ge'''
def get_pileup_rate(src_all, src_clean, targetf):
'''compute the pileup rate and write it as yaml into targetdir'''
# _all
h_all = Histogram(fromfile=src_all)
# _clean
h_clean = Histogram(fromfile=src_clean)
#
h_pu = h_clean.empty_copy()
for i, b in enumerate(h_pu):
b.count = max((h_all[i].count / h_clean[i].count) - 1.,
0)
#
open(targetf, 'w').write(str(h_pu))
return True
for ge_det in Ge_detectors:
source_all = pathlib.Path(f'./output/calibrated_2D/{ge_det}_merged.h2_all.txt_px.txt')
source_clean = pathlib.Path(f'./output/calibrated_2D/{ge_det}_merged.h2_clean.txt_px.txt')
target_dir = f'./output/Ge_pileup/'
target_file =f'{target_dir}/{ge_det}.pileup.h1.txt'
yield {
'name': ge_det,
'actions': [(create_folder, (target_dir,)),
(get_pileup_rate, None, {'src_all': source_all,
'src_clean': source_clean,
'targetf': target_file}),
f"./env.run scripts/Draw1Dh.py {target_file}"
],
'targets': [target_file],
'file_dep': [source_all, source_clean],
}
def task_smooth_ge_pileup_rate():
'''smooth the pile up rate '''
for ge_det in Ge_detectors:
source = pathlib.Path(f'./output/Ge_pileup/{ge_det}.pileup.h1.txt')
target_dir = f'./output/Ge_pileup/'
target_file =f'{target_dir}/{ge_det}.pileup_smooth.h1.txt'
smoothing = yaml.load(open('etc/ge_count_smoothing.yaml'),
Loader=yaml.Loader)['side']
yield {
'name': ge_det,
'actions': [(create_folder, (target_dir,)),
f"./env.run scripts/smooth_1D.py --side={smoothing} {source} > {target_file}",
f"./env.run scripts/Draw1Dh.py {target_file}"
],
'targets': [target_file],
'file_dep': [source, 'etc/ge_count_smoothing.yaml'],
}
def task_pu_time_to_energy():
'''convert an histogram in time into an histogram in energy'''
for ge_det in Ge_detectors:
target_dir = f'./output/Ge_pileup/'
source_file = pathlib.Path(f"{target_dir}/{ge_det}.pileup_smooth.h1.txt")
dest_file = f"{target_dir}/{ge_det}.pileup_smooth_perenergy.h1.txt"
t2E_params = yaml.load(open("etc/ge_time_to_energy.yaml"), Loader=yaml.Loader)
distance_of_flight = t2E_params['distance_of_flight']
tmin, tmax = t2E_params['tof_limits']
yield {
'name': ge_det,
'file_dep': [source_file, "etc/ge_time_to_energy.yaml"],
'targets': [dest_file],
'actions': [
f"./env.run scripts/time_to_energy_1D.py --dof={distance_of_flight} --emin=1.0 --tmin={tmin} --tmax={tmax} {source_file} > {dest_file}",
f"./env.run scripts/Draw1Dh.py {dest_file}"
]
}
'''
This file uses energy time and energy calibration coefficients and performs the calibration on bidims
'''
from doit.tools import create_folder
from doit.action import CmdAction
import yaml
import pathlib
from itertools import product, chain
from pyhisto import Histogram
#
# Loading the mapping
#
the_mapping = yaml.load(open('etc/mapping.yaml'),
Loader=yaml.Loader)
detectors = the_mapping['detectors']
FC_detectors = list(filter(lambda x:x.startswith('U'), detectors))
Ge_detectors = list(filter(lambda x: not x.startswith('U'), detectors))
the_transitions = yaml.load(open('etc/transitions_to_look_at.yaml'),
Loader=yaml.Loader)
def task_integrate_flux_over_windows():
'''integrate the flux over the energy windows given'''
for this_transition in the_transitions.keys():
for this_FC in FC_detectors:
flux_file = f'./output/flux/{this_FC}_flux_smooth.h1.txt'
dest_file = f"./output/transitions/{this_transition}/{this_FC}_windflux.h1.txt"
en_windows = " ".join(map(str, the_transitions[this_transition]['En limits']))
yield {
'name': f"{this_transition}.{this_FC}",
'file_dep': [flux_file],
'targets': [dest_file],
'actions': [
(create_folder, (f"./output/transitions/{this_transition}/",)),
f"./env.run scripts/integrate_over_windows.py --source={flux_file} --windows {en_windows} > {dest_file}",
f"./env.run scripts/Draw1Dh.py {dest_file}"
]
}
def no_task_divide_gcount_per_flux():
'''convert an histogram in time into an histogram in energy'''
for this_transition in the_transitions.keys():
for this_FC in FC_detectors:
for ge_det in Ge_detectors:
for flavor in ('puscu',):
flux_file = f'./output/flux/{FC_det}_flux_smooth.h1.txt'
gammacount_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gamcount_perenergy.h1.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{this_FC}.h1.txt"
yield {
'name': f"{this_transition}.{this_FC}.{get_det}.{flavor}",
'file_dep': [flux_file, gammacount_file],
'targets': [dest_file],
'actions': [
CmdAction((_gcount_time_to_energy_cmd,
[], {
'source_h': source_file,
't2E_param_file': "etc/ge_time_to_energy.yaml",
'target_h': dest_file
})),
f"./env.run scripts/Draw1Dh.py {dest_file}"
]
}
def task_draw_1D():
'''draw all the h1'''
for one_histo in pathlib.Path().glob('output/transitions/*/*h1.txt'):
yield {
'name': str(one_histo),
'file_dep': [one_histo],
'actions':[
f"./env.run scripts/Draw1Dh.py {str(one_histo)}"
],
'targets': [f"{str(one_histo)}.png"]
}
'''
Testing adaptive fit routine
'''
import subprocess, os
import numpy as np
from pyhisto.histogram import Histogram
# first test : one peak, small noise
N_noise: int = 100
N_peak: int = 1000
peak_x: float = 50.
peak_width: float = 2.35482
h1 = Histogram(nbins=100, xmin=0.0, xmax=100.)
# ... generating noise
for x in np.random.uniform(h1.xmin, h1.xmax, size=N_noise):
h1.fast_fill(x)
# ... generating peak
for x in np.random.normal(peak_x, peak_width, size=N_peak):
h1.fast_fill(x)
# ... writing file
with open("test_adaptive.h1.txt", 'w') as hfile:
hfile.write(str(h1))
cmd = "source `hostname`_env/bin/activate ; export PYTHONPATH=./lib:./scripts ; python ./scripts/adaptive_fit.py --xmin=20. --xmax=75. --histo_file=test_adaptive.h1.txt --plot=noparasites.png"
os.system(cmd)
# second test : one large peak, one parasite
N_noise: int = 100
N_peak: int = 1000
peak_x: float = 50.
peak_width: float = 2.35482
N_para: int = 200
para_x: float = 55.
h1 = Histogram(nbins=100, xmin=0.0, xmax=100.)
# ... generating noise
for x in np.random.uniform(h1.xmin, h1.xmax, size=N_noise):
h1.fast_fill(x)
# ... generating peak
for x in np.random.normal(peak_x, peak_width, size=N_peak):
h1.fast_fill(x)
# ... generating parasite
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... writing file
with open("test_adaptive.h1.txt", 'w') as hfile:
hfile.write(str(h1))
cmd = "source `hostname`_env/bin/activate ; export PYTHONPATH=./lib:./scripts ; python ./scripts/adaptive_fit.py --xpeak=50. --xmin=20. --xmax=75. --guessstdev=2. --bg_type=const --parasites=55 --histo_file=test_adaptive.h1.txt --plot=oneparasites.png"
os.system(cmd)
# third test : one large peak, two parasites
N_noise: int = 150
N_peak: int = 1000
peak_x: float = 50.
peak_width: float = 2.35482
N_para: int = 200
para_x: float = 55.
h1 = Histogram(nbins=200, xmin=0.0, xmax=100.)
# ... generating noise
for x in np.random.uniform(h1.xmin, h1.xmax, size=N_noise):
h1.fast_fill(x)
# ... generating peak
for x in np.random.normal(peak_x, peak_width, size=N_peak):
h1.fast_fill(x)
# ... generating parasite
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... ... second parasite
N_para: int = 100
para_x: float = 40.
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... writing file
with open("test_adaptive.h1.txt", 'w') as hfile:
hfile.write(str(h1))
cmd = "source `hostname`_env/bin/activate ; export PYTHONPATH=./lib:./scripts ; python ./scripts/adaptive_fit.py --xpeak=50. --xmin=20. --xmax=75. --guessstdev=2. --bg_type=lin --parasites 55.0 40.0 --histo_file=test_adaptive.h1.txt --plot=twoparasites.png"
os.system(cmd)
# thfinalird test : one large peak, many small parasites
N_noise: int = 100
N_peak: int = 1000
peak_x: float = 50.
peak_width: float = 2.35482
h1 = Histogram(nbins=100, xmin=0.0, xmax=100.)
# ... generating noise
for x in np.random.uniform(h1.xmin, h1.xmax, size=N_noise):
h1.fast_fill(x)
# ... generating peak
for x in np.random.normal(peak_x, peak_width, size=N_peak):
h1.fast_fill(x)
# ... generating parasite
N_para: int = 20
para_x: float = 55.
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... ... second parasite
N_para: int = 50
para_x: float = 40.
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... ... more parasite
N_para: int = 30
para_x: float = 58.
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... ... second parasite
N_para: int = 25
para_x: float = 65.
for x in np.random.normal(para_x, peak_width, size=N_para):
h1.fast_fill(x)
# ... writing file
with open("test_adaptive.h1.txt", 'w') as hfile:
hfile.write(str(h1))
cmd = "source `hostname`_env/bin/activate ; export PYTHONPATH=./lib:./scripts ; python ./scripts/adaptive_fit.py --xpeak=50. --xmin=20. --xmax=75. --guessstdev=2. --bg_type=lin --parasites 55.0 40.0 58. 65. --histo_file=test_adaptive.h1.txt --plot=manyparasites.png"
os.system(cmd)
# eof
\ No newline at end of file
# File to test the merging of 2d histograms
from pyhisto import Histogram
from smooth_1D import smooth_1D
import numpy as np
if __name__ == "__main__":
# Loading tests histograms
h1 = Histogram(nbins=10)
for x in np.random.uniform(0,10, size=10):
h1.fill(x)
open('tests/test_nonsmooth.txt', 'w').write(str(h1))
hsmooth = smooth_1D(h1, 1)
open('tests/test_smoothed.txt', 'w').write(str(hsmooth))
hsmooth2 = smooth_1D(h1, 2)
open('tests/test_smoothed2.txt', 'w').write(str(hsmooth2))
hsmooth3 = smooth_1D(h1, 3)
open('tests/test_smoothed3.txt', 'w').write(str(hsmooth3))
print(sum(h1), sum(hsmooth), sum(hsmooth2), sum(hsmooth3))
\ No newline at end of file
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment