Skip to content
Snippets Groups Projects
Commit 100989df authored by Greg Henning's avatar Greg Henning
Browse files

(milestone) ready for monte carlo iterations

parent eecdeadc
Branches
Tags
No related merge requests found
Showing
with 140 additions and 70 deletions
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
U3O8: 0.0
UF4: 0.0
bleu: 0.0
gris: 0.0
pm_U3O8: 1.0
pm_UF4: 1.0
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
U3O8: -0.07206028767999986
UF4: -0.026505841150632217
bleu: -4.237939034819274
gris: 8.591494689546018
pm_U3O8: 0.1
pm_UF4: 0.1
pm_bleu: 10.0
pm_gris: 10.0
pm_rouge: 10.0
pm_vert: 10.0
rouge: 0.0
small_time_jitter: 1.0
rouge: 0.19981474452169223
small_time_jitter: 0.1
src_U3O8: 0.0
src_UF4: 0.0
src_bleu: 0.0
src_gris: 0.0
src_rouge: 0.0
src_vert: 0.0
time_jitter: 10.0
vert: 0.0
vert: 4.632546185041605
FC_DOF:
U3O8: 27.3981
UF4: 27.285
U3O8: 27.39609879404094
UF4: 27.29551493294423
src_U3O8: 27.3981
src_UF4: 27.285
u_U3O8: 0.005
u_UF4: 0.005
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
SPEED_OF_LIGHT: 0.299792458
TNT_TS_STEP: 10.0
uncert_FC_DOF:
U3O8: 0.005
UF4: 0.005
......@@ -3,14 +3,13 @@ FC_DENSITY:
U3O8: 9.912e-07
UF4: 8.2978e-07
FC_EFF:
U3O8: 0.776
UF4: 0.944
U3O8: 0.7808238101057202
UF4: 0.9391766127324924
src_U3O8: 0.776
src_UF4: 0.944
u_U3O8: 0.04
u_UF4: 0.021
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
u_FC_EFF:
U3O8: 0.04
UF4: 0.021
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
min_neutron_energy: 20.0
min_neutron_energy: 10.0
max_neutron_energy: 27000.0
number_of_bins: 26990
#nf_eval: 'ENDF/B-VIII.0: U-235(N,F)'
nf_eval: 'ENDF/B-VII.1: U-235(N,F)'
nf_eval: 'ENDF/B-VIII.0: U-235(N,F)'
#nf_eval: 'ENDF/B-VII.1: U-235(N,F)'
smoothing: 25
\ No newline at end of file
GE_DOF: 28.8168
GE_DOF: 28.818582266009173
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
src_GE_DOF: 28.8168
u_GE_DOF: 0.005
......@@ -2,7 +2,7 @@
time_jitter: &B 10.0 # ns
small_time_jitter: &A 1.0 # nanoseconds
small_time_jitter: &A 0.1 # nanoseconds
U3O8: 0.0
pm_U3O8: *A
......
# Reference file
uncert_FC_DOF:
U3O8: &A 0.005
UF4: &B 0.005
FC_DOF:
U3O8: 27.3981
u_U3O8: *A
u_U3O8: 0.005
UF4: 27.285
u_UF4: *B
u_UF4: 0.005
SPEED_OF_LIGHT: 0.299792458 # meters per nanoseconds
......
u_FC_EFF:
U3O8: &A 0.04
UF4: &B 0.021
FC_EFF:
U3O8: 0.776
u_U3O8: *A
u_U3O8: 0.04
UF4: 0.944
u_UF4: *B
u_UF4: 0.021
# Density in atoms per barns
FC_DENSITY:
......
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
neutron loss: 0.018
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
neutron loss: 0.0180423773551037
neutron loss uncertainty: 0.001
src_neutron loss: 0.018
u_neutron loss: 0.001
MC config:
UUID: '000000'
style: center
when: '2024-01-27 15:35:54.582813'
UUID: b6c84d
style: mc
when: '2024-02-02 07:24:26.599547'
isotope of interest: 183W
nuclei density: 0.00312079131
nuclei density: 0.003206273965277562
nuclei density uncertainty: 0.00031066
nuclei density unit: nuclei par barn
src_nuclei density: 0.00312079131
u_nuclei density: 0.00031066
#!/bin/bash
#SBATCH --ntasks 4
#SBATCH --mem 12000
#SBATCH --time 0-20:00:00
#SBATCH --mem 10000
#SBATCH --time 0-05:00:00
# For MC jobs, the requirements in time and memory have been scaled
# down (20h -> 5h, 12GB to 10) to make ressource allocation easier
# #SBATCH --mail-user=ghenning@iphc.cnrs.fr
# #SBATCH --mail-type=END,FAIL
export LC_ALL="en_US.UTF-8"
echo "# [STARTING]"
echo "# [STARTING] job: $SLURM_JOB_ID "
date
echo "# Setting up the environnement"
......@@ -27,12 +29,14 @@ python --version
echo -e "-*-..........-*-\n"
echo $TMPDIR
# echo $TMPDIR
echo "# building C++ tool for filling into 2D histograms"
pwd && cd src/fill_into_h2/ && make binaries && cp bin/fill_into_h2.`hostname` ../../bin/ && cd ../.. && pwd
echo "# ... done"
echo "# Running tasks"
./run_task tasks/do_start_new_iter.py iter=center
./run_task tasks/do_start_new_iter.py iter=mc
echo -n "# Iteration UUID: "
cat etc/uuid.txt
#echo "#... export efficiencies"
......@@ -48,8 +52,18 @@ cat etc/uuid.txt
# echo "#... FC cut 2D and project on time"
# ./run_task tasks/do_fc_cut_and_project.py
echo "#... FC -> neutron flux [MC]"
echo " XXXXXXXXX SUPRESSED FOR NOW XXXXXXXXXXXXX"
#./run_task tasks/do_fc_to_flux.py
./run_task tasks/do_fc_to_flux.py
if [ $? != 0 ]; then
echo "# FC to flux task failed -- that happens sometime (I/O issue ?)";
echo "# RETRYING"
./run_task tasks/do_fc_to_flux.py
if [ $? != 0 ]; then
echo "# FC to flux task failed again";
echo "# Aborting"
cat etc/uuid.txt >> failed.txt
exit 1
fi
fi
# echo "#... ge_time_align"
# ./run_task tasks/do_ge_timealign.py
# echo "#... ge_calibrate"
......@@ -73,6 +87,7 @@ rm -rf `hostname`_env
echo "... done"
date
echo "# [END] job $SLURM_JOB_ID "
echo "..."
echo " .."
echo " ."
# Starting N jobs in chain
job_to_run=$1
Njobs=$2
# rm chained_jobs.txt
echo "# == Chained Jobs =="
echo "# Will run $job_to_run $Njobs times one after the other."
prev_job=$(sbatch --parsable $job_to_run)
echo -n "# [submitting job] $job_to_run ($prev_job) : "
date
for i in $(seq $((Njobs-1))); do
prev_job=$(sbatch --parsable --dependency=afterok:$prev_job $job_to_run)
echo -n "# [submitting job] $job_to_run ($prev_job) : "
date
done
......@@ -34,7 +34,7 @@ def task_collect_flux():
'targets': [this_target_file],
'actions': [
(create_folder, (target_dir,)),
f"cp -rv {this_flux_file} {this_target_file}",
f"cp {this_flux_file} {this_target_file}",
f"./env.run scripts/Draw1Dh.py {this_target_file}",
]
}
......@@ -56,8 +56,8 @@ def task_collect_integral_xs():
'targets': [trgt_file, trgt_uncert_file],
'actions': [
(create_folder, (target_dir,)),
f"cp -rv {src_file} {trgt_file}",
f"cp -rv {src_uncert_file} {trgt_uncert_file}",
f"cp {src_file} {trgt_file}",
f"cp {src_uncert_file} {trgt_uncert_file}",
f"./env.run scripts/Draw1Dh.py {trgt_file}",
]
}
......@@ -73,7 +73,7 @@ def task_collect_etc():
'file_dep': ['etc/mc_source_files.yaml'],
'targets': [target_file],
'actions':[
f"cp -rv etc/{src_file} {target_file}",
f"cp etc/{src_file} {target_file}",
],
}
# EOF
......@@ -365,6 +365,38 @@ def task_divide_by_efficiency():
],
}
return None
def task_multiply_err_by_mu():
'''correct errors for sum consitency'''
def _multiply_err_by_mu(source_err_file: str,
mu_file: str,
dest_err_file: str) -> str:
mu = max(1.0, yaml.load(open(mu_file), Loader=yaml.Loader).get('mu', 1.0))
return f"./env.run scripts/divide1D.py --scale={1.0/mu} {source_err_file} > {dest_err_file}"
for this_transition in the_transitions.keys():
for ge_det in Ge_detectors:
flavor ='clean'
source_err_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount_err.puscu.effcorr.txt"
dest_err_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount_err.puscu.effcorr.mucorr.txt"
mu_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.projEg_and_fits.checksum.txt" # file is actually yaml
yield {
'name': dest_err_file,
'file_dep': [source_err_file,
mu_file],
'targets': [dest_err_file],
'actions':[
(create_folder, (f"./output/transitions/{this_transition}/",)),
CmdAction((_multiply_err_by_mu,
[], {
'source_err_file': source_err_file,
'mu_file': mu_file,
'dest_err_file': dest_err_file,
})),
f"./env.run scripts/Draw1Dh.py {dest_err_file}",
],
}
return None
# def task_project_transitions_signal():
# '''project transitions bidim according to signal'''
......
......@@ -86,6 +86,7 @@ def task_FC_spect_to_perkeV():
yield {
'name': f"{FC_det} {source_file} -> {target_file}",
'file_dep': [source_file],
'task_dep':['FC_time_to_en'],
'targets': [target_file],
'actions': [
(spect_to_perkeV, None, {'source_file': source_file,
......@@ -109,7 +110,7 @@ def task_interpolate_perkeV():
# interpolating
hinterpolator = interp1d(*hsrc.xy(),
kind='quadratic',
fill_value=1.0e-30)
fill_value=1.0e-30, bounds_error=False)
for i, b in enumerate(htrgt):
htrgt.bins[i].count = max(0, hinterpolator(b.center()))
......@@ -150,10 +151,13 @@ def task_divide_per_sigmanf():
evalinterpolator = interp1d(list([pts['x']*1000. for pts in the_eval_pts]),
list([pts['y']*1.0 for pts in the_eval_pts]),
kind='linear', #'linear'
fill_value="extrapolate")
fill_value='extrapolate', bounds_error=False)
for i, b in enumerate(hsrc):
htrgt.bins[i].count = b.count / evalinterpolator(b.center())
if evalinterpolator(b.center()) >= 0:
htrgt.bins[i].count = b.count / evalinterpolator(b.center())
else:
htrgt.bins[i].count = 0.
open(target_file, 'w').write(str(htrgt))
return True
......
......@@ -45,7 +45,7 @@ def task_gcount_time_to_energy():
for flavor in ('clean',):
source_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount.puscu.effcorr.txt"
dest_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount.puscu.effcorr.perenergy.txt"
source_err_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount_err.puscu.effcorr.txt"
source_err_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount_err.puscu.effcorr.mucorr.txt"
dest_err_file = f"./output/transitions/{this_transition}/{ge_det}_{flavor}.gcount_err.puscu.effcorr.perenergy.txt"
yield {
'name': source_file,
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment