Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 9 additions & 54 deletions neucbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,37 +32,6 @@ class constants:
def isoDir(ele,A):
return './Data/Isotopes/'+ele.capitalize()+'/'+ele.capitalize()+str(int(A))+'/'

def parseIsotope(iso):
ele = ''
A = 0
for i in iso:
if i.isalpha():
ele += i
if i.isdigit():
A = A*10 + int(i)
return [ele,A]

def loadChainAlphaList(fname):
f = open(fname)
tokens = [line.split() for line in f.readlines()]
alpha_list = []
for line in tokens:
if len(list(line)) < 2 or line[0][0] == '#':
continue
# Read isotope and its branching ratio from file
iso = line[0]
br = float(line[1])
[ele,A] = parseIsotope(iso)

# Now get the isotope's alpha list and add it to the chain's list
aList_forIso = alpha.AlphaList(ele, A).load_or_fetch()
if constants.print_alphas:
print(iso, file = constants.ofile)
print('\t', aList_forIso, file = constants.ofile)
for [ene,intensity] in aList_forIso:
alpha_list.append([ene, intensity*br/100])
return alpha_list

def runTALYS(e_a, ele, A):
iso = str(ele)+str(int(A))
inpdir = isoDir(ele,A) + 'TalysInputs/'
Expand Down Expand Up @@ -242,30 +211,14 @@ def readTotalNXsect(e_a,ele,A):
#print("sigma = " , sigma)
return sigma

def condense_alpha_list(alpha_list,alpha_step_size):
alpha_ene_cdf = []
max_alpha = max(alpha_list)
e_a_max = int(max_alpha[0]*100 + 0.5)/100.
alpha_ene_cdf.append([e_a_max,max_alpha[1]])
e_a = e_a_max
while e_a > 0:
cum_int = 0
for alpha in alpha_list:
this_e_a = int(alpha[0]*100+0.5)/100.
if this_e_a >= e_a:
cum_int += alpha[1]
alpha_ene_cdf.append([e_a,cum_int])
e_a -= alpha_step_size
return alpha_ene_cdf

def run_alpha(alpha_list, mat_comp, e_alpha_step):
binsize = 0.1 # Bin size for output spectrum

spec_tot = {}
xsects = {}
total_xsect = 0
counter = 0
alpha_ene_cdf = condense_alpha_list(alpha_list,e_alpha_step)
alpha_ene_cdf = alpha_list.condense(e_alpha_step)
for [e_a, intensity] in alpha_ene_cdf:
counter += 1
if counter % (int(len(alpha_ene_cdf)/100)) == 0:
Expand Down Expand Up @@ -335,10 +288,12 @@ def main():
alphalist_file = sys.argv[sys.argv.index(arg)+1]
print('load alpha list', alphalist_file, file = sys.stdout)
alpha_list = alpha.AlphaList.from_filepath(alphalist_file)
alpha_list.load_or_fetch()
if arg == '-c':
chain_file = sys.argv[sys.argv.index(arg)+1]
print('load alpha chain', chain_file, file = sys.stdout)
alpha_list = loadChainAlphaList(chain_file)
alpha_list = alpha.ChainAlphaList.from_filepath(chain_file)
alpha_list.load_or_fetch()
if arg == '-m':
mat_file = sys.argv[sys.argv.index(arg)+1]
print('load target material', mat_file, file = sys.stdout)
Expand Down Expand Up @@ -374,18 +329,18 @@ def main():
constants.ofile = open(ofile,'w')
#sys.stdout = open(ofile,'w')

if len(alpha_list) == 0 or mat_comp.empty():
if len(alpha_list)==0: print('No alpha list or chain specified', file = sys.stdout)
if len(alpha_list.alphas) == 0 or mat_comp.empty():
if len(alpha_list.alphas)==0: print('No alpha list or chain specified', file = sys.stdout)
if mat_comp.empty(): print('No target material specified', file = sys.stdout)
print('', file = sys.stdout)
help_message()
return 0

if constants.print_alphas:
print('Alpha List: ', file = sys.stdout)
print(max(alpha_list), file = sys.stdout)
condense_alpha_list(alpha_list,alpha_step_size)
for alph in alpha_list:
print(max(alpha_list.alphas), file = sys.stdout)
condensed = alpha_list.condense(alpha_step_size)
for alph in condensed:
print(alph[0],'&', alph[1],'\\\\', file = sys.stdout)

if constants.download_data:
Expand Down
150 changes: 128 additions & 22 deletions neucbot/alpha.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,35 @@
import os
from neucbot import ensdf
import re
import numpy
from operator import itemgetter

ALPHA_LIST_DIR = "./AlphaLists"


def _alphas_from_file_path(file_path):
file = open(file_path)

# Parse alphalist files:
# 1. Only parse lines that have 2+ tab-separated tokens
# 2. Ignore any lines starting with "#"
# 3. Return list of lists (where each sublist is a list of floats)
alphas = [
[float(token) for token in line.split()] # Parse each token as float
for line in file.readlines() # for each line in file
if line[0] != "#"
and len(line.split()) >= 2 # except for lines matching these conditions
]
from neucbot import ensdf, utils

file.close()

return alphas
ALPHA_LIST_DIR = "./AlphaLists"
CHAIN_LIST_DIR = "./Chains"


class AlphaList:
ALPHA_LIST_FILE_PATTERN = re.compile(
r"^AlphaLists\/(?P<element>[A-Za-z]{1,2})(?P<isotope>\d{1,3})Alphas.dat"
)

def __init__(self, element, isotope):
self.element = element
self.isotope = isotope
self.file_path = f"{ALPHA_LIST_DIR}/{self.element}{self.isotope}Alphas.dat"
self.fetch_attempts = 3
self.alphas = []

@classmethod
def from_filepath(cls, file_path):
return _alphas_from_file_path(file_path)
if alpha_file_match := cls.ALPHA_LIST_FILE_PATTERN.match(file_path):
element = alpha_file_match.group("element")
isotope = alpha_file_match.group("isotope")

return cls(element, isotope)
else:
raise RuntimeError(f"Invalid file path for alphalist {file_path}")

def load_or_fetch(self):
while not os.path.isfile(self.file_path):
Expand All @@ -44,7 +41,27 @@ def load_or_fetch(self):
return self.load()

def load(self):
return _alphas_from_file_path(self.file_path)
file = open(self.file_path)

# Parse alphalist files:
# 1. Only parse lines that have 2+ tab-separated tokens
# 2. Ignore any lines starting with "#"
# 3. Return list of lists (where each sublist is a list of floats)
self.alphas = [
[float(token) for token in line.split()] # Parse each token as float
for line in file.readlines() # for each line in file
if line[0] != "#"
and len(line.split()) >= 2 # except for lines matching these conditions
]

file.close()

return self.alphas

def scale_by(self, branch_fraction):
self.alphas = [
[energy, intensity * branch_fraction] for [energy, intensity] in self.alphas
]

def write(self):
if os.path.exists(self.file_path):
Expand All @@ -61,3 +78,92 @@ def write(self):
file.close()

return True

# The condense function returns a modified list of alpha energy/intensity pairs,
# generated by the following procedure:
# 1. Sort the pairs by alpha energies (decreasing from highest to lowest)
# 2. Compute the cumulative intensities, obtained by adding the intensities of
# the sorted alpha energies.
# 3. Expand the list to fill in the gaps between alpha energies in increments
# of alpha_step_size. The cumulative intensity of
# (alpha_energy - n * alpha_step_size) is equal to the cumulative intensity
# of alpha_energy until the former expression is less than the next
# alpha_energy in the sorted list.
def condense(self, alpha_step_size):
sorted_alphas = sorted(self.alphas, key=itemgetter(0), reverse=True)

cumulative_intensities = numpy.cumsum([alpha[1] for alpha in sorted_alphas])

# Starting from (0 + step size) to (max sorted_alpha) in increments of alpha_step_size
max_alpha = utils.round_half_up(sorted_alphas[0][0])
alpha_steps = numpy.arange(max_alpha, 0, -alpha_step_size)

alpha_index = 0
max_index = len(sorted_alphas) - 1

# This will create a duplicate entry for [max_alpha, max_alpha_intensity]
# in the condensed alpha list. This is here intentionally.
condensed_alphas = [[max_alpha, cumulative_intensities[alpha_index]]]

for step in alpha_steps:
# If the alpha step is LESS THAN the next alpha energy (rounded to 2
# decimal places), increment alpha_index
if alpha_index < max_index and step < utils.round_half_up(
sorted_alphas[alpha_index + 1][0]
):
alpha_index += 1

intensity = cumulative_intensities[alpha_index]
condensed_alphas.append([step, intensity])

return condensed_alphas


class ChainAlphaList(AlphaList):
CHAIN_LIST_FILE_PATTERN = re.compile(
r"^Chains\/(?P<element>[A-Za-z]{1,2})(?P<isotope>\d{1,3})Chain.dat"
)
CHAIN_LIST_LINE_PATTERN = re.compile(
r"^(?P<element>[A-Za-z]{1,2})(?P<isotope>\d{1,3})\s+(?P<branch_frac>[\d\.]+)$"
)

def __init__(self, element, isotope):
self.element = element
self.isotope = isotope
self.file_path = f"{CHAIN_LIST_DIR}/{self.element}{self.isotope}Chain.dat"
self._alpha_lists = []
self.alphas = []

@classmethod
def from_filepath(cls, file_path):
if chain_file_match := cls.CHAIN_LIST_FILE_PATTERN.match(file_path):
element = chain_file_match.group("element")
isotope = chain_file_match.group("isotope")

return cls(element, isotope)
else:
raise RuntimeError(f"Invalid file path for chain alpha list {file_path}")

def load_or_fetch(self):
# Read in chain file list line by line, splitting into:
# - element symbol
# - mass_number
# - branching_ratio
# load_or_fetch() alpha list for each symbol/mass_number pair
# Scale intensity down by branching_ratio / 100
file = open(self.file_path)

for line in file.readlines():
if match := self.CHAIN_LIST_LINE_PATTERN.match(line):
branch_fraction = float(match.group("branch_frac")) / 100.0

alpha_list = AlphaList(match.group("element"), match.group("isotope"))
alpha_list.load_or_fetch()
alpha_list.scale_by(branch_fraction)

self._alpha_lists.append(alpha_list)
self.alphas += alpha_list.alphas

file.close()

return self.alphas
4 changes: 4 additions & 0 deletions neucbot/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,7 @@ def format_float(number, precision=6):
return "0.0"
else:
return numpy.format_float_scientific(number, precision=precision, unique=False)


def round_half_up(number):
return int(number * 100.0 + 0.5) / 100.0
1 change: 0 additions & 1 deletion tests/integration_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ echo "Materials/Acrylic.dat"
echo "AlphaLists/Rn220Alphas.dat"
echo


python3 ./neucbot.py -m Materials/Acrylic.dat -l AlphaLists/Rn220Alphas.dat -d v2 -o tmp-acrylic-rn220-alphalist.txt
diff tmp-acrylic-rn220-alphalist.txt tests/integration_tests/acrylic-rn220-alphalist.txt

Expand Down
Loading