diff --git a/neucbot.py b/neucbot.py index 1eee06d9..8e6bd347 100755 --- a/neucbot.py +++ b/neucbot.py @@ -32,37 +32,6 @@ class constants: def isoDir(ele,A): return './Data/Isotopes/'+ele.capitalize()+'/'+ele.capitalize()+str(int(A))+'/' -def parseIsotope(iso): - ele = '' - A = 0 - for i in iso: - if i.isalpha(): - ele += i - if i.isdigit(): - A = A*10 + int(i) - return [ele,A] - -def loadChainAlphaList(fname): - f = open(fname) - tokens = [line.split() for line in f.readlines()] - alpha_list = [] - for line in tokens: - if len(list(line)) < 2 or line[0][0] == '#': - continue - # Read isotope and its branching ratio from file - iso = line[0] - br = float(line[1]) - [ele,A] = parseIsotope(iso) - - # Now get the isotope's alpha list and add it to the chain's list - aList_forIso = alpha.AlphaList(ele, A).load_or_fetch() - if constants.print_alphas: - print(iso, file = constants.ofile) - print('\t', aList_forIso, file = constants.ofile) - for [ene,intensity] in aList_forIso: - alpha_list.append([ene, intensity*br/100]) - return alpha_list - def runTALYS(e_a, ele, A): iso = str(ele)+str(int(A)) inpdir = isoDir(ele,A) + 'TalysInputs/' @@ -242,22 +211,6 @@ def readTotalNXsect(e_a,ele,A): #print("sigma = " , sigma) return sigma -def condense_alpha_list(alpha_list,alpha_step_size): - alpha_ene_cdf = [] - max_alpha = max(alpha_list) - e_a_max = int(max_alpha[0]*100 + 0.5)/100. - alpha_ene_cdf.append([e_a_max,max_alpha[1]]) - e_a = e_a_max - while e_a > 0: - cum_int = 0 - for alpha in alpha_list: - this_e_a = int(alpha[0]*100+0.5)/100. - if this_e_a >= e_a: - cum_int += alpha[1] - alpha_ene_cdf.append([e_a,cum_int]) - e_a -= alpha_step_size - return alpha_ene_cdf - def run_alpha(alpha_list, mat_comp, e_alpha_step): binsize = 0.1 # Bin size for output spectrum @@ -265,7 +218,7 @@ def run_alpha(alpha_list, mat_comp, e_alpha_step): xsects = {} total_xsect = 0 counter = 0 - alpha_ene_cdf = condense_alpha_list(alpha_list,e_alpha_step) + alpha_ene_cdf = alpha_list.condense(e_alpha_step) for [e_a, intensity] in alpha_ene_cdf: counter += 1 if counter % (int(len(alpha_ene_cdf)/100)) == 0: @@ -335,10 +288,12 @@ def main(): alphalist_file = sys.argv[sys.argv.index(arg)+1] print('load alpha list', alphalist_file, file = sys.stdout) alpha_list = alpha.AlphaList.from_filepath(alphalist_file) + alpha_list.load_or_fetch() if arg == '-c': chain_file = sys.argv[sys.argv.index(arg)+1] print('load alpha chain', chain_file, file = sys.stdout) - alpha_list = loadChainAlphaList(chain_file) + alpha_list = alpha.ChainAlphaList.from_filepath(chain_file) + alpha_list.load_or_fetch() if arg == '-m': mat_file = sys.argv[sys.argv.index(arg)+1] print('load target material', mat_file, file = sys.stdout) @@ -374,8 +329,8 @@ def main(): constants.ofile = open(ofile,'w') #sys.stdout = open(ofile,'w') - if len(alpha_list) == 0 or mat_comp.empty(): - if len(alpha_list)==0: print('No alpha list or chain specified', file = sys.stdout) + if len(alpha_list.alphas) == 0 or mat_comp.empty(): + if len(alpha_list.alphas)==0: print('No alpha list or chain specified', file = sys.stdout) if mat_comp.empty(): print('No target material specified', file = sys.stdout) print('', file = sys.stdout) help_message() @@ -383,9 +338,9 @@ def main(): if constants.print_alphas: print('Alpha List: ', file = sys.stdout) - print(max(alpha_list), file = sys.stdout) - condense_alpha_list(alpha_list,alpha_step_size) - for alph in alpha_list: + print(max(alpha_list.alphas), file = sys.stdout) + condensed = alpha_list.condense(alpha_step_size) + for alph in condensed: print(alph[0],'&', alph[1],'\\\\', file = sys.stdout) if constants.download_data: diff --git a/neucbot/alpha.py b/neucbot/alpha.py index d96544eb..d4f0ee29 100644 --- a/neucbot/alpha.py +++ b/neucbot/alpha.py @@ -1,38 +1,35 @@ import os -from neucbot import ensdf +import re +import numpy +from operator import itemgetter -ALPHA_LIST_DIR = "./AlphaLists" - - -def _alphas_from_file_path(file_path): - file = open(file_path) - - # Parse alphalist files: - # 1. Only parse lines that have 2+ tab-separated tokens - # 2. Ignore any lines starting with "#" - # 3. Return list of lists (where each sublist is a list of floats) - alphas = [ - [float(token) for token in line.split()] # Parse each token as float - for line in file.readlines() # for each line in file - if line[0] != "#" - and len(line.split()) >= 2 # except for lines matching these conditions - ] +from neucbot import ensdf, utils - file.close() - - return alphas +ALPHA_LIST_DIR = "./AlphaLists" +CHAIN_LIST_DIR = "./Chains" class AlphaList: + ALPHA_LIST_FILE_PATTERN = re.compile( + r"^AlphaLists\/(?P[A-Za-z]{1,2})(?P\d{1,3})Alphas.dat" + ) + def __init__(self, element, isotope): self.element = element self.isotope = isotope self.file_path = f"{ALPHA_LIST_DIR}/{self.element}{self.isotope}Alphas.dat" self.fetch_attempts = 3 + self.alphas = [] @classmethod def from_filepath(cls, file_path): - return _alphas_from_file_path(file_path) + if alpha_file_match := cls.ALPHA_LIST_FILE_PATTERN.match(file_path): + element = alpha_file_match.group("element") + isotope = alpha_file_match.group("isotope") + + return cls(element, isotope) + else: + raise RuntimeError(f"Invalid file path for alphalist {file_path}") def load_or_fetch(self): while not os.path.isfile(self.file_path): @@ -44,7 +41,27 @@ def load_or_fetch(self): return self.load() def load(self): - return _alphas_from_file_path(self.file_path) + file = open(self.file_path) + + # Parse alphalist files: + # 1. Only parse lines that have 2+ tab-separated tokens + # 2. Ignore any lines starting with "#" + # 3. Return list of lists (where each sublist is a list of floats) + self.alphas = [ + [float(token) for token in line.split()] # Parse each token as float + for line in file.readlines() # for each line in file + if line[0] != "#" + and len(line.split()) >= 2 # except for lines matching these conditions + ] + + file.close() + + return self.alphas + + def scale_by(self, branch_fraction): + self.alphas = [ + [energy, intensity * branch_fraction] for [energy, intensity] in self.alphas + ] def write(self): if os.path.exists(self.file_path): @@ -61,3 +78,92 @@ def write(self): file.close() return True + + # The condense function returns a modified list of alpha energy/intensity pairs, + # generated by the following procedure: + # 1. Sort the pairs by alpha energies (decreasing from highest to lowest) + # 2. Compute the cumulative intensities, obtained by adding the intensities of + # the sorted alpha energies. + # 3. Expand the list to fill in the gaps between alpha energies in increments + # of alpha_step_size. The cumulative intensity of + # (alpha_energy - n * alpha_step_size) is equal to the cumulative intensity + # of alpha_energy until the former expression is less than the next + # alpha_energy in the sorted list. + def condense(self, alpha_step_size): + sorted_alphas = sorted(self.alphas, key=itemgetter(0), reverse=True) + + cumulative_intensities = numpy.cumsum([alpha[1] for alpha in sorted_alphas]) + + # Starting from (0 + step size) to (max sorted_alpha) in increments of alpha_step_size + max_alpha = utils.round_half_up(sorted_alphas[0][0]) + alpha_steps = numpy.arange(max_alpha, 0, -alpha_step_size) + + alpha_index = 0 + max_index = len(sorted_alphas) - 1 + + # This will create a duplicate entry for [max_alpha, max_alpha_intensity] + # in the condensed alpha list. This is here intentionally. + condensed_alphas = [[max_alpha, cumulative_intensities[alpha_index]]] + + for step in alpha_steps: + # If the alpha step is LESS THAN the next alpha energy (rounded to 2 + # decimal places), increment alpha_index + if alpha_index < max_index and step < utils.round_half_up( + sorted_alphas[alpha_index + 1][0] + ): + alpha_index += 1 + + intensity = cumulative_intensities[alpha_index] + condensed_alphas.append([step, intensity]) + + return condensed_alphas + + +class ChainAlphaList(AlphaList): + CHAIN_LIST_FILE_PATTERN = re.compile( + r"^Chains\/(?P[A-Za-z]{1,2})(?P\d{1,3})Chain.dat" + ) + CHAIN_LIST_LINE_PATTERN = re.compile( + r"^(?P[A-Za-z]{1,2})(?P\d{1,3})\s+(?P[\d\.]+)$" + ) + + def __init__(self, element, isotope): + self.element = element + self.isotope = isotope + self.file_path = f"{CHAIN_LIST_DIR}/{self.element}{self.isotope}Chain.dat" + self._alpha_lists = [] + self.alphas = [] + + @classmethod + def from_filepath(cls, file_path): + if chain_file_match := cls.CHAIN_LIST_FILE_PATTERN.match(file_path): + element = chain_file_match.group("element") + isotope = chain_file_match.group("isotope") + + return cls(element, isotope) + else: + raise RuntimeError(f"Invalid file path for chain alpha list {file_path}") + + def load_or_fetch(self): + # Read in chain file list line by line, splitting into: + # - element symbol + # - mass_number + # - branching_ratio + # load_or_fetch() alpha list for each symbol/mass_number pair + # Scale intensity down by branching_ratio / 100 + file = open(self.file_path) + + for line in file.readlines(): + if match := self.CHAIN_LIST_LINE_PATTERN.match(line): + branch_fraction = float(match.group("branch_frac")) / 100.0 + + alpha_list = AlphaList(match.group("element"), match.group("isotope")) + alpha_list.load_or_fetch() + alpha_list.scale_by(branch_fraction) + + self._alpha_lists.append(alpha_list) + self.alphas += alpha_list.alphas + + file.close() + + return self.alphas diff --git a/neucbot/utils.py b/neucbot/utils.py index 7f8157d1..d2d3b441 100644 --- a/neucbot/utils.py +++ b/neucbot/utils.py @@ -6,3 +6,7 @@ def format_float(number, precision=6): return "0.0" else: return numpy.format_float_scientific(number, precision=precision, unique=False) + + +def round_half_up(number): + return int(number * 100.0 + 0.5) / 100.0 diff --git a/tests/integration_tests.sh b/tests/integration_tests.sh index 87603777..58c1cc01 100755 --- a/tests/integration_tests.sh +++ b/tests/integration_tests.sh @@ -41,7 +41,6 @@ echo "Materials/Acrylic.dat" echo "AlphaLists/Rn220Alphas.dat" echo - python3 ./neucbot.py -m Materials/Acrylic.dat -l AlphaLists/Rn220Alphas.dat -d v2 -o tmp-acrylic-rn220-alphalist.txt diff tmp-acrylic-rn220-alphalist.txt tests/integration_tests/acrylic-rn220-alphalist.txt diff --git a/tests/test_alpha.py b/tests/test_alpha.py index 2ba2e463..74150004 100644 --- a/tests/test_alpha.py +++ b/tests/test_alpha.py @@ -1,9 +1,10 @@ import pytest import re +from pytest import approx from unittest import TestCase from unittest.mock import call, patch -from neucbot.alpha import AlphaList +from neucbot.alpha import AlphaList, ChainAlphaList from neucbot.ensdf import Client, Parser @@ -86,10 +87,11 @@ def test_load(self): ) def test_from_filepath(self): - alphas = AlphaList.from_filepath("AlphaLists/Bi212Alphas.dat") + alpha_list = AlphaList.from_filepath("AlphaLists/Bi212Alphas.dat") + alpha_list.load_or_fetch() self.assertEqual( - alphas, + alpha_list.alphas, [ [6.08988, 27.12], [6.05078, 69.91], @@ -127,6 +129,26 @@ def test_load_or_fetch_success(self, mocked_isfile, mocked_write): ], ) + def test_scale_by(self): + alpha_list = AlphaList.from_filepath("AlphaLists/Bi212Alphas.dat") + alpha_list.load_or_fetch() + alpha_list.scale_by(0.5) + + # Expect alpha intensities to be 50% of regular values + self.assertEqual( + alpha_list.alphas, + [ + [6.08988, 13.56], + [6.05078, 34.955], + [5.768, 0.85], + [5.626, 0.0785], + [5.607, 0.565], + [5.481, 0.0065], + [5.345, 0.0005], + [5.302, 0.000055], + ], + ) + @patch.object(AlphaList, "write") @patch("os.path.isfile") def test_load_or_fetch_raises_on_failed_write(self, mocked_isfile, mocked_write): @@ -135,3 +157,106 @@ def test_load_or_fetch_raises_on_failed_write(self, mocked_isfile, mocked_write) with self.assertRaisesRegex(RuntimeError, r"Unable to write alpha file"): AlphaList("Bi", 212).load_or_fetch() + + def test_condense(self): + alpha_list = AlphaList.from_filepath("AlphaLists/Bi212Alphas.dat") + alpha_list.load_or_fetch() + + condensed_list = alpha_list.condense(0.01) + + assert len(condensed_list) == 610 # Accounting for duplicate first entry + assert condensed_list[0] == [approx(6.09), approx(27.12)] + assert condensed_list[1] == [approx(6.09), approx(27.12)] + assert condensed_list[2] == [approx(6.08), approx(27.12)] + assert condensed_list[3] == [approx(6.07), approx(27.12)] + assert condensed_list[4] == [approx(6.06), approx(27.12)] + assert condensed_list[5] == [approx(6.05), approx(27.12)] + assert condensed_list[6] == [approx(6.04), approx(97.03)] + assert condensed_list[32] == [approx(5.78), approx(97.03)] + assert condensed_list[33] == [approx(5.77), approx(97.03)] + assert condensed_list[34] == [approx(5.76), approx(98.73)] + assert condensed_list[47] == [approx(5.63), approx(98.73)] + assert condensed_list[48] == [approx(5.62), approx(98.887)] + assert condensed_list[49] == [approx(5.61), approx(98.887)] + assert condensed_list[50] == [approx(5.60), approx(100.017)] + assert condensed_list[62] == [approx(5.48), approx(100.017)] + assert condensed_list[63] == [approx(5.47), approx(100.030)] + assert condensed_list[75] == [approx(5.35), approx(100.030)] + assert condensed_list[76] == [approx(5.34), approx(100.031)] + assert condensed_list[80] == [approx(5.30), approx(100.031)] + assert condensed_list[81] == [approx(5.29), approx(100.03111)] + assert condensed_list[609] == [approx(0.01), approx(100.03111)] + + +class TestChainAlphaList(TestCase): + def test_from_filepath(self): + chain_list = ChainAlphaList.from_filepath("Chains/Th232Chain.dat") + + assert chain_list.element == "Th" + assert chain_list.isotope == "232" + + def test_from_filepath_error_invalid_filepath(self): + with self.assertRaisesRegex( + RuntimeError, r"Invalid file path for chain alpha list" + ): + ChainAlphaList.from_filepath("Chains/Th232Alphas.dat") + + def test_load_or_fetch(self): + chain_list = ChainAlphaList.from_filepath("Chains/Th232Chain.dat") + chain_list.load_or_fetch() + + assert len(chain_list._alpha_lists) == 7 + self.assertEqual( + chain_list.alphas, + [ + # Th232 Alphas + [4.0123, 78.2], + [3.9471999999999996, 21.7], + [3.8110999999999997, 0.069], + # Th228 Alphas + [5.42315, 73.4], + [5.3403599999999996, 26.0], + [5.211, 0.408], + [5.173, 0.218], + [5.138, 0.036], + [4.99, 1e-05], + [4.944, 2.4e-05], + [4.507, 1.7e-05], + [4.43, 4.6e-06], + # Ra224 Alphas + [5.68537, 94.92], + [5.448600000000001, 5.06], + [5.161, 0.0071], + [5.051, 0.0076], + [5.034, 0.003], + # Rn220 Alphas + [6.28808, 99.886], + [5.747, 0.114], + # Po216 Alphas + [6.7783, 99.9981], + [5.985, 0.0019], + # Bi212 Alphas (multiplied by branching fraction .3594) + [6.08988, 9.746928], + [6.05078, 25.125653999999997], + [5.768, 0.61098], + [5.626, 0.0564258], + [5.607, 0.406122], + [5.481, 0.0046722], + [5.345, 0.0003594], + [5.302, 3.9534e-05], + # Po212 Alphas (multiplied by branching fraction .6406 + [8.78486, 64.06], + ], + ) + + def test_condense(self): + pass + chain_list = ChainAlphaList.from_filepath("Chains/Th232Chain.dat") + chain_list.load_or_fetch() + + condensed_list = chain_list.condense(0.01) + + assert len(condensed_list) == 879 + assert condensed_list[0] == [approx(8.78), approx(64.06)] + assert condensed_list[1] == [approx(8.78), approx(64.06)] + assert condensed_list[878] == [approx(0.01), approx(600.0399365339999)]