Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
317 changes: 250 additions & 67 deletions generate_jendl.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,13 @@

import argparse
import ssl
import tarfile
import gzip
from multiprocessing import Pool
from pathlib import Path
from shutil import rmtree, copyfileobj
from shutil import rmtree
from urllib.parse import urljoin

import openmc.data
from utils import download, process_neutron
from utils import download, process_neutron, process_thermal, extract, update_zsymam


class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter,
Expand All @@ -42,9 +40,12 @@ def main():
default='latest', help="Output HDF5 versioning. Use "
"'earliest' for backwards compatibility or 'latest' for "
"performance")
parser.add_argument('-r', '--release', choices=['4.0', '5.0'], default='5.0',
parser.add_argument('-r', '--release', choices=['5'], default='5',
help="The nuclear data library release version. "
"The currently supported options are 4.0, 5.0")
"The currently supported options are 5")
parser.add_argument('-p', '--particles', choices=['neutron', 'thermal', 'photon'],
nargs='+', default=['neutron', 'thermal', 'photon'],
help="Incident particles to include")
parser.add_argument('--cleanup', action='store_true',
help="Remove download directories when data has "
"been processed")
Expand All @@ -58,6 +59,14 @@ def main():
args = parser.parse_args()


def sort_key(path):
if path.name.startswith('c_'):
# Ensure that thermal scattering gets sorted after neutron data
return (1000, path)
else:
return openmc.data.zam(path.stem)


library_name = 'jendl'

cwd = Path.cwd()
Expand All @@ -72,92 +81,266 @@ def main():
# This dictionary contains all the unique information about each release.
# This can be exstened to accommodated new releases
release_details = {
'4.0': {
'base_url': 'https://wwwndc.jaea.go.jp/ftpnd/ftp/JENDL/',
'compressed_files': ['jendl40-or-up_20160106.tar.gz'],
'endf_files': endf_files_dir.joinpath('jendl40-or-up_20160106').glob('*.dat'),
'metastables': endf_files_dir.joinpath('jendl40-or-up_20160106').glob('*m.dat'),
'compressed_file_size': '0.2 GB',
'uncompressed_file_size': '2 GB'
},
'5.0':{
'base_url': 'https://wwwndc.jaea.go.jp/ftpnd/',
'compressed_files': ['ftp/JENDL/jendl5-n.tar.gz',
'jendl/jendl5-update/data/jendl5_upd6.tar.gz',
'jendl/jendl5-update/data/n_059-Pr-141.dat.gz'],
'endf_files': endf_files_dir.glob('*.dat'),
'metastables': endf_files_dir.glob('*m1.dat'),
'compressed_file_size': '4.1 GB',
'uncompressed_file_size': '16 GB'
'5':{
'neutron': {
'base_url': 'https://wwwndc.jaea.go.jp/ftpnd/ftp/JENDL/',
'compressed_files': [
'jendl5-n.tar.gz',
'jendl5-n_upd1.tar.gz',
'jendl5-n_upd6.tar.gz',
'jendl5-n_upd7.tar.gz',
'jendl5-n_upd10.tar.gz',
'jendl5-n_upd11.tar.gz',
'jendl5-n_upd12.tar.gz',
'jendl5-n_upd14.tar.gz',
],
'endf_dir': 'jendl5-n',
'patterns': ['n_???-*-???.dat', 'n_???-*-???m?.dat'],
'pattern_errata': ['jendl5-n_upd1/*.dat', 'jendl-n_upd6/*.dat', '*.dat'],
},
'thermal': {
'base_url': 'https://wwwndc.jaea.go.jp/ftpnd/ftp/JENDL/',
'compressed_files': [
'jendl5-tsl.tar.gz',
'jendl5-tsl_upd16.tar.gz',
],
'endf_dir': 'jendl5-tsl',
'pattern': '*.dat',
'sab_files': [
# Hydrogen (H-1) compounds
('n_001-H-001.dat', 'tsl_HinC5O2H8.dat'),
('n_001-H-001.dat', 'tsl_HinCH2.dat'),
('n_001-H-001.dat', 'tsl_HinH2O.dat'),
('n_001-H-001.dat', 'tsl_HinIceIh.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidBenzene.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidEthanol.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidMesitylene.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidMethane.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidM-Xylene.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidToluene.dat'),
('n_001-H-001.dat', 'tsl_HinLiquidTriphenylmethane.dat'),
('n_001-H-001.dat', 'tsl_HinOrthoH.dat'),
('n_001-H-001.dat', 'tsl_HinParaH.dat'),
('n_001-H-001.dat', 'tsl_HinSolidBenzene.dat'),
('n_001-H-001.dat', 'tsl_HinSolidEthanol.dat'),
('n_001-H-001.dat', 'tsl_HinSolidMesitylene.dat'),
('n_001-H-001.dat', 'tsl_HinSolidMethane.dat'),
('n_001-H-001.dat', 'tsl_HinSolidM-Xylene.dat'),
('n_001-H-001.dat', 'tsl_HinSolidToluene.dat'),
('n_001-H-001.dat', 'tsl_HinSolidTriphenylmethane.dat'),
('n_001-H-001.dat', 'tsl_HinYH2.dat'),
('n_001-H-001.dat', 'tsl_HinZrH.dat'),
# Deuterium (H-2) compounds
('n_001-H-002.dat', 'tsl_DinD2O.dat'),
('n_001-H-002.dat', 'tsl_DinOrthoD.dat'),
('n_001-H-002.dat', 'tsl_DinParaD.dat'),
# Beryllium (Be-9) compounds
('n_004-Be-009.dat', 'tsl_Be-metal.dat'),
('n_004-Be-009.dat', 'tsl_BeinBeO.dat'),
# Carbon (C-12) compounds
('n_006-C-012.dat', 'tsl_CinLiquidBenzene.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidEthanol.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidMesitylene.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidMethane.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidM-Xylene.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidToluene.dat'),
('n_006-C-012.dat', 'tsl_CinLiquidTriphenylmethane.dat'),
('n_006-C-012.dat', 'tsl_CinSiC.dat'),
('n_006-C-012.dat', 'tsl_CinSolidBenzene.dat'),
('n_006-C-012.dat', 'tsl_CinSolidEthanol.dat'),
('n_006-C-012.dat', 'tsl_CinSolidMesitylene.dat'),
('n_006-C-012.dat', 'tsl_CinSolidMethane.dat'),
('n_006-C-012.dat', 'tsl_CinSolidM-Xylene.dat'),
('n_006-C-012.dat', 'tsl_CinSolidToluene.dat'),
('n_006-C-012.dat', 'tsl_CinSolidTriphenylmethane.dat'),
('n_006-C-012.dat', 'tsl_crystalline-graphite.dat'),
('n_006-C-012.dat', 'tsl_reactor-graphite-10P.dat'),
('n_006-C-012.dat', 'tsl_reactor-graphite-30P.dat'),
# Nitrogen (N-14) compounds
('n_007-N-014.dat', 'tsl_NinUN.dat'),
# Oxygen (O-16) compounds
('n_008-O-016.dat', 'tsl_OinBeO.dat'),
('n_008-O-016.dat', 'tsl_OinD2O.dat'),
('n_008-O-016.dat', 'tsl_OinH2O.dat'),
('n_008-O-016.dat', 'tsl_OinIceIh.dat'),
('n_008-O-016.dat', 'tsl_OinLiquidEthanol.dat'),
('n_008-O-016.dat', 'tsl_OinSolidEthanol.dat'),
('n_008-O-016.dat', 'tsl_OinUO2.dat'),
# Aluminum (Al-27)
('n_013-Al-027.dat', 'tsl_013_Al_027.dat'),
# Silicon (Si-28) compounds
('n_014-Si-028.dat', 'tsl_SiinSiC.dat'),
('n_014-Si-028.dat', 'tsl_SiO2-alpha.dat'),
('n_014-Si-028.dat', 'tsl_SiO2-beta.dat'),
# Iron (Fe-56)
('n_026-Fe-056.dat', 'tsl_026_Fe_056.dat'),
# Yttrium (Y-89) compounds
('n_039-Y-089.dat', 'tsl_YinYH2.dat'),
# Zirconium (Zr-90) compounds
('n_040-Zr-090.dat', 'tsl_ZrinZrH.dat'),
# Uranium (U-238) compounds
('n_092-U-238.dat', 'tsl_UinUN.dat'),
('n_092-U-238.dat', 'tsl_UinUO2.dat'),
],
'pattern_errata': ['*.dat'],
},
'photon': {
'base_url': 'https://wwwndc.jaea.go.jp/ftpnd/ftp/JENDL/',
'compressed_files': [
'jendl5-pa.tar.gz',
'jendl5-ar.tar.gz',
],
'pattern_photoatomic': 'jendl5-pa/*.dat',
'pattern_atomic_relax': 'jendl5-ar/*.dat',
}
}
}

download_warning = """
WARNING: This script will download {} of data.
Extracting and processing the data requires {} of additional free disk space.
""".format(release_details[args.release]['compressed_file_size'],
release_details[args.release]['uncompressed_file_size'])

# ==============================================================================
# DOWNLOAD FILES FROM WEBSITE

if args.download:
print(download_warning)
for f in release_details[args.release]['compressed_files']:
# Establish connection to URL
download(urljoin(release_details[args.release]['base_url'], f),
for particle in args.particles:
details = release_details[args.release][particle]
for f in details['compressed_files']:
download(
urljoin(details['base_url'], f),
context=ssl._create_unverified_context(),
output_path=download_path)
output_path=download_path / particle
)

# ==============================================================================
# EXTRACT FILES FROM TGZ
if args.extract:
for f in release_details[args.release]['compressed_files']:
fname = Path(f).parts[-1]
if fname.endswith('.tar.gz'):
with tarfile.open(download_path / fname, 'r') as tgz:
print('Extracting {}...'.format(fname))
# extract files ignoring internal folder structure
for member in tgz.getmembers():
if member.isreg():
member.name = Path(member.name).name
tgz.extract(member, path=endf_files_dir)

else:
# get the file name
filename = Path(download_path / fname)
source = gzip.open(filename)
target = open(endf_files_dir / filename.name.rsplit('.', 1)[0], 'wb')
with source, target:
copyfileobj(source, target)
for particle in args.particles:
details = release_details[args.release][particle]
extraction_dir = endf_files_dir / particle
for f in details['compressed_files']:
extract(download_path / particle / f, extraction_dir)

if args.cleanup and download_path.exists():
rmtree(download_path)

# ==============================================================================
# GENERATE HDF5 LIBRARY -- NEUTRON FILES
# HANDLE ERRATA FILES

for particle in args.particles:
details = release_details[args.release][particle]
if "pattern_errata" in details:
destination_dir = endf_files_dir / particle / details["endf_dir"]
for pattern in details["pattern_errata"]:
files = (endf_files_dir / particle).rglob(pattern)
for p in files:
p.rename(destination_dir / p.name)

# Get a list of all ENDF files
neutron_files = release_details[args.release]['endf_files']
# =========================================================================
# PROCESS INCIDENT NEUTRON DATA

# Create output directory if it doesn't exist
args.destination.mkdir(parents=True, exist_ok=True)
for particle in args.particles:
particle_destination = args.destination / particle
particle_destination.mkdir(parents=True, exist_ok=True)

library = openmc.data.DataLibrary()

with Pool() as pool:
results = []
for filename in sorted(neutron_files):
func_args = (filename, args.destination, args.libver, args.temperatures)
r = pool.apply_async(process_neutron, func_args)
results.append(r)
if 'neutron' in args.particles:
particle = 'neutron'
with Pool() as pool:
details = release_details[args.release][particle]
results = []
neutron_dir = endf_files_dir / particle / details["endf_dir"]
for pattern in details['patterns']:
for filename in neutron_dir.glob(pattern):
func_args = (filename, args.destination / particle, args.libver,
args.temperatures)
r = pool.apply_async(process_neutron, func_args)
results.append(r)

for r in results:
r.wait()

for p in sorted((args.destination / particle).glob('*.h5'), key=sort_key):
library.register_file(p)

# =========================================================================
# PROCESS THERMAL SCATTERING DATA

if 'thermal' in args.particles:
neutron_details = release_details[args.release]['neutron']
thermal_details = release_details[args.release]['thermal']
neutron_dir = endf_files_dir / 'neutron' / neutron_details["endf_dir"]
thermal_dir = endf_files_dir / 'thermal' / thermal_details["endf_dir"]

# Patch liquid/solid evaluations to have unique ZSYMAM fields
update_thermal_list = [
("tsl_CinLiquidBenzene.dat", "c(c6h6)l"),
("tsl_CinLiquidEthanol.dat", "c(c2h6o)l"),
("tsl_CinLiquidM-Xylene.dat", "c(m-c8h10)l"),
("tsl_CinLiquidMesitylene.dat", "c(c9h12)l"),
("tsl_CinLiquidMethane.dat", "c(ch4)l"),
("tsl_CinLiquidToluene.dat", "c(c7h8)l"),
("tsl_CinLiquidTriphenylmethane.dat", "c(c19h16)l"),
("tsl_CinSolidBenzene.dat", "c(c6h6)s"),
("tsl_CinSolidEthanol.dat", "c(c2h6o)s"),
("tsl_CinSolidM-Xylene.dat", "c(m-c8h10)s"),
("tsl_CinSolidMesitylene.dat", "c(c9h12)s"),
("tsl_CinSolidMethane.dat", "c(ch4)s"),
("tsl_CinSolidToluene.dat", "c(c7h8)s"),
("tsl_CinSolidTriphenylmethane.dat", "c(c19h16)s"),
("tsl_HinLiquidBenzene.dat", "h(c6h6)l"),
("tsl_HinLiquidEthanol.dat", "h(c2h6o)l"),
("tsl_HinLiquidM-Xylene.dat", "h(m-c8h10)l"),
("tsl_HinLiquidMesitylene.dat", "h(c9h12)l"),
("tsl_HinLiquidMethane.dat", "h(ch4)l"),
("tsl_HinLiquidToluene.dat", "h(c7h8)l"),
("tsl_HinLiquidTriphenylmethane.dat", "h(c19h16)l"),
("tsl_HinSolidBenzene.dat", "h(c6h6)s"),
("tsl_HinSolidEthanol.dat", "h(c2h6o)s"),
("tsl_HinSolidM-Xylene.dat", "h(m-c8h10)s"),
("tsl_HinSolidMesitylene.dat", "h(c9h12)s"),
("tsl_HinSolidMethane.dat", "h(ch4)s"),
("tsl_HinSolidToluene.dat", "h(c7h8)s"),
("tsl_HinSolidTriphenylmethane.dat", "h(c19h16)s"),
("tsl_OinLiquidEthanol.dat", "o(c2h6o)l"),
("tsl_OinSolidEthanol.dat", "o(c2h6o)s"),
]
for filename, zsymam in update_thermal_list:
update_zsymam(thermal_dir / filename, zsymam)


with Pool() as pool:
results = []
for path_neutron, path_thermal in thermal_details['sab_files']:
func_args = (neutron_dir / path_neutron, thermal_dir / path_thermal,
args.destination / 'thermal', args.libver)
r = pool.apply_async(process_thermal, func_args)
results.append(r)

for r in results:
r.wait()

for p in sorted((args.destination / 'thermal').glob('*.h5'), key=sort_key):
library.register_file(p)

# =========================================================================
# INCIDENT PHOTON DATA

if 'photon' in args.particles:
particle = 'photon'
details = release_details[args.release][particle]
photo_files = (endf_files_dir / particle).rglob(details['pattern_photoatomic'])
atom_files = (endf_files_dir / particle).rglob(details['pattern_atomic_relax'])
for photo_path, atom_path in zip(sorted(photo_files), sorted(atom_files)):
# Generate instance of IncidentPhoton
print('Converting:', photo_path.name, atom_path.name)
data = openmc.data.IncidentPhoton.from_endf(photo_path, atom_path)

for r in results:
r.wait()
# Export HDF5 file
h5_file = args.destination / particle / f'{data.name}.h5'
data.export_to_hdf5(h5_file, 'w', libver=args.libver)

# Register with library
for p in sorted((args.destination).glob('*.h5')):
library.register_file(p)
# Register with library
library.register_file(h5_file)

# Write cross_sections.xml
library.export_to_xml(args.destination / 'cross_sections.xml')
Expand Down
Loading