Merge branch 'main' into feature/pointwisePolytrope
This commit is contained in:
222
utils/atomic/convertWeightsToHeader.py
Normal file
222
utils/atomic/convertWeightsToHeader.py
Normal file
@@ -0,0 +1,222 @@
|
||||
import pandas as pd
|
||||
|
||||
# Define fixed-width column specifications based on the format:
|
||||
# a1 (width 1), i3 (width 3), i5 (width 5), i5 (width 5), i5 (width 5),
|
||||
# 1x (skip 1), a3 (width 3), a4 (width 4), 1x (skip 1),
|
||||
# f14.6 (width 14), f12.6 (width 12), f13.5 (width 13),
|
||||
# 1x (skip 1), f10.5 (width 10), 1x (skip 1),
|
||||
# a2 (width 2), f13.5 (width 13), f11.5 (width 11),
|
||||
# 1x (skip 1), i3 (width 3), 1x (skip 1),
|
||||
# f13.6 (width 13), f12.6 (width 12)
|
||||
# Compute cumulative positions (0-indexed):
|
||||
colSpecs = [
|
||||
(0, 1), # control
|
||||
(1, 4), # NZ
|
||||
(4, 9), # N
|
||||
(9, 14), # Z
|
||||
(14, 19), # A
|
||||
# skip 1 char at position 19; next field starts at 20
|
||||
(20, 23), # el
|
||||
(23, 27), # o
|
||||
# skip 1 char at position 27; next field starts at 28
|
||||
(28, 42), # massExcess (f14.6)
|
||||
(42, 54), # massExcessUnc (f12.6)
|
||||
(54, 67), # bindingEnergy (f13.5)
|
||||
# skip 1 char at position 67; next field starts at 68
|
||||
(68, 78), # bindingEnergyUnc (f10.5)
|
||||
# skip 1 char at position 78; next field starts at 79
|
||||
(79, 81), # betaCode (a2)
|
||||
(81, 94), # betaDecayEnergy (f13.5)
|
||||
(94, 105), # betaDecayEnergyUnc (f11.5)
|
||||
# skip 1 char at position 105; next field starts at 106
|
||||
(106, 109),# atomicMassInt (i3)
|
||||
# skip 1 char at position 109; next field starts at 110
|
||||
(110, 123),# atomicMassFrac (f13.6)
|
||||
(123, 135) # atomicMassUnc (f12.6)
|
||||
]
|
||||
|
||||
# Define column names (using camelCase for variables)
|
||||
columnNames = [
|
||||
"control",
|
||||
"nz",
|
||||
"n",
|
||||
"z",
|
||||
"a",
|
||||
"el",
|
||||
"o",
|
||||
"massExcess",
|
||||
"massExcessUnc",
|
||||
"bindingEnergy",
|
||||
"bindingEnergyUnc",
|
||||
"betaCode",
|
||||
"betaDecayEnergy",
|
||||
"betaDecayEnergyUnc",
|
||||
"atomicMassInt",
|
||||
"atomicMassFrac",
|
||||
"atomicMassUnc"
|
||||
]
|
||||
|
||||
def combine_atomic_mass(row):
|
||||
"""
|
||||
Combine the integer and fractional parts of the atomic mass.
|
||||
For example, if atomicMassInt is '1' and atomicMassFrac is '008664.91590',
|
||||
this function returns float('1008664.91590').
|
||||
"""
|
||||
intPart = str(row["atomicMassInt"]).strip()
|
||||
fracPart = str(row["atomicMassFrac"]).strip()
|
||||
try:
|
||||
combined = int(intPart) + float(fracPart)/1e6
|
||||
return combined
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def mkInstanceName(row):
|
||||
"""
|
||||
Make a c++ instance name from the element and atomic number.
|
||||
"""
|
||||
speciesName = f"{row['el'].strip()}-{row['a']}"
|
||||
return speciesName.replace("-", "_")
|
||||
|
||||
def formatSpecies(row):
|
||||
"""
|
||||
Format c++ instantiation of Species struct from row data.
|
||||
"""
|
||||
name = f"{row['el'].strip()}-{row['a']}"
|
||||
instanceName = name.replace("-", "_")
|
||||
nz = int(row['nz'])
|
||||
n = int(row['n'])
|
||||
z = int(row['z'])
|
||||
a = int(row['a'])
|
||||
bindingEnergy = float(row['bindingEnergy'])
|
||||
atomicMass = float(row['atomicMass'])
|
||||
atomicMassUnc = float(row['atomicMassUnc'])
|
||||
NaN = "std::numeric_limits<double>::quiet_NaN()"
|
||||
try:
|
||||
betaDecayEnergy = float(row['betaDecayEnergy'].replace("#", "").replace("*", ""))
|
||||
except ValueError:
|
||||
betaDecayEnergy = NaN
|
||||
instantiation = f"static const Species {instanceName}(\"{name}\", \"{row['el']}\", {nz}, {n}, {z}, {a}, {bindingEnergy}, \"{row['betaCode']}\", {betaDecayEnergy}, {atomicMass}, {atomicMassUnc});"
|
||||
return instantiation
|
||||
|
||||
def formatHeader(dataFrame):
|
||||
"""
|
||||
Format c++ header file from DataFrame.
|
||||
"""
|
||||
header = f"""#ifndef SPECIES_MASS_DATA_H
|
||||
#define SPECIES_MASS_DATA_H
|
||||
#include <unordered_map>
|
||||
#include <string_view>
|
||||
#include <string>
|
||||
|
||||
namespace chemSpecies {{
|
||||
struct Species {{
|
||||
const std::string_view m_name; //< Name of the species
|
||||
const std::string_view m_el; //< Element symbol
|
||||
const int m_nz; //< NZ
|
||||
const int m_n; //< N
|
||||
const int m_z; //< Z
|
||||
const int m_a; //< A
|
||||
const double m_bindingEnergy; //< Binding energy
|
||||
const std::string_view m_betaCode; //< Beta decay code
|
||||
const double m_betaDecayEnergy; //< Beta decay energy
|
||||
const double m_atomicMass; //< Atomic mass
|
||||
const double m_atomicMassUnc; //< Atomic mass uncertainty
|
||||
|
||||
Species(const std::string_view name, const std::string_view el, const int nz, const int n, const int z, const int a, const double bindingEnergy, const std::string_view betaCode, const double betaDecayEnergy, const double atomicMass, const double atomicMassUnc)
|
||||
: m_name(name), m_el(el), m_nz(nz), m_n(n), m_z(z), m_a(a), m_bindingEnergy(bindingEnergy), m_betaCode(betaCode), m_betaDecayEnergy(betaDecayEnergy), m_atomicMass(atomicMass), m_atomicMassUnc(atomicMassUnc) {{}};
|
||||
|
||||
Species(const Species& species) {{
|
||||
m_name = species.m_name;
|
||||
m_el = species.m_el;
|
||||
m_nz = species.m_nz;
|
||||
m_n = species.m_n;
|
||||
m_z = species.m_z;
|
||||
m_a = species.m_a;
|
||||
m_bindingEnergy = species.m_bindingEnergy;
|
||||
m_betaCode = species.m_betaCode;
|
||||
m_betaDecayEnergy = species.m_betaDecayEnergy;
|
||||
m_atomicMass = species.m_atomicMass;
|
||||
m_atomicMassUnc = species.m_atomicMassUnc;
|
||||
}}
|
||||
|
||||
double mass() const {{
|
||||
return m_atomicMass;
|
||||
}}
|
||||
|
||||
double massUnc() const {{
|
||||
return m_atomicMassUnc;
|
||||
}}
|
||||
|
||||
double bindingEnergy() const {{
|
||||
return m_bindingEnergy;
|
||||
}}
|
||||
|
||||
double betaDecayEnergy() const {{
|
||||
return m_betaDecayEnergy;
|
||||
}}
|
||||
|
||||
std::string_view betaCode() const {{
|
||||
return m_betaCode;
|
||||
}}
|
||||
|
||||
std::string_view name() const {{
|
||||
return m_name;
|
||||
}}
|
||||
|
||||
std::string_view el() const {{
|
||||
return m_el;
|
||||
}}
|
||||
|
||||
int nz() const {{
|
||||
return m_nz;
|
||||
}}
|
||||
|
||||
int n() const {{
|
||||
return m_n;
|
||||
}}
|
||||
|
||||
int z() const {{
|
||||
return m_z;
|
||||
}}
|
||||
|
||||
int a() const {{
|
||||
return m_a;
|
||||
}}
|
||||
|
||||
friend std::ostream& operator<<(std::ostream& os, const Species& species) {{
|
||||
os << static_cast<std::string>(species.m_name) << " (" << species.m_atomicMass << " u)";
|
||||
return os;
|
||||
}}
|
||||
}};
|
||||
{'\n '.join([formatSpecies(row) for index, row in dataFrame.iterrows()])}
|
||||
static const std::unordered_map<std::string, Species> species = {{
|
||||
{'\n '.join([f'{{"{row["el"].strip()}-{row["a"]}", {mkInstanceName(row)}}},' for index, row in dataFrame.iterrows()])}
|
||||
}};
|
||||
}}; // namespace chemSpecies
|
||||
#endif // SPECIES_MASS_DATA_H
|
||||
"""
|
||||
return header
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
import os
|
||||
parser = argparse.ArgumentParser(description="Convert mass data to c++ header file.")
|
||||
parser.add_argument("input", help="Input file path.")
|
||||
parser.add_argument("-o", "--output", help="Output file path.", default="../../assets/static/atomic/include/atomicSpecies.h")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if not os.path.exists(args.input):
|
||||
raise FileNotFoundError(f"File not found: {args.input}")
|
||||
|
||||
# Read the file (adjust the skiprows value if your header differs)
|
||||
dataFrame = pd.read_fwf(args.input, colspecs=colSpecs, names=columnNames, skiprows=36)
|
||||
|
||||
# Combine the two atomic mass fields into one float column
|
||||
dataFrame["atomicMass"] = dataFrame.apply(combine_atomic_mass, axis=1)
|
||||
dataFrame.drop(columns=["atomicMassInt", "atomicMassFrac"], inplace=True)
|
||||
|
||||
# Format the header
|
||||
header = formatHeader(dataFrame)
|
||||
with open(args.output, "w") as f:
|
||||
f.write(header)
|
||||
13
utils/atomic/readme.md
Normal file
13
utils/atomic/readme.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Information
|
||||
Simple python utility for turning the file assets/atomic/weights.dat into a c++ header which can be included to provide easy access to all atomic weights inside 4DSSE
|
||||
|
||||
## Requirments
|
||||
In order to use this utility you will need
|
||||
|
||||
- Python
|
||||
- Pandas
|
||||
|
||||
## Usage
|
||||
```bash
|
||||
python convertWeightsToHeader.py <path/to/weights.dat> -o atomicWeights.h
|
||||
```
|
||||
@@ -1,47 +1,22 @@
|
||||
Poly:
|
||||
Gaussian:
|
||||
Sigma: 0.1
|
||||
Solver:
|
||||
ViewInitialGuess: false
|
||||
Debug:
|
||||
Global:
|
||||
GLVis:
|
||||
Keyset: ''
|
||||
View: 'false'
|
||||
Exit: 'false'
|
||||
F_gf_View:
|
||||
Keyset: defaultKeyset
|
||||
EOS:
|
||||
Helm:
|
||||
LogFile: "log"
|
||||
Network:
|
||||
Approx8:
|
||||
NonStiff:
|
||||
AbsTol: 1.0e-06
|
||||
RelTol: 1.0e-06
|
||||
Stiff:
|
||||
AbsTol: 1.0e-06
|
||||
RelTol: 1.0e-06
|
||||
Probe:
|
||||
GLVis:
|
||||
Visualization: true
|
||||
Host: localhost
|
||||
Port: 19916
|
||||
LogManager:
|
||||
DefaultLogName: 4DSSE.log
|
||||
|
||||
# This file was auto-generated by generateDefaultConfig.py
|
||||
# Do not modify this file directly.
|
||||
|
||||
#=================================== TYPE HINTS ===================================
|
||||
|
||||
# Poly:
|
||||
# Gaussian:
|
||||
# Sigma: double
|
||||
# Solver:
|
||||
# ViewInitialGuess: bool
|
||||
# Debug:
|
||||
# Global:
|
||||
# GLVis:
|
||||
# Keyset: sd:string
|
||||
# View: bo
|
||||
# Exit: bo
|
||||
# F_gf_View:
|
||||
# Keyset: std:string
|
||||
# Probe:
|
||||
# GLVis:
|
||||
# Visualization: bool
|
||||
# Host: std::string
|
||||
# Port: int
|
||||
# LogManager:
|
||||
# DefaultLogName: std::string
|
||||
#
|
||||
GLVis:
|
||||
Host: "localhost"
|
||||
Port: 19916
|
||||
Visualization: true
|
||||
LogManager:
|
||||
DefaultLogName: "4DSSE.log"
|
||||
opac:
|
||||
lowTemp:
|
||||
numeric:
|
||||
maxIter: 10
|
||||
|
||||
@@ -31,6 +31,9 @@ def parse_value(value, type_hint):
|
||||
if type_hint in {"int", "long"}:
|
||||
return (int(value), type_hint)
|
||||
elif type_hint in {"float", "double"}:
|
||||
return float(value)
|
||||
elif value == "defaultDataDir":
|
||||
return "data" # special case for defaultDataDir
|
||||
return (float(value), type_hint)
|
||||
elif type_hint == "bool":
|
||||
return (value.lower() in {"true", "1"}, type_hint)
|
||||
@@ -38,6 +41,9 @@ def parse_value(value, type_hint):
|
||||
return (value.strip('"'), type_hint) # Remove quotes for string literals
|
||||
elif value.startswith("'") and value.endswith("'"):
|
||||
return (value.strip("'"), type_hint) # Remove single quotes
|
||||
return value.strip("'") # Remove single quotes
|
||||
elif value.startswith('"') and value.endswith('"'):
|
||||
return (value.strip('"'). type_hint)# Remove quotes for string literals
|
||||
|
||||
return (value, type_hint) # Return as-is if unsure
|
||||
|
||||
@@ -66,10 +72,19 @@ def scan_files(directory):
|
||||
print(f"Match: {match.group()}")
|
||||
type_hint, hierarchy, value = match.groups()
|
||||
keys = hierarchy.split(":")
|
||||
if keys[0] == "Data" and keys[1] == "Dir":
|
||||
continue # Skip Data:Dir as it is a special case
|
||||
insert_into_dict(hierarchy_dict, keys, value, type_hint)
|
||||
|
||||
return hierarchy_dict
|
||||
|
||||
class QuotedString(str):
|
||||
pass
|
||||
|
||||
def represent_quoted_string(dumper, data):
|
||||
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='"')
|
||||
|
||||
|
||||
def split_dict_recursive(originalDict):
|
||||
dataDict = {}
|
||||
typeDict = {}
|
||||
@@ -87,6 +102,19 @@ def split_dict_recursive(originalDict):
|
||||
|
||||
def save_yaml(data, output_file):
|
||||
"""Saves the nested dictionary to a YAML file."""
|
||||
yaml.add_representer(QuotedString, represent_quoted_string)
|
||||
|
||||
def quote_strings(data):
|
||||
if isinstance(data, dict):
|
||||
return {k: quote_strings(v) for k, v in data.items()}
|
||||
elif isinstance(data, list):
|
||||
return [quote_strings(item) for item in data]
|
||||
elif isinstance(data, str):
|
||||
return QuotedString(data)
|
||||
else:
|
||||
return data
|
||||
|
||||
data = quote_strings(data)
|
||||
options, types = split_dict_recursive(data)
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(options, f, default_flow_style=False, sort_keys=False, indent=4)
|
||||
|
||||
@@ -48,18 +48,23 @@ class OPATTable:
|
||||
logT: Iterable[float] #< Logarithm of T values
|
||||
logKappa: Iterable[Iterable[float]] #< Logarithm of Kappa values
|
||||
|
||||
defaultHeader = Header(
|
||||
magic="OPAT",
|
||||
version=1,
|
||||
numTables=0,
|
||||
headerSize=256,
|
||||
indexOffset=0,
|
||||
creationDate=datetime.now().strftime("%b %d, %Y"),
|
||||
sourceInfo="no source provided by user",
|
||||
comment="default header",
|
||||
numIndex=2,
|
||||
reserved=b"\x00" * 24
|
||||
)
|
||||
def make_default_header() -> Header:
|
||||
"""
|
||||
@brief Create a default header for an OPAT file.
|
||||
@return The default header.
|
||||
"""
|
||||
return Header(
|
||||
magic="OPAT",
|
||||
version=1,
|
||||
numTables=0,
|
||||
headerSize=256,
|
||||
indexOffset=0,
|
||||
creationDate=datetime.now().strftime("%b %d, %Y"),
|
||||
sourceInfo="no source provided by user",
|
||||
comment="default header",
|
||||
numIndex=2,
|
||||
reserved=b"\x00" * 24
|
||||
)
|
||||
|
||||
class OpatIO:
|
||||
"""
|
||||
@@ -103,7 +108,7 @@ class OpatIO:
|
||||
Save the OPAT file as a binary file.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.header: Header = defaultHeader
|
||||
self.header: Header = make_default_header()
|
||||
self.tables: List[Tuple[Tuple[float, float], OPATTable]] = []
|
||||
|
||||
@staticmethod
|
||||
@@ -353,7 +358,7 @@ class OpatIO:
|
||||
tableString.append(logRRow + logRRowTrue)
|
||||
for i, logT in enumerate(table.logT):
|
||||
row = f"{logT:<10.4f}"
|
||||
for kappa in table.logKappa[i]:
|
||||
for kappa in table.logKappa[:, i]:
|
||||
row += f"{kappa:<10.4f}"
|
||||
tableString.append(row)
|
||||
tableString.append("=" * 80)
|
||||
|
||||
Reference in New Issue
Block a user