Add parse_1D function
This commit is contained in:
parent
2f4e097a68
commit
321e3e83a4
@ -1,3 +1,4 @@
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from decimal import Decimal
|
||||
@ -68,29 +69,36 @@ def load_1D(filepath):
|
||||
Names of these dictionaries are M + measurement number. They include HKL indeces, angles,
|
||||
monitors, stepsize and array of counts
|
||||
"""
|
||||
det_variables = {"file_type": str(filepath)[-3:], "meta": {}}
|
||||
with open(filepath, "r") as infile:
|
||||
_, ext = os.path.splitext(filepath)
|
||||
det_variables = parse_1D(infile, data_type=ext)
|
||||
|
||||
return det_variables
|
||||
|
||||
|
||||
def parse_1D(fileobj, data_type):
|
||||
# read metadata
|
||||
for line in infile:
|
||||
det_variables["Measurements"] = {}
|
||||
metadata = {}
|
||||
for line in fileobj:
|
||||
if "=" in line:
|
||||
variable, value = line.split("=")
|
||||
variable = variable.strip()
|
||||
if variable in META_VARS_FLOAT:
|
||||
det_variables["meta"][variable] = float(value)
|
||||
metadata[variable] = float(value)
|
||||
elif variable in META_VARS_STR:
|
||||
det_variables["meta"][variable] = str(value)[:-1].strip()
|
||||
metadata[variable] = str(value)[:-1].strip()
|
||||
elif variable in META_UB_MATRIX:
|
||||
det_variables["meta"][variable] = re.findall(r"[-+]?\d*\.\d+|\d+", str(value))
|
||||
metadata[variable] = re.findall(r"[-+]?\d*\.\d+|\d+", str(value))
|
||||
|
||||
if "#data" in line:
|
||||
# this is the end of metadata and the start of data section
|
||||
break
|
||||
|
||||
# read data
|
||||
if det_variables["file_type"] == "ccl":
|
||||
if data_type == ".ccl":
|
||||
measurements = {}
|
||||
decimal = list()
|
||||
data = infile.readlines()
|
||||
data = fileobj.readlines()
|
||||
position = -1
|
||||
for lines in data:
|
||||
position = position + 1
|
||||
@ -109,12 +117,12 @@ def load_1D(filepath):
|
||||
decimal.append(bool(Decimal(d["k_index"]) % 1 == 0))
|
||||
d["l_index"] = float(lines.split()[3])
|
||||
decimal.append(bool(Decimal(d["l_index"]) % 1 == 0))
|
||||
if det_variables["meta"]["zebra_mode"] == "bi":
|
||||
if metadata["zebra_mode"] == "bi":
|
||||
d["twotheta_angle"] = float(lines.split()[4]) # gamma
|
||||
d["omega_angle"] = float(lines.split()[5]) # omega
|
||||
d["chi_angle"] = float(lines.split()[6]) # nu
|
||||
d["phi_angle"] = float(lines.split()[7]) # doesnt matter
|
||||
elif det_variables["meta"]["zebra_mode"] == "nb":
|
||||
elif metadata["zebra_mode"] == "nb":
|
||||
d["gamma_angle"] = float(lines.split()[4]) # gamma
|
||||
d["omega_angle"] = float(lines.split()[5]) # omega
|
||||
d["nu_angle"] = float(lines.split()[6]) # nu
|
||||
@ -143,30 +151,31 @@ def load_1D(filepath):
|
||||
int(next_line.split()[0]),
|
||||
)
|
||||
d["Counts"] = counts
|
||||
det_variables["Measurements"][str("M" + str(measurement_number))] = d
|
||||
measurements[str("M" + str(measurement_number))] = d
|
||||
|
||||
if all(decimal):
|
||||
det_variables["meta"]["indices"] = "hkl"
|
||||
metadata["indices"] = "hkl"
|
||||
else:
|
||||
det_variables["meta"]["indices"] = "real"
|
||||
metadata["indices"] = "real"
|
||||
|
||||
elif det_variables["file_type"] == "dat":
|
||||
elif data_type == ".dat":
|
||||
# skip the first 2 rows, the third row contans the column names
|
||||
next(infile)
|
||||
next(infile)
|
||||
row_names = next(infile).split()
|
||||
next(fileobj)
|
||||
next(fileobj)
|
||||
col_names = next(fileobj).split()
|
||||
|
||||
data_cols = defaultdict(list)
|
||||
for line in infile:
|
||||
for line in fileobj:
|
||||
if "END-OF-DATA" in line:
|
||||
# this is the end of data
|
||||
break
|
||||
|
||||
for name, val in zip(row_names, line.split()):
|
||||
for name, val in zip(col_names, line.split()):
|
||||
data_cols[name].append(float(val))
|
||||
|
||||
det_variables["Measurements"] = dict(data_cols)
|
||||
measurements = dict(data_cols)
|
||||
|
||||
else:
|
||||
print("Unknown file extention")
|
||||
|
||||
return det_variables
|
||||
return {"meta": metadata, "Measurements": measurements}
|
||||
|
Loading…
x
Reference in New Issue
Block a user