Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 97 additions & 7 deletions avaframe/com1DFA/com1DFA.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import os
import pathlib
import pickle
import platform
import re
import time
from datetime import datetime
Expand Down Expand Up @@ -2159,7 +2158,7 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si
exportFields(cfg, t, fields, dem, outDir, cuSimName, TSave="initial")

if "particles" in resTypes:
savePartToPickle(particles, outDirData, cuSimName)
savePartToPickle(particles, outDirData, cuSimName, cfg=cfg)

# Update dtSave to remove the initial timestep we just saved
dtSave = updateSavingTimeStep(dtSaveOriginal, cfgGen, t)
Expand Down Expand Up @@ -2284,7 +2283,7 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si

# export particles dictionaries of saving time steps
if "particles" in resTypes:
Comment thread
fso42 marked this conversation as resolved.
Comment thread
fso42 marked this conversation as resolved.
savePartToPickle(particles, outDirData, cuSimName)
savePartToPickle(particles, outDirData, cuSimName, cfg=cfg)

# export particles properties for visulation
if cfg["VISUALISATION"].getboolean("writePartToCSV"):
Expand Down Expand Up @@ -2416,7 +2415,7 @@ def DFAIterate(cfg, particles, fields, dem, inputSimLines, outDir, cuSimName, si

# export particles dictionaries of saving time steps
if "particles" in resTypes:
savePartToPickle(particles, outDirData, cuSimName)
savePartToPickle(particles, outDirData, cuSimName, cfg=cfg)

# save contour line for each sim only if the field is properly computed (not a dummy array)
contourResType = cfg["VISUALISATION"]["contourResType"]
Expand Down Expand Up @@ -2834,7 +2833,7 @@ def releaseSecRelArea(cfg, particles, fields, dem, zPartArray0, reportAreaInfo):
return particles, zPartArray0, reportAreaInfo


def savePartToPickle(dictList, outDir, logName):
def savePartToPickle(dictList, outDir, logName, cfg=""):
"""Save each dictionary from a list to a pickle in outDir; works also for one dictionary instead of list
Note: particle coordinates are still in com1DFA reference system with origin 0,0

Expand All @@ -2846,16 +2845,107 @@ def savePartToPickle(dictList, outDir, logName):
path to output directory
logName : str
simulation Id
cfg: str or configparser object
['EXPORTS'] and ['GENERAL'] settings to provide particle properties to be saved,
if empty str all particle properties are saved, t (time info) always appended
"""

dictKeys = [
"nPart",
"x",
"y",
"trajectoryLengthXY",
"trajectoryLengthXYCor",
"trajectoryLengthXYZ",
"z",
"m",
"dmDet",
"massPerPart",
"nPPK",
"mTot",
"h",
"ux",
"uy",
"uz",
"uAcc",
"stoppCriteria",
"kineticEne",
"trajectoryAngle",
"potentialEne",
"peakKinEne",
"peakMassFlowing",
"simName",
"xllcenter",
"yllcenter",
"ID",
"nID",
"parentID",
"t",
"inCellDEM",
"indXDEM",
"indYDEM",
"indPartInCell",
"partInCell",
"secondaryReleaseInfo",
"iterate",
"idFixed",
"peakForceSPH",
"forceSPHIni",
"totalEnthalpy",
"velocityMag",
"nExitedParticles",
"tPlot",
"dmEnt",
"stoppedParticles",
"massInitialized",
"massEntrained",
"massDetrained",
"massStopped",
]

# create list of particle properties and append t (time info)
if isinstance(cfg, configparser.ConfigParser):
if cfg["EXPORTS"]["exportParticleProperties"] == "":
particleProperties = ""
else:
# first check if particle properties are valid
nonExisting = [
item
for item in cfg["EXPORTS"]["exportParticleProperties"].split("|")
if item not in dictKeys
]
if len(nonExisting) > 0:
message = "These particle properties are not available %s" % nonExisting
log.error(message)
raise AttributeError(message)

particleProperties = list(set(["t"] + cfg["EXPORTS"]["exportParticleProperties"].split("|")))
if cfg["TRACKPARTICLES"].getboolean("trackParticles"):
trackParticleProperties = cfg["TRACKPARTICLES"]["particleProperties"].split("|")
particleProperties = set(
["x", "y", "z", "ux", "uy", "uz", "m", "h"]
+ particleProperties
+ trackParticleProperties
)
else:
particleProperties = ""

if isinstance(dictList, list):
for dict in dictList:
if particleProperties != "":
particlesToSave = {key: dict[key] for key in particleProperties}
Comment thread
awirb marked this conversation as resolved.
else:
particlesToSave = dict
fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dict["t"])), "wb")
pickle.dump(dict, fi)
pickle.dump(particlesToSave, fi)
fi.close()
else:
if particleProperties != "":
particlesToSave = {key: dictList[key] for key in particleProperties}
else:
particlesToSave = dictList
fi = open(outDir / ("particles_%s_%09.4f.pickle" % (logName, dictList["t"])), "wb")
pickle.dump(dictList, fi)
pickle.dump(particlesToSave, fi)
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This and the above could be written in only one statement after the if/else block

Comment thread
fso42 marked this conversation as resolved.
fi.close()


Expand Down
5 changes: 4 additions & 1 deletion avaframe/com1DFA/com1DFACfg.ini
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ thresholdPointInPoly = 0.001

[TRACKPARTICLES]
# if particles should be tracked - don't forget to specify the "tSteps" you want to
# save further up (for example tStep = 0:1 will lead to tracking patiles every 1 second)
# save further up (for example tStep = 0:1 will lead to tracking particles every 1 second)
trackParticles = False
# centerTrackPartPoint of the location of the particles to track (x|y coordinates)
centerTrackPartPoint = 2933|-4010
Expand Down Expand Up @@ -570,4 +570,7 @@ exportData = True
exportRasters = False
# use LZW compression when writing TIFF raster files
useCompression = True
# particle properties - list all properties that shall be saved, t is always added
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add info that particle property list can be found in the documentation

exportParticleProperties =


69 changes: 69 additions & 0 deletions avaframe/tests/test_com1DFA.py
Original file line number Diff line number Diff line change
Expand Up @@ -1821,6 +1821,75 @@ def test_savePartToPickle(tmp_path):
assert np.array_equal(particlesRead3["m"], particles1["m"])
assert particlesRead3["t"] == 0.0

# call function to be tested
logName = "simNameTest4"
cfg = configparser.ConfigParser()
cfg["EXPORTS"] = {"exportParticleProperties": "x|m"}
cfg["TRACKPARTICLES"] = {"trackParticles": False}
com1DFA.savePartToPickle(particles1, outDir, logName, cfg=cfg)

# read pickle
picklePath4 = outDir / "particles_simNameTest4_0000.0000.pickle"
particlesRead4 = pickle.load(open(picklePath4, "rb"))

assert np.array_equal(particlesRead4["x"], particles1["x"])
assert "y" not in particlesRead4.keys()
assert np.array_equal(particlesRead4["m"], particles1["m"])
assert particlesRead4["t"] == 0.0

# call function to be tested
logName = "simNameTest5"
cfg = configparser.ConfigParser()
cfg["EXPORTS"] = {"exportParticleProperties": "x|m"}
cfg["TRACKPARTICLES"] = {"trackParticles": True, "particleProperties": "iCell"}
particles1["ux"] = np.asarray([1.0, 2.0, 3.0])
particles1["uy"] = np.asarray([1.0, 4.0, 5.0])
particles1["uz"] = np.asarray([10.0, 11.0, 11.0])
particles1["iCell"] = np.asarray([10.0, 11.0, 11.0])
particles2["ux"] = np.asarray([1.0, 2.0, 3.0])
particles2["uy"] = np.asarray([1.0, 4.0, 5.0])
particles2["uz"] = np.asarray([10.0, 11.0, 11.0])
particles2["iCell"] = np.asarray([10.0, 11.0, 11.0])
particles1["z"] = np.asarray([1.0, 2.0, 3.0])
particles2["z"] = np.asarray([1.0, 4.0, 5.0])
particles1["h"] = np.asarray([1.0, 2.0, 3.0])
particles2["h"] = np.asarray([1.0, 4.0, 5.0])
com1DFA.savePartToPickle(particles1, outDir, logName, cfg=cfg)

# read pickle
picklePath5 = outDir / "particles_simNameTest5_0000.0000.pickle"
particlesRead5 = pickle.load(open(picklePath5, "rb"))

assert np.array_equal(particlesRead5["x"], particles1["x"])
assert "y" in particlesRead5.keys()
assert "ux" in particlesRead5.keys()
assert np.array_equal(particlesRead5["iCell"], particles1["iCell"])
assert np.array_equal(particlesRead5["m"], particles1["m"])
assert particlesRead5["t"] == 0.0

# call function to be tested
logName = "simNameTest6"
cfg = configparser.ConfigParser()
cfg["EXPORTS"] = {"exportParticleProperties": "x|m|hallo"}
cfg["TRACKPARTICLES"] = {"trackParticles": False}

with pytest.raises(AttributeError) as e:
com1DFA.savePartToPickle(particles1, outDir, logName, cfg=cfg)
assert ("These particle properties are not available") in str(e.value)

# call function to be tested
logName = "simNameTest7"
cfg = configparser.ConfigParser()
cfg["EXPORTS"] = {"exportParticleProperties": ""}
cfg["TRACKPARTICLES"] = {"trackParticles": False}
com1DFA.savePartToPickle(particles1, outDir, logName, cfg=cfg)
# read pickle
picklePath7 = outDir / "particles_simNameTest7_0000.0000.pickle"
particlesRead7 = pickle.load(open(picklePath7, "rb"))

for pProp in particlesRead7:
assert pProp in ['ux', 'uy', 'uz', 'iCell', 'z', 'x', 'y', 'm', 'h', 't']


def test_exportFields(tmp_path):
"""test exporting fields to ascii files"""
Expand Down
Loading