16 #include "../utility/utility.h"
23 "Sends request for a field to target object"
28 const Cinfo * HDF5DataWriter::initCinfo()
32 "Handle process calls. Gets data from connected fields into a local"
33 " buffer and dumps them to `filename` if the buffer length exceeds"
39 "Reinitialize the object. If the current file handle is valid, it tries"
40 " to close that and open the file specified in current filename field.",
43 static Finfo * processShared[] = {
49 "Shared message to receive process and reinit",
50 processShared,
sizeof( processShared ) /
sizeof(
Finfo* ));
54 "Buffer size limit for flushing the data from memory to file. Default"
56 &HDF5DataWriter::setFlushLimit,
57 &HDF5DataWriter::getFlushLimit);
59 static Finfo * finfos[] = {
67 static string doc[] = {
68 "Name",
"HDF5DataWriter",
69 "Author",
"Subhasis Ray",
70 "Description",
"HDF5 file writer for saving field values from multiple objects."
72 "\nConnect the `requestOut` field of this object to the"
73 " `get{Fieldname}` of other objects where `fieldname` is the"
74 " target value field of type double. The HDF5DataWriter collects the"
75 " current values of the fields in all the targets at each time step in"
76 " a local buffer. When the buffer size exceeds `flushLimit` (default"
77 " 4M), it will write the data into the HDF5 file specified in its"
78 " `filename` field (default moose_output.h5). You can explicitly force"
79 " writing by calling the `flush` function."
81 "The dataset location in the output file replicates the MOOSE element"
82 " tree structure. Thus, if you record the Vm field from"
83 " `/model[0]/neuron[0]/soma[0], the dataset path will be"
84 " `/model[0]/neuron[0]/soma[0]/vm`"
87 "NOTE: The output file remains open until this object is destroyed, or"
88 " `close()` is called explicitly."
94 HDF5WriterBase::initCinfo(),
96 sizeof(finfos)/
sizeof(
Finfo*),
98 doc,
sizeof( doc ) /
sizeof(
string ));
102 static const Cinfo * hdf5dataWriterCinfo = HDF5DataWriter::initCinfo();
104 HDF5DataWriter::HDF5DataWriter(): flushLimit_(4*1024*1024), steps_(0)
108 HDF5DataWriter::~HDF5DataWriter()
113 void HDF5DataWriter::close()
115 if (filehandle_ < 0){
119 for (map < string, hid_t >::iterator ii = nodemap_.begin();
120 ii != nodemap_.end(); ++ii){
121 if (ii->second >= 0){
122 herr_t status = H5Dclose(ii->second);
124 cerr <<
"Warning: closing dataset for "
125 << ii->first <<
", returned status = "
130 HDF5WriterBase::close();
133 void HDF5DataWriter::flush()
135 if (filehandle_ < 0){
136 cerr <<
"HDF5DataWriter::flush() - "
137 "Filehandle invalid. Cannot write data." << endl;
141 for (
unsigned int ii = 0; ii < datasets_.size(); ++ii){
142 herr_t status = appendToDataset(datasets_[ii], data_[ii]);
145 cerr <<
"Warning: appending data for object " << src_[ii]
146 <<
" returned status " << status << endl;
149 HDF5WriterBase::flush();
150 H5Fflush(filehandle_, H5F_SCOPE_LOCAL);
156 void HDF5DataWriter::process(
const Eref & e,
ProcPtr p)
158 if (filehandle_ < 0){
162 vector <double> dataBuf;
164 for (
unsigned int ii = 0; ii < dataBuf.size(); ++ii){
165 data_[ii].push_back(dataBuf[ii]);
168 if (steps_ >= flushLimit_){
170 for (
unsigned int ii = 0; ii < datasets_.size(); ++ii){
171 herr_t status = appendToDataset(datasets_[ii], data_[ii]);
174 cerr <<
"Warning: appending data for object " << src_[ii]
175 <<
" returned status " << status << endl;
181 void HDF5DataWriter::reinit(
const Eref & e,
ProcPtr p)
184 for (
unsigned int ii = 0; ii < data_.size(); ++ii){
185 H5Dclose(datasets_[ii]);
195 assert(numTgt == src_.size());
199 if (filename_.empty()){
200 filename_ =
"moose_data.h5";
202 if (filehandle_ > 0 ){
209 for (
unsigned int ii = 0; ii < src_.size(); ++ii){
210 string varname = func_[ii];
211 size_t found = varname.find(
"get");
213 varname = varname.substr(3);
214 if (varname.length() == 0){
221 varname[0] = tolower(varname[0]);
224 assert(varname.length() > 0);
225 string path = src_[ii].path() +
"/" + varname;
226 hid_t dataset_id = getDataset(path);
227 datasets_.push_back(dataset_id);
229 data_.resize(src_.size());
235 hid_t HDF5DataWriter::getDataset(
string path)
237 if (filehandle_ < 0){
240 herr_t status = H5Eset_auto2(H5E_DEFAULT, NULL, NULL);
242 string::size_type lastslash = path.find_last_of(
"/");
243 vector<string> pathTokens;
245 hid_t prev_id = filehandle_;
247 for (
unsigned int ii = 0; ii < pathTokens.size()-1; ++ii ){
249 htri_t exists = H5Lexists(prev_id, pathTokens[ii].c_str(),
253 id = H5Gopen2(prev_id, pathTokens[ii].c_str(), H5P_DEFAULT);
254 }
else if (exists == 0) {
256 id = H5Gcreate2(prev_id, pathTokens[ii].c_str(),
257 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
259 if ((exists < 0) || (
id < 0)){
263 cerr <<
"Error: failed to open/create group: ";
264 for (
unsigned int jj = 0; jj <= ii; ++jj){
265 cerr <<
"/" << pathTokens[jj];
270 if (prev_id >= 0 && prev_id != filehandle_){
272 status = H5Gclose(prev_id);
273 assert( status >= 0 );
277 string name = pathTokens[pathTokens.size()-1];
278 htri_t exists = H5Lexists(prev_id, name.c_str(), H5P_DEFAULT);
279 hid_t dataset_id = -1;
281 dataset_id = H5Dopen2(prev_id, name.c_str(), H5P_DEFAULT);
282 }
else if (exists == 0){
283 dataset_id = createDoubleDataset(prev_id, name);
285 cerr <<
"Error: H5Lexists returned "
286 << exists <<
" for path \""
287 << path <<
"\"" << endl;
292 void HDF5DataWriter::setFlushLimit(
unsigned int value)
297 unsigned int HDF5DataWriter::getFlushLimit()
const
unsigned int dataIndex() const
unsigned int getMsgTargetAndFunctions(DataId srcDataId, const SrcFinfo *finfo, vector< ObjId > &tgt, vector< string > &func) const
Element * element() const
void tokenize(const string &str, const string &delimiters, vector< string > &tokens)
static SrcFinfo1< vector< double > * > * requestOut()