MOOSE - Multiscale Object Oriented Simulation Environment
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
HDF5WriterBase.cpp
Go to the documentation of this file.
1 // HDF5WriterBase.cpp ---
2 //
3 // Filename: HDF5WriterBase.cpp
4 // Description:
5 // Author: Subhasis Ray
6 // Maintainer:
7 // Created: Sat Feb 25 14:42:03 2012 (+0530)
8 // Version:
9 // Last-Updated: Sun Dec 20 23:20:44 2015 (-0500)
10 // By: subha
11 // Update #: 298
12 // URL:
13 // Keywords:
14 // Compatibility:
15 //
16 //
17 
18 // Commentary:
19 //
20 //
21 //
22 //
23 
24 // Change log:
25 //
26 //
27 //
28 
29 // Code:
30 
31 #ifdef USE_HDF5
32 
33 #include <algorithm>
34 #include <string>
35 #include <fstream>
36 
37 #include "hdf5.h"
38 
39 #include "header.h"
40 #include "../utility/utility.h"
41 
42 #include "HDF5WriterBase.h"
43 
44 using namespace std;
45 
47 // Utility functions
49 
56 hid_t require_attribute(hid_t file_id, string path,
57  hid_t data_type, hid_t data_id)
58 {
59  size_t attr_start = path.rfind("/");
60  string node_path = ".";
61  string attr_name = "";
62  if (attr_start == string::npos){
63  attr_start = 0;
64  } else {
65  node_path = path.substr(0, attr_start);
66  attr_start += 1;
67  }
68  attr_name = path.substr(attr_start);
69  if (H5Aexists_by_name(file_id, node_path.c_str(), attr_name.c_str(),
70  H5P_DEFAULT)){
71  return H5Aopen_by_name(file_id, node_path.c_str(), attr_name.c_str(),
72  H5P_DEFAULT, H5P_DEFAULT);
73  } else {
74  return H5Acreate_by_name(file_id, node_path.c_str(), attr_name.c_str(),
75  data_type, data_id,
76  H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
77  }
78 }
79 
83 hid_t require_group(hid_t file, string path)
84 {
85  vector<string> pathTokens;
86  moose::tokenize(path, "/", pathTokens);
87  hid_t prev = file;
88  hid_t current = -1;
89  htri_t exists;
90  // Open the container for the event maps
91  for (unsigned int ii = 0; ii < pathTokens.size(); ++ii){
92  exists = H5Lexists(prev, pathTokens[ii].c_str(), H5P_DEFAULT);
93  if (exists > 0){
94  current = H5Gopen2(prev, pathTokens[ii].c_str(), H5P_DEFAULT);
95  } else {
96  current = H5Gcreate2(prev, pathTokens[ii].c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
97  }
98  if (prev != file){
99  if(H5Gclose(prev) < 0){
100  return -1;
101  }
102  }
103  if (current < 0){
104  return current;
105  }
106  prev = current;
107  }
108  return current;
109 }
110 
114 hid_t HDF5WriterBase::createDoubleDataset(hid_t parent_id, std::string name, hsize_t size, hsize_t maxsize)
115 {
116  herr_t status;
117  hsize_t dims[1] = {size};
118  hsize_t maxdims[] = {maxsize};
119  hsize_t _chunkSize = chunkSize_;
120  if (_chunkSize > maxsize){
121  _chunkSize = maxsize;
122  }
123  hsize_t chunk_dims[] = {_chunkSize};
124  hid_t chunk_params = H5Pcreate(H5P_DATASET_CREATE);
125  status = H5Pset_chunk(chunk_params, 1, chunk_dims);
126  assert( status >= 0 );
127  if (compressor_ == "zlib"){
128  status = H5Pset_deflate(chunk_params, compression_);
129  } else if (compressor_ == "szip"){
130  // this needs more study
131  unsigned sz_opt_mask = H5_SZIP_NN_OPTION_MASK;
132  status = H5Pset_szip(chunk_params, sz_opt_mask,
133  HDF5WriterBase::CHUNK_SIZE);
134  }
135  hid_t dataspace = H5Screate_simple(1, dims, maxdims);
136  hid_t dataset_id = H5Dcreate2(parent_id, name.c_str(),
137  H5T_NATIVE_DOUBLE, dataspace,
138  H5P_DEFAULT, chunk_params, H5P_DEFAULT);
139  H5Sclose(dataspace);
140  H5Pclose(chunk_params);
141  return dataset_id;
142 }
143 
144 hid_t HDF5WriterBase::createStringDataset(hid_t parent_id, string name, hsize_t size, hsize_t maxsize)
145 {
146  herr_t status;
147  hid_t ftype = H5Tcopy(H5T_C_S1);
148  if (H5Tset_size(ftype, H5T_VARIABLE) < 0){
149  return -1;
150  }
151  hsize_t dims[] = {size};
152  hsize_t maxdims[] = {maxsize};
153  hsize_t _chunkSize = chunkSize_;
154  if (maxsize < _chunkSize){
155  _chunkSize = maxsize;
156  }
157  hsize_t chunk_dims[] = {_chunkSize};
158  hid_t chunk_params = H5Pcreate(H5P_DATASET_CREATE);
159  status = H5Pset_chunk(chunk_params, 1, chunk_dims);
160  assert( status >= 0 );
161  if (compressor_ == "zlib"){
162  status = H5Pset_deflate(chunk_params, compression_);
163  } else if (compressor_ == "szip"){
164  // this needs more study
165  unsigned sz_opt_mask = H5_SZIP_NN_OPTION_MASK;
166  status = H5Pset_szip(chunk_params, sz_opt_mask,
167  HDF5WriterBase::CHUNK_SIZE);
168  }
169  hid_t dataspace = H5Screate_simple(1, dims, maxdims);
170  hid_t dataset_id = H5Dcreate2(parent_id, name.c_str(),
171  ftype, dataspace,
172  H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
173  H5Sclose(dataspace);
174  H5Tclose(ftype);
175  H5Pclose(chunk_params);
176  return dataset_id;
177 }
178 
179 
183 herr_t HDF5WriterBase::appendToDataset(hid_t dataset_id, const vector< double >& data)
184 {
185  herr_t status;
186  if (dataset_id < 0){
187  return -1;
188  }
189  hid_t filespace = H5Dget_space(dataset_id);
190  if (filespace < 0){
191  return -1;
192  }
193  if (data.size() == 0){
194  return 0;
195  }
196  hsize_t size = H5Sget_simple_extent_npoints(filespace) + data.size();
197  status = H5Dset_extent(dataset_id, &size);
198  if (status < 0){
199  return status;
200  }
201  filespace = H5Dget_space(dataset_id);
202  hsize_t size_increment = data.size();
203  hid_t memspace = H5Screate_simple(1, &size_increment, NULL);
204  hsize_t start = size - data.size();
205  H5Sselect_hyperslab(filespace, H5S_SELECT_SET, &start, NULL,
206  &size_increment, NULL);
207  status = H5Dwrite(dataset_id, H5T_NATIVE_DOUBLE, memspace, filespace,
208  H5P_DEFAULT, &data[0]);
209  return status;
210 }
211 
212 
217 hid_t HDF5WriterBase::createDataset2D(hid_t parent, string name, unsigned int rows)
218 {
219  if (parent < 0){
220  return 0;
221  }
222  herr_t status;
223  // we need chunking here to allow extensibility
224  hsize_t chunkdims[] = {rows, chunkSize_};
225  hid_t chunk_params = H5Pcreate(H5P_DATASET_CREATE);
226  status = H5Pset_chunk(chunk_params, 2, chunkdims);
227  assert(status >= 0);
228  if (compressor_ == "zlib"){
229  status = H5Pset_deflate(chunk_params, compression_);
230  } else if (compressor_ == "szip"){
231  // this needs more study
232  unsigned sz_opt_mask = H5_SZIP_NN_OPTION_MASK;
233  status = H5Pset_szip(chunk_params, sz_opt_mask,
234  HDF5WriterBase::CHUNK_SIZE);
235  }
236  hsize_t dims[2] = {rows, 0};
237  hsize_t maxdims[2] = {rows, H5S_UNLIMITED};
238  hid_t dataspace = H5Screate_simple(2, dims, maxdims);
239  hid_t dset = H5Dcreate2(parent, name.c_str(), H5T_NATIVE_DOUBLE, dataspace, H5P_DEFAULT, chunk_params, H5P_DEFAULT);
240  H5Pclose(chunk_params);
241  H5Sclose(dataspace);
242  return dset;
243 }
244 
249 template <typename A>
250 herr_t writeScalarAttributesFromMap(hid_t file_id, map < string, A > path_value_map)
251 {
252  for (typename map< string, A >::const_iterator ii = path_value_map.begin();
253  ii != path_value_map.end(); ++ii){
254  herr_t status = writeScalarAttr<A>(file_id,
255  ii->first, ii->second);
256  if (status < 0){
257  cerr << "Error: writing attribute " << ii->first
258  << " returned status code " << status << endl;
259  return status;
260  }
261  }
262  return 0;
263 }
264 
269 template <typename A>
270 herr_t writeVectorAttributesFromMap(hid_t file_id, map < string, vector < A > > path_value_map)
271 {
272  for (typename map< string, vector < A > >::const_iterator ii = path_value_map.begin();
273  ii != path_value_map.end(); ++ii){
274  herr_t status = writeVectorAttr<A>(file_id,
275  ii->first, ii->second);
276  if (status < 0){
277  cerr << "Error: writing attribute " << ii->first
278  << " returned status code " << status << endl;
279  return status;
280  }
281  }
282  return 0;
283 }
284 
286 // Template specializations for scalar attribute writing
288 template <>
289 herr_t writeScalarAttr(hid_t file_id, string path, string value)
290 {
291  hid_t data_id = H5Screate(H5S_SCALAR);
292  hid_t dtype = H5Tcopy(H5T_C_S1);
293  H5Tset_size(dtype, value.length()+1);
294  const char * data = value.c_str();
295  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
296  herr_t status = H5Awrite(attr_id, dtype, data);
297  H5Aclose(attr_id);
298  return status;
299 }
300 
301 template <>
302 herr_t writeScalarAttr(hid_t file_id, string path, double value)
303 {
304  hid_t data_id = H5Screate(H5S_SCALAR);
305  hid_t dtype = H5T_NATIVE_DOUBLE;
306  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
307  herr_t status = H5Awrite(attr_id, dtype, (void*)(&value));
308  H5Aclose(attr_id);
309  return status;
310 }
311 
312 template <>
313 herr_t writeScalarAttr(hid_t file_id, string path, long value)
314 {
315  hid_t data_id = H5Screate(H5S_SCALAR);
316  hid_t dtype = H5T_NATIVE_LONG;
317  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
318  herr_t status = H5Awrite(attr_id, dtype, (void*)(&value));
319  H5Aclose(attr_id);
320  return status;
321 }
322 
323 template <>
324 herr_t writeScalarAttr(hid_t file_id, string path, int value)
325 {
326  hid_t data_id = H5Screate(H5S_SCALAR);
327  hid_t dtype = H5T_NATIVE_INT;
328  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
329  herr_t status = H5Awrite(attr_id, dtype, (void*)(&value));
330  H5Aclose(attr_id);
331  return status;
332 }
333 
335 // Template specializations for vector attribute writing
337 
338 template <>
339 herr_t writeVectorAttr(hid_t file_id, string path, vector < string > value)
340 {
341  hsize_t dims[] = {value.size()};
342  hid_t space = H5Screate_simple(1, dims, NULL);
343  hid_t dtype = H5Tcopy(H5T_C_S1);
344  H5Tset_size(dtype, H5T_VARIABLE);
345  const char ** data = (const char **)calloc(value.size(),
346  sizeof(const char*));
347  for (unsigned int ii = 0; ii < value.size(); ++ii){
348  data[ii] = value[ii].c_str();
349  }
350  hid_t attr_id = require_attribute(file_id, path, dtype, space);
351  herr_t status = H5Awrite(attr_id, dtype, data);
352  free(data);
353  H5Aclose(attr_id);
354  return status;
355 }
356 
357 template <>
358 herr_t writeVectorAttr(hid_t file_id, string path, vector < double > value)
359 {
360  hsize_t dims[] = {value.size()};
361  hid_t data_id = H5Screate_simple(1, dims, NULL);
362  hid_t dtype = H5T_NATIVE_DOUBLE;
363  H5Tset_size(dtype, value.size());
364  void * data = &value[0];
365  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
366  herr_t status = H5Awrite(attr_id, dtype, data);
367  H5Aclose(attr_id);
368  return status;
369 }
370 
371 template <>
372 herr_t writeVectorAttr(hid_t file_id, string path, vector < long > value)
373 {
374  hsize_t dims[] = {value.size()};
375  hid_t data_id = H5Screate_simple(1, dims, NULL);
376  hid_t dtype = H5T_NATIVE_LONG;
377  H5Tset_size(dtype, value.size());
378  void * data = &value[0];
379  hid_t attr_id = require_attribute(file_id, path, dtype, data_id);
380  herr_t status = H5Awrite(attr_id, dtype, data);
381  H5Aclose(attr_id);
382  return status;
383 }
384 
385 
386 
387 const Cinfo* HDF5WriterBase::initCinfo()
388 {
389 
391  // Field Definitions
393  static ValueFinfo< HDF5WriterBase, string > fileName(
394  "filename",
395  "Name of the file associated with this HDF5 writer object.",
396  &HDF5WriterBase::setFilename,
397  &HDF5WriterBase::getFilename);
398 
400  "isOpen",
401  "True if this object has an open file handle.",
402  &HDF5WriterBase::isOpen);
403 
405  "mode",
406  "Depending on mode, if file already exists, if mode=1, data will be"
407  " appended to existing file, if mode=2, file will be truncated, if "
408  " mode=4, no writing will happen.",
409  &HDF5WriterBase::setMode,
410  &HDF5WriterBase::getMode);
411 
413  "chunkSize",
414  "Chunksize for writing array data. Defaults to 100.",
415  &HDF5WriterBase::setChunkSize,
416  &HDF5WriterBase::getChunkSize);
417 
418  static ValueFinfo< HDF5WriterBase, string> compressor(
419  "compressor",
420  "Compression type for array data. zlib and szip are supported. Defaults to zlib.",
421  &HDF5WriterBase::setCompressor,
422  &HDF5WriterBase::getCompressor);
423 
425  "compression",
426  "Compression level for array data. Defaults to 6.",
427  &HDF5WriterBase::setCompression,
428  &HDF5WriterBase::getCompression);
429 
431  "stringAttr",
432  "String attributes. The key is attribute name, value is attribute value"
433  " (string).",
434  &HDF5WriterBase::setStringAttr,
435  &HDF5WriterBase::getStringAttr);
436 
438  "doubleAttr",
439  "Double precision floating point attributes. The key is attribute name,"
440  " value is attribute value (double).",
441  &HDF5WriterBase::setDoubleAttr,
442  &HDF5WriterBase::getDoubleAttr);
443 
445  "longAttr",
446  "Long integer attributes. The key is attribute name, value is attribute"
447  " value (long).",
448  &HDF5WriterBase::setLongAttr,
449  &HDF5WriterBase::getLongAttr);
450 
452  "stringVecAttr",
453  "String vector attributes. The key is attribute name, value is attribute value (string).",
454  &HDF5WriterBase::setStringVecAttr,
455  &HDF5WriterBase::getStringVecAttr);
456 
458  "doubleVecAttr",
459  "Double vector attributes. The key is attribute name, value is"
460  " attribute value (vector of double).",
461  &HDF5WriterBase::setDoubleVecAttr,
462  &HDF5WriterBase::getDoubleVecAttr);
463 
465  "longVecAttr",
466  "Long integer vector attributes. The key is attribute name, value is"
467  " attribute value (vector of long).",
468  &HDF5WriterBase::setLongVecAttr,
469  &HDF5WriterBase::getLongVecAttr);
470  static DestFinfo flush(
471  "flush",
472  "Write all buffer contents to file and clear the buffers.",
473  new OpFunc0 < HDF5WriterBase > ( &HDF5WriterBase::flush ));
474 
475  static DestFinfo close(
476  "close",
477  "Close the underlying file. This is a safety measure so that file is not in an invalid state even if a crash happens at exit.",
478  new OpFunc0< HDF5WriterBase > ( & HDF5WriterBase::close ));
479 
480 
481  static Finfo * finfos[] = {
482  &fileName,
483  &isOpen,
484  &mode,
485  &chunkSize,
486  &compressor,
487  &compression,
488  &sattr,
489  &dattr,
490  &lattr,
491  &svecattr,
492  &dvecattr,
493  &lvecattr,
494  &flush,
495  &close,
496  };
497  static string doc[] = {
498  "Name", "HDF5WriterBase",
499  "Author", "Subhasis Ray",
500  "Description", "HDF5 file writer base class. This is not to be used directly. Instead,"
501  " it should be subclassed to provide specific data writing functions."
502  " This class provides most basic properties like filename, file opening"
503  " mode, file open status."
504  };
505 
506 
507  static Dinfo< HDF5WriterBase > dinfo;
508  static Cinfo hdf5Cinfo(
509  "HDF5WriterBase",
511  finfos,
512  sizeof(finfos)/sizeof(Finfo*),
513  &dinfo,
514  doc, sizeof(doc)/sizeof(string));
515  return &hdf5Cinfo;
516 }
517 
518 const hssize_t HDF5WriterBase::CHUNK_SIZE = 1024; // default chunk size
519 
520 
521 HDF5WriterBase::HDF5WriterBase():
522  filehandle_(-1),
523  filename_("moose_output.h5"),
524  openmode_(H5F_ACC_EXCL),
525  chunkSize_(CHUNK_SIZE),
526  compressor_("zlib"),
527  compression_(6)
528 {
529 }
530 
531 HDF5WriterBase::~HDF5WriterBase()
532 {
533  // derived classes should flush data in their own destructors
534  close();
535 }
536 
537 void HDF5WriterBase::setFilename(string filename)
538 {
539  if (filename_ == filename){
540  return;
541  }
542 
543  // // If file is open, close it before changing filename
544  // if (filehandle_ >= 0){
545  // status = H5Fclose(filehandle_);
546  // if (status < 0){
547  // cerr << "Error: failed to close HDF5 file handle for " << filename_ << ". Error code: " << status << endl;
548  // }
549  // }
550  // filehandle_ = -1;
551  filename_ = filename;
552  // status = openFile(filename);
553 }
554 
555 string HDF5WriterBase::getFilename() const
556 {
557  return filename_;
558 }
559 
560 bool HDF5WriterBase::isOpen() const
561 {
562  return filehandle_ >= 0;
563 }
564 
565 herr_t HDF5WriterBase::openFile()
566 {
567  herr_t status = 0;
568  if (filehandle_ >= 0){
569  cout << "Warning: closing already open file and opening " << filename_ << endl;
570  status = H5Fclose(filehandle_);
571  filehandle_ = -1;
572  if (status < 0){
573  cerr << "Error: failed to close currently open HDF5 file. Error code: " << status << endl;
574  return status;
575  }
576  }
577  hid_t fapl_id = H5Pcreate(H5P_FILE_ACCESS);
578  // Ensure that all open objects are closed before the file is closed
579  H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG);
580  ifstream infile(filename_.c_str());
581  bool fexists = infile.good();
582  infile.close();
583  if (!fexists || openmode_ == H5F_ACC_TRUNC){
584  filehandle_ = H5Fcreate(filename_.c_str(), openmode_, H5P_DEFAULT, fapl_id);
585  } else if (openmode_ == H5F_ACC_RDWR) {
586  filehandle_ = H5Fopen(filename_.c_str(), openmode_, fapl_id);
587  } else {
588  cerr << "Error: File \"" << filename_ << "\" already exists. Specify mode=" << H5F_ACC_RDWR
589  << " for appending to it, mode=" << H5F_ACC_TRUNC
590  << " for overwriting it. mode=" << H5F_ACC_EXCL
591  << " requires the file does not exist." << endl;
592  return -1;
593  }
594  if (filehandle_ < 0){
595  cerr << "Error: Could not open file for writing: " << filename_ << endl;
596  status = -1;
597  }
598  return status;
599 }
600 
601 void HDF5WriterBase::setMode(unsigned int mode)
602 {
603  if (mode == H5F_ACC_RDWR || mode == H5F_ACC_TRUNC || mode == H5F_ACC_EXCL){
604  openmode_ = mode;
605  }
606 }
607 
608 unsigned HDF5WriterBase::getMode() const
609 {
610  return openmode_;
611 }
612 
613 void HDF5WriterBase::setChunkSize(unsigned int size)
614 {
615  chunkSize_ = size;
616 }
617 
618 unsigned int HDF5WriterBase::getChunkSize() const
619 {
620  return chunkSize_;
621 }
622 
623 void HDF5WriterBase::setCompressor(string name)
624 {
625  compressor_ = name;
626  std::transform(compressor_.begin(), compressor_.end(), compressor_.begin(), ::tolower);
627 }
628 
629 string HDF5WriterBase::getCompressor() const
630 {
631  return compressor_;
632 }
633 
634 void HDF5WriterBase::setCompression(unsigned int level)
635 {
636  compression_ = level;
637 }
638 
639 unsigned int HDF5WriterBase::getCompression() const
640 {
641  return compression_;
642 }
643 
644 
645 // Subclasses should reimplement this for flushing data content to
646 // file.
647 void HDF5WriterBase::flush()
648 {
649  flushAttributes();
650  sattr_.clear();
651  dattr_.clear();
652  lattr_.clear();
653  svecattr_.clear();
654  dvecattr_.clear();
655  lvecattr_.clear();
656 }
657 
658 void HDF5WriterBase::flushAttributes()
659 {
660  if (filehandle_ < 0){
661  return;
662  }
663  // Write all scalar attributes
664  writeScalarAttributesFromMap< string >(filehandle_, sattr_);
665  writeScalarAttributesFromMap< double >(filehandle_, dattr_);
666  writeScalarAttributesFromMap< long >(filehandle_, lattr_);
667  // Write the vector attributes
668  writeVectorAttributesFromMap< string >(filehandle_, svecattr_);
669  writeVectorAttributesFromMap< double >(filehandle_, dvecattr_);
670  writeVectorAttributesFromMap< long >(filehandle_, lvecattr_);
671 
672 }
673 void HDF5WriterBase::close()
674 {
675  if (filehandle_ < 0){
676  return;
677  }
678  flush();
679  herr_t status = H5Fclose(filehandle_);
680  filehandle_ = -1;
681  if (status < 0){
682  cerr << "Error: closing file returned status code=" << status << endl;
683  }
684 }
685 
686 void HDF5WriterBase::setStringAttr(string name, string value)
687 {
688  sattr_[name] = value;
689 }
690 
691 void HDF5WriterBase::setDoubleAttr(string name, double value)
692 {
693  dattr_[name] = value;
694 }
695 
696 void HDF5WriterBase::setLongAttr(string name, long value)
697 {
698  lattr_[name] = value;
699 }
700 
701 string HDF5WriterBase::getStringAttr(string name) const
702 {
703  map <string, string>::const_iterator ii = sattr_.find(name);
704  if (ii != sattr_.end()){
705  return ii->second;
706  }
707  cerr << "Error: no attribute named " << name << endl;
708  return "";
709 }
710 
711 double HDF5WriterBase::getDoubleAttr(string name) const
712 {
713  map <string, double>::const_iterator ii = dattr_.find(name);
714  if (ii != dattr_.end()){
715  return ii->second;
716  }
717  cerr << "Error: no attribute named " << name << endl;
718  return 0.0;
719 }
720 
721 long HDF5WriterBase::getLongAttr(string name) const
722 {
723  map <string, long>::const_iterator ii = lattr_.find(name);
724  if (ii != lattr_.end()){
725  return ii->second;
726  }
727  cerr << "Error: no attribute named " << name << endl;
728  return 0;
729 }
730 
732 // Vector attributes
734 
735 void HDF5WriterBase::setStringVecAttr(string name, vector < string > value)
736 {
737  svecattr_[name] = value;
738 }
739 
740 void HDF5WriterBase::setDoubleVecAttr(string name, vector < double > value)
741 {
742  dvecattr_[name] = value;
743 }
744 
745 void HDF5WriterBase::setLongVecAttr(string name, vector < long >value)
746 {
747  lvecattr_[name] = value;
748 }
749 
750 vector < string > HDF5WriterBase::getStringVecAttr(string name) const
751 {
752  map <string, vector < string > >::const_iterator ii = svecattr_.find(name);
753  if (ii != svecattr_.end()){
754  return ii->second;
755  }
756  cerr << "Error: no attribute named " << name << endl;
757  return vector<string>();
758 }
759 
760 vector < double > HDF5WriterBase::getDoubleVecAttr(string name) const
761 {
762  map <string, vector < double > >::const_iterator ii = dvecattr_.find(name);
763  if (ii != dvecattr_.end()){
764  return ii->second;
765  }
766  cerr << "Error: no attribute named " << name << endl;
767  return vector<double>();
768 }
769 
770 vector < long > HDF5WriterBase::getLongVecAttr(string name) const
771 {
772  map <string, vector < long > >::const_iterator ii = lvecattr_.find(name);
773  if (ii != lvecattr_.end()){
774  return ii->second;
775  }
776  cerr << "Error: no attribute named " << name << endl;
777  return vector<long>();
778 ;
779 }
780 
781 
782 
783 #endif // USE_HDF5
784 //
785 // HDF5WriterBase.cpp ends here
uint32_t value
Definition: moosemodule.h:42
Definition: Dinfo.h:60
static char name[]
Definition: mfield.cpp:401
void tokenize(const string &str, const string &delimiters, vector< string > &tokens)
Definition: strutil.cpp:19
Definition: OpFunc.h:13
static const Cinfo * initCinfo()
Definition: Neutral.cpp:16
Definition: Cinfo.h:18
static char path[]
Definition: mfield.cpp:403
Definition: Finfo.h:12