#!/usr/bin/env python




##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
    import builtins as builtin
except ImportError:
    import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
from xpdeint.Features._HDF5Format import _HDF5Format
from xpdeint.Geometry.SplitUniformDimensionRepresentation import SplitUniformDimensionRepresentation
from xpdeint.CallOnceGuards import callOnceGuard, callOncePerInstanceGuard

##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1484975071.383431
__CHEETAH_genTimestamp__ = 'Sat Jan 21 16:04:31 2017'
__CHEETAH_src__ = '/home/mattias/xmds-2.2.3/admin/staging/xmds-2.2.3/xpdeint/Features/HDF5Format.tmpl'
__CHEETAH_srcLastModified__ = 'Sun Nov 24 20:15:20 2013'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'

if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
    raise AssertionError(
      'This template was compiled with Cheetah version'
      ' %s. Templates compiled before version %s must be recompiled.'%(
         __CHEETAH_version__, RequiredCheetahVersion))

##################################################
## CLASSES

class HDF5Format(_HDF5Format):

    ##################################################
    ## CHEETAH GENERATED METHODS


    def __init__(self, *args, **KWs):

        super(HDF5Format, self).__init__(*args, **KWs)
        if not self._CHEETAH__instanceInitialized:
            cheetahKWArgs = {}
            allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
            for k,v in KWs.items():
                if k in allowedKWs: cheetahKWArgs[k] = v
            self._initCheetahInstance(**cheetahKWArgs)
        

    def description(self, **KWS):



        ## Generated from @def description: HDF5 output format at line 26, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write(u'''HDF5 output format''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    @callOnceGuard
    def includes(self, **KWS):



        ## CHEETAH: generated from @def includes at line 32, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        _v = super(HDF5Format, self).includes()
        if _v is not None: write(_filter(_v))
        # 
        write(u'''#if defined(HAVE_HDF5_HL)
  #include <hdf5_hl.h>
#endif
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeOutFunctionImplementationBody(self, dict, **KWS):



        ## CHEETAH: generated from @def writeOutFunctionImplementationBody($dict) at line 41, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        _v = VFFSL(SL,"writeOutFunctionImplementationBegin",False)(dict) # u'${writeOutFunctionImplementationBegin(dict)}' on line 43, col 1
        if _v is not None: write(_filter(_v, rawExpr=u'${writeOutFunctionImplementationBegin(dict)}')) # from line 43, col 1.
        write(u'''
''')
        featureOrdering = ['Driver']
        featureDict = dict.copy()
        featureDict['extraIndent'] = 0
        _v = VFFSL(SL,"insertCodeForFeatures",False)('binaryWriteOutBegin', featureOrdering, featureDict) # u"${insertCodeForFeatures('binaryWriteOutBegin', featureOrdering, featureDict)}" on line 48, col 1
        if _v is not None: write(_filter(_v, rawExpr=u"${insertCodeForFeatures('binaryWriteOutBegin', featureOrdering, featureDict)}")) # from line 48, col 1.
        extraIndent = featureDict['extraIndent']
        write(u'''
''')
        _v = VFFSL(SL,"writeOutFunctionContents",False)(dict) # u'${writeOutFunctionContents(dict), extraIndent=extraIndent}' on line 51, col 1
        if _v is not None: write(_filter(_v, extraIndent=extraIndent, rawExpr=u'${writeOutFunctionContents(dict), extraIndent=extraIndent}')) # from line 51, col 1.
        write(u'''
''')
        _v = VFFSL(SL,"insertCodeForFeaturesInReverseOrder",False)('binaryWriteOutEnd', featureOrdering, featureDict) # u"${insertCodeForFeaturesInReverseOrder('binaryWriteOutEnd', featureOrdering, featureDict)}" on line 53, col 1
        if _v is not None: write(_filter(_v, rawExpr=u"${insertCodeForFeaturesInReverseOrder('binaryWriteOutEnd', featureOrdering, featureDict)}")) # from line 53, col 1.
        write(u'''
''')
        _v = VFFSL(SL,"writeOutFunctionImplementationEnd",False)(dict) # u'${writeOutFunctionImplementationEnd(dict)}' on line 55, col 1
        if _v is not None: write(_filter(_v, rawExpr=u'${writeOutFunctionImplementationEnd(dict)}')) # from line 55, col 1.
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def truncateOutputFiles(self, baseFilename, **KWS):



        ## CHEETAH: generated from @def truncateOutputFiles($baseFilename) at line 59, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write(u'''char _dataFilename[200];
snprintf(_dataFilename, 200, "%s.h5", ''')
        _v = VFFSL(SL,"baseFilename",True) # u'${baseFilename}' on line 61, col 39
        if _v is not None: write(_filter(_v, rawExpr=u'${baseFilename}')) # from line 61, col 39.
        write(u''');

H5Fclose(H5Fcreate(_dataFilename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT));
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeOutFunctionContents(self, dict, **KWS):



        ## CHEETAH: generated from @def writeOutFunctionContents($dict) at line 67, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        fp = dict['fp']
        baseFilename = dict['baseFilename']
        groupID = dict['groupID']
        field = dict['field']
        basis = dict['basis']
        dependentVariables = dict['dependentVariables']
        componentCount = 0
        for variable in VFFSL(SL,"dependentVariables",True): # generated from line 76, col 3
            componentCount += len(VFFSL(SL,"variable.vector.components",True))
            if VFFSL(SL,"variable.vector.type",True) == 'complex': # generated from line 78, col 5
                componentCount += len(VFFSL(SL,"variable.vector.components",True))
        dict['componentCount'] = componentCount
        # 
        write(u'''char _h5Filename[200];
snprintf(_h5Filename, 200, "%s.h5", ''')
        _v = VFFSL(SL,"baseFilename",True) # u'${baseFilename}' on line 85, col 37
        if _v is not None: write(_filter(_v, rawExpr=u'${baseFilename}')) # from line 85, col 37.
        write(u''');

/* Open the file */
hid_t hdf5_file = H5Fopen(_h5Filename, H5F_ACC_RDWR, H5P_DEFAULT);
if (hdf5_file < 0) {
  _LOG(_WARNING_LOG_LEVEL, "Failed to open HDF5 file \'%s\', will try to create it.", _h5Filename);
  hdf5_file = H5Fcreate(_h5Filename, H5F_ACC_EXCL, H5P_DEFAULT, H5P_DEFAULT);
  if (hdf5_file < 0) {
    _LOG(_ERROR_LOG_LEVEL, "Failed to create HDF5 file \'%s\'. Bailing.", _h5Filename);
  }
}

/* Create the group for this data */
hid_t group;
if (!H5Lexists(hdf5_file, "/''')
        _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 99, col 29
        if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 99, col 29.
        write(u'''", H5P_DEFAULT))
  group = H5Gcreate(hdf5_file, "/''')
        _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 100, col 34
        if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 100, col 34.
        write(u'''", H5P_DEFAULT);
else
  group = H5Gopen(hdf5_file, "/''')
        _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 102, col 32
        if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 102, col 32.
        write(u'''");

if (''')
        _v = VFFSL(SL,"fp",True) # u'$fp' on line 104, col 5
        if _v is not None: write(_filter(_v, rawExpr=u'$fp')) # from line 104, col 5.
        write(u''') {
  fprintf(''')
        _v = VFFSL(SL,"fp",True) # u'$fp' on line 105, col 11
        if _v is not None: write(_filter(_v, rawExpr=u'$fp')) # from line 105, col 11.
        write(u''', "    <Stream><Metalink Format=\\"HDF5\\" Type=\\"Remote\\" Group=\\"/''')
        _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 105, col 80
        if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 105, col 80.
        write(u'''\\"/>\\n");
  fprintf(''')
        _v = VFFSL(SL,"fp",True) # u'$fp' on line 106, col 11
        if _v is not None: write(_filter(_v, rawExpr=u'$fp')) # from line 106, col 11.
        write(u''', "%s.h5\\n", ''')
        _v = VFFSL(SL,"baseFilename",True) # u'${baseFilename}' on line 106, col 27
        if _v is not None: write(_filter(_v, rawExpr=u'${baseFilename}')) # from line 106, col 27.
        write(u''');
  fprintf(''')
        _v = VFFSL(SL,"fp",True) # u'$fp' on line 107, col 11
        if _v is not None: write(_filter(_v, rawExpr=u'$fp')) # from line 107, col 11.
        write(u''', "    </Stream>\\n");
}

/* Create the coordinate data sets */
hsize_t coordinate_length;
hid_t coordinate_dataspace;
''')
        for dim in field.dimensions: # generated from line 113, col 3
            dimRep = dim.inBasis(basis)
            write(u'''coordinate_length = ''')
            _v = VFFSL(SL,"dimRep.globalLattice",True) # u'${dimRep.globalLattice}' on line 115, col 21
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.globalLattice}')) # from line 115, col 21.
            write(u''';
coordinate_dataspace = H5Screate_simple(1, &coordinate_length, NULL);
''')
            dataType = {'real': 'H5T_NATIVE_REAL', 'long': 'H5T_NATIVE_LONG'}[VFFSL(SL,"dimRep.type",True)]
            write(u'''hid_t dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 118, col 15
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 118, col 15.
            write(u''';
if (!H5Lexists(hdf5_file, "/''')
            _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 119, col 29
            if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 119, col 29.
            write(u'''/''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 119, col 40
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 119, col 40.
            write(u'''", H5P_DEFAULT))
  dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 120, col 11
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 120, col 11.
            write(u''' = H5Dcreate(hdf5_file, "/''')
            _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 120, col 51
            if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 120, col 51.
            write(u'''/''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 120, col 62
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 120, col 62.
            write(u'''", ''')
            _v = VFFSL(SL,"dataType",True) # u'${dataType}' on line 120, col 79
            if _v is not None: write(_filter(_v, rawExpr=u'${dataType}')) # from line 120, col 79.
            write(u''', coordinate_dataspace, H5P_DEFAULT);
else
  dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 122, col 11
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 122, col 11.
            write(u''' = H5Dopen(hdf5_file, "/''')
            _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 122, col 49
            if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 122, col 49.
            write(u'''/''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 122, col 60
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 122, col 60.
            write(u'''");
''')
            if isinstance(dimRep, SplitUniformDimensionRepresentation): # generated from line 123, col 5
                dimArrayName = ''.join([str(VFFSL(SL,"dimRep.name",True)),u'_data'])
                _v = VFFSL(SL,"dimRep.type",True) # u'${dimRep.type}' on line 125, col 1
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.type}')) # from line 125, col 1.
                write(u'''* ''')
                _v = VFFSL(SL,"dimArrayName",True) # u'${dimArrayName}' on line 125, col 17
                if _v is not None: write(_filter(_v, rawExpr=u'${dimArrayName}')) # from line 125, col 17.
                write(u''' = (''')
                _v = VFFSL(SL,"dimRep.type",True) # u'${dimRep.type}' on line 125, col 36
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.type}')) # from line 125, col 36.
                write(u'''*)xmds_malloc(''')
                _v = VFFSL(SL,"dimRep.globalLattice",True) # u'${dimRep.globalLattice}' on line 125, col 64
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.globalLattice}')) # from line 125, col 64.
                write(u''' * sizeof(''')
                _v = VFFSL(SL,"dimRep.type",True) # u'${dimRep.type}' on line 125, col 97
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.type}')) # from line 125, col 97.
                write(u'''));
for (long _i0 = 0; _i0 < ''')
                _v = VFFSL(SL,"dimRep.globalLattice",True) # u'${dimRep.globalLattice}' on line 126, col 26
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.globalLattice}')) # from line 126, col 26.
                write(u'''; _i0++) {
  ''')
                _v = VFFSL(SL,"dimArrayName",True) # u'${dimArrayName}' on line 127, col 3
                if _v is not None: write(_filter(_v, rawExpr=u'${dimArrayName}')) # from line 127, col 3.
                write(u'''[_i0] = ''')
                _v = VFFSL(SL,"dimRep.arrayName",True) # u'${dimRep.arrayName}' on line 127, col 26
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.arrayName}')) # from line 127, col 26.
                write(u'''[(_i0 + (''')
                _v = VFFSL(SL,"dimRep.globalLattice",True) # u'${dimRep.globalLattice}' on line 127, col 54
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.globalLattice}')) # from line 127, col 54.
                write(u'''+1)/2) % ''')
                _v = VFFSL(SL,"dimRep.globalLattice",True) # u'${dimRep.globalLattice}' on line 127, col 86
                if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.globalLattice}')) # from line 127, col 86.
                write(u'''];
}
''')
            else: # generated from line 129, col 5
                dimArrayName = dimRep.arrayName
            write(u'''H5Dwrite(dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 132, col 18
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 132, col 18.
            write(u''', ''')
            _v = VFFSL(SL,"dataType",True) # u'$dataType' on line 132, col 34
            if _v is not None: write(_filter(_v, rawExpr=u'$dataType')) # from line 132, col 34.
            write(u''', H5S_ALL, H5S_ALL, H5P_DEFAULT, ''')
            _v = VFFSL(SL,"dimArrayName",True) # u'${dimArrayName}' on line 132, col 76
            if _v is not None: write(_filter(_v, rawExpr=u'${dimArrayName}')) # from line 132, col 76.
            write(u''');
#if defined(HAVE_HDF5_HL)
  H5DSset_scale(dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 134, col 25
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 134, col 25.
            write(u''', "''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 134, col 42
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 134, col 42.
            write(u'''");
#endif

''')
            if isinstance(dimRep, SplitUniformDimensionRepresentation): # generated from line 137, col 5
                write(u'''xmds_free(''')
                _v = VFFSL(SL,"dimArrayName",True) # u'${dimArrayName}' on line 138, col 11
                if _v is not None: write(_filter(_v, rawExpr=u'${dimArrayName}')) # from line 138, col 11.
                write(u''');
''')
            write(u'''H5Sclose(coordinate_dataspace);
''')
        write(u'''
hsize_t file_dims[] = {''')
        _v = ', '.join(dim.inBasis(basis).globalLattice for dim in field.dimensions) # u"${', '.join(dim.inBasis(basis).globalLattice for dim in field.dimensions)}" on line 143, col 24
        if _v is not None: write(_filter(_v, rawExpr=u"${', '.join(dim.inBasis(basis).globalLattice for dim in field.dimensions)}")) # from line 143, col 24.
        write(u'''};
hid_t file_dataspace = H5Screate_simple(''')
        _v = VFFSL(SL,"len",False)(field.dimensions) # u'${len(field.dimensions)}' on line 144, col 41
        if _v is not None: write(_filter(_v, rawExpr=u'${len(field.dimensions)}')) # from line 144, col 41.
        write(u''', file_dims, NULL);

''')
        for variable in dependentVariables: # generated from line 146, col 3
            if VFFSL(SL,"variable.vector.type",True) == 'real': # generated from line 147, col 5
                variable['separatedComponents'] = list(enumerate(VFFSL(SL,"variable.components",True)))
            else: # generated from line 149, col 5
                components = []
                variable['separatedComponents'] = components
                for offset, componentName in enumerate(VFFSL(SL,"variable.components",True)): # generated from line 152, col 7
                    components.extend([(2*offset, componentName + 'R'), (2*offset+1, componentName + 'I')])
            for offset, componentName in VFFSL(SL,"variable.separatedComponents",True): # generated from line 156, col 5
                write(u'''hid_t dataset_''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 157, col 15
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 157, col 15.
                write(u''';
if (!H5Lexists(hdf5_file, "/''')
                _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 158, col 29
                if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 158, col 29.
                write(u'''/''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 158, col 40
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 158, col 40.
                write(u'''", H5P_DEFAULT))
  dataset_''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 159, col 11
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 159, col 11.
                write(u''' = H5Dcreate(hdf5_file, "/''')
                _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 159, col 53
                if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 159, col 53.
                write(u'''/''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 159, col 64
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 159, col 64.
                write(u'''", H5T_NATIVE_REAL, file_dataspace, H5P_DEFAULT);
else
  dataset_''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 161, col 11
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 161, col 11.
                write(u''' = H5Dopen(hdf5_file, "/''')
                _v = VFFSL(SL,"groupID",True) # u'${groupID}' on line 161, col 51
                if _v is not None: write(_filter(_v, rawExpr=u'${groupID}')) # from line 161, col 51.
                write(u'''/''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 161, col 62
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 161, col 62.
                write(u'''");
#if defined(HAVE_HDF5_HL)
''')
                for dimNum, dim in enumerate(field.dimensions): # generated from line 163, col 7
                    write(u'''  H5DSattach_scale(dataset_''')
                    _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 164, col 28
                    if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 164, col 28.
                    write(u''', dataset_''')
                    _v = VFN(VFN(VFFSL(SL,"dim",True),"inBasis",False)(basis),"name",True) # u'${dim.inBasis(basis).name}' on line 164, col 54
                    if _v is not None: write(_filter(_v, rawExpr=u'${dim.inBasis(basis).name}')) # from line 164, col 54.
                    write(u''', ''')
                    _v = VFFSL(SL,"dimNum",True) # u'${dimNum}' on line 164, col 82
                    if _v is not None: write(_filter(_v, rawExpr=u'${dimNum}')) # from line 164, col 82.
                    write(u''');
''')
                write(u'''#endif
''')
        for dim in field.dimensions: # generated from line 169, col 3
            dimRep = dim.inBasis(basis)
            write(u'''H5Dclose(dataset_''')
            _v = VFFSL(SL,"dimRep.name",True) # u'${dimRep.name}' on line 171, col 18
            if _v is not None: write(_filter(_v, rawExpr=u'${dimRep.name}')) # from line 171, col 18.
            write(u''');
''')
        write(u'''
''')
        #  This is where all of the magic MPI code goes
        featureOrdering = ['Driver']
        featureDict = dict.copy()
        featureDict['extraIndent'] = 0
        _v = VFFSL(SL,"insertCodeForFeatures",False)('binaryWriteOutWriteDataBegin', VFFSL(SL,"featureOrdering",True), featureDict) # u"${insertCodeForFeatures('binaryWriteOutWriteDataBegin', $featureOrdering, featureDict)}" on line 178, col 1
        if _v is not None: write(_filter(_v, rawExpr=u"${insertCodeForFeatures('binaryWriteOutWriteDataBegin', $featureOrdering, featureDict)}")) # from line 178, col 1.
        extraIndent = featureDict['extraIndent']
        dict['operation'] = 'write'
        dict['variables'] = dict['dependentVariables']
        write(u'''
if (''')
        _v = VFN(VFFSL(SL,"field",True),"sizeInBasis",False)(basis) # u'${field.sizeInBasis(basis)}' on line 183, col 5
        if _v is not None: write(_filter(_v, rawExpr=u'${field.sizeInBasis(basis)}')) # from line 183, col 5.
        write(u''') {
  ''')
        _v = VFFSL(SL,"processData",False)(dict) # u'${processData(dict), autoIndent=True, extraIndent=extraIndent}' on line 184, col 3
        if _v is not None: write(_filter(_v, autoIndent=True, extraIndent=extraIndent, rawExpr=u'${processData(dict), autoIndent=True, extraIndent=extraIndent}')) # from line 184, col 3.
        write(u'''}

''')
        #  This is where the rest of the magic MPI code goes
        _v = VFFSL(SL,"insertCodeForFeaturesInReverseOrder",False)('binaryWriteOutWriteDataEnd', VFFSL(SL,"featureOrdering",True), featureDict) # u"${insertCodeForFeaturesInReverseOrder('binaryWriteOutWriteDataEnd', $featureOrdering, featureDict)}" on line 188, col 1
        if _v is not None: write(_filter(_v, rawExpr=u"${insertCodeForFeaturesInReverseOrder('binaryWriteOutWriteDataEnd', $featureOrdering, featureDict)}")) # from line 188, col 1.
        write(u'''
''')
        for variable in dependentVariables: # generated from line 190, col 3
            for offset, componentName in VFFSL(SL,"variable.separatedComponents",True): # generated from line 191, col 5
                write(u'''H5Dclose(dataset_''')
                _v = VFFSL(SL,"componentName",True) # u'${componentName}' on line 192, col 18
                if _v is not None: write(_filter(_v, rawExpr=u'${componentName}')) # from line 192, col 18.
                write(u''');
''')
        write(u'''
H5Sclose(file_dataspace);
H5Gclose(group);
H5Fclose(hdf5_file);
''')
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeBody(self, **KWS):



        ## CHEETAH: main method generated for this template
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        # HDF5Format.tmpl
        # 
        # Created by Graham Dennis on 2009-01-24.
        # 
        # Copyright (c) 2009-2012, Graham Dennis
        # 
        # This program is free software: you can redistribute it and/or modify
        # it under the terms of the GNU General Public License as published by
        # the Free Software Foundation, either version 2 of the License, or
        # (at your option) any later version.
        # 
        # This program is distributed in the hope that it will be useful,
        # but WITHOUT ANY WARRANTY; without even the implied warranty of
        # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
        # GNU General Public License for more details.
        # 
        # You should have received a copy of the GNU General Public License
        # along with this program.  If not, see <http://www.gnu.org/licenses/>.
        # 
        write(u'''






''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        
    ##################################################
    ## CHEETAH GENERATED ATTRIBUTES


    _CHEETAH__instanceInitialized = False

    _CHEETAH_version = __CHEETAH_version__

    _CHEETAH_versionTuple = __CHEETAH_versionTuple__

    _CHEETAH_genTime = __CHEETAH_genTime__

    _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__

    _CHEETAH_src = __CHEETAH_src__

    _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__

    name = 'hdf5'

    mpiSafe = True

    _mainCheetahMethod_for_HDF5Format= 'writeBody'

## END CLASS DEFINITION

if not hasattr(HDF5Format, '_initCheetahAttributes'):
    templateAPIClass = getattr(HDF5Format, '_CHEETAH_templateClass', Template)
    templateAPIClass._addCheetahPlumbingCodeToClass(HDF5Format)


# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/

##################################################
## if run from command line:
if __name__ == '__main__':
    from Cheetah.TemplateCmdLineIface import CmdLineIface
    CmdLineIface(templateObj=HDF5Format()).run()


