#!/usr/bin/env python3




##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
    import builtins as builtin
except ImportError:
    import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
from Cheetah.compat import unicode
from xpdeint.Features.OutputFormat import OutputFormat
from xpdeint.Geometry.NonUniformDimensionRepresentation import NonUniformDimensionRepresentation
from xpdeint.CallOnceGuards import callOncePerInstanceGuard

##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '3.2.3'
__CHEETAH_versionTuple__ = (3, 2, 3, 'final', 0)
__CHEETAH_genTime__ = 1558054970.429099
__CHEETAH_genTimestamp__ = 'Fri May 17 11:02:50 2019'
__CHEETAH_src__ = '/home/mattias/xmds-2.2.3/admin/staging/xmds-3.0.0/xpdeint/Features/BinaryFormat.tmpl'
__CHEETAH_srcLastModified__ = 'Thu Apr  4 16:29:24 2019'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'

if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
    raise AssertionError(
      'This template was compiled with Cheetah version'
      ' %s. Templates compiled before version %s must be recompiled.'%(
         __CHEETAH_version__, RequiredCheetahVersion))

##################################################
## CLASSES

class BinaryFormat(OutputFormat):

    ##################################################
    ## CHEETAH GENERATED METHODS


    def __init__(self, *args, **KWs):

        super(BinaryFormat, self).__init__(*args, **KWs)
        if not self._CHEETAH__instanceInitialized:
            cheetahKWArgs = {}
            allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
            for k,v in KWs.items():
                if k in allowedKWs: cheetahKWArgs[k] = v
            self._initCheetahInstance(**cheetahKWArgs)
        

    def description(self, **KWS):



        ## Generated from @def description: binary output format at line 28, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write('''binary output format''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeOutFunctionImplementationBody(self, dict, **KWS):



        ## CHEETAH: generated from @def writeOutFunctionImplementationBody($dict) at line 32, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        _v = VFFSL(SL,"writeOutFunctionImplementationBegin",False)(dict) # '${writeOutFunctionImplementationBegin(dict)}' on line 34, col 1
        if _v is not None: write(_filter(_v, rawExpr='${writeOutFunctionImplementationBegin(dict)}')) # from line 34, col 1.
        write('''
''')
        # 
        featureOrdering = ['Driver']
        featureDict = dict.copy()
        featureDict['extraIndent'] = 0
        _v = VFFSL(SL,"insertCodeForFeatures",False)('binaryWriteOutBegin', featureOrdering, featureDict) # "${insertCodeForFeatures('binaryWriteOutBegin', featureOrdering, featureDict)}" on line 40, col 1
        if _v is not None: write(_filter(_v, rawExpr="${insertCodeForFeatures('binaryWriteOutBegin', featureOrdering, featureDict)}")) # from line 40, col 1.
        extraIndent = featureDict['extraIndent']
        write('''
''')
        _v = VFFSL(SL,"writeOutFunctionContents",False)(dict) # '${writeOutFunctionContents(dict), extraIndent=extraIndent}' on line 43, col 1
        if _v is not None: write(_filter(_v, extraIndent=extraIndent, rawExpr='${writeOutFunctionContents(dict), extraIndent=extraIndent}')) # from line 43, col 1.
        write('''
''')
        _v = VFFSL(SL,"insertCodeForFeaturesInReverseOrder",False)('binaryWriteOutEnd', featureOrdering, featureDict) # "${insertCodeForFeaturesInReverseOrder('binaryWriteOutEnd', featureOrdering, featureDict)}" on line 45, col 1
        if _v is not None: write(_filter(_v, rawExpr="${insertCodeForFeaturesInReverseOrder('binaryWriteOutEnd', featureOrdering, featureDict)}")) # from line 45, col 1.
        write('''
''')
        _v = VFFSL(SL,"writeOutFunctionImplementationEnd",False)(dict) # '${writeOutFunctionImplementationEnd(dict)}' on line 47, col 1
        if _v is not None: write(_filter(_v, rawExpr='${writeOutFunctionImplementationEnd(dict)}')) # from line 47, col 1.
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def truncateOutputFiles(self, baseFilename, **KWS):



        ## CHEETAH: generated from @def truncateOutputFiles($baseFilename) at line 51, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write('''char _dataFilename[200];
for (int _i = 0; _i < ''')
        _v = VFFSL(SL,"parent.outputGroups",True) # '${parent.outputGroups}' on line 53, col 23
        if _v is not None: write(_filter(_v, rawExpr='${parent.outputGroups}')) # from line 53, col 23.
        write('''; _i++) {
''')
        #  FIXME: This is a dodgy, dodgy hack. chunked_output should either be removed or rethought.
        if hasattr(self.parent, 'featureName') and self.parent.featureName == 'Output': # generated from line 55, col 2
            write('''  snprintf(_dataFilename, 200, "%s_mg%i.dat", ''')
            _v = VFFSL(SL,"baseFilename",True) # '${baseFilename}' on line 56, col 47
            if _v is not None: write(_filter(_v, rawExpr='${baseFilename}')) # from line 56, col 47.
            write(''', _i);
''')
        else: # generated from line 57, col 2
            write('''  snprintf(_dataFilename, 200, "%s.dat", ''')
            _v = VFFSL(SL,"baseFilename",True) # '${baseFilename}' on line 58, col 42
            if _v is not None: write(_filter(_v, rawExpr='${baseFilename}')) # from line 58, col 42.
            write(''');
''')
        write('''  fclose(fopen(_dataFilename, "wb"));  // truncate the file
}
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeOutFunctionContents(self, dict, **KWS):



        ## CHEETAH: generated from @def writeOutFunctionContents($dict) at line 64, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        fp = dict['fp']
        baseFilename = dict['baseFilename']
        outputGroupFilenameSuffix = dict['outputGroupFilenameSuffix']
        field = dict['field']
        basis = dict['basis']
        dependentVariables = dict['dependentVariables']
        componentCount = 0
        for variable in VFFSL(SL,"dependentVariables",True): # generated from line 73, col 3
            componentCount += len(VFFSL(SL,"variable.vector.components",True))
            if VFFSL(SL,"variable.vector.type",True) == 'complex': # generated from line 75, col 5
                componentCount += len(VFFSL(SL,"variable.vector.components",True))
        dict['componentCount'] = componentCount
        # 
        write('''const char *encoding = NULL;
#if CFG_ENDIAN == CFG_ENDIAN_BIG
  encoding = "BigEndian";
#else
  encoding = "LittleEndian";
#endif

char _datFilename[200];
snprintf(_datFilename, 200, "%s''')
        _v = VFFSL(SL,"outputGroupFilenameSuffix",True) # '${outputGroupFilenameSuffix}' on line 89, col 32
        if _v is not None: write(_filter(_v, rawExpr='${outputGroupFilenameSuffix}')) # from line 89, col 32.
        write('''.dat", ''')
        _v = VFFSL(SL,"baseFilename",True) # '${baseFilename}' on line 89, col 67
        if _v is not None: write(_filter(_v, rawExpr='${baseFilename}')) # from line 89, col 67.
        write(''');

if (''')
        _v = VFFSL(SL,"fp",True) # '$fp' on line 91, col 5
        if _v is not None: write(_filter(_v, rawExpr='$fp')) # from line 91, col 5.
        write(''') {
  const char *unsignedLongType = NULL;
  if (sizeof(unsigned long) == 4)
    unsignedLongType = "uint32";
  else if (sizeof(unsigned long) == 8)
    unsignedLongType = "uint64";
  else
    unsignedLongType = "ulong";

  fprintf(''')
        _v = VFFSL(SL,"fp",True) # '$fp' on line 100, col 11
        if _v is not None: write(_filter(_v, rawExpr='$fp')) # from line 100, col 11.
        write(''', "    <Stream><Metalink Format=\\"Binary\\" UnsignedLong=\\"%s\\" precision=\\"''')
        _v = VFFSL(SL,"precision",True) # '${precision}' on line 100, col 89
        if _v is not None: write(_filter(_v, rawExpr='${precision}')) # from line 100, col 89.
        write('''\\" Type=\\"Remote\\" Encoding=\\"%s\\"/>\\n",
          unsignedLongType, encoding);
  fprintf(''')
        _v = VFFSL(SL,"fp",True) # '$fp' on line 102, col 11
        if _v is not None: write(_filter(_v, rawExpr='$fp')) # from line 102, col 11.
        write(''', "%s\\n", _datFilename);
}

FILE* fpBinary;
if ((fpBinary = fopen(_datFilename, "r+b")) == NULL)
  // _LOG will cause the simulation to exit
  _LOG(_ERROR_LOG_LEVEL, "Unable to open output file %s\\n"
                         "Chucking a spack...\\n", _datFilename);

unsigned long dataSize;
off_t fieldOffset = 0;
real coordinate;

''')
        for dim in VFFSL(SL,"field.dimensions",True): # generated from line 115, col 3
            dimRep = VFN(VFFSL(SL,"dim",True),"inBasis",False)(basis)
            write('''dataSize = ''')
            _v = VFFSL(SL,"dimRep.globalLattice",True) # '${dimRep.globalLattice}' on line 117, col 12
            if _v is not None: write(_filter(_v, rawExpr='${dimRep.globalLattice}')) # from line 117, col 12.
            write(''';
if (fwrite(&dataSize, sizeof(unsigned long), 1, fpBinary) != 1) {
  _LOG(_ERROR_LOG_LEVEL, "Error writing size of dimension \'''')
            _v = VFFSL(SL,"dimRep.name",True) # '${dimRep.name}' on line 119, col 60
            if _v is not None: write(_filter(_v, rawExpr='${dimRep.name}')) # from line 119, col 60.
            write('''\' to binary data file \'%s\'.\\n", _datFilename);
}
''')
            if isinstance(dimRep, NonUniformDimensionRepresentation): # generated from line 121, col 5
                write('''if (fwrite(''')
                _v = VFFSL(SL,"dimRep.arrayName",True) # '${dimRep.arrayName}' on line 122, col 12
                if _v is not None: write(_filter(_v, rawExpr='${dimRep.arrayName}')) # from line 122, col 12.
                write(''', sizeof(real), dataSize, fpBinary) != dataSize) {
  _LOG(_ERROR_LOG_LEVEL, "Error writing coordinate values for dimension \'''')
                _v = VFFSL(SL,"dimRep.name",True) # '${dimRep.name}' on line 123, col 74
                if _v is not None: write(_filter(_v, rawExpr='${dimRep.name}')) # from line 123, col 74.
                write('''\' to binary data file \'%s\'.\\n", _datFilename);
}
''')
            else: # generated from line 125, col 5
                write('''coordinate = ''')
                _v = VFFSL(SL,"dimRep.minimum",True) # '${dimRep.minimum}' on line 126, col 14
                if _v is not None: write(_filter(_v, rawExpr='${dimRep.minimum}')) # from line 126, col 14.
                write(''';
for (long _i0 = 0; _i0 < dataSize; _i0++, coordinate += ''')
                _v = VFFSL(SL,"dimRep.stepSize",True) # '${dimRep.stepSize}' on line 127, col 57
                if _v is not None: write(_filter(_v, rawExpr='${dimRep.stepSize}')) # from line 127, col 57.
                write(''') {
  if (fwrite(&coordinate, sizeof(real), 1, fpBinary) != 1) {
    _LOG(_ERROR_LOG_LEVEL, "Error writing coordinate values for dimension \'''')
                _v = VFFSL(SL,"dimRep.name",True) # '${dimRep.name}' on line 129, col 76
                if _v is not None: write(_filter(_v, rawExpr='${dimRep.name}')) # from line 129, col 76.
                write('''\' to binary data file \'%s\'.\\n", _datFilename);
  }
}
''')
            write('''fieldOffset += sizeof(unsigned long) + sizeof(real) * dataSize;

''')
        # 
        if field.dimensions: # generated from line 137, col 3
            write('''dataSize = ''')
            _v = ' * '.join([dim.inBasis(basis).globalLattice for dim in field.dimensions]) # "${' * '.join([dim.inBasis(basis).globalLattice for dim in field.dimensions])}" on line 138, col 12
            if _v is not None: write(_filter(_v, rawExpr="${' * '.join([dim.inBasis(basis).globalLattice for dim in field.dimensions])}")) # from line 138, col 12.
            write(''';
''')
        else: # generated from line 139, col 3
            write('''dataSize = 1;
''')
        write('''off_t vectorFieldSize = dataSize * sizeof(real) + sizeof(unsigned long);

for (int _i = 0; _i < ''')
        _v = VFFSL(SL,"componentCount",True) # '${componentCount}' on line 144, col 23
        if _v is not None: write(_filter(_v, rawExpr='${componentCount}')) # from line 144, col 23.
        write('''; _i++) {
  fseeko(fpBinary, fieldOffset + _i * vectorFieldSize, SEEK_SET);
  if (fwrite(&dataSize, sizeof(unsigned long), 1, fpBinary) != 1) {
    _LOG(_ERROR_LOG_LEVEL, "Error writing vector size to binary data file \'%s\'.\\n", _datFilename);
  }
}

''')
        #  This is where all of the magic MPI code goes
        featureOrdering = ['Driver']
        featureDict = dict.copy()
        featureDict['extraIndent'] = 0
        _v = VFFSL(SL,"insertCodeForFeatures",False)('binaryWriteOutWriteDataBegin', VFFSL(SL,"featureOrdering",True), featureDict) # "${insertCodeForFeatures('binaryWriteOutWriteDataBegin', $featureOrdering, featureDict)}" on line 155, col 1
        if _v is not None: write(_filter(_v, rawExpr="${insertCodeForFeatures('binaryWriteOutWriteDataBegin', $featureOrdering, featureDict)}")) # from line 155, col 1.
        extraIndent = featureDict['extraIndent']
        write('''
''')
        _v = VFFSL(SL,"writeData",False)(dict) # '${writeData(dict), extraIndent=extraIndent}' on line 158, col 1
        if _v is not None: write(_filter(_v, extraIndent=extraIndent, rawExpr='${writeData(dict), extraIndent=extraIndent}')) # from line 158, col 1.
        write('''
''')
        #  This is where the rest of the magic MPI code goes
        _v = VFFSL(SL,"insertCodeForFeaturesInReverseOrder",False)('binaryWriteOutWriteDataEnd', VFFSL(SL,"featureOrdering",True), featureDict) # "${insertCodeForFeaturesInReverseOrder('binaryWriteOutWriteDataEnd', $featureOrdering, featureDict)}" on line 161, col 1
        if _v is not None: write(_filter(_v, rawExpr="${insertCodeForFeaturesInReverseOrder('binaryWriteOutWriteDataEnd', $featureOrdering, featureDict)}")) # from line 161, col 1.
        write('''
fclose(fpBinary);
if (''')
        _v = VFFSL(SL,"fp",True) # '$fp' on line 164, col 5
        if _v is not None: write(_filter(_v, rawExpr='$fp')) # from line 164, col 5.
        write(''')
  fprintf(''')
        _v = VFFSL(SL,"fp",True) # '$fp' on line 165, col 11
        if _v is not None: write(_filter(_v, rawExpr='$fp')) # from line 165, col 11.
        write(''', "    </Stream>\\n");
''')
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeData(self, dict, **KWS):



        ## CHEETAH: generated from @def writeData($dict) at line 170, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        field = dict['field']
        basis = dict['basis']
        dependentVariables = dict['dependentVariables']
        # 
        variablesInEarlierVectors = 0
        for variable in VFFSL(SL,"dependentVariables",True): # generated from line 177, col 3
            componentNameSizePrefix = ''
            if VFFSL(SL,"variable.vector.type",True) == 'complex': # generated from line 179, col 5
                componentNameSizePrefix = '2 * '
            write("""// loop over components of vector '""")
            _v = VFFSL(SL,"variable.vector.id",True) # '${variable.vector.id}' on line 182, col 36
            if _v is not None: write(_filter(_v, rawExpr='${variable.vector.id}')) # from line 182, col 36.
            write("""' (array '""")
            _v = VFFSL(SL,"variable.arrayName",True) # '${variable.arrayName}' on line 182, col 67
            if _v is not None: write(_filter(_v, rawExpr='${variable.arrayName}')) # from line 182, col 67.
            write("""')
for (unsigned int _component = 0; _component < """)
            _v = VFFSL(SL,"componentNameSizePrefix",True) # '${componentNameSizePrefix}' on line 183, col 48
            if _v is not None: write(_filter(_v, rawExpr='${componentNameSizePrefix}')) # from line 183, col 48.
            write('''_''')
            _v = VFFSL(SL,"variable.vector.id",True) # '${variable.vector.id}' on line 183, col 75
            if _v is not None: write(_filter(_v, rawExpr='${variable.vector.id}')) # from line 183, col 75.
            write('''_ncomponents; _component++) {
  off_t _outputfield_index_pointer, _outputfield_old_index_pointer;
  _outputfield_index_pointer = -42; // Just so that we always seek the first time

''')
            innerContent = VFFSL(SL,"innerLoopsForVariable",False)(VFFSL(SL,"variable",True), variablesInEarlierVectors, dict)
            vectors = [VFFSL(SL,"variable.vector",True)]
            write('''  ''')
            _v = VFFSL(SL,"loopOverFieldInBasisWithVectorsAndInnerContent",False)(VFFSL(SL,"field",True), VFFSL(SL,"basis",True), VFFSL(SL,"vectors",True), VFFSL(SL,"innerContent",True), vectorsNotNeedingDefines=vectors) # '${loopOverFieldInBasisWithVectorsAndInnerContent($field, $basis, $vectors, $innerContent, vectorsNotNeedingDefines=vectors), autoIndent=True}' on line 189, col 3
            if _v is not None: write(_filter(_v, autoIndent=True, rawExpr='${loopOverFieldInBasisWithVectorsAndInnerContent($field, $basis, $vectors, $innerContent, vectorsNotNeedingDefines=vectors), autoIndent=True}')) # from line 189, col 3.
            write("""} // end loop over components of vector '""")
            _v = VFFSL(SL,"variable.vector.id",True) # '${variable.vector.id}' on line 190, col 42
            if _v is not None: write(_filter(_v, rawExpr='${variable.vector.id}')) # from line 190, col 42.
            write("""' (array '""")
            _v = VFFSL(SL,"variable.arrayName",True) # '${variable.arrayName}' on line 190, col 73
            if _v is not None: write(_filter(_v, rawExpr='${variable.arrayName}')) # from line 190, col 73.
            write("""')
""")
            variablesInEarlierVectors += VFFSL(SL,"variable.vector.nComponents",True)
            if VFFSL(SL,"variable.vector.type",True) == 'complex': # generated from line 192, col 5
                variablesInEarlierVectors += VFFSL(SL,"variable.vector.nComponents",True)
        write('''
''')
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def innerLoopsForVariable(self, variable, variablesInEarlierVectors, dict, **KWS):



        ## CHEETAH: generated from @def innerLoopsForVariable($variable, $variablesInEarlierVectors, $dict) at line 200, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        # 
        field = dict['field']
        basis = dict['basis']
        # 
        write('''// UNVECTORISABLE
_outputfield_old_index_pointer = _outputfield_index_pointer;
_outputfield_index_pointer = 0;

// Calculate the output field index pointer
''')
        for idx, dim in enumerate(field.dimensions): # generated from line 210, col 3
            write('''_outputfield_index_pointer += (''')
            _v = VFN(VFN(VFFSL(SL,"dim",True),"inBasis",False)(basis),"strictlyAscendingGlobalIndex",True) # '$dim.inBasis(basis).strictlyAscendingGlobalIndex' on line 211, col 32
            if _v is not None: write(_filter(_v, rawExpr='$dim.inBasis(basis).strictlyAscendingGlobalIndex')) # from line 211, col 32.
            write(''')''')
            _v = ''.join([' * ' + dim.inBasis(basis).globalLattice for dim in field.dimensions[idx+1:]]) # "${''.join([' * ' + dim.inBasis(basis).globalLattice for dim in field.dimensions[idx+1:]])}" on line 212, col 1
            if _v is not None: write(_filter(_v, rawExpr="${''.join([' * ' + dim.inBasis(basis).globalLattice for dim in field.dimensions[idx+1:]])}")) # from line 212, col 1.
            write(''';
''')
        write('''
if (_outputfield_index_pointer != _outputfield_old_index_pointer + 1)
  fseeko(fpBinary, fieldOffset + _outputfield_index_pointer * sizeof(real) + (''')
        _v = VFFSL(SL,"variablesInEarlierVectors",True) # '${variablesInEarlierVectors}' on line 216, col 79
        if _v is not None: write(_filter(_v, rawExpr='${variablesInEarlierVectors}')) # from line 216, col 79.
        write(''' + _component) * vectorFieldSize + sizeof(unsigned long), SEEK_SET);

''')
        if VFFSL(SL,"variable.vector.type",True) == 'real': # generated from line 218, col 3
            write('''if (fwrite(&''')
            _v = VFFSL(SL,"variable.arrayName",True) # '${variable.arrayName}' on line 219, col 13
            if _v is not None: write(_filter(_v, rawExpr='${variable.arrayName}')) # from line 219, col 13.
            write('''[_''')
            _v = VFFSL(SL,"variable.vector.id",True) # '${variable.vector.id}' on line 219, col 36
            if _v is not None: write(_filter(_v, rawExpr='${variable.vector.id}')) # from line 219, col 36.
            write('''_index_pointer + _component], sizeof(real), 1, fpBinary) != 1) {
  _LOG(_ERROR_LOG_LEVEL, "Error writing output data.\\n");
}
''')
        else: # generated from line 222, col 3
            write('''if (fwrite(&(reinterpret_cast<real*>(''')
            _v = VFFSL(SL,"variable.arrayName",True) # '${variable.arrayName}' on line 223, col 38
            if _v is not None: write(_filter(_v, rawExpr='${variable.arrayName}')) # from line 223, col 38.
            write(''')[2*_''')
            _v = VFFSL(SL,"variable.vector.id",True) # '${variable.vector.id}' on line 223, col 64
            if _v is not None: write(_filter(_v, rawExpr='${variable.vector.id}')) # from line 223, col 64.
            write('''_index_pointer + _component]), sizeof(real), 1, fpBinary) != 1) {
  _LOG(_ERROR_LOG_LEVEL, "Error writing output data.\\n");
}
''')
        # 
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        

    def writeBody(self, **KWS):



        ## CHEETAH: main method generated for this template
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write('''
''')
        # 
        # BinaryFormat.tmpl
        # 
        # Created by Graham Dennis on 2007-09-20.
        # 
        # Copyright (c) 2007-2012, Graham Dennis
        # 
        # This program is free software: you can redistribute it and/or modify
        # it under the terms of the GNU General Public License as published by
        # the Free Software Foundation, either version 2 of the License, or
        # (at your option) any later version.
        # 
        # This program is distributed in the hope that it will be useful,
        # but WITHOUT ANY WARRANTY; without even the implied warranty of
        # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
        # GNU General Public License for more details.
        # 
        # You should have received a copy of the GNU General Public License
        # along with this program.  If not, see <http://www.gnu.org/licenses/>.
        # 
        write('''









''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
        
    ##################################################
    ## CHEETAH GENERATED ATTRIBUTES


    _CHEETAH__instanceInitialized = False

    _CHEETAH_version = __CHEETAH_version__

    _CHEETAH_versionTuple = __CHEETAH_versionTuple__

    _CHEETAH_genTime = __CHEETAH_genTime__

    _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__

    _CHEETAH_src = __CHEETAH_src__

    _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__

    name = 'binary'

    mpiSafe = True

    _mainCheetahMethod_for_BinaryFormat = 'writeBody'

## END CLASS DEFINITION

if not hasattr(BinaryFormat, '_initCheetahAttributes'):
    templateAPIClass = getattr(BinaryFormat,
                               '_CHEETAH_templateClass',
                               Template)
    templateAPIClass._addCheetahPlumbingCodeToClass(BinaryFormat)


# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit https://cheetahtemplate.org/

##################################################
## if run from command line:
if __name__ == '__main__':
    from Cheetah.TemplateCmdLineIface import CmdLineIface
    CmdLineIface(templateObj=BinaryFormat()).run()


