File: correlate.py

package info (click to toggle)
liggghts 3.0.3%2Brepack-2
  • links: PTS, VCS
  • area: main
  • in suites: jessie, jessie-kfreebsd
  • size: 106,076 kB
  • ctags: 34,406
  • sloc: cpp: 363,723; python: 21,138; ansic: 9,146; perl: 3,687; sh: 2,841; fortran: 2,802; xml: 788; makefile: 485; objc: 238; lisp: 169; f90: 145; csh: 16; awk: 14
file content (89 lines) | stat: -rw-r--r-- 2,537 bytes parent folder | download | duplicates (11)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#!/usr/bin/env python
"""
  function: 
    parse the block of thermo data in a lammps logfile and perform auto- and 
    cross correlation of the specified column data.  The total sum of the 
    correlation is also computed which can be converted to an integral by 
    multiplying by the timestep. 
  output:
    standard output contains column data for the auto- & cross correlations 
    plus the total sum of each. Note, only the upper triangle of the 
    correlation matrix is computed.
  usage: 
    correlate.py [-c col] <-c col2> <-s max_correlation_time>  [logfile] 
"""
import sys
import re
import array

# parse command line

maxCorrelationTime = 0
cols = array.array("I")
nCols = 0
args = sys.argv[1:]
index = 0
while index < len(args):
  arg = args[index]
  index += 1
  if   (arg == "-c"):
    cols.append(int(args[index])-1)
    nCols += 1
    index += 1
  elif (arg == "-s"):
    maxCorrelationTime = int(args[index])
    index += 1
  else :
    filename = arg
if (nCols < 1): raise RuntimeError, 'no data columns requested'
data = [array.array("d")]
for s in range(1,nCols)  : data.append( array.array("d") )

# read data block from log file

start = False
input = open(filename)
nSamples = 0
pattern = re.compile('\d')
line = input.readline()
while line :
  columns = line.split()
  if (columns and pattern.match(columns[0])) :
    for i in range(nCols): 
      data[i].append( float(columns[cols[i]]) )
    nSamples += 1
    start = True
  else :
     if (start) : break
  line = input.readline()
print "# read :",nSamples," samples of ", nCols," data"
if( maxCorrelationTime < 1): maxCorrelationTime = int(nSamples/2);
 
# correlate and integrate

correlationPairs = []
for i in range(0,nCols):
  for j in range(i,nCols): # note only upper triangle of the correlation matrix
    correlationPairs.append([i,j])
header = "# "
for k in range(len(correlationPairs)):
  i = str(correlationPairs[k][0]+1)
  j = str(correlationPairs[k][1]+1)
  header += " C"+i+j+" sum_C"+i+j
print header
nCorrelationPairs = len(correlationPairs)
sum = [0.0] * nCorrelationPairs
for s in range(maxCorrelationTime)  :
  correlation = [0.0] * nCorrelationPairs
  nt = nSamples-s
  for t in range(0,nt)  :
    for p in range(nCorrelationPairs):
      i = correlationPairs[p][0]
      j = correlationPairs[p][1]
      correlation[p] += data[i][t]*data[j][s+t]
  output = ""
  for p in range(0,nCorrelationPairs):
    correlation[p] /= nt
    sum[p] += correlation[p]
    output += str(correlation[p]) + " " + str(sum[p]) + " "
  print output