File: TokenizerLog.h

package info (click to toggle)
r-cran-readr 2.1.6-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 1,688 kB
  • sloc: cpp: 4,020; ansic: 1,811; makefile: 2
file content (179 lines) | stat: -rw-r--r-- 3,976 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
#ifndef FASTREAD_TOKENIZER_LOG_H_
#define FASTREAD_TOKENIZER_LOG_H_

#include "cpp11/protect.hpp"

#include "Token.h"
#include "Tokenizer.h"
#include "utils.h"

enum LogState {
  LOG_DELIM,
  LOG_FIELD,
  LOG_STRING,
  LOG_ESCAPE,
  LOG_QUOTE,
  LOG_DATE
};

class TokenizerLog : public Tokenizer {
  SourceIterator begin_, cur_, end_;
  LogState state_;
  int row_, col_;
  bool moreTokens_;
  bool trimWS_;

public:
  TokenizerLog(bool trimWS) : trimWS_(trimWS) {}

  void tokenize(SourceIterator begin, SourceIterator end) {
    cur_ = begin;
    begin_ = begin;
    end_ = end;

    row_ = 0;
    col_ = 0;
    state_ = LOG_DELIM;
    moreTokens_ = true;
  }

  std::pair<double, size_t> progress() {
    size_t bytes = cur_ - begin_;
    return std::make_pair(bytes / (double)(end_ - begin_), bytes);
  }

  Token nextToken() {
    // Capture current position
    int row = row_, col = col_;

    if (!moreTokens_)
      return Token(TOKEN_EOF, row, col);

    SourceIterator token_begin = cur_;

    while (cur_ != end_) {
      Advance advance(&cur_);

      if ((row_ + 1) % 100000 == 0 || (col_ + 1) % 100000 == 0)
        cpp11::check_user_interrupt();

      switch (state_) {
      case LOG_DELIM:
        if (*cur_ == '\r' || *cur_ == '\n') {
          newRecord();
          advanceForLF(&cur_, end_);
          return Token(TOKEN_EMPTY, row, col);
        } else if (*cur_ == ' ') {
          break;
        } else if (*cur_ == '"') {
          state_ = LOG_STRING;
        } else if (*cur_ == '[') {
          state_ = LOG_DATE;
        } else {
          state_ = LOG_FIELD;
        }
        break;

      case LOG_FIELD:
        if (*cur_ == '\r' || *cur_ == '\n') {
          newRecord();
          return fieldToken(token_begin, advanceForLF(&cur_, end_), row, col);
        } else if (*cur_ == ' ') {
          newField();
          return fieldToken(token_begin, cur_, row, col);
        }
        break;

      case LOG_QUOTE:
        if (*cur_ == ' ') {
          newField();
          return fieldToken(token_begin + 1, cur_ - 1, row, col);
        } else if (*cur_ == '\r' || *cur_ == '\n') {
          newRecord();
          return fieldToken(
              token_begin + 1, advanceForLF(&cur_, end_) - 1, row, col);
        } else {
          state_ = LOG_STRING;
        }
        break;

      case LOG_STRING:
        if (*cur_ == '"') {
          state_ = LOG_QUOTE;
        } else if (*cur_ == '\\') {
          state_ = LOG_ESCAPE;
        }
        break;

      case LOG_ESCAPE:
        state_ = LOG_STRING;
        break;

      case LOG_DATE:
        if (*cur_ == ']') {
          newField();
          if (cur_ + 1 != end_)
            cur_++;
          return fieldToken(token_begin + 1, cur_ - 1, row, col);
        }
        break;
      }
    }

    // Reached end of Source: cur_ == end_
    moreTokens_ = false;

    switch (state_) {
    case LOG_DELIM:
      if (col_ == 0) {
        return Token(TOKEN_EOF, row, col);
      } else {
        return Token(TOKEN_EMPTY, row, col);
      }

    case LOG_QUOTE:
      return fieldToken(token_begin + 1, end_ - 1, row, col);

    case LOG_STRING:
      return fieldToken(token_begin + 1, end_, row, col);

    case LOG_ESCAPE:
      warn(row, col, "closing escape at end of file");
      return fieldToken(token_begin + 1, end_, row, col);

    case LOG_DATE:
      warn(row, col, "closing ] at end of file");
      return fieldToken(token_begin + 1, end_, row, col);

    case LOG_FIELD:
      return fieldToken(token_begin, end_, row, col);
    }

    return Token(TOKEN_EOF, row, col);
  }

private:
  void newField() {
    col_++;
    state_ = LOG_DELIM;
  }

  void newRecord() {
    row_++;
    col_ = 0;
    state_ = LOG_DELIM;
  }

  Token fieldToken(SourceIterator begin, SourceIterator end, int row, int col) {
    Token t(begin, end, row, col, false);
    if (trimWS_) {
      t.trim();
    }

    t.flagNA(std::vector<std::string>(1, "-"));

    return t;
  }
};

#endif