File: tokenize.h

package info (click to toggle)
bowtie 1.2.3+dfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, sid
  • size: 16,888 kB
  • sloc: cpp: 35,784; perl: 5,903; ansic: 1,247; sh: 1,180; python: 487; makefile: 426
file content (52 lines) | stat: -rw-r--r-- 1,093 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
/*
 * tokenize.h
 *
 *  Created on: Jul 21, 2009
 *      Author: Ben Langmead
 */

#ifndef TOKENIZE_H_
#define TOKENIZE_H_

#include <string>
#include <sstream>
#include <vector>
#include <limits>

using namespace std;

/**
 * Split string s according to given delimiters.  Mostly borrowed
 * from C++ Programming HOWTO 7.3.
 */
static inline void tokenize(
	const string& s,
	const string& delims,
	vector<string>& ss,
	size_t max = std::numeric_limits<size_t>::max())
{
	string::size_type lastPos = s.find_first_not_of(delims, 0);
	string::size_type pos = s.find_first_of(delims, lastPos);
	while (string::npos != pos || string::npos != lastPos) {
		ss.push_back(s.substr(lastPos, pos - lastPos));
		lastPos = s.find_first_not_of(delims, pos);
		pos = s.find_first_of(delims, lastPos);
		if(ss.size() == (max - 1)) {
			pos = string::npos;
		}
	}
}

static inline void tokenize(
	const std::string& s,
    char delim,
    std::vector<std::string>& ss)
{
	std::string token;
	std::istringstream iss(s);
	while(getline(iss, token, delim)) {
		ss.push_back(token);
	}
}

#endif /*TOKENIZE_H_*/