1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151
|
/*=============================================================================
Boost.Wave: A Standard compliant C++ preprocessor library
http://www.boost.org/
Copyright (c) 2001-2010 Hartmut Kaiser. Distributed under the Boost
Software License, Version 1.0. (See accompanying file
LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#include <iostream>
#include <iomanip>
#include <fstream>
#include <string>
#include <vector>
///////////////////////////////////////////////////////////////////////////////
// Include Wave itself
#include <boost/wave.hpp>
///////////////////////////////////////////////////////////////////////////////
// Include the lexer stuff
#include <boost/wave/cpplexer/cpp_lex_token.hpp> // token class
#include <boost/wave/cpplexer/cpp_lex_iterator.hpp> // lexer class
///////////////////////////////////////////////////////////////////////////////
//
// Special output operator for a lex_token.
//
// Note: this doesn't compile if BOOST_SPIRIT_DEBUG is defined.
//
///////////////////////////////////////////////////////////////////////////////
template <typename PositionT>
inline std::ostream &
operator<< (std::ostream &stream,
boost::wave::cpplexer::lex_token<PositionT> const &t)
{
using namespace std;
using namespace boost::wave;
token_id id = token_id(t);
stream << setw(16)
<< left << boost::wave::get_token_name(id) << " ("
<< "#" << setw(3) << BASEID_FROM_TOKEN(id);
if (ExtTokenTypeMask & id) {
// this is an extended token id
if (AltTokenType == (id & ExtTokenOnlyMask)) {
stream << ", AltTokenType";
}
else if (TriGraphTokenType == (id & ExtTokenOnlyMask)) {
stream << ", TriGraphTokenType";
}
else if (AltExtTokenType == (id & ExtTokenOnlyMask)){
stream << ", AltExtTokenType";
}
}
stream
<< ") at " << t.get_position().get_file() << " ("
<< setw(3) << right << t.get_position().get_line() << "/"
<< setw(2) << right << t.get_position().get_column()
<< "): >";
typedef typename boost::wave::cpplexer::lex_token<PositionT>::string_type
string_type;
string_type const& value = t.get_value();
for (std::size_t i = 0; i < value.size(); ++i) {
switch (value[i]) {
case '\r': stream << "\\r"; break;
case '\n': stream << "\\n"; break;
case '\t': stream << "\\t"; break;
default:
stream << value[i];
break;
}
}
stream << "<";
return stream;
}
///////////////////////////////////////////////////////////////////////////////
// main entry point
int main(int argc, char *argv[])
{
if (2 != argc) {
std::cerr << "Usage: lexed_tokens infile" << std::endl;
return -1;
}
// current file position is saved for exception handling
boost::wave::util::file_position_type current_position;
try {
// Open and read in the specified input file.
std::ifstream instream(argv[1]);
std::string instr;
if (!instream.is_open()) {
std::cerr << "Could not open input file: " << argv[1] << std::endl;
return -2;
}
instream.unsetf(std::ios::skipws);
instr = std::string(std::istreambuf_iterator<char>(instream.rdbuf()),
std::istreambuf_iterator<char>());
// tokenize the input data into C++ tokens using the C++ lexer
typedef boost::wave::cpplexer::lex_token<> token_type;
typedef boost::wave::cpplexer::lex_iterator<token_type> lexer_type;
typedef token_type::position_type position_type;
position_type pos(argv[1]);
lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
boost::wave::language_support(
boost::wave::support_cpp|boost::wave::support_option_long_long));
lexer_type end = lexer_type();
while (it != end) {
current_position = (*it).get_position(); // for error reporting
std::cout << *it << std::endl; // dump the tokenf info
++it;
}
}
catch (boost::wave::cpplexer::lexing_exception const& e) {
// some lexing error
std::cerr
<< e.file_name() << "(" << e.line_no() << "): "
<< e.description() << std::endl;
return 2;
}
catch (std::exception const& e) {
// use last recognized token to retrieve the error position
std::cerr
<< current_position.get_file()
<< "(" << current_position.get_line() << "): "
<< "exception caught: " << e.what()
<< std::endl;
return 3;
}
catch (...) {
// use last recognized token to retrieve the error position
std::cerr
<< current_position.get_file()
<< "(" << current_position.get_line() << "): "
<< "unexpected exception caught." << std::endl;
return 4;
}
return 0;
}
|