1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54
|
# Generated by roxygen2: do not edit by hand
S3method(tokenize_character_shingles,data.frame)
S3method(tokenize_character_shingles,default)
S3method(tokenize_characters,data.frame)
S3method(tokenize_characters,default)
S3method(tokenize_lines,data.frame)
S3method(tokenize_lines,default)
S3method(tokenize_ngrams,data.frame)
S3method(tokenize_ngrams,default)
S3method(tokenize_paragraphs,data.frame)
S3method(tokenize_paragraphs,default)
S3method(tokenize_ptb,data.frame)
S3method(tokenize_ptb,default)
S3method(tokenize_regex,data.frame)
S3method(tokenize_regex,default)
S3method(tokenize_sentences,data.frame)
S3method(tokenize_sentences,default)
S3method(tokenize_skip_ngrams,data.frame)
S3method(tokenize_skip_ngrams,default)
S3method(tokenize_word_stems,data.frame)
S3method(tokenize_word_stems,default)
S3method(tokenize_words,data.frame)
S3method(tokenize_words,default)
export(chunk_text)
export(count_characters)
export(count_sentences)
export(count_words)
export(tokenize_character_shingles)
export(tokenize_characters)
export(tokenize_lines)
export(tokenize_ngrams)
export(tokenize_paragraphs)
export(tokenize_ptb)
export(tokenize_regex)
export(tokenize_sentences)
export(tokenize_skip_ngrams)
export(tokenize_word_stems)
export(tokenize_words)
importFrom(Rcpp,sourceCpp)
importFrom(SnowballC,getStemLanguages)
importFrom(SnowballC,wordStem)
importFrom(stringi,stri_c)
importFrom(stringi,stri_opts_regex)
importFrom(stringi,stri_replace_all_charclass)
importFrom(stringi,stri_replace_all_regex)
importFrom(stringi,stri_split_boundaries)
importFrom(stringi,stri_split_fixed)
importFrom(stringi,stri_split_lines)
importFrom(stringi,stri_split_regex)
importFrom(stringi,stri_subset_charclass)
importFrom(stringi,stri_trans_tolower)
importFrom(stringi,stri_trim_both)
useDynLib(tokenizers, .registration = TRUE)
|