diff --git a/lib/natural/index.js b/lib/natural/index.js index 921754896..52399bcc2 100644 --- a/lib/natural/index.js +++ b/lib/natural/index.js @@ -44,10 +44,10 @@ exports.AggressiveTokenizerPl = require('./tokenizers/aggressive_tokenizer_pl'); exports.AggressiveTokenizerPt = require('./tokenizers/aggressive_tokenizer_pt'); exports.AggressiveTokenizerNo = require('./tokenizers/aggressive_tokenizer_no'); exports.AggressiveTokenizer = require('./tokenizers/aggressive_tokenizer'); +exports.CaseTokenizer = require('./tokenizers/tokenizer_case'); exports.RegexpTokenizer = require('./tokenizers/regexp_tokenizer').RegexpTokenizer; exports.WordTokenizer = require('./tokenizers/regexp_tokenizer').WordTokenizer; exports.WordPunctTokenizer = require('./tokenizers/regexp_tokenizer').WordPunctTokenizer; -exports.CaseTokenizer = require('./tokenizers/tokenizer_case').CaseTokenizer; exports.TreebankWordTokenizer = require('./tokenizers/treebank_word_tokenizer'); exports.TokenizerJa = require('./tokenizers/tokenizer_ja'); exports.BayesClassifier = require('./classifiers/bayes_classifier'); diff --git a/lib/natural/tokenizers/tokenizer_case.js b/lib/natural/tokenizers/tokenizer_case.js index 46d9757da..88c381d13 100644 --- a/lib/natural/tokenizers/tokenizer_case.js +++ b/lib/natural/tokenizers/tokenizer_case.js @@ -23,6 +23,7 @@ var Tokenizer = require('./tokenizer'), util = require('util'), CaseTokenizer = function() { + Tokenizer.call(this); }; util.inherits(CaseTokenizer, Tokenizer); @@ -54,4 +55,4 @@ CaseTokenizer.prototype.tokenize = function(text, preserveApostrophe) { return this.trim(result.replace(/\s+/g, ' ').split(' ')); }; -exports.CaseTokenizer = CaseTokenizer; +module.exports = CaseTokenizer;