diff --git a/.yardopts b/.yardopts index 22ce944f64..2d5dda37e9 100644 --- a/.yardopts +++ b/.yardopts @@ -1 +1,3 @@ --no-private +--markup-provider=redcarpet +--markup=markdown diff --git a/Gemfile b/Gemfile index 24fdeb2934..c98a0afbf9 100644 --- a/Gemfile +++ b/Gemfile @@ -15,3 +15,4 @@ gem 'sinatra' # docs gem 'yard' +gem 'github-markup' diff --git a/Rakefile b/Rakefile index 3c8ff52ce4..ce68795031 100644 --- a/Rakefile +++ b/Rakefile @@ -8,12 +8,12 @@ task :spec do end task :doc do - sh 'yard' + sh 'bundle exec yard' end namespace :doc do task :server do - sh 'yard server --reload' + sh 'bundle exec yard server --reload' end task :clean do diff --git a/lib/rouge/lexer.rb b/lib/rouge/lexer.rb index d79ee960ec..fdac33dc64 100644 --- a/lib/rouge/lexer.rb +++ b/lib/rouge/lexer.rb @@ -3,7 +3,7 @@ module Rouge # @abstract - # A lexer transforms text into a stream of [token, chunk] pairs. + # A lexer transforms text into a stream of `[token, chunk]` pairs. class Lexer class << self # Lexes `stream` with the given options. The lex is delegated to a @@ -119,7 +119,7 @@ def aliases(*args) args.each { |arg| Lexer.register(arg, self) } end - # Specify a list of filename globs associated with this lexer + # Specify a list of filename globs associated with this lexer. # # @example # class Ruby < Lexer @@ -147,6 +147,15 @@ def registry # -*- instance methods -*- # + # Create a new lexer with the given options. Individual lexers may + # specify extra options. The only current globally accepted option + # is `:debug`. + # + # @option opts :debug + # Prints debug information to stdout. The particular info depends + # on the lexer in question. In regex lexers, this will log the + # state stack at the beginning of each step, along with each regex + # tried and each stream consumed. Try it, it's pretty useful. def initialize(opts={}) options(opts) end @@ -214,7 +223,7 @@ def lex(string, opts={}, &b) # @abstract # - # Yield [token, chunk] pairs, given a prepared input stream. This + # Yield `[token, chunk]` pairs, given a prepared input stream. This # must be implemented. # # @param [StringScanner] stream @@ -225,7 +234,7 @@ def stream_tokens(stream, &b) # @abstract # - # return a number between 0 and 1 indicating the likelihood that + # Return a number between 0 and 1 indicating the likelihood that # the text given should be lexed with this lexer. The default # implementation returns 0. # diff --git a/lib/rouge/regex_lexer.rb b/lib/rouge/regex_lexer.rb index 71997161fc..26845e8a3f 100644 --- a/lib/rouge/regex_lexer.rb +++ b/lib/rouge/regex_lexer.rb @@ -56,7 +56,7 @@ def initialize(rules) # # @param [Regexp] re # a regular expression for this rule to test. - # @param [String] token + # @param [String] tok # the token type to yield if `re` matches. # @param [#to_s] next_state # (optional) a state to push onto the stack if `re` matches. @@ -187,11 +187,11 @@ def reset! # The process for lexing works as follows, until the stream is empty: # # 1. We look at the state on top of the stack (which by default is - # [:root]). + # `[:root]`). # 2. Each rule in that state is tried until one is successful. If one # is found, that rule's callback is evaluated - which may yield # tokens and manipulate the state stack. Otherwise, one character - # is consumed with an 'Error' token, and we continue at (1.) + # is consumed with an `'Error'` token, and we continue at (1.) # # @see #step #step (where (2.) is implemented) def stream_tokens(stream, &b) @@ -265,8 +265,7 @@ def run_callback(stream, callback, &output_stream) end # The number of successive scans permitted without consuming - # the input stream. If this is exceeded, one character is - # consumed with the 'Error' token, and the lex is continued. + # the input stream. If this is exceeded, the match fails. MAX_NULL_SCANS = 5 # @private @@ -323,8 +322,8 @@ def group(tok) end # Delegate the lex to another lexer. The #lex method will be called - # with :continue set to true, so that #reset! will not be called. In - # this way, a single lexer can be repeatedly delegated to while + # with `:continue` set to true, so that #reset! will not be called. + # In this way, a single lexer can be repeatedly delegated to while # maintaining its own internal state stack. # # @param [#lex] lexer @@ -367,7 +366,7 @@ def pop!(times=1) times.times { stack.pop } end - # reset the stack back to [:root]. + # reset the stack back to `[:root]`. def reset_stack debug { ' resetting stack' } stack.clear