Use camelcase for enums (#65)

- Rename non-public enums to use camel case.
- Rephrase some doc comments to use modern style.
- Remove some doc comments that add no new information.
diff --git a/lib/src/event.dart b/lib/src/event.dart
index 31625f8..c4f6a6d 100644
--- a/lib/src/event.dart
+++ b/lib/src/event.dart
@@ -9,10 +9,7 @@
 
 /// An event emitted by a [Parser].
 class Event {
-  /// The event type.
   final EventType type;
-
-  /// The span associated with the event.
   final FileSpan span;
 
   Event(this.type, this.span);
@@ -24,7 +21,7 @@
 /// An event indicating the beginning of a YAML document.
 class DocumentStartEvent implements Event {
   @override
-  EventType get type => EventType.DOCUMENT_START;
+  EventType get type => EventType.documentStart;
   @override
   final FileSpan span;
 
@@ -51,7 +48,7 @@
 /// An event indicating the end of a YAML document.
 class DocumentEndEvent implements Event {
   @override
-  EventType get type => EventType.DOCUMENT_END;
+  EventType get type => EventType.documentEnd;
   @override
   final FileSpan span;
 
@@ -68,11 +65,11 @@
 /// An event indicating that an alias was referenced.
 class AliasEvent implements Event {
   @override
-  EventType get type => EventType.ALIAS;
+  EventType get type => EventType.alias;
   @override
   final FileSpan span;
 
-  /// The name of the anchor.
+  /// The alias name.
   final String name;
 
   AliasEvent(this.span, this.name);
@@ -81,8 +78,7 @@
   String toString() => 'ALIAS $name';
 }
 
-/// A base class for events that can have anchor and tag properties associated
-/// with them.
+/// An event that can have associated anchor and tag properties.
 abstract class _ValueEvent implements Event {
   /// The name of the value's anchor, or `null` if it wasn't anchored.
   String get anchor;
@@ -102,7 +98,7 @@
 /// An event indicating a single scalar value.
 class ScalarEvent extends _ValueEvent {
   @override
-  EventType get type => EventType.SCALAR;
+  EventType get type => EventType.scalar;
   @override
   final FileSpan span;
   @override
@@ -125,7 +121,7 @@
 /// An event indicating the beginning of a sequence.
 class SequenceStartEvent extends _ValueEvent {
   @override
-  EventType get type => EventType.SEQUENCE_START;
+  EventType get type => EventType.sequenceStart;
   @override
   final FileSpan span;
   @override
@@ -142,7 +138,7 @@
 /// An event indicating the beginning of a mapping.
 class MappingStartEvent extends _ValueEvent {
   @override
-  EventType get type => EventType.MAPPING_START;
+  EventType get type => EventType.mappingStart;
   @override
   final FileSpan span;
   @override
@@ -156,16 +152,16 @@
   MappingStartEvent(this.span, this.style, {this.anchor, this.tag});
 }
 
-/// An enum of types of [Event] object.
+/// The types of [Event] objects.
 enum EventType {
-  STREAM_START,
-  STREAM_END,
-  DOCUMENT_START,
-  DOCUMENT_END,
-  ALIAS,
-  SCALAR,
-  SEQUENCE_START,
-  SEQUENCE_END,
-  MAPPING_START,
-  MAPPING_END
+  streamStart,
+  streamEnd,
+  documentStart,
+  documentEnd,
+  alias,
+  scalar,
+  sequenceStart,
+  sequenceEnd,
+  mappingStart,
+  mappingEnd
 }
diff --git a/lib/src/loader.dart b/lib/src/loader.dart
index 08b2544..54172b5 100644
--- a/lib/src/loader.dart
+++ b/lib/src/loader.dart
@@ -36,7 +36,7 @@
       : _parser = Parser(source, sourceUrl: sourceUrl) {
     var event = _parser.parse();
     _span = event.span;
-    assert(event.type == EventType.STREAM_START);
+    assert(event.type == EventType.streamStart);
   }
 
   /// Loads the next document from the stream.
@@ -46,7 +46,7 @@
     if (_parser.isDone) return null;
 
     var event = _parser.parse();
-    if (event.type == EventType.STREAM_END) {
+    if (event.type == EventType.streamEnd) {
       _span = _span.expand(event.span);
       return null;
     }
@@ -62,7 +62,7 @@
     var contents = _loadNode(_parser.parse());
 
     var lastEvent = _parser.parse() as DocumentEndEvent;
-    assert(lastEvent.type == EventType.DOCUMENT_END);
+    assert(lastEvent.type == EventType.documentEnd);
 
     return YamlDocument.internal(
         contents,
@@ -76,13 +76,13 @@
   /// Composes a node.
   YamlNode _loadNode(Event firstEvent) {
     switch (firstEvent.type) {
-      case EventType.ALIAS:
+      case EventType.alias:
         return _loadAlias(firstEvent as AliasEvent);
-      case EventType.SCALAR:
+      case EventType.scalar:
         return _loadScalar(firstEvent as ScalarEvent);
-      case EventType.SEQUENCE_START:
+      case EventType.sequenceStart:
         return _loadSequence(firstEvent as SequenceStartEvent);
-      case EventType.MAPPING_START:
+      case EventType.mappingStart:
         return _loadMapping(firstEvent as MappingStartEvent);
       default:
         throw 'Unreachable';
@@ -136,7 +136,7 @@
     _registerAnchor(firstEvent.anchor, node);
 
     var event = _parser.parse();
-    while (event.type != EventType.SEQUENCE_END) {
+    while (event.type != EventType.sequenceEnd) {
       children.add(_loadNode(event));
       event = _parser.parse();
     }
@@ -158,7 +158,7 @@
     _registerAnchor(firstEvent.anchor, node);
 
     var event = _parser.parse();
-    while (event.type != EventType.MAPPING_END) {
+    while (event.type != EventType.mappingEnd) {
       var key = _loadNode(event);
       var value = _loadNode(_parser.parse());
       if (children.containsKey(key)) {
diff --git a/lib/src/parser.dart b/lib/src/parser.dart
index fd24f88..3eba773 100644
--- a/lib/src/parser.dart
+++ b/lib/src/parser.dart
@@ -116,10 +116,10 @@
   ///       ************
   Event _parseStreamStart() {
     var token = _scanner.scan();
-    assert(token.type == TokenType.STREAM_START);
+    assert(token.type == TokenType.streamStart);
 
     _state = _State.DOCUMENT_START;
-    return Event(EventType.STREAM_START, token.span);
+    return Event(EventType.streamStart, token.span);
   }
 
   /// Parses the productions:
@@ -137,14 +137,14 @@
     // as long as there was an end indicator.
 
     // Parse extra document end indicators.
-    while (token.type == TokenType.DOCUMENT_END) {
+    while (token.type == TokenType.documentEnd) {
       token = _scanner.advance();
     }
 
-    if (token.type != TokenType.VERSION_DIRECTIVE &&
-        token.type != TokenType.TAG_DIRECTIVE &&
-        token.type != TokenType.DOCUMENT_START &&
-        token.type != TokenType.STREAM_END) {
+    if (token.type != TokenType.versionDirective &&
+        token.type != TokenType.tagDirective &&
+        token.type != TokenType.documentStart &&
+        token.type != TokenType.streamEnd) {
       // Parse an implicit document.
       _processDirectives();
       _states.add(_State.DOCUMENT_END);
@@ -152,10 +152,10 @@
       return DocumentStartEvent(token.span.start.pointSpan());
     }
 
-    if (token.type == TokenType.STREAM_END) {
+    if (token.type == TokenType.streamEnd) {
       _state = _State.END;
       _scanner.scan();
-      return Event(EventType.STREAM_END, token.span);
+      return Event(EventType.streamEnd, token.span);
     }
 
     // Parse an explicit document.
@@ -164,7 +164,7 @@
     var versionDirective = pair.first;
     var tagDirectives = pair.last;
     token = _scanner.peek();
-    if (token.type != TokenType.DOCUMENT_START) {
+    if (token.type != TokenType.documentStart) {
       throw YamlException('Expected document start.', token.span);
     }
 
@@ -186,11 +186,11 @@
     var token = _scanner.peek();
 
     switch (token.type) {
-      case TokenType.VERSION_DIRECTIVE:
-      case TokenType.TAG_DIRECTIVE:
-      case TokenType.DOCUMENT_START:
-      case TokenType.DOCUMENT_END:
-      case TokenType.STREAM_END:
+      case TokenType.versionDirective:
+      case TokenType.tagDirective:
+      case TokenType.documentStart:
+      case TokenType.documentEnd:
+      case TokenType.streamEnd:
         _state = _states.removeLast();
         return _processEmptyScalar(token.span.start);
       default:
@@ -210,7 +210,7 @@
     _state = _State.DOCUMENT_START;
 
     var token = _scanner.peek();
-    if (token.type == TokenType.DOCUMENT_END) {
+    if (token.type == TokenType.documentEnd) {
       _scanner.scan();
       return DocumentEndEvent(token.span, isImplicit: false);
     } else {
@@ -291,7 +291,7 @@
       }
     }
 
-    if (indentlessSequence && token.type == TokenType.BLOCK_ENTRY) {
+    if (indentlessSequence && token.type == TokenType.blockEntry) {
       _state = _State.INDENTLESS_SEQUENCE_ENTRY;
       return SequenceStartEvent(span.expand(token.span), CollectionStyle.BLOCK,
           anchor: anchor, tag: tag);
@@ -307,25 +307,25 @@
           anchor: anchor, tag: tag);
     }
 
-    if (token.type == TokenType.FLOW_SEQUENCE_START) {
+    if (token.type == TokenType.flowSequenceStart) {
       _state = _State.FLOW_SEQUENCE_FIRST_ENTRY;
       return SequenceStartEvent(span.expand(token.span), CollectionStyle.FLOW,
           anchor: anchor, tag: tag);
     }
 
-    if (token.type == TokenType.FLOW_MAPPING_START) {
+    if (token.type == TokenType.flowMappingStart) {
       _state = _State.FLOW_MAPPING_FIRST_KEY;
       return MappingStartEvent(span.expand(token.span), CollectionStyle.FLOW,
           anchor: anchor, tag: tag);
     }
 
-    if (block && token.type == TokenType.BLOCK_SEQUENCE_START) {
+    if (block && token.type == TokenType.blockSequenceStart) {
       _state = _State.BLOCK_SEQUENCE_FIRST_ENTRY;
       return SequenceStartEvent(span.expand(token.span), CollectionStyle.BLOCK,
           anchor: anchor, tag: tag);
     }
 
-    if (block && token.type == TokenType.BLOCK_MAPPING_START) {
+    if (block && token.type == TokenType.blockMappingStart) {
       _state = _State.BLOCK_MAPPING_FIRST_KEY;
       return MappingStartEvent(span.expand(token.span), CollectionStyle.BLOCK,
           anchor: anchor, tag: tag);
@@ -347,11 +347,11 @@
   Event _parseBlockSequenceEntry() {
     var token = _scanner.peek();
 
-    if (token.type == TokenType.BLOCK_ENTRY) {
+    if (token.type == TokenType.blockEntry) {
       token = _scanner.advance();
 
-      if (token.type == TokenType.BLOCK_ENTRY ||
-          token.type == TokenType.BLOCK_END) {
+      if (token.type == TokenType.blockEntry ||
+          token.type == TokenType.blockEnd) {
         _state = _State.BLOCK_SEQUENCE_ENTRY;
         return _processEmptyScalar(token.span.end);
       } else {
@@ -360,10 +360,10 @@
       }
     }
 
-    if (token.type == TokenType.BLOCK_END) {
+    if (token.type == TokenType.blockEnd) {
       _scanner.scan();
       _state = _states.removeLast();
-      return Event(EventType.SEQUENCE_END, token.span);
+      return Event(EventType.sequenceEnd, token.span);
     }
 
     throw YamlException("While parsing a block collection, expected '-'.",
@@ -377,18 +377,18 @@
   Event _parseIndentlessSequenceEntry() {
     var token = _scanner.peek();
 
-    if (token.type != TokenType.BLOCK_ENTRY) {
+    if (token.type != TokenType.blockEntry) {
       _state = _states.removeLast();
-      return Event(EventType.SEQUENCE_END, token.span.start.pointSpan());
+      return Event(EventType.sequenceEnd, token.span.start.pointSpan());
     }
 
     var start = token.span.start;
     token = _scanner.advance();
 
-    if (token.type == TokenType.BLOCK_ENTRY ||
-        token.type == TokenType.KEY ||
-        token.type == TokenType.VALUE ||
-        token.type == TokenType.BLOCK_END) {
+    if (token.type == TokenType.blockEntry ||
+        token.type == TokenType.key ||
+        token.type == TokenType.value ||
+        token.type == TokenType.blockEnd) {
       _state = _State.INDENTLESS_SEQUENCE_ENTRY;
       return _processEmptyScalar(start);
     } else {
@@ -409,13 +409,13 @@
   ///                              *********
   Event _parseBlockMappingKey() {
     var token = _scanner.peek();
-    if (token.type == TokenType.KEY) {
+    if (token.type == TokenType.key) {
       var start = token.span.start;
       token = _scanner.advance();
 
-      if (token.type == TokenType.KEY ||
-          token.type == TokenType.VALUE ||
-          token.type == TokenType.BLOCK_END) {
+      if (token.type == TokenType.key ||
+          token.type == TokenType.value ||
+          token.type == TokenType.blockEnd) {
         _state = _State.BLOCK_MAPPING_VALUE;
         return _processEmptyScalar(start);
       } else {
@@ -427,15 +427,15 @@
     // libyaml doesn't allow empty keys without an explicit key indicator, but
     // the spec does. See example 8.18:
     // http://yaml.org/spec/1.2/spec.html#id2798896.
-    if (token.type == TokenType.VALUE) {
+    if (token.type == TokenType.value) {
       _state = _State.BLOCK_MAPPING_VALUE;
       return _processEmptyScalar(token.span.start);
     }
 
-    if (token.type == TokenType.BLOCK_END) {
+    if (token.type == TokenType.blockEnd) {
       _scanner.scan();
       _state = _states.removeLast();
-      return Event(EventType.MAPPING_END, token.span);
+      return Event(EventType.mappingEnd, token.span);
     }
 
     throw YamlException('Expected a key while parsing a block mapping.',
@@ -455,16 +455,16 @@
   Event _parseBlockMappingValue() {
     var token = _scanner.peek();
 
-    if (token.type != TokenType.VALUE) {
+    if (token.type != TokenType.value) {
       _state = _State.BLOCK_MAPPING_KEY;
       return _processEmptyScalar(token.span.start);
     }
 
     var start = token.span.start;
     token = _scanner.advance();
-    if (token.type == TokenType.KEY ||
-        token.type == TokenType.VALUE ||
-        token.type == TokenType.BLOCK_END) {
+    if (token.type == TokenType.key ||
+        token.type == TokenType.value ||
+        token.type == TokenType.blockEnd) {
       _state = _State.BLOCK_MAPPING_KEY;
       return _processEmptyScalar(start);
     } else {
@@ -490,9 +490,9 @@
     if (first) _scanner.scan();
     var token = _scanner.peek();
 
-    if (token.type != TokenType.FLOW_SEQUENCE_END) {
+    if (token.type != TokenType.flowSequenceEnd) {
       if (!first) {
-        if (token.type != TokenType.FLOW_ENTRY) {
+        if (token.type != TokenType.flowEntry) {
           throw YamlException(
               "While parsing a flow sequence, expected ',' or ']'.",
               token.span.start.pointSpan());
@@ -501,11 +501,11 @@
         token = _scanner.advance();
       }
 
-      if (token.type == TokenType.KEY) {
+      if (token.type == TokenType.key) {
         _state = _State.FLOW_SEQUENCE_ENTRY_MAPPING_KEY;
         _scanner.scan();
         return MappingStartEvent(token.span, CollectionStyle.FLOW);
-      } else if (token.type != TokenType.FLOW_SEQUENCE_END) {
+      } else if (token.type != TokenType.flowSequenceEnd) {
         _states.add(_State.FLOW_SEQUENCE_ENTRY);
         return _parseNode();
       }
@@ -513,7 +513,7 @@
 
     _scanner.scan();
     _state = _states.removeLast();
-    return Event(EventType.SEQUENCE_END, token.span);
+    return Event(EventType.sequenceEnd, token.span);
   }
 
   /// Parses the productions:
@@ -524,9 +524,9 @@
   Event _parseFlowSequenceEntryMappingKey() {
     var token = _scanner.peek();
 
-    if (token.type == TokenType.VALUE ||
-        token.type == TokenType.FLOW_ENTRY ||
-        token.type == TokenType.FLOW_SEQUENCE_END) {
+    if (token.type == TokenType.value ||
+        token.type == TokenType.flowEntry ||
+        token.type == TokenType.flowSequenceEnd) {
       // libyaml consumes the token here, but that seems like a bug, since it
       // always causes [_parseFlowSequenceEntryMappingValue] to emit an empty
       // scalar.
@@ -548,10 +548,10 @@
   Event _parseFlowSequenceEntryMappingValue() {
     var token = _scanner.peek();
 
-    if (token.type == TokenType.VALUE) {
+    if (token.type == TokenType.value) {
       token = _scanner.advance();
-      if (token.type != TokenType.FLOW_ENTRY &&
-          token.type != TokenType.FLOW_SEQUENCE_END) {
+      if (token.type != TokenType.flowEntry &&
+          token.type != TokenType.flowSequenceEnd) {
         _states.add(_State.FLOW_SEQUENCE_ENTRY_MAPPING_END);
         return _parseNode();
       }
@@ -568,7 +568,7 @@
   ///                                                   *
   Event _parseFlowSequenceEntryMappingEnd() {
     _state = _State.FLOW_SEQUENCE_ENTRY;
-    return Event(EventType.MAPPING_END, _scanner.peek().span.start.pointSpan());
+    return Event(EventType.mappingEnd, _scanner.peek().span.start.pointSpan());
   }
 
   /// Parses the productions:
@@ -588,9 +588,9 @@
     if (first) _scanner.scan();
     var token = _scanner.peek();
 
-    if (token.type != TokenType.FLOW_MAPPING_END) {
+    if (token.type != TokenType.flowMappingEnd) {
       if (!first) {
-        if (token.type != TokenType.FLOW_ENTRY) {
+        if (token.type != TokenType.flowEntry) {
           throw YamlException(
               "While parsing a flow mapping, expected ',' or '}'.",
               token.span.start.pointSpan());
@@ -599,18 +599,18 @@
         token = _scanner.advance();
       }
 
-      if (token.type == TokenType.KEY) {
+      if (token.type == TokenType.key) {
         token = _scanner.advance();
-        if (token.type != TokenType.VALUE &&
-            token.type != TokenType.FLOW_ENTRY &&
-            token.type != TokenType.FLOW_MAPPING_END) {
+        if (token.type != TokenType.value &&
+            token.type != TokenType.flowEntry &&
+            token.type != TokenType.flowMappingEnd) {
           _states.add(_State.FLOW_MAPPING_VALUE);
           return _parseNode();
         } else {
           _state = _State.FLOW_MAPPING_VALUE;
           return _processEmptyScalar(token.span.start);
         }
-      } else if (token.type != TokenType.FLOW_MAPPING_END) {
+      } else if (token.type != TokenType.flowMappingEnd) {
         _states.add(_State.FLOW_MAPPING_EMPTY_VALUE);
         return _parseNode();
       }
@@ -618,7 +618,7 @@
 
     _scanner.scan();
     _state = _states.removeLast();
-    return Event(EventType.MAPPING_END, token.span);
+    return Event(EventType.mappingEnd, token.span);
   }
 
   /// Parses the productions:
@@ -634,10 +634,10 @@
       return _processEmptyScalar(token.span.start);
     }
 
-    if (token.type == TokenType.VALUE) {
+    if (token.type == TokenType.value) {
       token = _scanner.advance();
-      if (token.type != TokenType.FLOW_ENTRY &&
-          token.type != TokenType.FLOW_MAPPING_END) {
+      if (token.type != TokenType.flowEntry &&
+          token.type != TokenType.flowMappingEnd) {
         _states.add(_State.FLOW_MAPPING_KEY);
         return _parseNode();
       }
@@ -657,8 +657,8 @@
 
     VersionDirective versionDirective;
     var tagDirectives = <TagDirective>[];
-    while (token.type == TokenType.VERSION_DIRECTIVE ||
-        token.type == TokenType.TAG_DIRECTIVE) {
+    while (token.type == TokenType.versionDirective ||
+        token.type == TokenType.tagDirective) {
       if (token is VersionDirectiveToken) {
         if (versionDirective != null) {
           throw YamlException('Duplicate %YAML directive.', token.span);
@@ -709,16 +709,16 @@
 
 /// The possible states for the parser.
 class _State {
-  /// Expect [TokenType.STREAM_START].
+  /// Expect [TokenType.streamStart].
   static const STREAM_START = _State('STREAM_START');
 
-  /// Expect [TokenType.DOCUMENT_START].
+  /// Expect [TokenType.documentStart].
   static const DOCUMENT_START = _State('DOCUMENT_START');
 
   /// Expect the content of a document.
   static const DOCUMENT_CONTENT = _State('DOCUMENT_CONTENT');
 
-  /// Expect [TokenType.DOCUMENT_END].
+  /// Expect [TokenType.documentEnd].
   static const DOCUMENT_END = _State('DOCUMENT_END');
 
   /// Expect a block node.
diff --git a/lib/src/scanner.dart b/lib/src/scanner.dart
index 2e49943..98d54df 100644
--- a/lib/src/scanner.dart
+++ b/lib/src/scanner.dart
@@ -95,17 +95,17 @@
   /// [SourceSpan]s.
   final SpanScanner _scanner;
 
-  /// Whether this scanner has produced a [TokenType.STREAM_START] token
+  /// Whether this scanner has produced a [TokenType.streamStart] token
   /// indicating the beginning of the YAML stream.
   var _streamStartProduced = false;
 
-  /// Whether this scanner has produced a [TokenType.STREAM_END] token
+  /// Whether this scanner has produced a [TokenType.streamEnd] token
   /// indicating the end of the YAML stream.
   var _streamEndProduced = false;
 
   /// The queue of tokens yet to be emitted.
   ///
-  /// These are queued up in advance so that [TokenType.KEY] tokens can be
+  /// These are queued up in advance so that [TokenType.key] tokens can be
   /// inserted once the scanner determines that a series of tokens represents a
   /// mapping key.
   final _tokens = QueueList<Token>();
@@ -117,7 +117,7 @@
 
   /// Whether the next token in [_tokens] is ready to be returned.
   ///
-  /// It might not be ready if there may still be a [TokenType.KEY] inserted
+  /// It might not be ready if there may still be a [TokenType.key] inserted
   /// before it.
   var _tokenAvailable = false;
 
@@ -136,7 +136,7 @@
   /// Entries in this list may be `null`, indicating that there is no valid
   /// simple key for the associated level of nesting.
   ///
-  /// When a ":" is parsed and there's a simple key available, a [TokenType.KEY]
+  /// When a ":" is parsed and there's a simple key available, a [TokenType.key]
   /// token is inserted in [_tokens] before that key's token. This allows the
   /// parser to tell that the key is intended to be a mapping key.
   final _simpleKeys = <_SimpleKey>[null];
@@ -301,7 +301,7 @@
     var token = _tokens.removeFirst();
     _tokenAvailable = false;
     _tokensParsed++;
-    _streamEndProduced = token is Token && token.type == TokenType.STREAM_END;
+    _streamEndProduced = token is Token && token.type == TokenType.streamEnd;
     return token;
   }
 
@@ -325,7 +325,7 @@
         _staleSimpleKeys();
 
         // If there are no more tokens to fetch, break.
-        if (_tokens.last.type == TokenType.STREAM_END) break;
+        if (_tokens.last.type == TokenType.streamEnd) break;
 
         // If the current token could be a simple key, we need to scan more
         // tokens until we determine whether it is or not. Otherwise we might
@@ -365,12 +365,12 @@
 
       if (_isBlankOrEndAt(3)) {
         if (_scanner.matches('---')) {
-          _fetchDocumentIndicator(TokenType.DOCUMENT_START);
+          _fetchDocumentIndicator(TokenType.documentStart);
           return;
         }
 
         if (_scanner.matches('...')) {
-          _fetchDocumentIndicator(TokenType.DOCUMENT_END);
+          _fetchDocumentIndicator(TokenType.documentEnd);
           return;
         }
       }
@@ -378,16 +378,16 @@
 
     switch (_scanner.peekChar()) {
       case LEFT_SQUARE:
-        _fetchFlowCollectionStart(TokenType.FLOW_SEQUENCE_START);
+        _fetchFlowCollectionStart(TokenType.flowSequenceStart);
         return;
       case LEFT_CURLY:
-        _fetchFlowCollectionStart(TokenType.FLOW_MAPPING_START);
+        _fetchFlowCollectionStart(TokenType.flowMappingStart);
         return;
       case RIGHT_SQUARE:
-        _fetchFlowCollectionEnd(TokenType.FLOW_SEQUENCE_END);
+        _fetchFlowCollectionEnd(TokenType.flowSequenceEnd);
         return;
       case RIGHT_CURLY:
-        _fetchFlowCollectionEnd(TokenType.FLOW_MAPPING_END);
+        _fetchFlowCollectionEnd(TokenType.flowMappingEnd);
         return;
       case COMMA:
         _fetchFlowEntry();
@@ -442,9 +442,9 @@
           // a quoted string) it isn't required to have whitespace after it
           // since it unambiguously describes a map.
           var token = _tokens.last;
-          if (token.type == TokenType.FLOW_SEQUENCE_END ||
-              token.type == TokenType.FLOW_MAPPING_END ||
-              (token.type == TokenType.SCALAR &&
+          if (token.type == TokenType.flowSequenceEnd ||
+              token.type == TokenType.flowMappingEnd ||
+              (token.type == TokenType.scalar &&
                   (token as ScalarToken).style.isQuoted)) {
             _fetchValue();
             return;
@@ -568,12 +568,12 @@
   /// Pops indentation levels from [_indents] until the current level becomes
   /// less than or equal to [column].
   ///
-  /// For each indentation level, appends a [TokenType.BLOCK_END] token.
+  /// For each indentation level, appends a [TokenType.blockEnd] token.
   void _unrollIndent(int column) {
     if (!_inBlockContext) return;
 
     while (_indent > column) {
-      _tokens.add(Token(TokenType.BLOCK_END, _scanner.emptySpan));
+      _tokens.add(Token(TokenType.blockEnd, _scanner.emptySpan));
       _indents.removeLast();
     }
   }
@@ -581,26 +581,26 @@
   /// Pops indentation levels from [_indents] until the current level resets to
   /// -1.
   ///
-  /// For each indentation level, appends a [TokenType.BLOCK_END] token.
+  /// For each indentation level, appends a [TokenType.blockEnd] token.
   void _resetIndent() => _unrollIndent(-1);
 
-  /// Produces a [TokenType.STREAM_START] token.
+  /// Produces a [TokenType.streamStart] token.
   void _fetchStreamStart() {
     // Much of libyaml's initialization logic here is done in variable
     // initializers instead.
     _streamStartProduced = true;
-    _tokens.add(Token(TokenType.STREAM_START, _scanner.emptySpan));
+    _tokens.add(Token(TokenType.streamStart, _scanner.emptySpan));
   }
 
-  /// Produces a [TokenType.STREAM_END] token.
+  /// Produces a [TokenType.streamEnd] token.
   void _fetchStreamEnd() {
     _resetIndent();
     _removeSimpleKey();
     _simpleKeyAllowed = false;
-    _tokens.add(Token(TokenType.STREAM_END, _scanner.emptySpan));
+    _tokens.add(Token(TokenType.streamEnd, _scanner.emptySpan));
   }
 
-  /// Produces a [TokenType.VERSION_DIRECTIVE] or [TokenType.TAG_DIRECTIVE]
+  /// Produces a [TokenType.versionDirective] or [TokenType.tagDirective]
   /// token.
   void _fetchDirective() {
     _resetIndent();
@@ -610,7 +610,7 @@
     if (directive != null) _tokens.add(directive);
   }
 
-  /// Produces a [TokenType.DOCUMENT_START] or [TokenType.DOCUMENT_END] token.
+  /// Produces a [TokenType.documentStart] or [TokenType.documentEnd] token.
   void _fetchDocumentIndicator(TokenType type) {
     _resetIndent();
     _removeSimpleKey();
@@ -625,8 +625,8 @@
     _tokens.add(Token(type, _scanner.spanFrom(start)));
   }
 
-  /// Produces a [TokenType.FLOW_SEQUENCE_START] or
-  /// [TokenType.FLOW_MAPPING_START] token.
+  /// Produces a [TokenType.flowSequenceStart] or
+  /// [TokenType.flowMappingStart] token.
   void _fetchFlowCollectionStart(TokenType type) {
     _saveSimpleKey();
     _increaseFlowLevel();
@@ -634,7 +634,7 @@
     _addCharToken(type);
   }
 
-  /// Produces a [TokenType.FLOW_SEQUENCE_END] or [TokenType.FLOW_MAPPING_END]
+  /// Produces a [TokenType.flowSequenceEnd] or [TokenType.flowMappingEnd]
   /// token.
   void _fetchFlowCollectionEnd(TokenType type) {
     _removeSimpleKey();
@@ -643,14 +643,14 @@
     _addCharToken(type);
   }
 
-  /// Produces a [TokenType.FLOW_ENTRY] token.
+  /// Produces a [TokenType.flowEntry] token.
   void _fetchFlowEntry() {
     _removeSimpleKey();
     _simpleKeyAllowed = true;
-    _addCharToken(TokenType.FLOW_ENTRY);
+    _addCharToken(TokenType.flowEntry);
   }
 
-  /// Produces a [TokenType.BLOCK_ENTRY] token.
+  /// Produces a [TokenType.blockEntry] token.
   void _fetchBlockEntry() {
     if (_inBlockContext) {
       if (!_simpleKeyAllowed) {
@@ -659,7 +659,7 @@
       }
 
       _rollIndent(
-          _scanner.column, TokenType.BLOCK_SEQUENCE_START, _scanner.location);
+          _scanner.column, TokenType.blockSequenceStart, _scanner.location);
     } else {
       // It is an error for the '-' indicator to occur in the flow context, but
       // we let the Parser detect and report it because it's able to point to
@@ -668,10 +668,10 @@
 
     _removeSimpleKey();
     _simpleKeyAllowed = true;
-    _addCharToken(TokenType.BLOCK_ENTRY);
+    _addCharToken(TokenType.blockEntry);
   }
 
-  /// Produces the [TokenType.KEY] token.
+  /// Produces the [TokenType.key] token.
   void _fetchKey() {
     if (_inBlockContext) {
       if (!_simpleKeyAllowed) {
@@ -680,27 +680,27 @@
       }
 
       _rollIndent(
-          _scanner.column, TokenType.BLOCK_MAPPING_START, _scanner.location);
+          _scanner.column, TokenType.blockMappingStart, _scanner.location);
     }
 
     // Simple keys are allowed after `?` in a block context.
     _simpleKeyAllowed = _inBlockContext;
-    _addCharToken(TokenType.KEY);
+    _addCharToken(TokenType.key);
   }
 
-  /// Produces the [TokenType.VALUE] token.
+  /// Produces the [TokenType.value] token.
   void _fetchValue() {
     var simpleKey = _simpleKeys.last;
     if (simpleKey != null) {
       // Add a [TokenType.KEY] directive before the first token of the simple
       // key so the parser knows that it's part of a key/value pair.
       _tokens.insert(simpleKey.tokenNumber - _tokensParsed,
-          Token(TokenType.KEY, simpleKey.location.pointSpan() as FileSpan));
+          Token(TokenType.key, simpleKey.location.pointSpan() as FileSpan));
 
       // In the block context, we may need to add the
       // [TokenType.BLOCK_MAPPING_START] token.
       _rollIndent(
-          simpleKey.column, TokenType.BLOCK_MAPPING_START, simpleKey.location,
+          simpleKey.column, TokenType.blockMappingStart, simpleKey.location,
           tokenNumber: simpleKey.tokenNumber);
 
       // Remove the simple key.
@@ -719,16 +719,16 @@
       // If we're here, we've found the ':' indicator following a complex key.
 
       _rollIndent(
-          _scanner.column, TokenType.BLOCK_MAPPING_START, _scanner.location);
+          _scanner.column, TokenType.blockMappingStart, _scanner.location);
       _simpleKeyAllowed = true;
     } else if (_simpleKeyAllowed) {
       // If we're here, we've found the ':' indicator with an empty key. This
       // behavior differs from libyaml, which disallows empty implicit keys.
       _simpleKeyAllowed = false;
-      _addCharToken(TokenType.KEY);
+      _addCharToken(TokenType.key);
     }
 
-    _addCharToken(TokenType.VALUE);
+    _addCharToken(TokenType.value);
   }
 
   /// Adds a token with [type] to [_tokens].
@@ -740,21 +740,21 @@
     _tokens.add(Token(type, _scanner.spanFrom(start)));
   }
 
-  /// Produces a [TokenType.ALIAS] or [TokenType.ANCHOR] token.
+  /// Produces a [TokenType.alias] or [TokenType.anchor] token.
   void _fetchAnchor({bool anchor = true}) {
     _saveSimpleKey();
     _simpleKeyAllowed = false;
     _tokens.add(_scanAnchor(anchor: anchor));
   }
 
-  /// Produces a [TokenType.TAG] token.
+  /// Produces a [TokenType.tag] token.
   void _fetchTag() {
     _saveSimpleKey();
     _simpleKeyAllowed = false;
     _tokens.add(_scanTag());
   }
 
-  /// Produces a [TokenType.SCALAR] token with style [ScalarStyle.LITERAL] or
+  /// Produces a [TokenType.scalar] token with style [ScalarStyle.LITERAL] or
   /// [ScalarStyle.FOLDED].
   void _fetchBlockScalar({bool literal = false}) {
     _removeSimpleKey();
@@ -762,7 +762,7 @@
     _tokens.add(_scanBlockScalar(literal: literal));
   }
 
-  /// Produces a [TokenType.SCALAR] token with style [ScalarStyle.SINGLE_QUOTED]
+  /// Produces a [TokenType.scalar] token with style [ScalarStyle.SINGLE_QUOTED]
   /// or [ScalarStyle.DOUBLE_QUOTED].
   void _fetchFlowScalar({bool singleQuote = false}) {
     _saveSimpleKey();
@@ -770,7 +770,7 @@
     _tokens.add(_scanFlowScalar(singleQuote: singleQuote));
   }
 
-  /// Produces a [TokenType.SCALAR] token with style [ScalarStyle.PLAIN].
+  /// Produces a [TokenType.scalar] token with style [ScalarStyle.PLAIN].
   void _fetchPlainScalar() {
     _saveSimpleKey();
     _simpleKeyAllowed = false;
@@ -816,7 +816,7 @@
     }
   }
 
-  /// Scans a [TokenType.YAML_DIRECTIVE] or [TokenType.TAG_DIRECTIVE] token.
+  /// Scans a [TokenType.YAML_DIRECTIVE] or [TokenType.tagDirective] token.
   ///
   ///     %YAML    1.2    # a comment \n
   ///     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -940,7 +940,7 @@
     return TagDirectiveToken(_scanner.spanFrom(start), handle, prefix);
   }
 
-  /// Scans a [TokenType.ANCHOR] token.
+  /// Scans a [TokenType.anchor] token.
   Token _scanAnchor({bool anchor = true}) {
     var start = _scanner.state;
 
@@ -977,7 +977,7 @@
     }
   }
 
-  /// Scans a [TokenType.TAG] token.
+  /// Scans a [TokenType.tag] token.
   Token _scanTag() {
     String handle;
     String suffix;
@@ -1090,11 +1090,11 @@
     _scanner.readChar();
 
     // Check for a chomping indicator.
-    var chomping = _Chomping.CLIP;
+    var chomping = _Chomping.clip;
     var increment = 0;
     var char = _scanner.peekChar();
     if (char == PLUS || char == HYPHEN) {
-      chomping = char == PLUS ? _Chomping.KEEP : _Chomping.STRIP;
+      chomping = char == PLUS ? _Chomping.keep : _Chomping.strip;
       _scanner.readChar();
 
       // Check for an indentation indicator.
@@ -1118,7 +1118,7 @@
 
       char = _scanner.peekChar();
       if (char == PLUS || char == HYPHEN) {
-        chomping = char == PLUS ? _Chomping.KEEP : _Chomping.STRIP;
+        chomping = char == PLUS ? _Chomping.keep : _Chomping.strip;
         _scanner.readChar();
       }
     }
@@ -1203,8 +1203,8 @@
     }
 
     // Chomp the tail.
-    if (chomping != _Chomping.STRIP) buffer.write(leadingBreak);
-    if (chomping == _Chomping.KEEP) buffer.write(trailingBreaks);
+    if (chomping != _Chomping.strip) buffer.write(leadingBreak);
+    if (chomping == _Chomping.keep) buffer.write(trailingBreaks);
 
     return ScalarToken(_scanner.spanFrom(start, end), buffer.toString(),
         literal ? ScalarStyle.LITERAL : ScalarStyle.FOLDED);
@@ -1662,17 +1662,16 @@
       : required = required;
 }
 
-/// An enum of chomping indicators that describe how to handle trailing
-/// whitespace for a block scalar.
+/// The ways to handle trailing whitespace for a block scalar.
 ///
 /// See http://yaml.org/spec/1.2/spec.html#id2794534.
 enum _Chomping {
   /// All trailing whitespace is discarded.
-  STRIP,
+  strip,
 
   /// A single trailing newline is retained.
-  CLIP,
+  clip,
 
   /// All trailing whitespace is preserved.
-  KEEP
+  keep
 }
diff --git a/lib/src/token.dart b/lib/src/token.dart
index 9051d68..8416554 100644
--- a/lib/src/token.dart
+++ b/lib/src/token.dart
@@ -8,10 +8,7 @@
 
 /// A token emitted by a [Scanner].
 class Token {
-  /// The token type.
   final TokenType type;
-
-  /// The span associated with the token.
   final FileSpan span;
 
   Token(this.type, this.span);
@@ -23,7 +20,7 @@
 /// A token representing a `%YAML` directive.
 class VersionDirectiveToken implements Token {
   @override
-  TokenType get type => TokenType.VERSION_DIRECTIVE;
+  TokenType get type => TokenType.versionDirective;
   @override
   final FileSpan span;
 
@@ -42,7 +39,7 @@
 /// A token representing a `%TAG` directive.
 class TagDirectiveToken implements Token {
   @override
-  TokenType get type => TokenType.TAG_DIRECTIVE;
+  TokenType get type => TokenType.tagDirective;
   @override
   final FileSpan span;
 
@@ -61,11 +58,10 @@
 /// A token representing an anchor (`&foo`).
 class AnchorToken implements Token {
   @override
-  TokenType get type => TokenType.ANCHOR;
+  TokenType get type => TokenType.anchor;
   @override
   final FileSpan span;
 
-  /// The name of the anchor.
   final String name;
 
   AnchorToken(this.span, this.name);
@@ -77,11 +73,10 @@
 /// A token representing an alias (`*foo`).
 class AliasToken implements Token {
   @override
-  TokenType get type => TokenType.ALIAS;
+  TokenType get type => TokenType.alias;
   @override
   final FileSpan span;
 
-  /// The name of the anchor.
   final String name;
 
   AliasToken(this.span, this.name);
@@ -93,11 +88,11 @@
 /// A token representing a tag (`!foo`).
 class TagToken implements Token {
   @override
-  TokenType get type => TokenType.TAG;
+  TokenType get type => TokenType.tag;
   @override
   final FileSpan span;
 
-  /// The tag handle.
+  /// The tag handle for named tags.
   final String handle;
 
   /// The tag suffix, or `null`.
@@ -109,14 +104,14 @@
   String toString() => 'TAG $handle $suffix';
 }
 
-/// A tkoen representing a scalar value.
+/// A scalar value.
 class ScalarToken implements Token {
   @override
-  TokenType get type => TokenType.SCALAR;
+  TokenType get type => TokenType.scalar;
   @override
   final FileSpan span;
 
-  /// The contents of the scalar.
+  /// The unparsed contents of the value..
   final String value;
 
   /// The style of the scalar in the original source.
@@ -128,32 +123,32 @@
   String toString() => 'SCALAR $style "$value"';
 }
 
-/// An enum of types of [Token] object.
+/// The types of [Token] objects.
 enum TokenType {
-  STREAM_START,
-  STREAM_END,
+  streamStart,
+  streamEnd,
 
-  VERSION_DIRECTIVE,
-  TAG_DIRECTIVE,
-  DOCUMENT_START,
-  DOCUMENT_END,
+  versionDirective,
+  tagDirective,
+  documentStart,
+  documentEnd,
 
-  BLOCK_SEQUENCE_START,
-  BLOCK_MAPPING_START,
-  BLOCK_END,
+  blockSequenceStart,
+  blockMappingStart,
+  blockEnd,
 
-  FLOW_SEQUENCE_START,
-  FLOW_SEQUENCE_END,
-  FLOW_MAPPING_START,
-  FLOW_MAPPING_END,
+  flowSequenceStart,
+  flowSequenceEnd,
+  flowMappingStart,
+  flowMappingEnd,
 
-  BLOCK_ENTRY,
-  FLOW_ENTRY,
-  KEY,
-  VALUE,
+  blockEntry,
+  flowEntry,
+  key,
+  value,
 
-  ALIAS,
-  ANCHOR,
-  TAG,
-  SCALAR
+  alias,
+  anchor,
+  tag,
+  scalar
 }