aboutsummaryrefslogtreecommitdiff
path: root/tools/closure_linter-2.3.4/closure_linter/common/tokens.py
diff options
context:
space:
mode:
authorGravatar Trevor Elliott <trevor@galois.com>2013-05-16 14:28:25 -0700
committerGravatar Trevor Elliott <trevor@galois.com>2013-05-16 14:28:25 -0700
commitd316614847c16569da34a42e808dfb332fc6b6c9 (patch)
tree1d76a49c647c645d4513ab970b500f3c7b6a0cf1 /tools/closure_linter-2.3.4/closure_linter/common/tokens.py
parentb4f01ad9fa584c77fe6b2a6f55a9e5c00a701e58 (diff)
Remove google closure, and start reworking the build system
Squashed commit of the following: commit 446aae2afd089c28abd1d03a5fd20d4735837e16 Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:50:17 2013 -0700 stage-dir doesn't need to be cleaned Since everything goes into a common build tree now, cleaning just involves removing the entire build tree. commit d8f531ddf8ee1406ec915502c28dc0eb3912d0ee Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:47:10 2013 -0700 Switch to placing build artifacts in a build tree commit 9eedeec8d6a1012b1b7e466120260276b1e952d4 Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:35:01 2013 -0700 Remove the closure_compiler and closure_linter commit 5784158cf2cd55f0ffd01147ae014379ecc857fd Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:34:27 2013 -0700 Move the scripts in build to tools/bin commit 64a6a53ea0fd5e299e9d17c0e4f8fedf305272dc Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:27:55 2013 -0700 Build jsdoc Also, remove the old Makefiles that were in doc, as they're not necessary anymore. commit 1ef0d9e39cd4a24807ee6ca956fbc627fb851b9d Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:18:59 2013 -0700 Conditionally build the manual commit c326c58059e0d5035edecfd6261ee42797c49c2c Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:13:31 2013 -0700 Get the html manual building again commit 480fa132ffb0562eb3f61d45d79d3315b1d3cc29 Author: Trevor Elliott <trevor@galois.com> Date: Thu May 16 11:13:01 2013 -0700 Move doc specific .gitignore stuff to doc/.gitignore commit 8c108d4e0df848839bcd6b4c22d623053f590e95 Author: Trevor Elliott <trevor@galois.com> Date: Wed May 15 10:42:41 2013 -0700 Fix some path inconsistencies in the contexts build.mk commit ee53404be09cf26983365374da84ade564b92926 Author: Trevor Elliott <trevor@galois.com> Date: Wed May 15 10:37:40 2013 -0700 Preliminary build system changes * Chrome extension builds, but there are problems commit 474c6b88190787aeffd960ffb5855d31770e7141 Author: Trevor Elliott <trevor@galois.com> Date: Mon May 13 19:06:31 2013 -0700 Remove the closure toolkit
Diffstat (limited to 'tools/closure_linter-2.3.4/closure_linter/common/tokens.py')
-rwxr-xr-xtools/closure_linter-2.3.4/closure_linter/common/tokens.py139
1 files changed, 0 insertions, 139 deletions
diff --git a/tools/closure_linter-2.3.4/closure_linter/common/tokens.py b/tools/closure_linter-2.3.4/closure_linter/common/tokens.py
deleted file mode 100755
index 4c7d818..0000000
--- a/tools/closure_linter-2.3.4/closure_linter/common/tokens.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2008 The Closure Linter Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS-IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Classes to represent tokens and positions within them."""
-
-__author__ = ('robbyw@google.com (Robert Walker)',
- 'ajp@google.com (Andy Perelson)')
-
-
-class TokenType(object):
- """Token types common to all languages."""
- NORMAL = 'normal'
- WHITESPACE = 'whitespace'
- BLANK_LINE = 'blank line'
-
-
-class Token(object):
- """Token class for intelligent text splitting.
-
- The token class represents a string of characters and an identifying type.
-
- Attributes:
- type: The type of token.
- string: The characters the token comprises.
- length: The length of the token.
- line: The text of the line the token is found in.
- line_number: The number of the line the token is found in.
- values: Dictionary of values returned from the tokens regex match.
- previous: The token before this one.
- next: The token after this one.
- start_index: The character index in the line where this token starts.
- attached_object: Object containing more information about this token.
- metadata: Object containing metadata about this token. Must be added by
- a separate metadata pass.
- """
-
- def __init__(self, string, token_type, line, line_number, values=None):
- """Creates a new Token object.
-
- Args:
- string: The string of input the token contains.
- token_type: The type of token.
- line: The text of the line this token is in.
- line_number: The line number of the token.
- values: A dict of named values within the token. For instance, a
- function declaration may have a value called 'name' which captures the
- name of the function.
- """
- self.type = token_type
- self.string = string
- self.length = len(string)
- self.line = line
- self.line_number = line_number
- self.values = values
-
- # These parts can only be computed when the file is fully tokenized
- self.previous = None
- self.next = None
- self.start_index = None
-
- # This part is set in statetracker.py
- # TODO(robbyw): Wrap this in to metadata
- self.attached_object = None
-
- # This part is set in *metadatapass.py
- self.metadata = None
-
- def IsFirstInLine(self):
- """Tests if this token is the first token in its line.
-
- Returns:
- Whether the token is the first token in its line.
- """
- return not self.previous or self.previous.line_number != self.line_number
-
- def IsLastInLine(self):
- """Tests if this token is the last token in its line.
-
- Returns:
- Whether the token is the last token in its line.
- """
- return not self.next or self.next.line_number != self.line_number
-
- def IsType(self, token_type):
- """Tests if this token is of the given type.
-
- Args:
- token_type: The type to test for.
-
- Returns:
- True if the type of this token matches the type passed in.
- """
- return self.type == token_type
-
- def IsAnyType(self, *token_types):
- """Tests if this token is any of the given types.
-
- Args:
- token_types: The types to check. Also accepts a single array.
-
- Returns:
- True if the type of this token is any of the types passed in.
- """
- if not isinstance(token_types[0], basestring):
- return self.type in token_types[0]
- else:
- return self.type in token_types
-
- def __repr__(self):
- return '<Token: %s, "%s", %r, %d, %r>' % (self.type, self.string,
- self.values, self.line_number,
- self.metadata)
-
- def __iter__(self):
- """Returns a token iterator."""
- node = self
- while node:
- yield node
- node = node.next
-
- def __reversed__(self):
- """Returns a reverse-direction token iterator."""
- node = self
- while node:
- yield node
- node = node.previous