|
""" |
|
pygments.lexers.dylan |
|
~~~~~~~~~~~~~~~~~~~~~ |
|
|
|
Lexers for the Dylan language. |
|
|
|
:copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS. |
|
:license: BSD, see LICENSE for details. |
|
""" |
|
|
|
import re |
|
|
|
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \ |
|
default, line_re |
|
from pygments.token import Comment, Operator, Keyword, Name, String, \ |
|
Number, Punctuation, Generic, Literal, Whitespace |
|
|
|
__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer'] |
|
|
|
|
|
class DylanLexer(RegexLexer): |
|
""" |
|
For the Dylan language. |
|
""" |
|
|
|
name = 'Dylan' |
|
url = 'http://www.opendylan.org/' |
|
aliases = ['dylan'] |
|
filenames = ['*.dylan', '*.dyl', '*.intr'] |
|
mimetypes = ['text/x-dylan'] |
|
version_added = '0.7' |
|
|
|
flags = re.IGNORECASE |
|
|
|
builtins = { |
|
'subclass', 'abstract', 'block', 'concrete', 'constant', 'class', |
|
'compiler-open', 'compiler-sideways', 'domain', 'dynamic', |
|
'each-subclass', 'exception', 'exclude', 'function', 'generic', |
|
'handler', 'inherited', 'inline', 'inline-only', 'instance', |
|
'interface', 'import', 'keyword', 'library', 'macro', 'method', |
|
'module', 'open', 'primary', 'required', 'sealed', 'sideways', |
|
'singleton', 'slot', 'thread', 'variable', 'virtual'} |
|
|
|
keywords = { |
|
'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup', |
|
'create', 'define', 'else', 'elseif', 'end', 'export', 'finally', |
|
'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename', |
|
'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when', |
|
'while'} |
|
|
|
operators = { |
|
'~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=', |
|
'>', '>=', '&', '|'} |
|
|
|
functions = { |
|
'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!', |
|
'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply', |
|
'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!', |
|
'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol', |
|
'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose', |
|
'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as', |
|
'condition-format-arguments', 'condition-format-string', 'conjoin', |
|
'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions', |
|
'direct-subclasses', 'direct-superclasses', 'disjoin', 'do', |
|
'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?', |
|
'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first', |
|
'first-setter', 'floor', 'floor/', 'forward-iteration-protocol', |
|
'function-arguments', 'function-return-values', |
|
'function-specializers', 'gcd', 'generic-function-mandatory-keywords', |
|
'generic-function-methods', 'head', 'head-setter', 'identity', |
|
'initialize', 'instance?', 'integral?', 'intersection', |
|
'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited', |
|
'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make', |
|
'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes', |
|
'min', 'modulo', 'negative', 'negative?', 'next-method', |
|
'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop', |
|
'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank', |
|
'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!', |
|
'remove-duplicates', 'remove-duplicates!', 'remove-key!', |
|
'remove-method', 'replace-elements!', 'replace-subsequence!', |
|
'restart-query', 'return-allowed?', 'return-description', |
|
'return-query', 'reverse', 'reverse!', 'round', 'round/', |
|
'row-major-index', 'second', 'second-setter', 'shallow-copy', |
|
'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?', |
|
'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position', |
|
'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third', |
|
'third-setter', 'truncate', 'truncate/', 'type-error-expected-type', |
|
'type-error-value', 'type-for-copy', 'type-union', 'union', 'values', |
|
'vector', 'zero?'} |
|
|
|
valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+' |
|
|
|
def get_tokens_unprocessed(self, text): |
|
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): |
|
if token is Name: |
|
lowercase_value = value.lower() |
|
if lowercase_value in self.builtins: |
|
yield index, Name.Builtin, value |
|
continue |
|
if lowercase_value in self.keywords: |
|
yield index, Keyword, value |
|
continue |
|
if lowercase_value in self.functions: |
|
yield index, Name.Builtin, value |
|
continue |
|
if lowercase_value in self.operators: |
|
yield index, Operator, value |
|
continue |
|
yield index, token, value |
|
|
|
tokens = { |
|
'root': [ |
|
|
|
(r'\s+', Whitespace), |
|
|
|
|
|
(r'//.*?\n', Comment.Single), |
|
|
|
|
|
(r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)', |
|
bygroups(Name.Attribute, Operator, Whitespace, String)), |
|
|
|
default('code') |
|
], |
|
'code': [ |
|
|
|
(r'\s+', Whitespace), |
|
|
|
|
|
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)), |
|
|
|
|
|
(r'/\*', Comment.Multiline, 'comment'), |
|
|
|
|
|
(r'"', String, 'string'), |
|
(r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char), |
|
|
|
|
|
(r'#b[01]+', Number.Bin), |
|
|
|
|
|
(r'#o[0-7]+', Number.Oct), |
|
|
|
|
|
(r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float), |
|
|
|
|
|
(r'[-+]?\d+', Number.Integer), |
|
|
|
|
|
(r'#x[0-9a-f]+', Number.Hex), |
|
|
|
|
|
(r'(\?' + valid_name + ')(:)' |
|
r'(token|name|variable|expression|body|case-body|\*)', |
|
bygroups(Name.Tag, Operator, Name.Builtin)), |
|
(r'(\?)(:)(token|name|variable|expression|body|case-body|\*)', |
|
bygroups(Name.Tag, Operator, Name.Builtin)), |
|
(r'\?' + valid_name, Name.Tag), |
|
|
|
|
|
(r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation), |
|
|
|
|
|
|
|
(r':=', Operator), |
|
|
|
|
|
(r'#[tf]', Literal), |
|
|
|
|
|
(r'#"', String.Symbol, 'keyword'), |
|
|
|
|
|
(r'#[a-z0-9-]+', Keyword), |
|
|
|
|
|
(valid_name + ':', Keyword), |
|
|
|
|
|
('<' + valid_name + '>', Name.Class), |
|
|
|
|
|
(r'\*' + valid_name + r'\*', Name.Variable.Global), |
|
|
|
|
|
(r'\$' + valid_name, Name.Constant), |
|
|
|
|
|
(valid_name, Name), |
|
], |
|
'comment': [ |
|
(r'[^*/]+', Comment.Multiline), |
|
(r'/\*', Comment.Multiline, '#push'), |
|
(r'\*/', Comment.Multiline, '#pop'), |
|
(r'[*/]', Comment.Multiline) |
|
], |
|
'keyword': [ |
|
(r'"', String.Symbol, '#pop'), |
|
(r'[^\\"]+', String.Symbol), |
|
], |
|
'string': [ |
|
(r'"', String, '#pop'), |
|
(r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape), |
|
(r'[^\\"\n]+', String), |
|
(r'\\\n', String), |
|
(r'\\', String), |
|
] |
|
} |
|
|
|
|
|
class DylanLidLexer(RegexLexer): |
|
""" |
|
For Dylan LID (Library Interchange Definition) files. |
|
""" |
|
|
|
name = 'DylanLID' |
|
aliases = ['dylan-lid', 'lid'] |
|
filenames = ['*.lid', '*.hdp'] |
|
mimetypes = ['text/x-dylan-lid'] |
|
url = 'http://www.opendylan.org/' |
|
version_added = '1.6' |
|
flags = re.IGNORECASE |
|
|
|
tokens = { |
|
'root': [ |
|
|
|
(r'\s+', Whitespace), |
|
|
|
|
|
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)), |
|
|
|
|
|
(r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)', |
|
bygroups(Name.Attribute, Operator, Whitespace, String)), |
|
] |
|
} |
|
|
|
|
|
class DylanConsoleLexer(Lexer): |
|
""" |
|
For Dylan interactive console output. |
|
|
|
This is based on a copy of the ``RubyConsoleLexer``. |
|
""" |
|
name = 'Dylan session' |
|
aliases = ['dylan-console', 'dylan-repl'] |
|
filenames = ['*.dylan-console'] |
|
mimetypes = ['text/x-dylan-console'] |
|
url = 'http://www.opendylan.org/' |
|
version_added = '1.6' |
|
_example = 'dylan-console/console.dylan-console' |
|
|
|
_prompt_re = re.compile(r'\?| ') |
|
|
|
def get_tokens_unprocessed(self, text): |
|
dylexer = DylanLexer(**self.options) |
|
|
|
curcode = '' |
|
insertions = [] |
|
for match in line_re.finditer(text): |
|
line = match.group() |
|
m = self._prompt_re.match(line) |
|
if m is not None: |
|
end = m.end() |
|
insertions.append((len(curcode), |
|
[(0, Generic.Prompt, line[:end])])) |
|
curcode += line[end:] |
|
else: |
|
if curcode: |
|
yield from do_insertions(insertions, |
|
dylexer.get_tokens_unprocessed(curcode)) |
|
curcode = '' |
|
insertions = [] |
|
yield match.start(), Generic.Output, line |
|
if curcode: |
|
yield from do_insertions(insertions, |
|
dylexer.get_tokens_unprocessed(curcode)) |
|
|