repl.it
@mat1/

highlightpy

Python

No description

fork
loading
Files
  • main.py
  • lexer.py
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import lexer

from pprint import pprint

filename = 'main.py'

lang = lexer.Language()

keywords = ('False', 'None', 'True', 'and', 'as', 'assert', 'async','await', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'nonlocal', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while', 'with', 'yield')

builtins = ('abs', 'all', 'any', 'ascii', 'bin', 'bool', 'breakpoint', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod', 'compile', 'complex', 'copyright', 'credits', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'exec', 'exit', 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'help', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', 'license', 'list', 'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print', 'property', 'quit', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type', 'vars', 'zip', 'self')

# parsed from top to bottom
lang.set_definitions(
	(r'[\)}\]]', lexer.tokens.closeBracket),
	(r'[\({\[]', lexer.tokens.openBracket),

	(r"'''(?s).*?'''", lexer.tokens.string),
	(r'"""(?s).*?"""', lexer.tokens.string),
	(r"'[^']*'", lexer.tokens.string),
	(r'"[^"]*"', lexer.tokens.string),

	(r'#(.*)', lexer.tokens.comment),

	(r'-?(.?[0-9])+', lexer.tokens.number),
	(r'\b(' + '|'.join(keywords) + r')\b', lexer.tokens.keyword),
	(r'\b(' + '|'.join(builtins) + r')\b', lexer.tokens.builtin),
	(r'\+|\-|=|\*|\/|==|>=|<=|!=', lexer.tokens.operator),
	(r"\w+", lexer.tokens.variable),
	(r"\s+", lexer.tokens.whitespace)
)

# vs dark (kinda good) #
vs_dark = {
	lexer.tokens.string: (206, 145, 120),
	lexer.tokens.comment: (96, 139, 78),
	lexer.tokens.builtin: (86, 156, 214),
	lexer.tokens.keyword: (86, 156, 214),
	lexer.tokens.number: (181, 206, 168),
}

highlights = vs_dark

with open(filename, 'r') as f:
	code = f.read()

parsed = lang.parse(code)
pprint(parsed)
output = ''
for token_data in parsed:
	token, text = token_data
	color = highlights.get(token, '97')
	if isinstance(color, tuple):
		color = '38;2;' + ';'.join(map(str, color))
	ansi_color = f'\033[{color}m'
	output += ansi_color
	output += text
print('\n\n\n\n')
print(output)