1 | #!/usr/bin/env python2
|
2 | """
|
3 | prompt_test.py: Tests for prompt.py
|
4 | """
|
5 | from __future__ import print_function
|
6 |
|
7 | import unittest
|
8 |
|
9 | from core import test_lib
|
10 | from frontend import match
|
11 | from osh import prompt # module under test
|
12 |
|
13 |
|
14 | class PromptTest(unittest.TestCase):
|
15 |
|
16 | def setUp(self):
|
17 | arena = test_lib.MakeArena('<ui_test.py>')
|
18 | mem = test_lib.MakeMem(arena)
|
19 | parse_ctx = test_lib.InitParseContext()
|
20 | self.p = prompt.Evaluator('osh', '0.0.0', parse_ctx, mem)
|
21 | # note: this has a separate 'mem' object
|
22 | self.p.word_ev = test_lib.InitWordEvaluator()
|
23 |
|
24 | def testEvaluator(self):
|
25 | # Regression for caching bug!
|
26 | self.assertEqual('foo', self.p.EvalPrompt('foo'))
|
27 | self.assertEqual('foo', self.p.EvalPrompt('foo'))
|
28 |
|
29 | def testNoEscapes(self):
|
30 | for prompt_str in ["> ", "osh>", "[[]][[]][][]]][["]:
|
31 | self.assertEqual(self.p.EvalPrompt(prompt_str), prompt_str)
|
32 |
|
33 | def testValidEscapes(self):
|
34 | for prompt_str in [
|
35 | "\[\033[01;34m\]user\[\033[00m\] >", r"\[\]\[\]\[\]",
|
36 | r"\[\] hi \[hi\] \[\] hello"
|
37 | ]:
|
38 | self.assertEqual(
|
39 | self.p.EvalPrompt(prompt_str),
|
40 | prompt_str.replace(r"\[", "\x01").replace(r"\]", "\x02"))
|
41 |
|
42 | def testInvalidEscapes(self):
|
43 | for invalid_prompt in [
|
44 | r"\[\[",
|
45 | r"\[\]\[\]\]",
|
46 | r"\]\]",
|
47 | r"almost valid \]",
|
48 | r"\[almost valid",
|
49 | r"\]\[", # goes negative!
|
50 | ]:
|
51 | tokens = match.Ps1Tokens(invalid_prompt)
|
52 | self.assertEqual(r'<Error: Unbalanced \[ and \]> ',
|
53 | self.p._ReplaceBackslashCodes(tokens))
|
54 |
|
55 |
|
56 | if __name__ == '__main__':
|
57 | unittest.main()
|