Merge branch 'master' into custom-shell

This commit is contained in:
Aaron Meurer 2017-04-17 13:01:32 -05:00 committed by GitHub
commit 074d249d28
4 changed files with 91 additions and 58 deletions

View File

@ -7,31 +7,16 @@ import bdb
import gc
import os
import sys
from functools import partial
from types import TracebackType
from pudb.lowlevel import decode_lines
from pudb.settings import load_config, save_config
from pudb.py3compat import PY3, raw_input, execfile
CONFIG = load_config()
save_config(CONFIG)
from pudb.py3compat import PY3, raw_input, execfile
if PY3:
_next = "__next__"
else:
_next = "next"
try:
from functools import partial
except ImportError:
def partial(func, *args, **keywords):
def newfunc(*fargs, **fkeywords):
newkeywords = keywords.copy()
newkeywords.update(fkeywords)
return func(*(args + fargs), **newkeywords)
newfunc.func = func
newfunc.args = args
newfunc.keywords = keywords
return newfunc
HELP_TEXT = """\
Welcome to PuDB, the Python Urwid debugger.
-------------------------------------------
@ -555,10 +540,7 @@ class FileSourceCodeProvider(SourceCodeProvider):
self.file_name = debugger.canonic(file_name)
def __eq__(self, other):
return (
type(self) == type(other)
and
self.file_name == other.file_name)
return type(self) == type(other) and self.file_name == other.file_name
def identifier(self):
return self.file_name
@ -585,18 +567,7 @@ class FileSourceCodeProvider(SourceCodeProvider):
try:
from linecache import getlines
lines = getlines(self.file_name)
from pudb.lowlevel import detect_encoding
source_enc, _ = detect_encoding(getattr(iter(lines), _next))
decoded_lines = []
for l in lines:
if hasattr(l, "decode"):
decoded_lines.append(l.decode(source_enc))
else:
decoded_lines.append(l)
return format_source(debugger_ui, decoded_lines, set(breakpoints))
return format_source(debugger_ui, list(decode_lines(lines)), set(breakpoints))
except:
from pudb.lowlevel import format_exception
debugger_ui.message("Could not load source file '%s':\n\n%s" % (
@ -631,22 +602,8 @@ class DirectSourceCodeProvider(SourceCodeProvider):
def get_lines(self, debugger_ui):
from pudb.source_view import format_source
lines = self.code.split("\n")
from pudb.lowlevel import detect_encoding
source_enc, _ = detect_encoding(getattr(iter(lines), _next))
decoded_lines = []
for i, l in enumerate(lines):
if hasattr(l, "decode"):
l = l.decode(source_enc)
if i+1 < len(lines):
l += "\n"
decoded_lines.append(l)
return format_source(debugger_ui, decoded_lines, set())
lines = self.code.splitlines(True)
return format_source(debugger_ui, list(decode_lines(lines)), set())
# }}}

View File

@ -86,7 +86,7 @@ def lookup_module(filename):
# {{{ file encoding detection
# stolen from Python 3.1's tokenize.py, by Ka-Ping Yee
# the main idea stolen from Python 3.1's tokenize.py, by Ka-Ping Yee
import re
cookie_re = re.compile("^\s*#.*coding[:=]\s*([-\w.]+)")
@ -95,13 +95,13 @@ if PY3:
BOM_UTF8 = BOM_UTF8.decode()
def detect_encoding(readline):
def detect_encoding(lines):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
in the same way as the tokenize() generator.
be used to decode a Python source file. It requires one argment, lines,
iterable lines stream.
It will call readline a maximum of twice, and return the encoding used
It will read a maximum of two lines, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read
in.
@ -113,11 +113,11 @@ def detect_encoding(readline):
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
bom_found = False
encoding = None
line_iterator = iter(lines)
def read_or_stop():
try:
return readline()
return next(line_iterator)
except StopIteration:
return ''
@ -166,6 +166,15 @@ def detect_encoding(readline):
return 'utf-8', [first, second]
def decode_lines(lines):
source_enc, _ = detect_encoding(lines)
for line in lines:
if hasattr(line, "decode"):
yield line.decode(source_enc)
else:
yield line
# }}}

31
test/test_lowlevel.py Normal file
View File

@ -0,0 +1,31 @@
# -*- coding: utf-8 -*-
from pudb.lowlevel import detect_encoding, decode_lines
from pudb.py3compat import PY3
def test_detect_encoding_nocookie():
lines = ['Test Проверка']
encoding, _ = detect_encoding(lines)
assert encoding == 'utf-8'
def test_detect_encoding_cookie():
lines = [
'# coding=utf-8',
'Test',
'Проверка'
]
encoding, _ = detect_encoding(lines)
assert encoding == 'utf-8'
def test_decode_lines():
lines = [
'# coding=utf-8',
'Test',
'Проверка',
]
if PY3:
assert lines == list(decode_lines(lines))
else:
assert [l.decode('utf-8') for l in lines] == list(decode_lines(lines))

View File

@ -0,0 +1,36 @@
import pytest # noqa: F401
from pudb.debugger import NullSourceCodeProvider, FileSourceCodeProvider, DirectSourceCodeProvider
from pudb.source_view import SourceLine
class TestNullSourceCodeProvider:
def test_get_lines(self, mocker):
provider = NullSourceCodeProvider()
result = provider.get_lines(mocker.Mock())
assert len(result) == 10
assert isinstance(result[0], SourceLine)
class TestFileSourceCodeProvider:
def test_string_file_name(self, mocker):
mock_debugger = mocker.Mock()
mock_debugger.canonic = mocker.Mock(return_value='<string>')
provider = FileSourceCodeProvider(mock_debugger, 'test file name')
result = provider.get_lines(mocker.MagicMock())
assert len(result) == 1
assert isinstance(result[0], SourceLine)
def test_get_lines(self, mocker):
provider = FileSourceCodeProvider(mocker.Mock(), 'test file name')
result = provider.get_lines(mocker.MagicMock())
assert len(result) == 1
assert isinstance(result[0], SourceLine)
class TestDirectSourceCodeProvider:
def test_get_lines(self, mocker):
provider = DirectSourceCodeProvider(mocker.Mock(), 'test code')
result = provider.get_lines(mocker.Mock())
assert len(result) == 1
assert isinstance(result[0], SourceLine)