This commit is contained in:
Sven Riwoldt
2024-04-01 20:30:24 +02:00
parent fd333f3514
commit c7bc862c6f
6804 changed files with 1065135 additions and 0 deletions

View File

@@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Kernels Contributors
#
# Licensed under the terms of the MIT License
# (see spyder_kernels/__init__.py for details)
# -----------------------------------------------------------------------------
"""Tests."""

View File

@@ -0,0 +1,160 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Kernels Contributors
#
# Licensed under the terms of the MIT License
# (see spyder_kernels/__init__.py for details)
# -----------------------------------------------------------------------------
"""
Tests for dochelpers.py
"""
# Standard library imports
import os
import sys
# Test library imports
import pytest
# Local imports
from spyder_kernels.utils.dochelpers import (
getargtxt, getargspecfromtext, getdoc, getobj, getsignaturefromtext,
isdefined)
from spyder_kernels.py3compat import PY2
class Test(object):
def method(self, x, y=2):
pass
@pytest.mark.skipif(
PY2 or os.name == 'nt', reason="Only works on Linux and Mac")
@pytest.mark.skipif(
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
reason="Fails on Mac with Python 3.8")
def test_dochelpers():
"""Test dochelpers."""
assert getargtxt(Test.method) == ['x, ', 'y=2']
assert not getargtxt(Test.__init__)
assert getdoc(sorted) == {
'note': 'Function of builtins module',
'argspec': '(...)',
'docstring': 'Return a new list containing all items from the '
'iterable in ascending order.\n\nA custom key function '
'can be supplied to customize the sort order, and the\n'
'reverse flag can be set to request the result in '
'descending order.',
'name': 'sorted'
}
assert not getargtxt(sorted)
assert isdefined('numpy.take', force_import=True)
assert isdefined('__import__')
assert not isdefined('zzz', force_import=True)
assert getobj('globals') == 'globals'
assert not getobj('globals().keys')
assert getobj('+scipy.signal.') == 'scipy.signal'
assert getobj('4.') == '4'
@pytest.mark.skipif(PY2, reason="Fails in Python 2")
def test_no_signature():
"""
Test that we can get documentation for objects for which Python can't get a
signature directly because it gives an error.
This is a regression test for issue spyder-ide/spyder#21148
"""
import numpy as np
doc = getdoc(np.where)
signature = doc['argspec']
assert signature and signature != "(...)" and signature.startswith("(")
assert doc['docstring']
@pytest.mark.parametrize(
'text, name, expected',
[
('foo(x, y)', 'foo', '(x, y)'),
('foo(x, y)', '', '(x, y)'),
]
)
def test_getsignaturefromtext_py2(text, name, expected):
assert getsignaturefromtext(text, name) == expected
@pytest.mark.skipif(PY2, reason="Don't work in Python 2")
@pytest.mark.parametrize(
'text, name, expected',
[
# Simple text with and without name
('foo(x, y)', 'foo', '(x, y)'),
('foo(x, y)', '', '(x, y)'),
# Single arg
('foo(x)', '', '(x)'),
('foo(x = {})', '', '(x = {})'),
# Not a valid identifier
('1a(x, y)', '', ''),
# Valid identifier
('a1(x, y=2)', '', '(x, y=2)'),
# Unicode identifier with and without name
('ΣΔ(x, y)', 'ΣΔ', '(x, y)'),
('ΣΔ(x, y)', '', '(x, y)'),
# Multiple signatures in a single line
('ΣΔ(x, y) foo(a, b)', '', '(x, y)'),
('1a(x, y) foo(a, b)', '', '(a, b)'),
# Multiple signatures in multiple lines
('foo(a, b = 1)\n\nΣΔ(x, y=2)', '', '(a, b = 1)'),
('1a(a, b = 1)\n\nΣΔ(x, y=2)', '', '(x, y=2)'),
# Signature after math operations
('2(3 + 5) 3*(99) ΣΔ(x, y)', '', '(x, y)'),
# No identifier
('(x, y)', '', ''),
('foo (a=1, b = 2)', '', ''),
# Empty signature
('foo()', '', ''),
('foo()', 'foo', ''),
]
)
def test_getsignaturefromtext(text, name, expected):
assert getsignaturefromtext(text, name) == expected
def test_multisignature():
"""
Test that we can get at least one signature from an object with multiple
ones declared in its docstring.
"""
def foo():
"""
foo(x, y) foo(a, b)
foo(c, d)
"""
signature = getargspecfromtext(foo.__doc__)
assert signature == "(x, y)"
def test_multiline_signature():
"""
Test that we can get signatures splitted into multiple lines in a
docstring.
"""
def foo():
"""
foo(x,
y)
This is a docstring.
"""
signature = getargspecfromtext(foo.__doc__)
assert signature.startswith("(x, ")
if __name__ == "__main__":
pytest.main()

View File

@@ -0,0 +1,344 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Kernels Contributors
#
# Licensed under the terms of the MIT License
# (see spyder_kernels/__init__.py for details)
# -----------------------------------------------------------------------------
"""
Tests for iofuncs.py.
"""
# Standard library imports
import io
import os
import copy
# Third party imports
import pytest
import numpy as np
# Local imports
import spyder_kernels.utils.iofuncs as iofuncs
from spyder_kernels.py3compat import is_text_string, PY2
# Full path to this file's parent directory for loading data
LOCATION = os.path.realpath(os.path.join(os.getcwd(),
os.path.dirname(__file__)))
# =============================================================================
# ---- Helper functions and classes
# =============================================================================
def are_namespaces_equal(actual, expected):
if actual is None and expected is None:
return True
are_equal = True
for var in sorted(expected.keys()):
try:
are_equal = are_equal and bool(np.mean(
expected[var] == actual[var]))
except ValueError:
are_equal = are_equal and all(
[np.all(obj1 == obj2) for obj1, obj2 in zip(expected[var],
actual[var])])
print(str(var) + ": " + str(are_equal))
return are_equal
class CustomObj(object):
"""A custom class of objects for testing."""
def __init__(self, data):
self.data = None
if data:
self.data = data
def __eq__(self, other):
return self.__dict__ == other.__dict__
class UnDeepCopyableObj(CustomObj):
"""A class of objects that cannot be deepcopied."""
def __getstate__(self):
raise RuntimeError()
class UnPickleableObj(UnDeepCopyableObj):
"""A class of objects that can deepcopied, but not pickled."""
def __deepcopy__(self, memo):
new_one = self.__class__.__new__(self.__class__)
new_one.__dict__.update(self.__dict__)
return new_one
# =============================================================================
# ---- Fixtures
# =============================================================================
@pytest.fixture
def spydata_values():
"""
Define spydata file ground truth values.
The file export_data.spydata contains five variables to be loaded.
This fixture declares those variables in a static way.
"""
A = 1
B = 'ham'
C = np.eye(3)
D = {'a': True, 'b': np.eye(4, dtype=np.complex128)}
E = [np.eye(2, dtype=np.int64), 42.0, np.eye(3, dtype=np.bool_), np.eye(4, dtype=object)]
return {'A': A, 'B': B, 'C': C, 'D': D, 'E': E}
@pytest.fixture
def real_values():
"""
Load a Numpy pickled file.
The file numpy_data.npz contains six variables, each one represents the
expected test values after a manual conversion of the same variables
defined and evaluated in MATLAB. The manual type conversion was done
over several variable types, such as: Matrices/Vectors, Scalar and
Complex numbers, Structs, Strings and Cell Arrays. The set of variables
was defined to allow and test the deep conversion of a compound type,
i.e., a struct that contains other types that need to be converted,
like other structs, matrices and Cell Arrays.
"""
path = os.path.join(LOCATION, 'numpy_data.npz')
file_s = np.load(path, allow_pickle=True)
A = file_s['A'].item()
B = file_s['B']
C = file_s['C']
D = file_s['D'].item()
E = file_s['E']
return {'A': A, 'B': B, 'C': C, 'D': D, 'E': E}
@pytest.fixture
def namespace_objects_full(spydata_values):
"""
Define a dictionary of objects of a variety of different types to be saved.
This fixture reprisents the state of the namespace before saving and
filtering out un-deep-copyable, un-pickleable, and uninteresting objects.
"""
namespace_dict = copy.deepcopy(spydata_values)
namespace_dict['expected_error_string'] = (
'Some objects could not be saved: '
'undeepcopyable_instance, unpickleable_instance')
namespace_dict['module_obj'] = io
namespace_dict['class_obj'] = Exception
namespace_dict['function_obj'] = os.path.join
namespace_dict['unpickleable_instance'] = UnPickleableObj("spam")
namespace_dict['undeepcopyable_instance'] = UnDeepCopyableObj("ham")
namespace_dict['custom_instance'] = CustomObj("eggs")
return namespace_dict
@pytest.fixture
def namespace_objects_filtered(spydata_values):
"""
Define a dictionary of the objects from the namespace that can be saved.
This fixture reprisents the state of the namespace after saving and
filtering out un-deep-copyable, un-pickleable, and uninteresting objects.
"""
namespace_dict = copy.deepcopy(spydata_values)
namespace_dict['custom_instance'] = CustomObj("eggs")
return namespace_dict
@pytest.fixture
def namespace_objects_nocopyable():
"""
Define a dictionary of that cannot be deepcopied.
"""
namespace_dict = {}
namespace_dict['expected_error_string'] = 'No supported objects to save'
namespace_dict['class_obj'] = Exception
namespace_dict['undeepcopyable_instance'] = UnDeepCopyableObj("ham")
return namespace_dict
@pytest.fixture
def namespace_objects_nopickleable():
"""
Define a dictionary of objects that cannot be pickled.
"""
namespace_dict = {}
namespace_dict['expected_error_string'] = 'No supported objects to save'
namespace_dict['function_obj'] = os.path.join
namespace_dict['unpickleable_instance'] = UnPickleableObj("spam")
return namespace_dict
@pytest.fixture
def input_namespace(request):
if request.param is None:
return None
else:
return request.getfixturevalue(request.param)
@pytest.fixture
def expected_namespace(request):
if request.param is None:
return None
else:
return request.getfixturevalue(request.param)
# =============================================================================
# ---- Tests
# =============================================================================
def test_npz_import():
"""
Test the load of .npz files as dictionaries.
"""
filename = os.path.join(LOCATION, 'import_data.npz')
data = iofuncs.load_array(filename)
assert isinstance(data, tuple)
variables, error = data
assert variables['val1'] == np.array(1) and not error
@pytest.mark.skipif(iofuncs.load_matlab is None, reason="SciPy required")
def test_matlab_import(real_values):
"""
Test the automatic conversion and import of variables from MATLAB.
This test loads a file stored in MATLAB, the variables defined are
equivalent to the manually converted values done over Numpy. This test
allows to evaluate the function which processes the conversion automa-
tically. i.e., The automatic conversion results should be equal to the
manual conversion of the variables.
"""
path = os.path.join(LOCATION, 'data.mat')
inf, _ = iofuncs.load_matlab(path)
valid = True
for var in sorted(real_values.keys()):
valid = valid and bool(np.mean(real_values[var] == inf[var]))
assert valid
@pytest.mark.skipif(PY2, reason="Fails on Python 2")
@pytest.mark.parametrize('spydata_file_name', ['export_data.spydata',
'export_data_renamed.spydata'])
def test_spydata_import(spydata_file_name, spydata_values):
"""
Test spydata handling and variable importing.
This test loads all the variables contained inside a spydata tar
container and compares them against their static values.
It tests both a file with the original name, and one that has been renamed
in order to catch Issue #9 .
"""
path = os.path.join(LOCATION, spydata_file_name)
data, error = iofuncs.load_dictionary(path)
assert error is None
assert are_namespaces_equal(data, spydata_values)
def test_spydata_import_witherror():
"""
Test that import fails gracefully with a fn not present in the namespace.
Checks that the error is caught, the message is passed back,
and the current working directory is restored afterwards.
"""
original_cwd = os.getcwd()
path = os.path.join(LOCATION, 'export_data_withfunction.spydata')
data, error = iofuncs.load_dictionary(path)
assert error and is_text_string(error)
assert data is None
assert os.getcwd() == original_cwd
def test_spydata_import_missing_file():
"""
Test that import fails properly when file is missing, and resets the cwd.
"""
original_cwd = os.getcwd()
path = os.path.join(LOCATION, 'non_existant_path_2019-01-23.spydata')
try:
iofuncs.load_dictionary(path)
except IOError:
pass
else:
# Fail if exception did not occur when it should
assert False
assert os.getcwd() == original_cwd
@pytest.mark.skipif(iofuncs.load_matlab is None, reason="SciPy required")
def test_matlabstruct():
"""Test support for matlab stlye struct."""
a = iofuncs.MatlabStruct()
a.b = 'spam'
assert a["b"] == 'spam'
a.c["d"] = 'eggs'
assert a.c.d == 'eggs'
assert a == {'c': {'d': 'eggs'}, 'b': 'spam'}
a['d'] = [1, 2, 3]
buf = io.BytesIO()
iofuncs.save_matlab(a, buf)
buf.seek(0)
data, error = iofuncs.load_matlab(buf)
assert error is None
assert data['b'] == 'spam'
assert data['c'].d == 'eggs'
assert data['d'].tolist() == [[1, 2, 3]]
@pytest.mark.parametrize('input_namespace,expected_namespace,filename', [
('spydata_values', 'spydata_values', 'export_data_copy'),
('namespace_objects_full', 'namespace_objects_filtered', 'export_data_2'),
('namespace_objects_nocopyable', None, 'export_data_none_1'),
('namespace_objects_nopickleable', None, 'export_data_none_2'),
], indirect=['input_namespace', 'expected_namespace'])
def test_spydata_export(input_namespace, expected_namespace,
filename):
"""
Test spydata export and re-import.
This test saves the variables in ``spydata`` format and then
reloads and checks them to make sure they save/restore properly
and no errors occur during the process.
"""
path = os.path.join(LOCATION, filename + '.spydata')
expected_error = None
if 'expected_error_string' in input_namespace:
expected_error = input_namespace['expected_error_string']
del input_namespace['expected_error_string']
cwd_original = os.getcwd()
try:
export_error = iofuncs.save_dictionary(input_namespace, path)
assert export_error == expected_error
if expected_namespace is None:
assert not os.path.isfile(path)
else:
data_actual, import_error = iofuncs.load_dictionary(path)
assert import_error is None
print(data_actual.keys())
print(expected_namespace.keys())
assert are_namespaces_equal(data_actual, expected_namespace)
assert cwd_original == os.getcwd()
finally:
if os.path.isfile(path):
try:
os.remove(path)
except (IOError, OSError, PermissionError):
pass
if __name__ == "__main__":
pytest.main()

View File

@@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Kernels Contributors
#
# Licensed under the terms of the MIT License
# (see spyder_kernels/__init__.py for details)
# -----------------------------------------------------------------------------
import pytest
from spyder_kernels.utils.lazymodules import LazyModule, FakeObject
def test_non_existent_module():
"""Test that we retun FakeObject's for non-existing modules."""
mod = LazyModule('no_module', second_level_attrs=['a'])
# First level attributes must return FakeObject
assert mod.foo is FakeObject
# Second level attributes in second_level_attrs should return
# FakeObject too.
assert mod.foo.a is FakeObject
# Other second level attributes should raise an error.
with pytest.raises(AttributeError):
mod.foo.b
def test_existing_modules():
"""Test that lazy modules work for existing modules."""
np = LazyModule('numpy')
import numpy
# Both the lazy and actual modules should return the same.
assert np.ndarray == numpy.ndarray
# The lazy module should have these extra attributes
assert np.__spy_mod__
assert np.__spy_modname__

View File

@@ -0,0 +1,458 @@
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Kernels Contributors
#
# Licensed under the terms of the MIT License
# (see spyder_kernels/__init__.py for details)
# -----------------------------------------------------------------------------
"""
Tests for utils.py
"""
# Standard library imports
from collections import defaultdict
import datetime
import sys
# Third party imports
import numpy as np
import pandas as pd
import pytest
import xarray as xr
import PIL.Image
# Local imports
from spyder_kernels.py3compat import PY2
from spyder_kernels.utils.nsview import (
sort_against, is_supported, value_to_display, get_size,
get_supported_types, get_type_string, get_numpy_type_string,
is_editable_type)
def generate_complex_object():
"""Taken from issue #4221."""
bug = defaultdict(list)
for i in range(50000):
a = {j:np.random.rand(10) for j in range(10)}
bug[i] = a
return bug
COMPLEX_OBJECT = generate_complex_object()
DF = pd.DataFrame([1,2,3])
DATASET = xr.Dataset({0: pd.DataFrame([1,2]), 1:pd.DataFrame([3,4])})
# --- Tests
# -----------------------------------------------------------------------------
def test_get_size():
"""Test that the size of all values is returned correctly"""
class RecursionClassNoLen():
def __getattr__(self, name):
if name=='size': return self.name
else:
return super(object, self).__getattribute__(name)
length = [list([1,2,3]), tuple([1,2,3]), set([1,2,3]), '123',
{1:1, 2:2, 3:3}]
for obj in length:
assert get_size(obj) == 3
df = pd.DataFrame([[1,2,3], [1,2,3]])
assert get_size(df) == (2, 3)
df = pd.Series([1,2,3])
assert get_size(df) == (3,)
df = pd.Index([1,2,3])
assert get_size(df) == (3,)
arr = np.array([[1,2,3], [1,2,3]], dtype=np.complex128)
assert get_size(arr) == (2, 3)
img = PIL.Image.new('RGB', (256,256))
assert get_size(img) == (256,256)
obj = RecursionClassNoLen()
assert get_size(obj) == 1
def test_sort_against():
lista = [5, 6, 7]
listb = [2, 3, 1]
res = sort_against(lista, listb)
assert res == [7, 5, 6]
def test_sort_against_is_stable():
lista = [3, 0, 1]
listb = [1, 1, 1]
res = sort_against(lista, listb)
assert res == lista
def test_none_values_are_supported():
"""Tests that None values are displayed by default"""
supported_types = get_supported_types()
mode = 'editable'
none_var = None
none_list = [2, None, 3, None]
none_dict = {'a': None, 'b': 4}
none_tuple = (None, [3, None, 4], 'eggs')
assert is_supported(none_var, filters=tuple(supported_types[mode]))
assert is_supported(none_list, filters=tuple(supported_types[mode]))
assert is_supported(none_dict, filters=tuple(supported_types[mode]))
assert is_supported(none_tuple, filters=tuple(supported_types[mode]))
def test_str_subclass_display():
"""Test for value_to_display of subclasses of str/basestring."""
class Test(str):
def __repr__(self):
return 'test'
value = Test()
value_display = value_to_display(value)
assert 'Test object' in value_display
def test_default_display():
"""Tests for default_display."""
# Display of defaultdict
assert (value_to_display(COMPLEX_OBJECT) ==
'defaultdict object of collections module')
# Display of array of COMPLEX_OBJECT
assert (value_to_display(np.array(COMPLEX_OBJECT)) ==
'ndarray object of numpy module')
# Display of Dataset
assert (value_to_display(DATASET) ==
'Dataset object of xarray.core.dataset module')
@pytest.mark.skipif(
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
reason="Fails on Mac with Python 3.8")
def test_list_display():
"""Tests for display of lists."""
long_list = list(range(100))
# Simple list
assert value_to_display([1, 2, 3]) == '[1, 2, 3]'
# Long list
assert (value_to_display(long_list) ==
'[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ...]')
# Short list of lists
assert (value_to_display([long_list] * 3) ==
'[[0, 1, 2, 3, 4, ...], [0, 1, 2, 3, 4, ...], [0, 1, 2, 3, 4, ...]]')
# Long list of lists
result = '[' + ''.join('[0, 1, 2, 3, 4, ...], '*10)[:-2] + ']'
assert value_to_display([long_list] * 10) == result[:70] + ' ...'
# Multiple level lists
assert (value_to_display([[1, 2, 3, [4], 5]] + long_list) ==
'[[1, 2, 3, [...], 5], 0, 1, 2, 3, 4, 5, 6, 7, 8, ...]')
assert value_to_display([1, 2, [DF]]) == '[1, 2, [Dataframe]]'
assert value_to_display([1, 2, [[DF], DATASET]]) == '[1, 2, [[...], Dataset]]'
# List of complex object
assert value_to_display([COMPLEX_OBJECT]) == '[defaultdict]'
# List of composed objects
li = [COMPLEX_OBJECT, DATASET, 1, {1:2, 3:4}, DF]
result = '[defaultdict, Dataset, 1, {1:2, 3:4}, Dataframe]'
assert value_to_display(li) == result
# List starting with a non-supported object (#5313)
supported_types = tuple(get_supported_types()['editable'])
li = [len, 1]
assert value_to_display(li) == '[builtin_function_or_method, 1]'
assert is_supported(li, filters=supported_types)
@pytest.mark.skipif(
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
reason="Fails on Mac with Python 3.8")
def test_dict_display():
"""Tests for display of dicts."""
long_list = list(range(100))
long_dict = dict(zip(list(range(100)), list(range(100))))
# Simple dict
assert value_to_display({0:0, 'a':'b'}) == "{0:0, 'a':'b'}"
# Long dict
assert (value_to_display(long_dict) ==
'{0:0, 1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8, 9:9, ...}')
# Short list of lists
assert (value_to_display({1:long_dict, 2:long_dict}) ==
'{1:{0:0, 1:1, 2:2, 3:3, 4:4, ...}, 2:{0:0, 1:1, 2:2, 3:3, 4:4, ...}}')
# Long dict of dicts
result = ('{(0, 0, 0, 0, 0, ...):[0, 1, 2, 3, 4, ...], '
'(1, 1, 1, 1, 1, ...):[0, 1, 2, 3, 4, ...]}')
assert value_to_display({(0,)*100:long_list, (1,)*100:long_list}) == result[:70] + ' ...'
# Multiple level dicts
assert (value_to_display({0: {1:1, 2:2, 3:3, 4:{0:0}, 5:5}, 1:1}) ==
'{0:{1:1, 2:2, 3:3, 4:{...}, 5:5}, 1:1}')
assert value_to_display({0:0, 1:1, 2:2, 3:DF}) == '{0:0, 1:1, 2:2, 3:Dataframe}'
assert value_to_display({0:0, 1:1, 2:[[DF], DATASET]}) == '{0:0, 1:1, 2:[[...], Dataset]}'
# Dict of complex object
assert value_to_display({0:COMPLEX_OBJECT}) == '{0:defaultdict}'
# Dict of composed objects
li = {0:COMPLEX_OBJECT, 1:DATASET, 2:2, 3:{0:0, 1:1}, 4:DF}
result = '{0:defaultdict, 1:Dataset, 2:2, 3:{0:0, 1:1}, 4:Dataframe}'
assert value_to_display(li) == result
# Dict starting with a non-supported object (#5313)
supported_types = tuple(get_supported_types()['editable'])
di = {max: len, 1: 1}
assert value_to_display(di) in (
'{builtin_function_or_method:builtin_function_or_method, 1:1}',
'{1:1, builtin_function_or_method:builtin_function_or_method}')
assert is_supported(di, filters=supported_types)
def test_set_display():
"""Tests for display of sets."""
long_set = {i for i in range(100)}
# Simple set
assert value_to_display({1, 2, 3}) == '{1, 2, 3}'
# Long set
disp = '{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ...}'
assert value_to_display(long_set) == disp
# Short list of sets
disp = '[{0, 1, 2, 3, 4, ...}, {0, 1, 2, 3, 4, ...}, {0, 1, 2, 3, 4, ...}]'
assert value_to_display([long_set] * 3) == disp
# Long list of sets
disp = '[' + ''.join('{0, 1, 2, 3, 4, ...}, '*10)[:-2] + ']'
assert value_to_display([long_set] * 10) == disp[:70] + ' ...'
def test_datetime_display():
"""Simple tests that dates, datetimes and timedeltas display correctly."""
test_date = datetime.date(2017, 12, 18)
test_date_2 = datetime.date(2017, 2, 2)
test_datetime = datetime.datetime(2017, 12, 18, 13, 43, 2)
test_datetime_2 = datetime.datetime(2017, 8, 18, 0, 41, 27)
test_timedelta = datetime.timedelta(-1, 2000)
test_timedelta_2 = datetime.timedelta(0, 3600)
# Simple dates/datetimes/timedeltas
assert value_to_display(test_date) == '2017-12-18'
assert value_to_display(test_datetime) == '2017-12-18 13:43:02'
assert value_to_display(test_timedelta) == '-1 day, 0:33:20'
# Lists of dates/datetimes/timedeltas
assert (value_to_display([test_date, test_date_2]) ==
'[2017-12-18, 2017-02-02]')
assert (value_to_display([test_datetime, test_datetime_2]) ==
'[2017-12-18 13:43:02, 2017-08-18 00:41:27]')
assert (value_to_display([test_timedelta, test_timedelta_2]) ==
'[-1 day, 0:33:20, 1:00:00]')
# Tuple of dates/datetimes/timedeltas
assert (value_to_display((test_date, test_datetime, test_timedelta)) ==
'(2017-12-18, 2017-12-18 13:43:02, -1 day, 0:33:20)')
# Dict of dates/datetimes/timedeltas
assert (value_to_display({0: test_date,
1: test_datetime,
2: test_timedelta_2}) ==
("{0:2017-12-18, 1:2017-12-18 13:43:02, 2:1:00:00}"))
def test_str_in_container_display():
"""Test that strings are displayed correctly inside lists or dicts."""
# Assert that both bytes and unicode return the right display
assert value_to_display([b'a', u'b']) == "['a', 'b']"
# Encoded unicode gives bytes and it can't be transformed to
# unicode again. So this test the except part of
# is_binary_string(value) in value_to_display
if PY2:
assert value_to_display([u'Э'.encode('cp1251')]) == "['\xdd']"
def test_ellipses(tmpdir):
"""
Test that we're adding a binary ellipses when value_to_display of
a collection is too long and binary.
For issue 6942
"""
# Create binary file with all bytes
file = tmpdir.new(basename='bytes.txt')
file.write_binary(bytearray(list(range(255))))
# Read bytes back
buffer = file.read(mode='rb')
# Assert that there's a binary ellipses in the representation
assert b' ...' in value_to_display(buffer)
def test_get_type_string():
"""Test for get_type_string."""
# Bools
assert get_type_string(True) == 'bool'
# Numeric types (PY2 has long, which disappeared in PY3)
if not PY2:
expected = ['int', 'float', 'complex']
numeric_types = [1, 1.5, 1 + 2j]
assert [get_type_string(t) for t in numeric_types] == expected
# Lists
assert get_type_string([1, 2, 3]) == 'list'
# Sets
assert get_type_string({1, 2, 3}) == 'set'
# Dictionaries
assert get_type_string({'a': 1, 'b': 2}) == 'dict'
# Tuples
assert get_type_string((1, 2, 3)) == 'tuple'
# Strings
if not PY2:
assert get_type_string('foo') == 'str'
# Numpy objects
assert get_type_string(np.array([1, 2, 3])) == 'NDArray'
masked_array = np.ma.MaskedArray([1, 2, 3], mask=[True, False, True])
assert get_type_string(masked_array) == 'MaskedArray'
matrix = np.matrix([[1, 2], [3, 4]])
assert get_type_string(matrix) == 'Matrix'
# Pandas objects
df = pd.DataFrame([1, 2, 3])
assert get_type_string(df) == 'DataFrame'
series = pd.Series([1, 2, 3])
assert get_type_string(series) == 'Series'
index = pd.Index([1, 2, 3])
assert get_type_string(index) in ['Int64Index', 'Index']
# PIL images
img = PIL.Image.new('RGB', (256,256))
assert get_type_string(img) == 'PIL.Image.Image'
# Datetime objects
date = datetime.date(2010, 10, 1)
assert get_type_string(date) == 'datetime.date'
date = datetime.timedelta(-1, 2000)
assert get_type_string(date) == 'datetime.timedelta'
def test_is_editable_type():
"""Test for get_type_string."""
# Bools
assert is_editable_type(True)
# Numeric type
numeric_types = [1, 1.5, 1 + 2j]
assert all([is_editable_type(t) for t in numeric_types])
# Lists
assert is_editable_type([1, 2, 3])
# Sets
assert is_editable_type({1, 2, 3})
# Dictionaries
assert is_editable_type({'a': 1, 'b': 2})
# Tuples
assert is_editable_type((1, 2, 3))
# Strings
assert is_editable_type('foo')
# Numpy objects
assert is_editable_type(np.array([1, 2, 3]))
masked_array = np.ma.MaskedArray([1, 2, 3], mask=[True, False, True])
assert is_editable_type(masked_array)
matrix = np.matrix([[1, 2], [3, 4]])
assert is_editable_type(matrix)
# Pandas objects
df = pd.DataFrame([1, 2, 3])
assert is_editable_type(df)
series = pd.Series([1, 2, 3])
assert is_editable_type(series)
index = pd.Index([1, 2, 3])
assert is_editable_type(index)
# PIL images
img = PIL.Image.new('RGB', (256,256))
assert is_editable_type(img)
# Datetime objects
date = datetime.date(2010, 10, 1)
assert is_editable_type(date)
date = datetime.timedelta(-1, 2000)
assert is_editable_type(date)
# Other objects
class MyClass:
a = 1
assert not is_editable_type(MyClass)
my_instance = MyClass()
assert not is_editable_type(my_instance)
def test_get_numpy_type():
"""Test for get_numpy_type_string."""
# Numpy objects
assert get_numpy_type_string(np.array([1, 2, 3])) == 'Array'
matrix = np.matrix([[1, 2], [3, 4]])
assert get_numpy_type_string(matrix) == 'Array'
assert get_numpy_type_string(np.int32(1)) == 'Scalar'
# Regular Python objects
assert get_numpy_type_string(1.5) == 'Unknown'
assert get_numpy_type_string([1, 2, 3]) == 'Unknown'
assert get_numpy_type_string({1: 2}) == 'Unknown'
# PIL images
img = PIL.Image.new('RGB', (256,256))
assert get_numpy_type_string(img) == 'Unknown'
# Pandas objects
df = pd.DataFrame([1, 2, 3])
assert get_numpy_type_string(df) == 'Unknown'
if __name__ == "__main__":
pytest.main()