asm
This commit is contained in:
@@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2009- Spyder Kernels Contributors (see AUTHORS.txt)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
from ._version import __version__
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,12 @@
|
||||
#
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Version File."""
|
||||
|
||||
VERSION_INFO = (2, 5, 1)
|
||||
__version__ = '.'.join(map(str, VERSION_INFO))
|
||||
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
API to communicate between the Spyder IDE and the Spyder kernel.
|
||||
It uses Jupyter Comms for messaging. The messages are sent by calling an
|
||||
arbitrary function, with the limitation that the arguments have to be
|
||||
picklable. If the function must return, the call must be blocking.
|
||||
|
||||
In addition, the frontend can interrupt the kernel to process the message sent.
|
||||
This allows, for example, to set a breakpoint in pdb while the debugger is
|
||||
running. The message will only be delivered when the kernel is checking the
|
||||
event loop, or if pdb is waiting for an input.
|
||||
|
||||
Example:
|
||||
|
||||
On one side:
|
||||
|
||||
```
|
||||
def hello_str(msg):
|
||||
print('Hello ' + msg + '!')
|
||||
|
||||
def add(a, d):
|
||||
return a + b
|
||||
|
||||
left_comm.register_call_handler('add_numbers', add)
|
||||
left_comm.register_call_handler('print_hello', hello_str)
|
||||
```
|
||||
|
||||
On the other:
|
||||
|
||||
```
|
||||
right_comm.remote_call().print_hello('world')
|
||||
res = right_comm.remote_call(blocking=True).add_numbers(1, 2)
|
||||
print('1 + 2 = ' + str(res))
|
||||
```
|
||||
|
||||
Which prints on the right side (The one with the `left_comm`):
|
||||
|
||||
```
|
||||
Hello world!
|
||||
```
|
||||
|
||||
And on the left side:
|
||||
|
||||
```
|
||||
1 + 2 = 3
|
||||
```
|
||||
"""
|
||||
|
||||
from spyder_kernels.comms.commbase import CommError
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,558 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © Spyder Project Contributors
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder/__init__.py for details)
|
||||
|
||||
"""
|
||||
Class that handles communications between Spyder kernel and frontend.
|
||||
|
||||
Comms transmit data in a list of buffers, and in a json-able dictionnary.
|
||||
Here, we only support a buffer list with a single element.
|
||||
|
||||
The messages exchanged have the following msg_dict:
|
||||
|
||||
```
|
||||
msg_dict = {
|
||||
'spyder_msg_type': spyder_msg_type,
|
||||
'content': content,
|
||||
}
|
||||
```
|
||||
|
||||
The buffer is generated by cloudpickle using `PICKLE_PROTOCOL = 2`.
|
||||
|
||||
To simplify the usage of messaging, we use a higher level function calling
|
||||
mechanism:
|
||||
- The `remote_call` method returns a RemoteCallHandler object
|
||||
- By calling an attribute of this object, the call is sent to the other
|
||||
side of the comm.
|
||||
- If the `_wait_reply` is implemented, remote_call can be called with
|
||||
`blocking=True`, which will wait for a reply sent by the other side.
|
||||
|
||||
The messages exchanged are:
|
||||
- Function call (spyder_msg_type = 'remote_call'):
|
||||
- The content is a dictionnary {
|
||||
'call_name': The name of the function to be called,
|
||||
'call_id': uuid to match the request to a potential reply,
|
||||
'settings': A dictionnary of settings,
|
||||
}
|
||||
- The buffer encodes a dictionnary {
|
||||
'call_args': The function args,
|
||||
'call_kwargs': The function kwargs,
|
||||
}
|
||||
- If the 'settings' has `'blocking' = True`, a reply is sent.
|
||||
(spyder_msg_type = 'remote_call_reply'):
|
||||
- The buffer contains the return value of the function.
|
||||
- The 'content' is a dict with: {
|
||||
'is_error': a boolean indicating if the return value is an
|
||||
exception to be raised.
|
||||
'call_id': The uuid from above,
|
||||
'call_name': The function name (mostly for debugging)
|
||||
}
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import cloudpickle
|
||||
import pickle
|
||||
import logging
|
||||
import sys
|
||||
import uuid
|
||||
import traceback
|
||||
|
||||
from spyder_kernels.py3compat import PY2, PY3
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# To be able to get and set variables between Python 2 and 3
|
||||
DEFAULT_PICKLE_PROTOCOL = 2
|
||||
|
||||
# Max timeout (in secs) for blocking calls
|
||||
TIMEOUT = 3
|
||||
|
||||
|
||||
class CommError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class CommsErrorWrapper():
|
||||
def __init__(self, call_name, call_id):
|
||||
self.call_name = call_name
|
||||
self.call_id = call_id
|
||||
self.etype, self.error, tb = sys.exc_info()
|
||||
self.tb = traceback.extract_tb(tb)
|
||||
|
||||
def raise_error(self):
|
||||
"""
|
||||
Raise the error while adding informations on the callback.
|
||||
"""
|
||||
# Add the traceback in the error, so it can be handled upstream
|
||||
raise self.etype(self)
|
||||
|
||||
def format_error(self):
|
||||
"""
|
||||
Format the error received from the other side and returns a list of
|
||||
strings.
|
||||
"""
|
||||
lines = (['Exception in comms call {}:\n'.format(self.call_name)]
|
||||
+ traceback.format_list(self.tb)
|
||||
+ traceback.format_exception_only(self.etype, self.error))
|
||||
return lines
|
||||
|
||||
def print_error(self, file=None):
|
||||
"""
|
||||
Print the error to file or to sys.stderr if file is None.
|
||||
"""
|
||||
if file is None:
|
||||
file = sys.stderr
|
||||
for line in self.format_error():
|
||||
print(line, file=file)
|
||||
|
||||
def __str__(self):
|
||||
"""Get string representation."""
|
||||
return str(self.error)
|
||||
|
||||
def __repr__(self):
|
||||
"""Get repr."""
|
||||
return repr(self.error)
|
||||
|
||||
|
||||
# Replace sys.excepthook to handle CommsErrorWrapper
|
||||
sys_excepthook = sys.excepthook
|
||||
|
||||
|
||||
def comm_excepthook(type, value, tb):
|
||||
if len(value.args) == 1 and isinstance(value.args[0], CommsErrorWrapper):
|
||||
traceback.print_tb(tb)
|
||||
value.args[0].print_error()
|
||||
return
|
||||
sys_excepthook(type, value, tb)
|
||||
|
||||
|
||||
sys.excepthook = comm_excepthook
|
||||
|
||||
|
||||
class CommBase(object):
|
||||
"""
|
||||
Class with the necessary attributes and methods to handle
|
||||
communications between a kernel and a frontend.
|
||||
Subclasses must open a comm and register it with `self._register_comm`.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(CommBase, self).__init__()
|
||||
self.calling_comm_id = None
|
||||
self._comms = {}
|
||||
# Handlers
|
||||
self._message_handlers = {}
|
||||
self._remote_call_handlers = {}
|
||||
# Lists of reply numbers
|
||||
self._reply_inbox = {}
|
||||
self._reply_waitlist = {}
|
||||
|
||||
self._register_message_handler(
|
||||
'remote_call', self._handle_remote_call)
|
||||
self._register_message_handler(
|
||||
'remote_call_reply', self._handle_remote_call_reply)
|
||||
self.register_call_handler('_set_pickle_protocol',
|
||||
self._set_pickle_protocol)
|
||||
|
||||
def get_comm_id_list(self, comm_id=None):
|
||||
"""Get a list of comms id."""
|
||||
if comm_id is None:
|
||||
id_list = list(self._comms.keys())
|
||||
else:
|
||||
id_list = [comm_id]
|
||||
return id_list
|
||||
|
||||
def close(self, comm_id=None):
|
||||
"""Close the comm and notify the other side."""
|
||||
id_list = self.get_comm_id_list(comm_id)
|
||||
|
||||
for comm_id in id_list:
|
||||
try:
|
||||
self._comms[comm_id]['comm'].close()
|
||||
del self._comms[comm_id]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def is_open(self, comm_id=None):
|
||||
"""Check to see if the comm is open."""
|
||||
if comm_id is None:
|
||||
return len(self._comms) > 0
|
||||
return comm_id in self._comms
|
||||
|
||||
def is_ready(self, comm_id=None):
|
||||
"""
|
||||
Check to see if the other side replied.
|
||||
|
||||
The check is made with _set_pickle_protocol as this is the first call
|
||||
made. If comm_id is not specified, check all comms.
|
||||
"""
|
||||
id_list = self.get_comm_id_list(comm_id)
|
||||
if len(id_list) == 0:
|
||||
return False
|
||||
return all([self._comms[cid]['status'] == 'ready' for cid in id_list])
|
||||
|
||||
def register_call_handler(self, call_name, handler):
|
||||
"""
|
||||
Register a remote call handler.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
call_name : str
|
||||
The name of the called function.
|
||||
handler : callback
|
||||
A function to handle the request, or `None` to unregister
|
||||
`call_name`.
|
||||
"""
|
||||
if not handler:
|
||||
self._remote_call_handlers.pop(call_name, None)
|
||||
return
|
||||
|
||||
self._remote_call_handlers[call_name] = handler
|
||||
|
||||
def remote_call(self, comm_id=None, callback=None, **settings):
|
||||
"""Get a handler for remote calls."""
|
||||
return RemoteCallFactory(self, comm_id, callback, **settings)
|
||||
|
||||
# ---- Private -----
|
||||
def _send_message(self, spyder_msg_type, content=None, data=None,
|
||||
comm_id=None):
|
||||
"""
|
||||
Publish custom messages to the other side.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
spyder_msg_type: str
|
||||
The spyder message type
|
||||
content: dict
|
||||
The (JSONable) content of the message
|
||||
data: any
|
||||
Any object that is serializable by cloudpickle (should be most
|
||||
things). Will arrive as cloudpickled bytes in `.buffers[0]`.
|
||||
comm_id: int
|
||||
the comm to send to. If None sends to all comms.
|
||||
"""
|
||||
if not self.is_open(comm_id):
|
||||
raise CommError("The comm is not connected.")
|
||||
id_list = self.get_comm_id_list(comm_id)
|
||||
for comm_id in id_list:
|
||||
msg_dict = {
|
||||
'spyder_msg_type': spyder_msg_type,
|
||||
'content': content,
|
||||
'pickle_protocol': self._comms[comm_id]['pickle_protocol'],
|
||||
'python_version': sys.version,
|
||||
}
|
||||
buffers = [cloudpickle.dumps(
|
||||
data, protocol=self._comms[comm_id]['pickle_protocol'])]
|
||||
self._comms[comm_id]['comm'].send(msg_dict, buffers=buffers)
|
||||
|
||||
def _set_pickle_protocol(self, protocol):
|
||||
"""Set the pickle protocol used to send data."""
|
||||
protocol = min(protocol, pickle.HIGHEST_PROTOCOL)
|
||||
self._comms[self.calling_comm_id]['pickle_protocol'] = protocol
|
||||
self._comms[self.calling_comm_id]['status'] = 'ready'
|
||||
|
||||
@property
|
||||
def _comm_name(self):
|
||||
"""
|
||||
Get the name used for the underlying comms.
|
||||
"""
|
||||
return 'spyder_api'
|
||||
|
||||
def _register_message_handler(self, message_id, handler):
|
||||
"""
|
||||
Register a message handler.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
message_id : str
|
||||
The identifier for the message
|
||||
handler : callback
|
||||
A function to handle the message. This is called with 3 arguments:
|
||||
- msg_dict: A dictionary with message information.
|
||||
- buffer: The data transmitted in the buffer
|
||||
Pass None to unregister the message_id
|
||||
"""
|
||||
if handler is None:
|
||||
self._message_handlers.pop(message_id, None)
|
||||
return
|
||||
|
||||
self._message_handlers[message_id] = handler
|
||||
|
||||
def _register_comm(self, comm):
|
||||
"""
|
||||
Open a new comm to the kernel.
|
||||
"""
|
||||
comm.on_msg(self._comm_message)
|
||||
comm.on_close(self._comm_close)
|
||||
self._comms[comm.comm_id] = {
|
||||
'comm': comm,
|
||||
'pickle_protocol': DEFAULT_PICKLE_PROTOCOL,
|
||||
'status': 'opening',
|
||||
}
|
||||
|
||||
def _comm_close(self, msg):
|
||||
"""Close comm."""
|
||||
comm_id = msg['content']['comm_id']
|
||||
del self._comms[comm_id]
|
||||
|
||||
def _comm_message(self, msg):
|
||||
"""
|
||||
Handle internal spyder messages.
|
||||
"""
|
||||
self.calling_comm_id = msg['content']['comm_id']
|
||||
|
||||
# Get message dict
|
||||
msg_dict = msg['content']['data']
|
||||
|
||||
# Load the buffer. Only one is supported.
|
||||
try:
|
||||
if PY3:
|
||||
# https://docs.python.org/3/library/pickle.html#pickle.loads
|
||||
# Using encoding='latin1' is required for unpickling
|
||||
# NumPy arrays and instances of datetime, date and time
|
||||
# pickled by Python 2.
|
||||
buffer = cloudpickle.loads(msg['buffers'][0],
|
||||
encoding='latin-1')
|
||||
else:
|
||||
buffer = cloudpickle.loads(msg['buffers'][0])
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Exception in cloudpickle.loads : %s" % str(e))
|
||||
buffer = CommsErrorWrapper(
|
||||
msg_dict['content']['call_name'],
|
||||
msg_dict['content']['call_id'])
|
||||
|
||||
msg_dict['content']['is_error'] = True
|
||||
|
||||
spyder_msg_type = msg_dict['spyder_msg_type']
|
||||
|
||||
if spyder_msg_type in self._message_handlers:
|
||||
self._message_handlers[spyder_msg_type](
|
||||
msg_dict, buffer)
|
||||
else:
|
||||
logger.debug("No such spyder message type: %s" % spyder_msg_type)
|
||||
|
||||
def _handle_remote_call(self, msg, buffer):
|
||||
"""Handle a remote call."""
|
||||
msg_dict = msg['content']
|
||||
self.on_incoming_call(msg_dict)
|
||||
try:
|
||||
return_value = self._remote_callback(
|
||||
msg_dict['call_name'],
|
||||
buffer['call_args'],
|
||||
buffer['call_kwargs'])
|
||||
self._set_call_return_value(msg_dict, return_value)
|
||||
except Exception:
|
||||
exc_infos = CommsErrorWrapper(
|
||||
msg_dict['call_name'], msg_dict['call_id'])
|
||||
self._set_call_return_value(msg_dict, exc_infos, is_error=True)
|
||||
|
||||
def _remote_callback(self, call_name, call_args, call_kwargs):
|
||||
"""Call the callback function for the remote call."""
|
||||
if call_name in self._remote_call_handlers:
|
||||
return self._remote_call_handlers[call_name](
|
||||
*call_args, **call_kwargs)
|
||||
|
||||
raise CommError("No such spyder call type: %s" % call_name)
|
||||
|
||||
def _set_call_return_value(self, call_dict, data, is_error=False):
|
||||
"""
|
||||
A remote call has just been processed.
|
||||
|
||||
This will reply if settings['blocking'] == True
|
||||
"""
|
||||
settings = call_dict['settings']
|
||||
|
||||
display_error = ('display_error' in settings and
|
||||
settings['display_error'])
|
||||
if is_error and display_error:
|
||||
data.print_error()
|
||||
|
||||
send_reply = 'send_reply' in settings and settings['send_reply']
|
||||
if not send_reply:
|
||||
# Nothing to send back
|
||||
return
|
||||
content = {
|
||||
'is_error': is_error,
|
||||
'call_id': call_dict['call_id'],
|
||||
'call_name': call_dict['call_name']
|
||||
}
|
||||
|
||||
self._send_message('remote_call_reply', content=content, data=data,
|
||||
comm_id=self.calling_comm_id)
|
||||
|
||||
def _register_call(self, call_dict, callback=None):
|
||||
"""
|
||||
Register the call so the reply can be properly treated.
|
||||
"""
|
||||
settings = call_dict['settings']
|
||||
blocking = 'blocking' in settings and settings['blocking']
|
||||
call_id = call_dict['call_id']
|
||||
if blocking or callback is not None:
|
||||
self._reply_waitlist[call_id] = blocking, callback
|
||||
|
||||
def on_outgoing_call(self, call_dict):
|
||||
"""A message is about to be sent"""
|
||||
call_dict["pickle_highest_protocol"] = pickle.HIGHEST_PROTOCOL
|
||||
return call_dict
|
||||
|
||||
def on_incoming_call(self, call_dict):
|
||||
"""A call was received"""
|
||||
if "pickle_highest_protocol" in call_dict:
|
||||
self._set_pickle_protocol(call_dict["pickle_highest_protocol"])
|
||||
|
||||
def _get_call_return_value(self, call_dict, call_data, comm_id):
|
||||
"""
|
||||
Send a remote call and return the reply.
|
||||
|
||||
If settings['blocking'] == True, this will wait for a reply and return
|
||||
the replied value.
|
||||
"""
|
||||
call_dict = self.on_outgoing_call(call_dict)
|
||||
self._send_message(
|
||||
'remote_call', content=call_dict, data=call_data,
|
||||
comm_id=comm_id)
|
||||
|
||||
settings = call_dict['settings']
|
||||
|
||||
blocking = 'blocking' in settings and settings['blocking']
|
||||
|
||||
if not blocking:
|
||||
return
|
||||
|
||||
call_id = call_dict['call_id']
|
||||
call_name = call_dict['call_name']
|
||||
|
||||
# Wait for the blocking call
|
||||
if 'timeout' in settings and settings['timeout'] is not None:
|
||||
timeout = settings['timeout']
|
||||
else:
|
||||
timeout = TIMEOUT
|
||||
|
||||
self._wait_reply(call_id, call_name, timeout)
|
||||
|
||||
reply = self._reply_inbox.pop(call_id)
|
||||
|
||||
if reply['is_error']:
|
||||
return self._sync_error(reply['value'])
|
||||
|
||||
return reply['value']
|
||||
|
||||
def _wait_reply(self, call_id, call_name, timeout):
|
||||
"""
|
||||
Wait for the other side reply.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _handle_remote_call_reply(self, msg_dict, buffer):
|
||||
"""
|
||||
A blocking call received a reply.
|
||||
"""
|
||||
content = msg_dict['content']
|
||||
call_id = content['call_id']
|
||||
call_name = content['call_name']
|
||||
is_error = content['is_error']
|
||||
|
||||
# Unexpected reply
|
||||
if call_id not in self._reply_waitlist:
|
||||
if is_error:
|
||||
return self._async_error(buffer)
|
||||
else:
|
||||
logger.debug('Got an unexpected reply {}, id:{}'.format(
|
||||
call_name, call_id))
|
||||
return
|
||||
|
||||
blocking, callback = self._reply_waitlist.pop(call_id)
|
||||
|
||||
# Async error
|
||||
if is_error and not blocking:
|
||||
return self._async_error(buffer)
|
||||
|
||||
# Callback
|
||||
if callback is not None and not is_error:
|
||||
callback(buffer)
|
||||
|
||||
# Blocking inbox
|
||||
if blocking:
|
||||
self._reply_inbox[call_id] = {
|
||||
'is_error': is_error,
|
||||
'value': buffer,
|
||||
'content': content
|
||||
}
|
||||
|
||||
def _async_error(self, error_wrapper):
|
||||
"""
|
||||
Handle an error that was raised on the other side asyncronously.
|
||||
"""
|
||||
error_wrapper.print_error()
|
||||
|
||||
def _sync_error(self, error_wrapper):
|
||||
"""
|
||||
Handle an error that was raised on the other side syncronously.
|
||||
"""
|
||||
error_wrapper.raise_error()
|
||||
|
||||
|
||||
class RemoteCallFactory(object):
|
||||
"""Class to create `RemoteCall`s."""
|
||||
|
||||
def __init__(self, comms_wrapper, comm_id, callback, **settings):
|
||||
# Avoid setting attributes
|
||||
super(RemoteCallFactory, self).__setattr__(
|
||||
'_comms_wrapper', comms_wrapper)
|
||||
super(RemoteCallFactory, self).__setattr__('_comm_id', comm_id)
|
||||
super(RemoteCallFactory, self).__setattr__('_callback', callback)
|
||||
super(RemoteCallFactory, self).__setattr__('_settings', settings)
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Get a call for a function named 'name'."""
|
||||
return RemoteCall(name, self._comms_wrapper, self._comm_id,
|
||||
self._callback, self._settings)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""Set an attribute to the other side."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RemoteCall():
|
||||
"""Class to call the other side of the comms like a function."""
|
||||
|
||||
def __init__(self, name, comms_wrapper, comm_id, callback, settings):
|
||||
self._name = name
|
||||
self._comms_wrapper = comms_wrapper
|
||||
self._comm_id = comm_id
|
||||
self._settings = settings
|
||||
self._callback = callback
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
Transmit the call to the other side of the tunnel.
|
||||
|
||||
The args and kwargs have to be picklable.
|
||||
"""
|
||||
blocking = 'blocking' in self._settings and self._settings['blocking']
|
||||
self._settings['send_reply'] = blocking or self._callback is not None
|
||||
|
||||
call_id = uuid.uuid4().hex
|
||||
call_dict = {
|
||||
'call_name': self._name,
|
||||
'call_id': call_id,
|
||||
'settings': self._settings,
|
||||
}
|
||||
call_data = {
|
||||
'call_args': args,
|
||||
'call_kwargs': kwargs,
|
||||
}
|
||||
|
||||
if not self._comms_wrapper.is_open(self._comm_id):
|
||||
# Only an error if the call is blocking.
|
||||
if blocking:
|
||||
raise CommError("The comm is not connected.")
|
||||
logger.debug("Call to unconnected comm: %s" % self._name)
|
||||
return
|
||||
self._comms_wrapper._register_call(call_dict, self._callback)
|
||||
return self._comms_wrapper._get_call_return_value(
|
||||
call_dict, call_data, self._comm_id)
|
||||
@@ -0,0 +1,322 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © Spyder Project Contributors
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder/__init__.py for details)
|
||||
|
||||
"""
|
||||
In addition to the remote_call mechanism implemented in CommBase:
|
||||
- Implements _wait_reply, so blocking calls can be made.
|
||||
"""
|
||||
|
||||
import pickle
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from IPython.core.getipython import get_ipython
|
||||
from jupyter_client.localinterfaces import localhost
|
||||
from tornado import ioloop
|
||||
import zmq
|
||||
|
||||
from spyder_kernels.comms.commbase import CommBase, CommError
|
||||
from spyder_kernels.py3compat import TimeoutError, PY2
|
||||
|
||||
|
||||
if PY2:
|
||||
import thread
|
||||
|
||||
|
||||
def get_free_port():
|
||||
"""Find a free port on the local machine."""
|
||||
sock = socket.socket()
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, b'\0' * 8)
|
||||
sock.bind((localhost(), 0))
|
||||
port = sock.getsockname()[1]
|
||||
sock.close()
|
||||
return port
|
||||
|
||||
|
||||
def frontend_request(blocking, timeout=None):
|
||||
"""
|
||||
Send a request to the frontend.
|
||||
|
||||
If blocking is True, The return value will be returned.
|
||||
"""
|
||||
if not get_ipython().kernel.frontend_comm.is_open():
|
||||
raise CommError("Can't make a request to a closed comm")
|
||||
# Get a reply from the last frontend to have sent a message
|
||||
return get_ipython().kernel.frontend_call(
|
||||
blocking=blocking,
|
||||
broadcast=False,
|
||||
timeout=timeout)
|
||||
|
||||
|
||||
class FrontendComm(CommBase):
|
||||
"""Mixin to implement the spyder_shell_api."""
|
||||
|
||||
def __init__(self, kernel):
|
||||
super(FrontendComm, self).__init__()
|
||||
|
||||
# Comms
|
||||
self.kernel = kernel
|
||||
self.kernel.comm_manager.register_target(
|
||||
self._comm_name, self._comm_open)
|
||||
|
||||
self.comm_port = None
|
||||
self.register_call_handler('_send_comm_config',
|
||||
self._send_comm_config)
|
||||
|
||||
self.comm_lock = threading.RLock()
|
||||
|
||||
# self.kernel.parent is IPKernelApp unless we are in tests
|
||||
if self.kernel.parent:
|
||||
# Create a new socket
|
||||
self.context = zmq.Context()
|
||||
self.comm_socket = self.context.socket(zmq.ROUTER)
|
||||
self.comm_socket.linger = 1000
|
||||
|
||||
self.comm_port = get_free_port()
|
||||
|
||||
self.comm_port = self.kernel.parent._bind_socket(
|
||||
self.comm_socket, self.comm_port)
|
||||
if hasattr(zmq, 'ROUTER_HANDOVER'):
|
||||
# Set router-handover to workaround zeromq reconnect problems
|
||||
# in certain rare circumstances.
|
||||
# See ipython/ipykernel#270 and zeromq/libzmq#2892
|
||||
self.comm_socket.router_handover = 1
|
||||
|
||||
self.comm_thread_close = threading.Event()
|
||||
self.comm_socket_thread = threading.Thread(target=self.poll_thread)
|
||||
self.comm_socket_thread.start()
|
||||
|
||||
# Patch parent.close . This function only exists in Python 3.
|
||||
if not PY2:
|
||||
parent_close = self.kernel.parent.close
|
||||
|
||||
def close():
|
||||
"""Close comm_socket_thread."""
|
||||
self.close_thread()
|
||||
parent_close()
|
||||
|
||||
self.kernel.parent.close = close
|
||||
|
||||
def close(self, comm_id=None):
|
||||
"""Close the comm and notify the other side."""
|
||||
with self.comm_lock:
|
||||
return super(FrontendComm, self).close(comm_id)
|
||||
|
||||
def _send_message(self, *args, **kwargs):
|
||||
"""Publish custom messages to the other side."""
|
||||
with self.comm_lock:
|
||||
return super(FrontendComm, self)._send_message(*args, **kwargs)
|
||||
|
||||
def close_thread(self):
|
||||
"""Close comm."""
|
||||
self.comm_thread_close.set()
|
||||
self.comm_socket.close()
|
||||
self.context.term()
|
||||
self.comm_socket_thread.join()
|
||||
|
||||
def poll_thread(self):
|
||||
"""Receive messages from comm socket."""
|
||||
if not PY2:
|
||||
# Create an event loop for the handlers.
|
||||
ioloop.IOLoop().initialize()
|
||||
while not self.comm_thread_close.is_set():
|
||||
self.poll_one()
|
||||
|
||||
def poll_one(self):
|
||||
"""Receive one message from comm socket."""
|
||||
out_stream = None
|
||||
if self.kernel.shell_streams:
|
||||
# If the message handler needs to send a reply,
|
||||
# use the regular shell stream.
|
||||
out_stream = self.kernel.shell_streams[0]
|
||||
try:
|
||||
ident, msg = self.kernel.session.recv(self.comm_socket, 0)
|
||||
except zmq.error.ContextTerminated:
|
||||
return
|
||||
except Exception:
|
||||
self.kernel.log.warning("Invalid Message:", exc_info=True)
|
||||
return
|
||||
msg_type = msg['header']['msg_type']
|
||||
|
||||
if msg_type == 'shutdown_request':
|
||||
self.comm_thread_close.set()
|
||||
self._comm_close(msg)
|
||||
return
|
||||
|
||||
handler = self.kernel.shell_handlers.get(msg_type, None)
|
||||
try:
|
||||
if handler is None:
|
||||
self.kernel.log.warning("Unknown message type: %r", msg_type)
|
||||
return
|
||||
if PY2:
|
||||
handler(out_stream, ident, msg)
|
||||
return
|
||||
|
||||
import asyncio
|
||||
|
||||
if (getattr(asyncio, 'run', False) and
|
||||
asyncio.iscoroutinefunction(handler)):
|
||||
# This is needed for ipykernel 6+
|
||||
asyncio.run(handler(out_stream, ident, msg))
|
||||
else:
|
||||
# This is required for Python 3.6, which doesn't have
|
||||
# asyncio.run or ipykernel versions less than 6. The
|
||||
# nice thing is that ipykernel 6, which requires
|
||||
# asyncio, doesn't support Python 3.6.
|
||||
handler(out_stream, ident, msg)
|
||||
except Exception:
|
||||
self.kernel.log.error(
|
||||
"Exception in message handler:", exc_info=True)
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
# Flush to ensure reply is sent
|
||||
if out_stream:
|
||||
out_stream.flush(zmq.POLLOUT)
|
||||
|
||||
def remote_call(self, comm_id=None, blocking=False, callback=None,
|
||||
timeout=None):
|
||||
"""Get a handler for remote calls."""
|
||||
return super(FrontendComm, self).remote_call(
|
||||
blocking=blocking,
|
||||
comm_id=comm_id,
|
||||
callback=callback,
|
||||
timeout=timeout)
|
||||
|
||||
def wait_until(self, condition, timeout=None):
|
||||
"""Wait until condition is met. Returns False if timeout."""
|
||||
if condition():
|
||||
return True
|
||||
t_start = time.time()
|
||||
while not condition():
|
||||
if timeout is not None and time.time() > t_start + timeout:
|
||||
return False
|
||||
if threading.current_thread() is self.comm_socket_thread:
|
||||
# Wait for a reply on the comm channel.
|
||||
self.poll_one()
|
||||
else:
|
||||
# Wait 10ms for a reply
|
||||
time.sleep(0.01)
|
||||
return True
|
||||
|
||||
# --- Private --------
|
||||
def _wait_reply(self, call_id, call_name, timeout, retry=True):
|
||||
"""Wait until the frontend replies to a request."""
|
||||
def reply_received():
|
||||
"""The reply is there!"""
|
||||
return call_id in self._reply_inbox
|
||||
if not self.wait_until(reply_received):
|
||||
if retry:
|
||||
self._wait_reply(call_id, call_name, timeout, False)
|
||||
return
|
||||
raise TimeoutError(
|
||||
"Timeout while waiting for '{}' reply.".format(
|
||||
call_name))
|
||||
|
||||
def _comm_open(self, comm, msg):
|
||||
"""
|
||||
A new comm is open!
|
||||
"""
|
||||
self.calling_comm_id = comm.comm_id
|
||||
self._register_comm(comm)
|
||||
self._set_pickle_protocol(msg['content']['data']['pickle_protocol'])
|
||||
self._send_comm_config()
|
||||
|
||||
def on_outgoing_call(self, call_dict):
|
||||
"""A message is about to be sent"""
|
||||
call_dict["comm_port"] = self.comm_port
|
||||
return super(FrontendComm, self).on_outgoing_call(call_dict)
|
||||
|
||||
def _send_comm_config(self):
|
||||
"""Send the comm config to the frontend."""
|
||||
self.remote_call()._set_comm_port(self.comm_port)
|
||||
self.remote_call()._set_pickle_protocol(pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def _comm_close(self, msg):
|
||||
"""Close comm."""
|
||||
comm_id = msg['content']['comm_id']
|
||||
# Send back a close message confirmation
|
||||
# Fixes spyder-ide/spyder#15356
|
||||
self.close(comm_id)
|
||||
|
||||
def _async_error(self, error_wrapper):
|
||||
"""
|
||||
Send an async error back to the frontend to be displayed.
|
||||
"""
|
||||
self.remote_call()._async_error(error_wrapper)
|
||||
|
||||
def _register_comm(self, comm):
|
||||
"""
|
||||
Remove side effect ipykernel has.
|
||||
"""
|
||||
def handle_msg(msg):
|
||||
"""Handle a comm_msg message"""
|
||||
if comm._msg_callback:
|
||||
comm._msg_callback(msg)
|
||||
comm.handle_msg = handle_msg
|
||||
super(FrontendComm, self)._register_comm(comm)
|
||||
|
||||
def _remote_callback(self, call_name, call_args, call_kwargs):
|
||||
"""Call the callback function for the remote call."""
|
||||
with self.comm_lock:
|
||||
current_stdout = sys.stdout
|
||||
current_stderr = sys.stderr
|
||||
saved_stdout_write = current_stdout.write
|
||||
saved_stderr_write = current_stderr.write
|
||||
thread_id = thread.get_ident() if PY2 else threading.get_ident()
|
||||
current_stdout.write = WriteWrapper(
|
||||
saved_stdout_write, call_name, thread_id)
|
||||
current_stderr.write = WriteWrapper(
|
||||
saved_stderr_write, call_name, thread_id)
|
||||
try:
|
||||
return super(FrontendComm, self)._remote_callback(
|
||||
call_name, call_args, call_kwargs)
|
||||
finally:
|
||||
current_stdout.write = saved_stdout_write
|
||||
current_stderr.write = saved_stderr_write
|
||||
|
||||
|
||||
class WriteWrapper(object):
|
||||
"""Wrapper to warn user when text is printed."""
|
||||
|
||||
def __init__(self, write, name, thread_id):
|
||||
self._write = write
|
||||
self._name = name
|
||||
self._thread_id = thread_id
|
||||
self._warning_shown = False
|
||||
|
||||
def is_benign_message(self, message):
|
||||
"""Determine if a message is benign in order to filter it."""
|
||||
benign_messages = [
|
||||
# Fixes spyder-ide/spyder#14928
|
||||
# Fixes spyder-ide/spyder-kernels#343
|
||||
'DeprecationWarning',
|
||||
# Fixes spyder-ide/spyder-kernels#365
|
||||
'IOStream.flush timed out'
|
||||
]
|
||||
|
||||
return any([msg in message for msg in benign_messages])
|
||||
|
||||
def __call__(self, string):
|
||||
"""Print warning once."""
|
||||
thread_id = thread.get_ident() if PY2 else threading.get_ident()
|
||||
if self._thread_id != thread_id:
|
||||
return self._write(string)
|
||||
|
||||
if not self.is_benign_message(string):
|
||||
if not self._warning_shown:
|
||||
self._warning_shown = True
|
||||
|
||||
# Don't print handler name for `show_mpl_backend_errors`
|
||||
# because we have a specific message for it.
|
||||
if repr(self._name) != "'show_mpl_backend_errors'":
|
||||
self._write(
|
||||
"\nOutput from spyder call " + repr(self._name) + ":\n"
|
||||
)
|
||||
|
||||
return self._write(string)
|
||||
@@ -0,0 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Console kernel
|
||||
"""
|
||||
@@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Remove the current working directory from sys.path for Python 3.7+
|
||||
# because since that version it's added by default to sys.path when
|
||||
# using 'python -m'.
|
||||
if sys.version_info[0] == 3 and sys.version_info[1] >= 7:
|
||||
cwd = os.getcwd()
|
||||
if cwd in sys.path:
|
||||
sys.path.remove(cwd)
|
||||
|
||||
from spyder_kernels.console import start
|
||||
try:
|
||||
start.main()
|
||||
except Exception:
|
||||
# We have to explicitely write to __stderr__ as stderr might already
|
||||
# have been replaced.
|
||||
import traceback
|
||||
traceback.print_exc(file=sys.__stderr__)
|
||||
sys.__stderr__.flush()
|
||||
raise
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,867 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Spyder kernel for Jupyter.
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
# Third-party imports
|
||||
from ipykernel.ipkernel import IPythonKernel
|
||||
from ipykernel import eventloops
|
||||
from traitlets.config.loader import LazyConfigValue
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.py3compat import (
|
||||
TEXT_TYPES, to_text_string, PY3)
|
||||
from spyder_kernels.comms.frontendcomm import FrontendComm
|
||||
from spyder_kernels.utils.iofuncs import iofunctions
|
||||
from spyder_kernels.utils.mpl import (
|
||||
MPL_BACKENDS_FROM_SPYDER, MPL_BACKENDS_TO_SPYDER, INLINE_FIGURE_FORMATS)
|
||||
from spyder_kernels.utils.nsview import (
|
||||
get_remote_data, make_remote_view, get_size)
|
||||
from spyder_kernels.console.shell import SpyderShell
|
||||
|
||||
if PY3:
|
||||
import faulthandler
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Excluded variables from the Variable Explorer (i.e. they are not
|
||||
# shown at all there)
|
||||
EXCLUDED_NAMES = ['In', 'Out', 'exit', 'get_ipython', 'quit']
|
||||
|
||||
|
||||
class SpyderKernel(IPythonKernel):
|
||||
"""Spyder kernel for Jupyter."""
|
||||
|
||||
shell_class = SpyderShell
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SpyderKernel, self).__init__(*args, **kwargs)
|
||||
|
||||
self.comm_manager.get_comm = self._get_comm
|
||||
self.frontend_comm = FrontendComm(self)
|
||||
|
||||
# All functions that can be called through the comm
|
||||
handlers = {
|
||||
'set_breakpoints': self.set_spyder_breakpoints,
|
||||
'set_pdb_ignore_lib': self.set_pdb_ignore_lib,
|
||||
'set_pdb_execute_events': self.set_pdb_execute_events,
|
||||
'set_pdb_use_exclamation_mark': self.set_pdb_use_exclamation_mark,
|
||||
'get_value': self.get_value,
|
||||
'load_data': self.load_data,
|
||||
'save_namespace': self.save_namespace,
|
||||
'is_defined': self.is_defined,
|
||||
'get_doc': self.get_doc,
|
||||
'get_source': self.get_source,
|
||||
'set_value': self.set_value,
|
||||
'remove_value': self.remove_value,
|
||||
'copy_value': self.copy_value,
|
||||
'set_cwd': self.set_cwd,
|
||||
'get_cwd': self.get_cwd,
|
||||
'get_syspath': self.get_syspath,
|
||||
'get_env': self.get_env,
|
||||
'close_all_mpl_figures': self.close_all_mpl_figures,
|
||||
'show_mpl_backend_errors': self.show_mpl_backend_errors,
|
||||
'get_namespace_view': self.get_namespace_view,
|
||||
'set_namespace_view_settings': self.set_namespace_view_settings,
|
||||
'get_var_properties': self.get_var_properties,
|
||||
'set_sympy_forecolor': self.set_sympy_forecolor,
|
||||
'update_syspath': self.update_syspath,
|
||||
'is_special_kernel_valid': self.is_special_kernel_valid,
|
||||
'get_matplotlib_backend': self.get_matplotlib_backend,
|
||||
'get_mpl_interactive_backend': self.get_mpl_interactive_backend,
|
||||
'pdb_input_reply': self.pdb_input_reply,
|
||||
'_interrupt_eventloop': self._interrupt_eventloop,
|
||||
'enable_faulthandler': self.enable_faulthandler,
|
||||
}
|
||||
for call_id in handlers:
|
||||
self.frontend_comm.register_call_handler(
|
||||
call_id, handlers[call_id])
|
||||
|
||||
self.namespace_view_settings = {}
|
||||
self._mpl_backend_error = None
|
||||
self._running_namespace = None
|
||||
self.faulthandler_handle = None
|
||||
|
||||
# -- Public API -----------------------------------------------------------
|
||||
def do_shutdown(self, restart):
|
||||
"""Disable faulthandler if enabled before proceeding."""
|
||||
self.disable_faulthandler()
|
||||
super(SpyderKernel, self).do_shutdown(restart)
|
||||
|
||||
def frontend_call(self, blocking=False, broadcast=True,
|
||||
timeout=None, callback=None):
|
||||
"""Call the frontend."""
|
||||
# If not broadcast, send only to the calling comm
|
||||
if broadcast:
|
||||
comm_id = None
|
||||
else:
|
||||
comm_id = self.frontend_comm.calling_comm_id
|
||||
|
||||
return self.frontend_comm.remote_call(
|
||||
blocking=blocking,
|
||||
comm_id=comm_id,
|
||||
callback=callback,
|
||||
timeout=timeout)
|
||||
|
||||
def enable_faulthandler(self, fn):
|
||||
"""
|
||||
Open a file to save the faulthandling and identifiers for
|
||||
internal threads.
|
||||
"""
|
||||
if not PY3:
|
||||
# Not implemented
|
||||
return
|
||||
self.disable_faulthandler()
|
||||
f = open(fn, 'w')
|
||||
self.faulthandler_handle = f
|
||||
f.write("Main thread id:\n")
|
||||
f.write(hex(threading.main_thread().ident))
|
||||
f.write('\nSystem threads ids:\n')
|
||||
f.write(" ".join([hex(thread.ident) for thread in threading.enumerate()
|
||||
if thread is not threading.main_thread()]))
|
||||
f.write('\n')
|
||||
faulthandler.enable(f)
|
||||
|
||||
def disable_faulthandler(self):
|
||||
"""
|
||||
Cancel the faulthandling, close the file handle and remove the file.
|
||||
"""
|
||||
if not PY3:
|
||||
# Not implemented
|
||||
return
|
||||
if self.faulthandler_handle:
|
||||
faulthandler.disable()
|
||||
self.faulthandler_handle.close()
|
||||
self.faulthandler_handle = None
|
||||
|
||||
# --- For the Variable Explorer
|
||||
def set_namespace_view_settings(self, settings):
|
||||
"""Set namespace_view_settings."""
|
||||
self.namespace_view_settings = settings
|
||||
|
||||
def get_namespace_view(self):
|
||||
"""
|
||||
Return the namespace view
|
||||
|
||||
This is a dictionary with the following structure
|
||||
|
||||
{'a':
|
||||
{
|
||||
'type': 'str',
|
||||
'size': 1,
|
||||
'view': '1',
|
||||
'python_type': 'int',
|
||||
'numpy_type': 'Unknown'
|
||||
}
|
||||
}
|
||||
|
||||
Here:
|
||||
* 'a' is the variable name.
|
||||
* 'type' and 'size' are self-evident.
|
||||
* 'view' is its value or its repr computed with
|
||||
`value_to_display`.
|
||||
* 'python_type' is its Python type computed with
|
||||
`get_type_string`.
|
||||
* 'numpy_type' is its Numpy type (if any) computed with
|
||||
`get_numpy_type_string`.
|
||||
"""
|
||||
|
||||
settings = self.namespace_view_settings
|
||||
if settings:
|
||||
ns = self._get_current_namespace()
|
||||
view = make_remote_view(ns, settings, EXCLUDED_NAMES)
|
||||
return view
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_var_properties(self):
|
||||
"""
|
||||
Get some properties of the variables in the current
|
||||
namespace
|
||||
"""
|
||||
settings = self.namespace_view_settings
|
||||
if settings:
|
||||
ns = self._get_current_namespace()
|
||||
data = get_remote_data(ns, settings, mode='editable',
|
||||
more_excluded_names=EXCLUDED_NAMES)
|
||||
|
||||
properties = {}
|
||||
for name, value in list(data.items()):
|
||||
properties[name] = {
|
||||
'is_list': self._is_list(value),
|
||||
'is_dict': self._is_dict(value),
|
||||
'is_set': self._is_set(value),
|
||||
'len': self._get_len(value),
|
||||
'is_array': self._is_array(value),
|
||||
'is_image': self._is_image(value),
|
||||
'is_data_frame': self._is_data_frame(value),
|
||||
'is_series': self._is_series(value),
|
||||
'array_shape': self._get_array_shape(value),
|
||||
'array_ndim': self._get_array_ndim(value)
|
||||
}
|
||||
|
||||
return properties
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_value(self, name):
|
||||
"""Get the value of a variable"""
|
||||
ns = self._get_current_namespace()
|
||||
return ns[name]
|
||||
|
||||
def set_value(self, name, value):
|
||||
"""Set the value of a variable"""
|
||||
ns = self._get_reference_namespace(name)
|
||||
ns[name] = value
|
||||
self.log.debug(ns)
|
||||
|
||||
def remove_value(self, name):
|
||||
"""Remove a variable"""
|
||||
ns = self._get_reference_namespace(name)
|
||||
ns.pop(name)
|
||||
|
||||
def copy_value(self, orig_name, new_name):
|
||||
"""Copy a variable"""
|
||||
ns = self._get_reference_namespace(orig_name)
|
||||
ns[new_name] = ns[orig_name]
|
||||
|
||||
def load_data(self, filename, ext, overwrite=False):
|
||||
"""
|
||||
Load data from filename.
|
||||
|
||||
Use 'overwrite' to determine if conflicts between variable names need
|
||||
to be handle or not.
|
||||
|
||||
For example, if a loaded variable is call 'var'
|
||||
and there is already a variable 'var' in the namespace, having
|
||||
'overwrite=True' will cause 'var' to be updated.
|
||||
In the other hand, with 'overwrite=False', a new variable will be
|
||||
created with a sufix starting with 000 i.e 'var000' (default behavior).
|
||||
"""
|
||||
from spyder_kernels.utils.misc import fix_reference_name
|
||||
|
||||
glbs = self.shell.user_ns
|
||||
load_func = iofunctions.load_funcs[ext]
|
||||
data, error_message = load_func(filename)
|
||||
|
||||
if error_message:
|
||||
return error_message
|
||||
|
||||
if not overwrite:
|
||||
# We convert to list since we mutate this dictionary
|
||||
for key in list(data.keys()):
|
||||
new_key = fix_reference_name(key, blacklist=list(glbs.keys()))
|
||||
if new_key != key:
|
||||
data[new_key] = data.pop(key)
|
||||
|
||||
try:
|
||||
glbs.update(data)
|
||||
except Exception as error:
|
||||
return str(error)
|
||||
|
||||
return None
|
||||
|
||||
def save_namespace(self, filename):
|
||||
"""Save namespace into filename"""
|
||||
ns = self._get_current_namespace()
|
||||
settings = self.namespace_view_settings
|
||||
data = get_remote_data(ns, settings, mode='picklable',
|
||||
more_excluded_names=EXCLUDED_NAMES).copy()
|
||||
return iofunctions.save(data, filename)
|
||||
|
||||
# --- For Pdb
|
||||
def _do_complete(self, code, cursor_pos):
|
||||
"""Call parent class do_complete"""
|
||||
return super(SpyderKernel, self).do_complete(code, cursor_pos)
|
||||
|
||||
def do_complete(self, code, cursor_pos):
|
||||
"""
|
||||
Call PdB complete if we are debugging.
|
||||
|
||||
Public method of ipykernel overwritten for debugging.
|
||||
"""
|
||||
if self.shell.is_debugging():
|
||||
return self.shell.pdb_session.do_complete(code, cursor_pos)
|
||||
return self._do_complete(code, cursor_pos)
|
||||
|
||||
def set_spyder_breakpoints(self, breakpoints):
|
||||
"""
|
||||
Handle a message from the frontend
|
||||
"""
|
||||
if self.shell.pdb_session:
|
||||
self.shell.pdb_session.set_spyder_breakpoints(breakpoints)
|
||||
|
||||
def set_pdb_ignore_lib(self, state):
|
||||
"""
|
||||
Change the "Ignore libraries while stepping" debugger setting.
|
||||
"""
|
||||
if self.shell.pdb_session:
|
||||
self.shell.pdb_session.pdb_ignore_lib = state
|
||||
|
||||
def set_pdb_execute_events(self, state):
|
||||
"""
|
||||
Handle a message from the frontend
|
||||
"""
|
||||
if self.shell.pdb_session:
|
||||
self.shell.pdb_session.pdb_execute_events = state
|
||||
|
||||
def set_pdb_use_exclamation_mark(self, state):
|
||||
"""
|
||||
Set an option on the current debugging session to decide wether
|
||||
the Pdb commands needs to be prefixed by '!'
|
||||
"""
|
||||
if self.shell.pdb_session:
|
||||
self.shell.pdb_session.pdb_use_exclamation_mark = state
|
||||
|
||||
def pdb_input_reply(self, line, echo_stack_entry=True):
|
||||
"""Get a pdb command from the frontend."""
|
||||
debugger = self.shell.pdb_session
|
||||
if debugger:
|
||||
debugger._disable_next_stack_entry = not echo_stack_entry
|
||||
debugger._cmd_input_line = line
|
||||
if self.eventloop:
|
||||
# Interrupting the eventloop is only implemented when a message is
|
||||
# received on the shell channel, but this message is queued and
|
||||
# won't be processed because an `execute` message is being
|
||||
# processed. Therefore we process the message here (control chan.)
|
||||
# and request a dummy message to be sent on the shell channel to
|
||||
# stop the eventloop. This will call back `_interrupt_eventloop`.
|
||||
self.frontend_call().request_interrupt_eventloop()
|
||||
|
||||
def _interrupt_eventloop(self):
|
||||
"""Interrupts the eventloop."""
|
||||
# Receiving the request is enough to stop the eventloop.
|
||||
pass
|
||||
|
||||
# --- For the Help plugin
|
||||
def is_defined(self, obj, force_import=False):
|
||||
"""Return True if object is defined in current namespace"""
|
||||
from spyder_kernels.utils.dochelpers import isdefined
|
||||
|
||||
ns = self._get_current_namespace(with_magics=True)
|
||||
return isdefined(obj, force_import=force_import, namespace=ns)
|
||||
|
||||
def get_doc(self, objtxt):
|
||||
"""Get object documentation dictionary"""
|
||||
try:
|
||||
import matplotlib
|
||||
matplotlib.rcParams['docstring.hardcopy'] = True
|
||||
except:
|
||||
pass
|
||||
from spyder_kernels.utils.dochelpers import getdoc
|
||||
|
||||
obj, valid = self._eval(objtxt)
|
||||
if valid:
|
||||
return getdoc(obj)
|
||||
|
||||
def get_source(self, objtxt):
|
||||
"""Get object source"""
|
||||
from spyder_kernels.utils.dochelpers import getsource
|
||||
|
||||
obj, valid = self._eval(objtxt)
|
||||
if valid:
|
||||
return getsource(obj)
|
||||
|
||||
# -- For Matplolib
|
||||
def get_matplotlib_backend(self):
|
||||
"""Get current matplotlib backend."""
|
||||
try:
|
||||
import matplotlib
|
||||
return MPL_BACKENDS_TO_SPYDER[matplotlib.get_backend()]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_mpl_interactive_backend(self):
|
||||
"""
|
||||
Get current Matplotlib interactive backend.
|
||||
|
||||
This is different from the current backend because, for instance, the
|
||||
user can set first the Qt5 backend, then the Inline one. In that case,
|
||||
the current backend is Inline, but the current interactive one is Qt5,
|
||||
and this backend can't be changed without a kernel restart.
|
||||
"""
|
||||
# Mapping from frameworks to backend names.
|
||||
mapping = {
|
||||
'qt': 'QtAgg',
|
||||
'tk': 'TkAgg',
|
||||
'macosx': 'MacOSX'
|
||||
}
|
||||
|
||||
# --- Get interactive framework
|
||||
framework = None
|
||||
|
||||
# Detect if there is a graphical framework running by checking the
|
||||
# eventloop function attached to the kernel.eventloop attribute (see
|
||||
# `ipykernel.eventloops.enable_gui` for context).
|
||||
from IPython.core.getipython import get_ipython
|
||||
loop_func = get_ipython().kernel.eventloop
|
||||
|
||||
if loop_func is not None:
|
||||
if loop_func == eventloops.loop_tk:
|
||||
framework = 'tk'
|
||||
elif loop_func == eventloops.loop_qt5:
|
||||
framework = 'qt'
|
||||
elif loop_func == eventloops.loop_cocoa:
|
||||
framework = 'macosx'
|
||||
else:
|
||||
# Spyder doesn't handle other backends
|
||||
framework = 'other'
|
||||
|
||||
# --- Return backend according to framework
|
||||
if framework is None:
|
||||
# Since no interactive backend has been set yet, this is
|
||||
# equivalent to having the inline one.
|
||||
return 0
|
||||
elif framework in mapping:
|
||||
return MPL_BACKENDS_TO_SPYDER[mapping[framework]]
|
||||
else:
|
||||
# This covers the case of other backends (e.g. Wx or Gtk)
|
||||
# which users can set interactively with the %matplotlib
|
||||
# magic but not through our Preferences.
|
||||
return -1
|
||||
|
||||
def set_matplotlib_backend(self, backend, pylab=False):
|
||||
"""Set matplotlib backend given a Spyder backend option."""
|
||||
mpl_backend = MPL_BACKENDS_FROM_SPYDER[to_text_string(backend)]
|
||||
self._set_mpl_backend(mpl_backend, pylab=pylab)
|
||||
|
||||
def set_mpl_inline_figure_format(self, figure_format):
|
||||
"""Set the inline figure format to use with matplotlib."""
|
||||
mpl_figure_format = INLINE_FIGURE_FORMATS[figure_format]
|
||||
self._set_config_option(
|
||||
'InlineBackend.figure_format', mpl_figure_format)
|
||||
|
||||
def set_mpl_inline_resolution(self, resolution):
|
||||
"""Set inline figure resolution."""
|
||||
self._set_mpl_inline_rc_config('figure.dpi', resolution)
|
||||
|
||||
def set_mpl_inline_figure_size(self, width, height):
|
||||
"""Set inline figure size."""
|
||||
value = (width, height)
|
||||
self._set_mpl_inline_rc_config('figure.figsize', value)
|
||||
|
||||
def set_mpl_inline_bbox_inches(self, bbox_inches):
|
||||
"""
|
||||
Set inline print figure bbox inches.
|
||||
|
||||
The change is done by updating the 'print_figure_kwargs' config dict.
|
||||
"""
|
||||
from IPython.core.getipython import get_ipython
|
||||
config = get_ipython().kernel.config
|
||||
inline_config = (
|
||||
config['InlineBackend'] if 'InlineBackend' in config else {})
|
||||
print_figure_kwargs = (
|
||||
inline_config['print_figure_kwargs']
|
||||
if 'print_figure_kwargs' in inline_config else {})
|
||||
bbox_inches_dict = {
|
||||
'bbox_inches': 'tight' if bbox_inches else None}
|
||||
print_figure_kwargs.update(bbox_inches_dict)
|
||||
|
||||
# This seems to be necessary for newer versions of Traitlets because
|
||||
# print_figure_kwargs doesn't return a dict.
|
||||
if isinstance(print_figure_kwargs, LazyConfigValue):
|
||||
figure_kwargs_dict = print_figure_kwargs.to_dict().get('update')
|
||||
if figure_kwargs_dict:
|
||||
print_figure_kwargs = figure_kwargs_dict
|
||||
|
||||
self._set_config_option(
|
||||
'InlineBackend.print_figure_kwargs', print_figure_kwargs)
|
||||
|
||||
# -- For completions
|
||||
def set_jedi_completer(self, use_jedi):
|
||||
"""Enable/Disable jedi as the completer for the kernel."""
|
||||
self._set_config_option('IPCompleter.use_jedi', use_jedi)
|
||||
|
||||
def set_greedy_completer(self, use_greedy):
|
||||
"""Enable/Disable greedy completer for the kernel."""
|
||||
self._set_config_option('IPCompleter.greedy', use_greedy)
|
||||
|
||||
def set_autocall(self, autocall):
|
||||
"""Enable/Disable autocall funtionality."""
|
||||
self._set_config_option('ZMQInteractiveShell.autocall', autocall)
|
||||
|
||||
# --- Additional methods
|
||||
def set_cwd(self, dirname):
|
||||
"""Set current working directory."""
|
||||
os.chdir(dirname)
|
||||
|
||||
def get_cwd(self):
|
||||
"""Get current working directory."""
|
||||
try:
|
||||
return os.getcwd()
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def get_syspath(self):
|
||||
"""Return sys.path contents."""
|
||||
return sys.path[:]
|
||||
|
||||
def get_env(self):
|
||||
"""Get environment variables."""
|
||||
return os.environ.copy()
|
||||
|
||||
def close_all_mpl_figures(self):
|
||||
"""Close all Matplotlib figures."""
|
||||
try:
|
||||
import matplotlib.pyplot as plt
|
||||
plt.close('all')
|
||||
except:
|
||||
pass
|
||||
|
||||
def is_special_kernel_valid(self):
|
||||
"""
|
||||
Check if optional dependencies are available for special consoles.
|
||||
"""
|
||||
try:
|
||||
if os.environ.get('SPY_AUTOLOAD_PYLAB_O') == 'True':
|
||||
import matplotlib
|
||||
elif os.environ.get('SPY_SYMPY_O') == 'True':
|
||||
import sympy
|
||||
elif os.environ.get('SPY_RUN_CYTHON') == 'True':
|
||||
import cython
|
||||
except Exception:
|
||||
# Use Exception instead of ImportError here because modules can
|
||||
# fail to be imported due to a lot of issues.
|
||||
if os.environ.get('SPY_AUTOLOAD_PYLAB_O') == 'True':
|
||||
return u'matplotlib'
|
||||
elif os.environ.get('SPY_SYMPY_O') == 'True':
|
||||
return u'sympy'
|
||||
elif os.environ.get('SPY_RUN_CYTHON') == 'True':
|
||||
return u'cython'
|
||||
return None
|
||||
|
||||
def update_syspath(self, path_dict, new_path_dict):
|
||||
"""
|
||||
Update the PYTHONPATH of the kernel.
|
||||
|
||||
`path_dict` and `new_path_dict` have the paths as keys and the state
|
||||
as values. The state is `True` for active and `False` for inactive.
|
||||
|
||||
`path_dict` corresponds to the previous state of the PYTHONPATH.
|
||||
`new_path_dict` corresponds to the new state of the PYTHONPATH.
|
||||
"""
|
||||
# Remove old paths
|
||||
for path in path_dict:
|
||||
while path in sys.path:
|
||||
sys.path.remove(path)
|
||||
|
||||
# Add new paths
|
||||
pypath = [path for path, active in new_path_dict.items() if active]
|
||||
if pypath:
|
||||
sys.path.extend(pypath)
|
||||
os.environ.update({'PYTHONPATH': os.pathsep.join(pypath)})
|
||||
else:
|
||||
os.environ.pop('PYTHONPATH', None)
|
||||
|
||||
# -- Private API ---------------------------------------------------
|
||||
# --- For the Variable Explorer
|
||||
def _get_current_namespace(self, with_magics=False):
|
||||
"""
|
||||
Return current namespace
|
||||
|
||||
This is globals() if not debugging, or a dictionary containing
|
||||
both locals() and globals() for current frame when debugging
|
||||
"""
|
||||
ns = {}
|
||||
if self.shell.is_debugging() and self.shell.pdb_session.curframe:
|
||||
# Stopped at a pdb prompt
|
||||
ns.update(self.shell.user_ns)
|
||||
ns.update(self.shell._pdb_locals)
|
||||
else:
|
||||
# Give access to the running namespace if there is one
|
||||
if self._running_namespace is None:
|
||||
ns.update(self.shell.user_ns)
|
||||
else:
|
||||
# This is true when a file is executing.
|
||||
running_globals, running_locals = self._running_namespace
|
||||
ns.update(running_globals)
|
||||
if running_locals is not None:
|
||||
ns.update(running_locals)
|
||||
|
||||
# Add magics to ns so we can show help about them on the Help
|
||||
# plugin
|
||||
if with_magics:
|
||||
line_magics = self.shell.magics_manager.magics['line']
|
||||
cell_magics = self.shell.magics_manager.magics['cell']
|
||||
ns.update(line_magics)
|
||||
ns.update(cell_magics)
|
||||
return ns
|
||||
|
||||
def _get_reference_namespace(self, name):
|
||||
"""
|
||||
Return namespace where reference name is defined
|
||||
|
||||
It returns the globals() if reference has not yet been defined
|
||||
"""
|
||||
lcls = self.shell._pdb_locals
|
||||
if name in lcls:
|
||||
return lcls
|
||||
return self.shell.user_ns
|
||||
|
||||
def _get_len(self, var):
|
||||
"""Return sequence length"""
|
||||
try:
|
||||
return get_size(var)
|
||||
except:
|
||||
return None
|
||||
|
||||
def _is_array(self, var):
|
||||
"""Return True if variable is a NumPy array"""
|
||||
try:
|
||||
import numpy
|
||||
return isinstance(var, numpy.ndarray)
|
||||
except:
|
||||
return False
|
||||
|
||||
def _is_image(self, var):
|
||||
"""Return True if variable is a PIL.Image image"""
|
||||
try:
|
||||
from PIL import Image
|
||||
return isinstance(var, Image.Image)
|
||||
except:
|
||||
return False
|
||||
|
||||
def _is_data_frame(self, var):
|
||||
"""Return True if variable is a DataFrame"""
|
||||
try:
|
||||
from pandas import DataFrame
|
||||
return isinstance(var, DataFrame)
|
||||
except:
|
||||
return False
|
||||
|
||||
def _is_series(self, var):
|
||||
"""Return True if variable is a Series"""
|
||||
try:
|
||||
from pandas import Series
|
||||
return isinstance(var, Series)
|
||||
except:
|
||||
return False
|
||||
|
||||
def _is_list(self, var):
|
||||
"""Return True if variable is a list or tuple."""
|
||||
# The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
return isinstance(var, (tuple, list))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _is_dict(self, var):
|
||||
"""Return True if variable is a dictionary."""
|
||||
# The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
return isinstance(var, dict)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _is_set(self, var):
|
||||
"""Return True if variable is a set."""
|
||||
# The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
return isinstance(var, set)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _get_array_shape(self, var):
|
||||
"""Return array's shape"""
|
||||
try:
|
||||
if self._is_array(var):
|
||||
return var.shape
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
def _get_array_ndim(self, var):
|
||||
"""Return array's ndim"""
|
||||
try:
|
||||
if self._is_array(var):
|
||||
return var.ndim
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
# --- For the Help plugin
|
||||
def _eval(self, text):
|
||||
"""
|
||||
Evaluate text and return (obj, valid)
|
||||
where *obj* is the object represented by *text*
|
||||
and *valid* is True if object evaluation did not raise any exception
|
||||
"""
|
||||
from spyder_kernels.py3compat import is_text_string
|
||||
|
||||
assert is_text_string(text)
|
||||
ns = self._get_current_namespace(with_magics=True)
|
||||
try:
|
||||
return eval(text, ns), True
|
||||
except:
|
||||
return None, False
|
||||
|
||||
# --- For Matplotlib
|
||||
def _set_mpl_backend(self, backend, pylab=False):
|
||||
"""
|
||||
Set a backend for Matplotlib.
|
||||
|
||||
backend: A parameter that can be passed to %matplotlib
|
||||
(e.g. 'inline' or 'tk').
|
||||
pylab: Is the pylab magic should be used in order to populate the
|
||||
namespace from numpy and matplotlib
|
||||
"""
|
||||
import traceback
|
||||
from IPython.core.getipython import get_ipython
|
||||
|
||||
# Don't proceed further if there's any error while importing Matplotlib
|
||||
try:
|
||||
import matplotlib
|
||||
except Exception:
|
||||
return
|
||||
|
||||
generic_error = (
|
||||
"\n" + "="*73 + "\n"
|
||||
"NOTE: The following error appeared when setting "
|
||||
"your Matplotlib backend!!\n" + "="*73 + "\n\n"
|
||||
"{0}"
|
||||
)
|
||||
|
||||
magic = 'pylab' if pylab else 'matplotlib'
|
||||
|
||||
error = None
|
||||
try:
|
||||
# This prevents Matplotlib to automatically set the backend, which
|
||||
# overrides our own mechanism.
|
||||
matplotlib.rcParams['backend'] = 'Agg'
|
||||
|
||||
# Set the backend
|
||||
get_ipython().run_line_magic(magic, backend)
|
||||
except RuntimeError as err:
|
||||
# This catches errors generated by ipykernel when
|
||||
# trying to set a backend. See issue 5541
|
||||
if "GUI eventloops" in str(err):
|
||||
previous_backend = matplotlib.get_backend()
|
||||
if not backend in previous_backend.lower():
|
||||
# Only inform about an error if the user selected backend
|
||||
# and the one set by Matplotlib are different. Else this
|
||||
# message is very confusing.
|
||||
error = (
|
||||
"\n"
|
||||
"NOTE: Spyder *can't* set your selected Matplotlib "
|
||||
"backend because there is a previous backend already "
|
||||
"in use.\n\n"
|
||||
"Your backend will be {0}".format(previous_backend)
|
||||
)
|
||||
# This covers other RuntimeError's
|
||||
else:
|
||||
error = generic_error.format(traceback.format_exc())
|
||||
except ImportError as err:
|
||||
additional_info = (
|
||||
"This is most likely caused by missing packages in the Python "
|
||||
"environment\n"
|
||||
"or installation whose interpreter is located at:\n\n"
|
||||
" {0}"
|
||||
).format(sys.executable)
|
||||
|
||||
error = generic_error.format(err) + '\n\n' + additional_info
|
||||
except Exception:
|
||||
error = generic_error.format(traceback.format_exc())
|
||||
|
||||
self._mpl_backend_error = error
|
||||
|
||||
def _set_config_option(self, option, value):
|
||||
"""
|
||||
Set config options using the %config magic.
|
||||
|
||||
As parameters:
|
||||
option: config option, for example 'InlineBackend.figure_format'.
|
||||
value: value of the option, for example 'SVG', 'Retina', etc.
|
||||
"""
|
||||
from IPython.core.getipython import get_ipython
|
||||
try:
|
||||
base_config = "{option} = "
|
||||
value_line = (
|
||||
"'{value}'" if isinstance(value, TEXT_TYPES) else "{value}")
|
||||
config_line = base_config + value_line
|
||||
get_ipython().run_line_magic(
|
||||
'config',
|
||||
config_line.format(option=option, value=value))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _set_mpl_inline_rc_config(self, option, value):
|
||||
"""
|
||||
Update any of the Matplolib rcParams given an option and value.
|
||||
"""
|
||||
try:
|
||||
from matplotlib import rcParams
|
||||
rcParams[option] = value
|
||||
except Exception:
|
||||
# Needed in case matplolib isn't installed
|
||||
pass
|
||||
|
||||
def show_mpl_backend_errors(self):
|
||||
"""Show Matplotlib backend errors after the prompt is ready."""
|
||||
if self._mpl_backend_error is not None:
|
||||
print(self._mpl_backend_error) # spyder: test-skip
|
||||
|
||||
def set_sympy_forecolor(self, background_color='dark'):
|
||||
"""Set SymPy forecolor depending on console background."""
|
||||
if os.environ.get('SPY_SYMPY_O') == 'True':
|
||||
try:
|
||||
from sympy import init_printing
|
||||
from IPython.core.getipython import get_ipython
|
||||
if background_color == 'dark':
|
||||
init_printing(forecolor='White', ip=get_ipython())
|
||||
elif background_color == 'light':
|
||||
init_printing(forecolor='Black', ip=get_ipython())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- Others
|
||||
def _load_autoreload_magic(self):
|
||||
"""Load %autoreload magic."""
|
||||
from IPython.core.getipython import get_ipython
|
||||
try:
|
||||
get_ipython().run_line_magic('reload_ext', 'autoreload')
|
||||
get_ipython().run_line_magic('autoreload', '2')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _load_wurlitzer(self):
|
||||
"""Load wurlitzer extension."""
|
||||
# Wurlitzer has no effect on Windows
|
||||
if not os.name == 'nt':
|
||||
from IPython.core.getipython import get_ipython
|
||||
# Enclose this in a try/except because if it fails the
|
||||
# console will be totally unusable.
|
||||
# Fixes spyder-ide/spyder#8668
|
||||
try:
|
||||
get_ipython().run_line_magic('reload_ext', 'wurlitzer')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _get_comm(self, comm_id):
|
||||
"""
|
||||
We need to redefine this method from ipykernel.comm_manager to
|
||||
avoid showing a warning when the comm corresponding to comm_id
|
||||
is not present.
|
||||
|
||||
Fixes spyder-ide/spyder#15498
|
||||
"""
|
||||
try:
|
||||
return self.comm_manager.comms[comm_id]
|
||||
except KeyError:
|
||||
pass
|
||||
@@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Custom Spyder Outstream class.
|
||||
"""
|
||||
|
||||
from ipykernel.iostream import OutStream
|
||||
|
||||
|
||||
class TTYOutStream(OutStream):
|
||||
"""Subclass of OutStream that represents a TTY."""
|
||||
|
||||
def __init__(self, session, pub_thread, name, pipe=None, echo=None, *,
|
||||
watchfd=True):
|
||||
super().__init__(session, pub_thread, name, pipe,
|
||||
echo=echo, watchfd=watchfd, isatty=True)
|
||||
@@ -0,0 +1,115 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Spyder shell for Jupyter kernels.
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import bdb
|
||||
import sys
|
||||
|
||||
# Third-party imports
|
||||
from ipykernel.zmqshell import ZMQInteractiveShell
|
||||
|
||||
|
||||
class SpyderShell(ZMQInteractiveShell):
|
||||
"""Spyder shell."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# Create _pdb_obj before __init__
|
||||
self._pdb_obj = None
|
||||
super(SpyderShell, self).__init__(*args, **kwargs)
|
||||
|
||||
# register post_execute
|
||||
self.events.register('post_execute', self.do_post_execute)
|
||||
|
||||
# ---- Methods overriden by us.
|
||||
def ask_exit(self):
|
||||
"""Engage the exit actions."""
|
||||
self.kernel.frontend_comm.close_thread()
|
||||
return super(SpyderShell, self).ask_exit()
|
||||
|
||||
def _showtraceback(self, etype, evalue, stb):
|
||||
"""
|
||||
Don't show a traceback when exiting our debugger after entering
|
||||
it through a `breakpoint()` call.
|
||||
|
||||
This is because calling `!exit` after `breakpoint()` raises
|
||||
BdbQuit, which throws a long and useless traceback.
|
||||
"""
|
||||
if etype is bdb.BdbQuit:
|
||||
stb = ['']
|
||||
super(SpyderShell, self)._showtraceback(etype, evalue, stb)
|
||||
|
||||
# ---- For Pdb namespace integration
|
||||
def get_local_scope(self, stack_depth):
|
||||
"""Get local scope at given frame depth."""
|
||||
frame = sys._getframe(stack_depth + 1)
|
||||
if self._pdb_frame is frame:
|
||||
# Avoid calling f_locals on _pdb_frame
|
||||
return self._pdb_obj.curframe_locals
|
||||
else:
|
||||
return frame.f_locals
|
||||
|
||||
def get_global_scope(self, stack_depth):
|
||||
"""Get global scope at given frame depth."""
|
||||
frame = sys._getframe(stack_depth + 1)
|
||||
return frame.f_globals
|
||||
|
||||
def is_debugging(self):
|
||||
"""
|
||||
Check if we are currently debugging.
|
||||
"""
|
||||
return bool(self._pdb_frame)
|
||||
|
||||
@property
|
||||
def pdb_session(self):
|
||||
"""Get current pdb session."""
|
||||
return self._pdb_obj
|
||||
|
||||
@pdb_session.setter
|
||||
def pdb_session(self, pdb_obj):
|
||||
"""Register Pdb session to use it later"""
|
||||
self._pdb_obj = pdb_obj
|
||||
|
||||
@property
|
||||
def _pdb_frame(self):
|
||||
"""Return current Pdb frame if there is any"""
|
||||
if self.pdb_session is not None:
|
||||
return self.pdb_session.curframe
|
||||
|
||||
@property
|
||||
def _pdb_locals(self):
|
||||
"""
|
||||
Return current Pdb frame locals if available. Otherwise
|
||||
return an empty dictionary
|
||||
"""
|
||||
if self._pdb_frame is not None:
|
||||
return self._pdb_obj.curframe_locals
|
||||
else:
|
||||
return {}
|
||||
|
||||
@property
|
||||
def user_ns(self):
|
||||
"""Get the current namespace."""
|
||||
if self._pdb_frame is not None:
|
||||
return self._pdb_frame.f_globals
|
||||
else:
|
||||
return self.__user_ns
|
||||
|
||||
@user_ns.setter
|
||||
def user_ns(self, namespace):
|
||||
"""Set user_ns."""
|
||||
self.__user_ns = namespace
|
||||
|
||||
def do_post_execute(self):
|
||||
"""Flush __std*__ after execution."""
|
||||
# Flush C standard streams.
|
||||
sys.__stderr__.flush()
|
||||
sys.__stdout__.flush()
|
||||
@@ -0,0 +1,335 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
File used to start kernels for the IPython Console
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import os
|
||||
import os.path as osp
|
||||
import sys
|
||||
import site
|
||||
|
||||
from traitlets import DottedObjectName
|
||||
import ipykernel
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.utils.misc import is_module_installed
|
||||
from spyder_kernels.utils.mpl import (
|
||||
MPL_BACKENDS_FROM_SPYDER, INLINE_FIGURE_FORMATS)
|
||||
|
||||
|
||||
PY2 = sys.version[0] == '2'
|
||||
IPYKERNEL_6 = ipykernel.__version__[0] >= '6'
|
||||
|
||||
|
||||
def import_spydercustomize():
|
||||
"""Import our customizations into the kernel."""
|
||||
here = osp.dirname(__file__)
|
||||
parent = osp.dirname(here)
|
||||
customize_dir = osp.join(parent, 'customize')
|
||||
|
||||
# Remove current directory from sys.path to prevent kernel
|
||||
# crashes when people name Python files or modules with
|
||||
# the same name as standard library modules.
|
||||
# See spyder-ide/spyder#8007
|
||||
while '' in sys.path:
|
||||
sys.path.remove('')
|
||||
|
||||
# Import our customizations
|
||||
site.addsitedir(customize_dir)
|
||||
import spydercustomize # noqa
|
||||
|
||||
# Remove our customize path from sys.path
|
||||
try:
|
||||
sys.path.remove(customize_dir)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def sympy_config(mpl_backend):
|
||||
"""Sympy configuration"""
|
||||
if mpl_backend is not None:
|
||||
lines = """
|
||||
from sympy.interactive import init_session
|
||||
init_session()
|
||||
%matplotlib {0}
|
||||
""".format(mpl_backend)
|
||||
else:
|
||||
lines = """
|
||||
from sympy.interactive import init_session
|
||||
init_session()
|
||||
"""
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def kernel_config():
|
||||
"""Create a config object with IPython kernel options."""
|
||||
from IPython.core.application import get_ipython_dir
|
||||
from traitlets.config.loader import Config, load_pyconfig_files
|
||||
|
||||
# ---- IPython config ----
|
||||
try:
|
||||
profile_path = osp.join(get_ipython_dir(), 'profile_default')
|
||||
cfg = load_pyconfig_files(['ipython_config.py',
|
||||
'ipython_kernel_config.py'],
|
||||
profile_path)
|
||||
except:
|
||||
cfg = Config()
|
||||
|
||||
# ---- Spyder config ----
|
||||
spy_cfg = Config()
|
||||
|
||||
# Enable/disable certain features for testing
|
||||
testing = os.environ.get('SPY_TESTING') == 'True'
|
||||
if testing:
|
||||
# Don't load nor save history in our IPython consoles.
|
||||
spy_cfg.HistoryAccessor.enabled = False
|
||||
|
||||
# Until we implement Issue 1052
|
||||
spy_cfg.InteractiveShell.xmode = 'Plain'
|
||||
|
||||
# Jedi completer. It's only available in Python 3
|
||||
jedi_o = os.environ.get('SPY_JEDI_O') == 'True'
|
||||
if not PY2:
|
||||
spy_cfg.IPCompleter.use_jedi = jedi_o
|
||||
|
||||
# Clear terminal arguments input.
|
||||
# This needs to be done before adding the exec_lines that come from
|
||||
# Spyder, to avoid deleting the sys module if users want to import
|
||||
# it through them.
|
||||
# See spyder-ide/spyder#15788
|
||||
clear_argv = "import sys; sys.argv = ['']; del sys"
|
||||
spy_cfg.IPKernelApp.exec_lines = [clear_argv]
|
||||
|
||||
# Set our runfile in builtins here to prevent other packages shadowing it.
|
||||
# This started to be a problem since IPykernel 6.3.0.
|
||||
if not PY2:
|
||||
spy_cfg.IPKernelApp.exec_lines.append(
|
||||
"import builtins; "
|
||||
"builtins.runfile = builtins.spyder_runfile; "
|
||||
"del builtins.spyder_runfile; del builtins"
|
||||
)
|
||||
|
||||
# Prevent other libraries to change the breakpoint builtin.
|
||||
# This started to be a problem since IPykernel 6.3.0.
|
||||
if sys.version_info[0:2] >= (3, 7):
|
||||
spy_cfg.IPKernelApp.exec_lines.append(
|
||||
"import sys; import pdb; "
|
||||
"sys.breakpointhook = pdb.set_trace; "
|
||||
"del sys; del pdb"
|
||||
)
|
||||
|
||||
# Run lines of code at startup
|
||||
run_lines_o = os.environ.get('SPY_RUN_LINES_O')
|
||||
if run_lines_o is not None:
|
||||
spy_cfg.IPKernelApp.exec_lines += (
|
||||
[x.strip() for x in run_lines_o.split(';')]
|
||||
)
|
||||
|
||||
# Load %autoreload magic
|
||||
spy_cfg.IPKernelApp.exec_lines.append(
|
||||
"get_ipython().kernel._load_autoreload_magic()")
|
||||
|
||||
# Load wurlitzer extension
|
||||
spy_cfg.IPKernelApp.exec_lines.append(
|
||||
"get_ipython().kernel._load_wurlitzer()")
|
||||
|
||||
# Default inline backend configuration
|
||||
# This is useful to have when people doesn't
|
||||
# use our config system to configure the
|
||||
# inline backend but want to use
|
||||
# '%matplotlib inline' at runtime
|
||||
spy_cfg.InlineBackend.rc = {
|
||||
'figure.figsize': (6.0, 4.0),
|
||||
# 72 dpi matches SVG/qtconsole.
|
||||
# This only affects PNG export, as SVG has no dpi setting.
|
||||
'figure.dpi': 72,
|
||||
# 12pt labels get cutoff on 6x4 logplots, so use 10pt.
|
||||
'font.size': 10,
|
||||
# 10pt still needs a little more room on the xlabel
|
||||
'figure.subplot.bottom': .125,
|
||||
# Play nicely with any background color.
|
||||
'figure.facecolor': 'white',
|
||||
'figure.edgecolor': 'white'
|
||||
}
|
||||
|
||||
# Pylab configuration
|
||||
mpl_backend = None
|
||||
if is_module_installed('matplotlib'):
|
||||
# Set Matplotlib backend with Spyder options
|
||||
pylab_o = os.environ.get('SPY_PYLAB_O')
|
||||
backend_o = os.environ.get('SPY_BACKEND_O')
|
||||
if pylab_o == 'True' and backend_o is not None:
|
||||
mpl_backend = MPL_BACKENDS_FROM_SPYDER[backend_o]
|
||||
# Inline backend configuration
|
||||
if mpl_backend == 'inline':
|
||||
# Figure format
|
||||
format_o = os.environ.get('SPY_FORMAT_O')
|
||||
formats = INLINE_FIGURE_FORMATS
|
||||
if format_o is not None:
|
||||
spy_cfg.InlineBackend.figure_format = formats[format_o]
|
||||
|
||||
# Resolution
|
||||
resolution_o = os.environ.get('SPY_RESOLUTION_O')
|
||||
if resolution_o is not None:
|
||||
spy_cfg.InlineBackend.rc['figure.dpi'] = float(
|
||||
resolution_o)
|
||||
|
||||
# Figure size
|
||||
width_o = float(os.environ.get('SPY_WIDTH_O'))
|
||||
height_o = float(os.environ.get('SPY_HEIGHT_O'))
|
||||
if width_o is not None and height_o is not None:
|
||||
spy_cfg.InlineBackend.rc['figure.figsize'] = (width_o,
|
||||
height_o)
|
||||
|
||||
# Print figure kwargs
|
||||
bbox_inches_o = os.environ.get('SPY_BBOX_INCHES_O')
|
||||
bbox_inches = 'tight' if bbox_inches_o == 'True' else None
|
||||
spy_cfg.InlineBackend.print_figure_kwargs.update(
|
||||
{'bbox_inches': bbox_inches})
|
||||
else:
|
||||
# Set Matplotlib backend to inline for external kernels.
|
||||
# Fixes issue 108
|
||||
mpl_backend = 'inline'
|
||||
|
||||
# Automatically load Pylab and Numpy, or only set Matplotlib
|
||||
# backend
|
||||
autoload_pylab_o = os.environ.get('SPY_AUTOLOAD_PYLAB_O') == 'True'
|
||||
command = "get_ipython().kernel._set_mpl_backend('{0}', {1})"
|
||||
spy_cfg.IPKernelApp.exec_lines.append(
|
||||
command.format(mpl_backend, autoload_pylab_o))
|
||||
|
||||
# Enable Cython magic
|
||||
run_cython = os.environ.get('SPY_RUN_CYTHON') == 'True'
|
||||
if run_cython and is_module_installed('Cython'):
|
||||
spy_cfg.IPKernelApp.exec_lines.append('%reload_ext Cython')
|
||||
|
||||
# Run a file at startup
|
||||
use_file_o = os.environ.get('SPY_USE_FILE_O')
|
||||
run_file_o = os.environ.get('SPY_RUN_FILE_O')
|
||||
if use_file_o == 'True' and run_file_o is not None:
|
||||
if osp.exists(run_file_o):
|
||||
spy_cfg.IPKernelApp.file_to_run = run_file_o
|
||||
|
||||
# Autocall
|
||||
autocall_o = os.environ.get('SPY_AUTOCALL_O')
|
||||
if autocall_o is not None:
|
||||
spy_cfg.ZMQInteractiveShell.autocall = int(autocall_o)
|
||||
|
||||
# To handle the banner by ourselves in IPython 3+
|
||||
spy_cfg.ZMQInteractiveShell.banner1 = ''
|
||||
|
||||
# Greedy completer
|
||||
greedy_o = os.environ.get('SPY_GREEDY_O') == 'True'
|
||||
spy_cfg.IPCompleter.greedy = greedy_o
|
||||
|
||||
# Sympy loading
|
||||
sympy_o = os.environ.get('SPY_SYMPY_O') == 'True'
|
||||
if sympy_o and is_module_installed('sympy'):
|
||||
lines = sympy_config(mpl_backend)
|
||||
spy_cfg.IPKernelApp.exec_lines.append(lines)
|
||||
|
||||
# Disable the new mechanism to capture and forward low-level output
|
||||
# in IPykernel 6. For that we have Wurlitzer.
|
||||
if not PY2:
|
||||
spy_cfg.IPKernelApp.capture_fd_output = False
|
||||
|
||||
# Merge IPython and Spyder configs. Spyder prefs will have prevalence
|
||||
# over IPython ones
|
||||
cfg._merge(spy_cfg)
|
||||
return cfg
|
||||
|
||||
|
||||
def varexp(line):
|
||||
"""
|
||||
Spyder's variable explorer magic
|
||||
|
||||
Used to generate plots, histograms and images of the variables displayed
|
||||
on it.
|
||||
"""
|
||||
ip = get_ipython() #analysis:ignore
|
||||
funcname, name = line.split()
|
||||
try:
|
||||
import guiqwt.pyplot as pyplot
|
||||
except:
|
||||
import matplotlib.pyplot as pyplot
|
||||
pyplot.figure();
|
||||
getattr(pyplot, funcname[2:])(ip.kernel._get_current_namespace()[name])
|
||||
pyplot.show()
|
||||
|
||||
|
||||
def main():
|
||||
# Remove this module's path from sys.path:
|
||||
try:
|
||||
sys.path.remove(osp.dirname(__file__))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
locals().pop('__file__')
|
||||
except KeyError:
|
||||
pass
|
||||
__doc__ = ''
|
||||
__name__ = '__main__'
|
||||
|
||||
# Import our customizations into the kernel
|
||||
import_spydercustomize()
|
||||
|
||||
# Remove current directory from sys.path to prevent kernel
|
||||
# crashes when people name Python files or modules with
|
||||
# the same name as standard library modules.
|
||||
# See spyder-ide/spyder#8007
|
||||
while '' in sys.path:
|
||||
sys.path.remove('')
|
||||
|
||||
# Main imports
|
||||
from ipykernel.kernelapp import IPKernelApp
|
||||
from spyder_kernels.console.kernel import SpyderKernel
|
||||
|
||||
class SpyderKernelApp(IPKernelApp):
|
||||
|
||||
if IPYKERNEL_6:
|
||||
outstream_class = DottedObjectName(
|
||||
'spyder_kernels.console.outstream.TTYOutStream')
|
||||
|
||||
def init_pdb(self):
|
||||
"""
|
||||
This method was added in IPykernel 5.3.1 and it replaces
|
||||
the debugger used by the kernel with a new class
|
||||
introduced in IPython 7.15 during kernel's initialization.
|
||||
Therefore, it doesn't allow us to use our debugger.
|
||||
"""
|
||||
pass
|
||||
|
||||
# Fire up the kernel instance.
|
||||
kernel = SpyderKernelApp.instance()
|
||||
kernel.kernel_class = SpyderKernel
|
||||
try:
|
||||
kernel.config = kernel_config()
|
||||
except:
|
||||
pass
|
||||
kernel.initialize()
|
||||
|
||||
# Set our own magics
|
||||
kernel.shell.register_magic_function(varexp)
|
||||
|
||||
# Set Pdb class to be used by %debug and %pdb.
|
||||
# This makes IPython consoles to use the class defined in our
|
||||
# sitecustomize instead of their default one.
|
||||
import pdb
|
||||
kernel.shell.InteractiveTB.debugger_cls = pdb.Pdb
|
||||
|
||||
# Start the (infinite) kernel event loop.
|
||||
kernel.start()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Site package for the console kernel
|
||||
|
||||
NOTE: This package shouldn't be imported at **any** place.
|
||||
It's only used to set additional functionality for
|
||||
our consoles.
|
||||
"""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,127 @@
|
||||
#
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
|
||||
import linecache
|
||||
import os.path
|
||||
import types
|
||||
import sys
|
||||
|
||||
from IPython.core.getipython import get_ipython
|
||||
|
||||
from spyder_kernels.py3compat import PY2
|
||||
|
||||
|
||||
def new_main_mod(filename, modname):
|
||||
"""
|
||||
Reimplemented from IPython/core/interactiveshell.py to avoid caching
|
||||
and clearing recursive namespace.
|
||||
"""
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
main_mod = types.ModuleType(
|
||||
modname,
|
||||
doc="Module created for script run in IPython")
|
||||
|
||||
main_mod.__file__ = filename
|
||||
# It seems pydoc (and perhaps others) needs any module instance to
|
||||
# implement a __nonzero__ method
|
||||
main_mod.__nonzero__ = lambda : True
|
||||
|
||||
return main_mod
|
||||
|
||||
|
||||
class NamespaceManager(object):
|
||||
"""
|
||||
Get a namespace and set __file__ to filename for this namespace.
|
||||
|
||||
The namespace is either namespace, the current namespace if
|
||||
current_namespace is True, or a new namespace.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, namespace=None, current_namespace=False,
|
||||
file_code=None, stack_depth=1):
|
||||
self.filename = filename
|
||||
self.ns_globals = namespace
|
||||
self.ns_locals = None
|
||||
self.current_namespace = current_namespace
|
||||
self._previous_filename = None
|
||||
self._previous_main = None
|
||||
self._previous_running_namespace = None
|
||||
self._reset_main = False
|
||||
self._file_code = file_code
|
||||
ipython_shell = get_ipython()
|
||||
self.context_globals = ipython_shell.get_global_scope(stack_depth + 1)
|
||||
self.context_locals = ipython_shell.get_local_scope(stack_depth + 1)
|
||||
|
||||
def __enter__(self):
|
||||
"""
|
||||
Prepare the namespace.
|
||||
"""
|
||||
# Save previous __file__
|
||||
ipython_shell = get_ipython()
|
||||
if self.ns_globals is None:
|
||||
if self.current_namespace:
|
||||
self.ns_globals = self.context_globals
|
||||
self.ns_locals = self.context_locals
|
||||
if '__file__' in self.ns_globals:
|
||||
self._previous_filename = self.ns_globals['__file__']
|
||||
self.ns_globals['__file__'] = self.filename
|
||||
else:
|
||||
main_mod = new_main_mod(self.filename, '__main__')
|
||||
self.ns_globals = main_mod.__dict__
|
||||
self.ns_locals = None
|
||||
# Needed to allow pickle to reference main
|
||||
if '__main__' in sys.modules:
|
||||
self._previous_main = sys.modules['__main__']
|
||||
sys.modules['__main__'] = main_mod
|
||||
self._reset_main = True
|
||||
|
||||
# Save current namespace for access by variable explorer
|
||||
self._previous_running_namespace = (
|
||||
ipython_shell.kernel._running_namespace)
|
||||
ipython_shell.kernel._running_namespace = (
|
||||
self.ns_globals, self.ns_locals)
|
||||
|
||||
if (self._file_code is not None
|
||||
and not PY2
|
||||
and isinstance(self._file_code, bytes)):
|
||||
try:
|
||||
self._file_code = self._file_code.decode()
|
||||
except UnicodeDecodeError:
|
||||
# Setting the cache is not supported for non utf-8 files
|
||||
self._file_code = None
|
||||
if self._file_code is not None:
|
||||
# '\n' is used instead of the native line endings. (see linecache)
|
||||
# mtime is set to None to avoid a cache update.
|
||||
linecache.cache[self.filename] = (
|
||||
len(self._file_code), None,
|
||||
[line + '\n' for line in self._file_code.splitlines()],
|
||||
self.filename)
|
||||
return self.ns_globals, self.ns_locals
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Reset the namespace.
|
||||
"""
|
||||
ipython_shell = get_ipython()
|
||||
ipython_shell.kernel._running_namespace = (
|
||||
self._previous_running_namespace)
|
||||
if self._previous_filename:
|
||||
self.ns_globals['__file__'] = self._previous_filename
|
||||
elif '__file__' in self.ns_globals:
|
||||
self.ns_globals.pop('__file__')
|
||||
|
||||
if not self.current_namespace:
|
||||
self.context_globals.update(self.ns_globals)
|
||||
if self.context_locals and self.ns_locals:
|
||||
self.context_locals.update(self.ns_locals)
|
||||
|
||||
if self._previous_main:
|
||||
sys.modules['__main__'] = self._previous_main
|
||||
elif '__main__' in sys.modules and self._reset_main:
|
||||
del sys.modules['__main__']
|
||||
if self.filename in linecache.cache and os.path.exists(self.filename):
|
||||
linecache.cache.pop(self.filename)
|
||||
@@ -0,0 +1,818 @@
|
||||
#
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
#
|
||||
# IMPORTANT NOTE: Don't add a coding line here! It's not necessary for
|
||||
# site files
|
||||
#
|
||||
# Spyder consoles sitecustomize
|
||||
#
|
||||
|
||||
import ast
|
||||
import bdb
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import pdb
|
||||
import shlex
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
from IPython.core.getipython import get_ipython
|
||||
|
||||
from spyder_kernels.comms.frontendcomm import frontend_request
|
||||
from spyder_kernels.customize.namespace_manager import NamespaceManager
|
||||
from spyder_kernels.customize.spyderpdb import SpyderPdb, get_new_debugger
|
||||
from spyder_kernels.customize.umr import UserModuleReloader
|
||||
from spyder_kernels.py3compat import (
|
||||
PY2, _print, encode, compat_exec, FileNotFoundError)
|
||||
from spyder_kernels.customize.utils import capture_last_Expr, canonic
|
||||
|
||||
if not PY2:
|
||||
from IPython.core.inputtransformer2 import (
|
||||
TransformerManager, leading_indent, leading_empty_lines)
|
||||
else:
|
||||
from IPython.core.inputsplitter import IPythonInputSplitter
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# sys.argv can be missing when Python is embedded, taking care of it.
|
||||
# Fixes Issue 1473 and other crazy crashes with IPython 0.13 trying to
|
||||
# access it.
|
||||
# =============================================================================
|
||||
if not hasattr(sys, 'argv'):
|
||||
sys.argv = ['']
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Main constants
|
||||
# =============================================================================
|
||||
IS_EXT_INTERPRETER = os.environ.get('SPY_EXTERNAL_INTERPRETER') == "True"
|
||||
HIDE_CMD_WINDOWS = os.environ.get('SPY_HIDE_CMD') == "True"
|
||||
SHOW_INVALID_SYNTAX_MSG = True
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Execfile functions
|
||||
#
|
||||
# The definitions for Python 2 on Windows were taken from the IPython project
|
||||
# Copyright (C) The IPython Development Team
|
||||
# Distributed under the terms of the modified BSD license
|
||||
# =============================================================================
|
||||
try:
|
||||
# Python 2
|
||||
import __builtin__ as builtins
|
||||
|
||||
except ImportError:
|
||||
# Python 3
|
||||
import builtins
|
||||
basestring = (str,)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Setting console encoding (otherwise Python does not recognize encoding)
|
||||
# for Windows platforms
|
||||
# =============================================================================
|
||||
if os.name == 'nt' and PY2:
|
||||
try:
|
||||
import locale, ctypes
|
||||
_t, _cp = locale.getdefaultlocale('LANG')
|
||||
try:
|
||||
_cp = int(_cp[2:])
|
||||
ctypes.windll.kernel32.SetConsoleCP(_cp)
|
||||
ctypes.windll.kernel32.SetConsoleOutputCP(_cp)
|
||||
except (ValueError, TypeError):
|
||||
# Code page number in locale is not valid
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Prevent subprocess.Popen calls to create visible console windows on Windows.
|
||||
# See issue #4932
|
||||
# =============================================================================
|
||||
if os.name == 'nt' and HIDE_CMD_WINDOWS:
|
||||
import subprocess
|
||||
creation_flag = 0x08000000 # CREATE_NO_WINDOW
|
||||
|
||||
class SubprocessPopen(subprocess.Popen):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['creationflags'] = creation_flag
|
||||
super(SubprocessPopen, self).__init__(*args, **kwargs)
|
||||
|
||||
subprocess.Popen = SubprocessPopen
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Importing user's sitecustomize
|
||||
# =============================================================================
|
||||
try:
|
||||
import sitecustomize #analysis:ignore
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Add default filesystem encoding on Linux to avoid an error with
|
||||
# Matplotlib 1.5 in Python 2 (Fixes Issue 2793)
|
||||
# =============================================================================
|
||||
if PY2 and sys.platform.startswith('linux'):
|
||||
def _getfilesystemencoding_wrapper():
|
||||
return 'utf-8'
|
||||
|
||||
sys.getfilesystemencoding = _getfilesystemencoding_wrapper
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Set PyQt API to #2
|
||||
# =============================================================================
|
||||
if os.environ.get("QT_API") == 'pyqt':
|
||||
try:
|
||||
import sip
|
||||
for qtype in ('QString', 'QVariant', 'QDate', 'QDateTime',
|
||||
'QTextStream', 'QTime', 'QUrl'):
|
||||
sip.setapi(qtype, 2)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
os.environ.pop('QT_API')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Patch PyQt4 and PyQt5
|
||||
# =============================================================================
|
||||
# This saves the QApplication instances so that Python doesn't destroy them.
|
||||
# Python sees all the QApplication as differnet Python objects, while
|
||||
# Qt sees them as a singleton (There is only one Application!). Deleting one
|
||||
# QApplication causes all the other Python instances to become broken.
|
||||
# See spyder-ide/spyder/issues/2970
|
||||
try:
|
||||
from PyQt5 import QtWidgets
|
||||
|
||||
class SpyderQApplication(QtWidgets.QApplication):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SpyderQApplication, self).__init__(*args, **kwargs)
|
||||
# Add reference to avoid destruction
|
||||
# This creates a Memory leak but avoids a Segmentation fault
|
||||
SpyderQApplication._instance_list.append(self)
|
||||
|
||||
SpyderQApplication._instance_list = []
|
||||
QtWidgets.QApplication = SpyderQApplication
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
from PyQt4 import QtGui
|
||||
|
||||
class SpyderQApplication(QtGui.QApplication):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(SpyderQApplication, self).__init__(*args, **kwargs)
|
||||
# Add reference to avoid destruction
|
||||
# This creates a Memory leak but avoids a Segmentation fault
|
||||
SpyderQApplication._instance_list.append(self)
|
||||
|
||||
SpyderQApplication._instance_list = []
|
||||
QtGui.QApplication = SpyderQApplication
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# IPython adjustments
|
||||
# =============================================================================
|
||||
# Patch unittest.main so that errors are printed directly in the console.
|
||||
# See http://comments.gmane.org/gmane.comp.python.ipython.devel/10557
|
||||
# Fixes Issue 1370
|
||||
import unittest
|
||||
from unittest import TestProgram
|
||||
|
||||
class IPyTesProgram(TestProgram):
|
||||
def __init__(self, *args, **kwargs):
|
||||
test_runner = unittest.TextTestRunner(stream=sys.stderr)
|
||||
kwargs['testRunner'] = kwargs.pop('testRunner', test_runner)
|
||||
kwargs['exit'] = False
|
||||
TestProgram.__init__(self, *args, **kwargs)
|
||||
|
||||
unittest.main = IPyTesProgram
|
||||
|
||||
# Ignore some IPython/ipykernel warnings
|
||||
try:
|
||||
warnings.filterwarnings(action='ignore', category=DeprecationWarning,
|
||||
module='ipykernel.ipkernel')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Turtle adjustments
|
||||
# =============================================================================
|
||||
# This is needed to prevent turtle scripts crashes after multiple runs in the
|
||||
# same IPython Console instance.
|
||||
# See Spyder issue #6278
|
||||
try:
|
||||
import turtle
|
||||
from turtle import Screen, Terminator
|
||||
|
||||
def spyder_bye():
|
||||
try:
|
||||
Screen().bye()
|
||||
turtle.TurtleScreen._RUNNING = True
|
||||
except Terminator:
|
||||
pass
|
||||
turtle.bye = spyder_bye
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pandas adjustments
|
||||
# =============================================================================
|
||||
try:
|
||||
import pandas as pd
|
||||
|
||||
# Set Pandas output encoding
|
||||
pd.options.display.encoding = 'utf-8'
|
||||
|
||||
# Filter warning that appears for DataFrames with np.nan values
|
||||
# Example:
|
||||
# >>> import pandas as pd, numpy as np
|
||||
# >>> pd.Series([np.nan,np.nan,np.nan],index=[1,2,3])
|
||||
# Fixes Issue 2991
|
||||
# For 0.18-
|
||||
warnings.filterwarnings(action='ignore', category=RuntimeWarning,
|
||||
module='pandas.core.format',
|
||||
message=".*invalid value encountered in.*")
|
||||
# For 0.18.1+
|
||||
warnings.filterwarnings(action='ignore', category=RuntimeWarning,
|
||||
module='pandas.formats.format',
|
||||
message=".*invalid value encountered in.*")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Numpy adjustments
|
||||
# =============================================================================
|
||||
try:
|
||||
# Filter warning that appears when users have 'Show max/min'
|
||||
# turned on and Numpy arrays contain a nan value.
|
||||
# Fixes Issue 7063
|
||||
# Note: It only happens in Numpy 1.14+
|
||||
warnings.filterwarnings(action='ignore', category=RuntimeWarning,
|
||||
module='numpy.core._methods',
|
||||
message=".*invalid value encountered in.*")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Multiprocessing adjustments
|
||||
# =============================================================================
|
||||
# This patch is only needed on Python 3
|
||||
if not PY2:
|
||||
# This could fail with changes in Python itself, so we protect it
|
||||
# with a try/except
|
||||
try:
|
||||
import multiprocessing.spawn
|
||||
_old_preparation_data = multiprocessing.spawn.get_preparation_data
|
||||
|
||||
def _patched_preparation_data(name):
|
||||
"""
|
||||
Patched get_preparation_data to work when all variables are
|
||||
removed before execution.
|
||||
"""
|
||||
try:
|
||||
d = _old_preparation_data(name)
|
||||
except AttributeError:
|
||||
main_module = sys.modules['__main__']
|
||||
# Any string for __spec__ does the job
|
||||
main_module.__spec__ = ''
|
||||
d = _old_preparation_data(name)
|
||||
# On windows, there is no fork, so we need to save the main file
|
||||
# and import it
|
||||
if (os.name == 'nt' and 'init_main_from_path' in d
|
||||
and not os.path.exists(d['init_main_from_path'])):
|
||||
_print(
|
||||
"Warning: multiprocessing may need the main file to exist. "
|
||||
"Please save {}".format(d['init_main_from_path']))
|
||||
# Remove path as the subprocess can't do anything with it
|
||||
del d['init_main_from_path']
|
||||
return d
|
||||
multiprocessing.spawn.get_preparation_data = _patched_preparation_data
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# os adjustments
|
||||
# =============================================================================
|
||||
# This is necessary to have better support for Rich and Colorama.
|
||||
def _patched_get_terminal_size(fd=None):
|
||||
return os.terminal_size((80, 30))
|
||||
|
||||
os.get_terminal_size = _patched_get_terminal_size
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pdb adjustments
|
||||
# =============================================================================
|
||||
pdb.Pdb = SpyderPdb
|
||||
|
||||
# =============================================================================
|
||||
# User module reloader
|
||||
# =============================================================================
|
||||
__umr__ = UserModuleReloader(namelist=os.environ.get("SPY_UMR_NAMELIST", None))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Handle Post Mortem Debugging and Traceback Linkage to Spyder
|
||||
# =============================================================================
|
||||
def post_mortem_excepthook(type, value, tb):
|
||||
"""
|
||||
For post mortem exception handling, print a banner and enable post
|
||||
mortem debugging.
|
||||
"""
|
||||
ipython_shell = get_ipython()
|
||||
ipython_shell.showtraceback((type, value, tb))
|
||||
p = pdb.Pdb(ipython_shell.colors)
|
||||
|
||||
if not type == SyntaxError:
|
||||
# wait for stderr to print (stderr.flush does not work in this case)
|
||||
time.sleep(0.1)
|
||||
_print('*' * 40)
|
||||
_print('Entering post mortem debugging...')
|
||||
_print('*' * 40)
|
||||
|
||||
# Inform Spyder about position of exception: pdb.Pdb.interaction() calls
|
||||
# cmd.Cmd.cmdloop(), which calls SpyderPdb.preloop() where
|
||||
# send_initial_notification is handled.
|
||||
p.send_initial_notification = True
|
||||
|
||||
p.reset()
|
||||
frame = tb.tb_next.tb_frame
|
||||
# wait for stdout to print
|
||||
time.sleep(0.1)
|
||||
p.interaction(frame, tb)
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# runfile and debugfile commands
|
||||
# ==============================================================================
|
||||
def get_current_file_name():
|
||||
"""Get the current file name."""
|
||||
try:
|
||||
return frontend_request(blocking=True).current_filename()
|
||||
except Exception:
|
||||
_print("This command failed to be executed because an error occurred"
|
||||
" while trying to get the current file name from Spyder's"
|
||||
" editor. The error was:\n\n")
|
||||
get_ipython().showtraceback(exception_only=True)
|
||||
return None
|
||||
|
||||
|
||||
def count_leading_empty_lines(cell):
|
||||
"""Count the number of leading empty cells."""
|
||||
if PY2:
|
||||
lines = cell.splitlines(True)
|
||||
else:
|
||||
lines = cell.splitlines(keepends=True)
|
||||
if not lines:
|
||||
return 0
|
||||
for i, line in enumerate(lines):
|
||||
if line and not line.isspace():
|
||||
return i
|
||||
return len(lines)
|
||||
|
||||
|
||||
def transform_cell(code, indent_only=False):
|
||||
"""Transform IPython code to Python code."""
|
||||
number_empty_lines = count_leading_empty_lines(code)
|
||||
if indent_only:
|
||||
# Not implemented for PY2
|
||||
if PY2:
|
||||
return code
|
||||
if not code.endswith('\n'):
|
||||
code += '\n' # Ensure the cell has a trailing newline
|
||||
lines = code.splitlines(keepends=True)
|
||||
lines = leading_indent(leading_empty_lines(lines))
|
||||
code = ''.join(lines)
|
||||
else:
|
||||
if PY2:
|
||||
tm = IPythonInputSplitter()
|
||||
return tm.transform_cell(code)
|
||||
else:
|
||||
tm = TransformerManager()
|
||||
code = tm.transform_cell(code)
|
||||
return '\n' * number_empty_lines + code
|
||||
|
||||
|
||||
def exec_code(code, filename, ns_globals, ns_locals=None, post_mortem=False,
|
||||
exec_fun=None, capture_last_expression=False):
|
||||
"""Execute code and display any exception."""
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
global SHOW_INVALID_SYNTAX_MSG
|
||||
|
||||
if PY2:
|
||||
filename = encode(filename)
|
||||
code = encode(code)
|
||||
|
||||
if exec_fun is None:
|
||||
# Replace by exec when dropping Python 2
|
||||
exec_fun = compat_exec
|
||||
|
||||
ipython_shell = get_ipython()
|
||||
is_ipython = os.path.splitext(filename)[1] == '.ipy'
|
||||
try:
|
||||
if not is_ipython:
|
||||
# TODO: remove the try-except and let the SyntaxError raise
|
||||
# Because there should not be ipython code in a python file
|
||||
try:
|
||||
ast_code = ast.parse(transform_cell(code, indent_only=True))
|
||||
except SyntaxError as e:
|
||||
try:
|
||||
ast_code = ast.parse(transform_cell(code))
|
||||
except SyntaxError:
|
||||
if PY2:
|
||||
raise e
|
||||
else:
|
||||
# Need to call exec to avoid Syntax Error in Python 2.
|
||||
# TODO: remove exec when dropping Python 2 support.
|
||||
exec("raise e from None")
|
||||
else:
|
||||
if SHOW_INVALID_SYNTAX_MSG:
|
||||
_print(
|
||||
"\nWARNING: This is not valid Python code. "
|
||||
"If you want to use IPython magics, "
|
||||
"flexible indentation, and prompt removal, "
|
||||
"we recommend that you save this file with the "
|
||||
".ipy extension.\n")
|
||||
SHOW_INVALID_SYNTAX_MSG = False
|
||||
else:
|
||||
ast_code = ast.parse(transform_cell(code))
|
||||
|
||||
if code.rstrip()[-1:] == ";":
|
||||
# Supress output with ;
|
||||
capture_last_expression = False
|
||||
|
||||
if capture_last_expression:
|
||||
ast_code, capture_last_expression = capture_last_Expr(
|
||||
ast_code, "_spyder_out")
|
||||
|
||||
exec_fun(compile(ast_code, filename, 'exec'), ns_globals, ns_locals)
|
||||
|
||||
if capture_last_expression:
|
||||
out = ns_globals.pop("_spyder_out", None)
|
||||
if out is not None:
|
||||
return out
|
||||
|
||||
except SystemExit as status:
|
||||
# ignore exit(0)
|
||||
if status.code:
|
||||
ipython_shell.showtraceback(exception_only=True)
|
||||
except BaseException as error:
|
||||
if (isinstance(error, bdb.BdbQuit)
|
||||
and ipython_shell.pdb_session):
|
||||
# Ignore BdbQuit if we are debugging, as it is expected.
|
||||
ipython_shell.pdb_session = None
|
||||
elif post_mortem and isinstance(error, Exception):
|
||||
error_type, error, tb = sys.exc_info()
|
||||
post_mortem_excepthook(error_type, error, tb)
|
||||
else:
|
||||
# We ignore the call to exec
|
||||
ipython_shell.showtraceback(tb_offset=1)
|
||||
finally:
|
||||
__tracebackhide__ = "__pdb_exit__"
|
||||
|
||||
|
||||
def get_file_code(filename, save_all=True, raise_exception=False):
|
||||
"""Retrieve the content of a file."""
|
||||
# Get code from spyder
|
||||
try:
|
||||
return frontend_request(blocking=True).get_file_code(
|
||||
filename, save_all=save_all)
|
||||
except Exception:
|
||||
# Maybe this is a local file
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
return f.read()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if raise_exception:
|
||||
raise
|
||||
# Else return None
|
||||
return None
|
||||
|
||||
|
||||
def runfile(filename=None, args=None, wdir=None, namespace=None,
|
||||
post_mortem=False, current_namespace=False):
|
||||
"""
|
||||
Run filename
|
||||
args: command line arguments (string)
|
||||
wdir: working directory
|
||||
namespace: namespace for execution
|
||||
post_mortem: boolean, whether to enter post-mortem mode on error
|
||||
current_namespace: if true, run the file in the current namespace
|
||||
"""
|
||||
return _exec_file(
|
||||
filename, args, wdir, namespace,
|
||||
post_mortem, current_namespace, stack_depth=1)
|
||||
|
||||
|
||||
def _exec_file(filename=None, args=None, wdir=None, namespace=None,
|
||||
post_mortem=False, current_namespace=False, stack_depth=0,
|
||||
exec_fun=None, canonic_filename=None):
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
ipython_shell = get_ipython()
|
||||
if filename is None:
|
||||
filename = get_current_file_name()
|
||||
if filename is None:
|
||||
return
|
||||
|
||||
try:
|
||||
filename = filename.decode('utf-8')
|
||||
except (UnicodeError, TypeError, AttributeError):
|
||||
# UnicodeError, TypeError --> eventually raised in Python 2
|
||||
# AttributeError --> systematically raised in Python 3
|
||||
pass
|
||||
if PY2:
|
||||
filename = encode(filename)
|
||||
if __umr__.enabled:
|
||||
__umr__.run()
|
||||
if args is not None and not isinstance(args, basestring):
|
||||
raise TypeError("expected a character buffer object")
|
||||
|
||||
try:
|
||||
file_code = get_file_code(filename, raise_exception=True)
|
||||
except Exception:
|
||||
# Show an error and return None
|
||||
_print(
|
||||
"This command failed to be executed because an error occurred"
|
||||
" while trying to get the file code from Spyder's"
|
||||
" editor. The error was:\n\n")
|
||||
get_ipython().showtraceback(exception_only=True)
|
||||
return
|
||||
|
||||
# Here the remote filename has been used. It must now be valid locally.
|
||||
if canonic_filename is not None:
|
||||
filename = canonic_filename
|
||||
else:
|
||||
filename = canonic(filename)
|
||||
|
||||
with NamespaceManager(filename, namespace, current_namespace,
|
||||
file_code=file_code, stack_depth=stack_depth + 1
|
||||
) as (ns_globals, ns_locals):
|
||||
sys.argv = [filename]
|
||||
if args is not None:
|
||||
for arg in shlex.split(args):
|
||||
sys.argv.append(arg)
|
||||
|
||||
if "multiprocessing" in sys.modules:
|
||||
# See https://github.com/spyder-ide/spyder/issues/16696
|
||||
try:
|
||||
sys.modules['__mp_main__'] = sys.modules['__main__']
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if wdir is not None:
|
||||
if PY2:
|
||||
try:
|
||||
wdir = wdir.decode('utf-8')
|
||||
except (UnicodeError, TypeError):
|
||||
# UnicodeError, TypeError --> eventually raised in Python 2
|
||||
pass
|
||||
if os.path.isdir(wdir):
|
||||
os.chdir(wdir)
|
||||
# See https://github.com/spyder-ide/spyder/issues/13632
|
||||
if "multiprocessing.process" in sys.modules:
|
||||
try:
|
||||
import multiprocessing.process
|
||||
multiprocessing.process.ORIGINAL_DIR = os.path.abspath(
|
||||
wdir)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
_print("Working directory {} doesn't exist.\n".format(wdir))
|
||||
|
||||
try:
|
||||
if __umr__.has_cython:
|
||||
# Cython files
|
||||
with io.open(filename, encoding='utf-8') as f:
|
||||
ipython_shell.run_cell_magic('cython', '', f.read())
|
||||
else:
|
||||
exec_code(file_code, filename, ns_globals, ns_locals,
|
||||
post_mortem=post_mortem, exec_fun=exec_fun,
|
||||
capture_last_expression=False)
|
||||
finally:
|
||||
sys.argv = ['']
|
||||
|
||||
|
||||
# IPykernel 6.3.0+ shadows our runfile because it depends on the Pydev
|
||||
# debugger, which adds its own runfile to builtins. So we replace it with
|
||||
# our own using exec_lines in start.py
|
||||
if PY2:
|
||||
builtins.runfile = runfile
|
||||
else:
|
||||
builtins.spyder_runfile = runfile
|
||||
|
||||
|
||||
def debugfile(filename=None, args=None, wdir=None, post_mortem=False,
|
||||
current_namespace=False):
|
||||
"""
|
||||
Debug filename
|
||||
args: command line arguments (string)
|
||||
wdir: working directory
|
||||
post_mortem: boolean, included for compatiblity with runfile
|
||||
"""
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
if filename is None:
|
||||
filename = get_current_file_name()
|
||||
if filename is None:
|
||||
return
|
||||
|
||||
shell = get_ipython()
|
||||
if shell.is_debugging():
|
||||
# Recursive
|
||||
code = (
|
||||
"runfile({}".format(repr(filename)) +
|
||||
", args=%r, wdir=%r, current_namespace=%r)" % (
|
||||
args, wdir, current_namespace)
|
||||
)
|
||||
|
||||
shell.pdb_session.enter_recursive_debugger(
|
||||
code, filename, True,
|
||||
)
|
||||
else:
|
||||
debugger = get_new_debugger(filename, True)
|
||||
_exec_file(
|
||||
filename=filename,
|
||||
canonic_filename=debugger.canonic(filename),
|
||||
args=args,
|
||||
wdir=wdir,
|
||||
current_namespace=current_namespace,
|
||||
exec_fun=debugger.run,
|
||||
stack_depth=1,
|
||||
)
|
||||
|
||||
|
||||
builtins.debugfile = debugfile
|
||||
|
||||
|
||||
def runcell(cellname, filename=None, post_mortem=False):
|
||||
"""
|
||||
Run a code cell from an editor as a file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cellname : str or int
|
||||
Cell name or index.
|
||||
filename : str
|
||||
Needed to allow for proper traceback links.
|
||||
post_mortem: bool
|
||||
Automatically enter post mortem on exception.
|
||||
"""
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
return _exec_cell(cellname, filename, post_mortem, stack_depth=1)
|
||||
|
||||
|
||||
def _exec_cell(cellname, filename=None, post_mortem=False, stack_depth=0,
|
||||
exec_fun=None, canonic_filename=None):
|
||||
"""
|
||||
Execute a code cell with a given exec function.
|
||||
"""
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
if filename is None:
|
||||
filename = get_current_file_name()
|
||||
if filename is None:
|
||||
return
|
||||
try:
|
||||
filename = filename.decode('utf-8')
|
||||
except (UnicodeError, TypeError, AttributeError):
|
||||
# UnicodeError, TypeError --> eventually raised in Python 2
|
||||
# AttributeError --> systematically raised in Python 3
|
||||
pass
|
||||
ipython_shell = get_ipython()
|
||||
try:
|
||||
# Get code from spyder
|
||||
cell_code = frontend_request(
|
||||
blocking=True).run_cell(cellname, filename)
|
||||
except Exception:
|
||||
_print("This command failed to be executed because an error occurred"
|
||||
" while trying to get the cell code from Spyder's"
|
||||
" editor. The error was:\n\n")
|
||||
get_ipython().showtraceback(exception_only=True)
|
||||
return
|
||||
|
||||
if not cell_code or cell_code.strip() == '':
|
||||
_print("Nothing to execute, this cell is empty.\n")
|
||||
return
|
||||
|
||||
# Trigger `post_execute` to exit the additional pre-execution.
|
||||
# See Spyder PR #7310.
|
||||
ipython_shell.events.trigger('post_execute')
|
||||
file_code = get_file_code(filename, save_all=False)
|
||||
|
||||
# Here the remote filename has been used. It must now be valid locally.
|
||||
if canonic_filename is not None:
|
||||
filename = canonic_filename
|
||||
else:
|
||||
# Normalise the filename
|
||||
filename = canonic(filename)
|
||||
|
||||
with NamespaceManager(filename, current_namespace=True,
|
||||
file_code=file_code, stack_depth=stack_depth + 1
|
||||
) as (ns_globals, ns_locals):
|
||||
return exec_code(cell_code, filename, ns_globals, ns_locals,
|
||||
post_mortem=post_mortem, exec_fun=exec_fun,
|
||||
capture_last_expression=True)
|
||||
|
||||
|
||||
builtins.runcell = runcell
|
||||
|
||||
|
||||
def debugcell(cellname, filename=None, post_mortem=False):
|
||||
"""Debug a cell."""
|
||||
# Tell IPython to hide this frame (>7.16)
|
||||
__tracebackhide__ = True
|
||||
if filename is None:
|
||||
filename = get_current_file_name()
|
||||
if filename is None:
|
||||
return
|
||||
|
||||
shell = get_ipython()
|
||||
if shell.is_debugging():
|
||||
# Recursive
|
||||
code = (
|
||||
"runcell({}, ".format(repr(cellname)) +
|
||||
"{})".format(repr(filename))
|
||||
)
|
||||
shell.pdb_session.enter_recursive_debugger(
|
||||
code, filename, False,
|
||||
)
|
||||
else:
|
||||
debugger = get_new_debugger(filename, False)
|
||||
_exec_cell(
|
||||
cellname=cellname,
|
||||
filename=filename,
|
||||
canonic_filename=debugger.canonic(filename),
|
||||
exec_fun=debugger.run,
|
||||
stack_depth=1
|
||||
)
|
||||
|
||||
|
||||
builtins.debugcell = debugcell
|
||||
|
||||
|
||||
def cell_count(filename=None):
|
||||
"""
|
||||
Get the number of cells in a file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
The file to get the cells from. If None, the currently opened file.
|
||||
"""
|
||||
if filename is None:
|
||||
filename = get_current_file_name()
|
||||
if filename is None:
|
||||
raise RuntimeError('Could not get cell count from frontend.')
|
||||
try:
|
||||
# Get code from spyder
|
||||
cell_count = frontend_request(blocking=True).cell_count(filename)
|
||||
return cell_count
|
||||
except Exception:
|
||||
etype, error, tb = sys.exc_info()
|
||||
raise etype(error)
|
||||
|
||||
|
||||
builtins.cell_count = cell_count
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# PYTHONPATH and sys.path Adjustments
|
||||
# =============================================================================
|
||||
# PYTHONPATH is not passed to kernel directly, see spyder-ide/spyder#13519
|
||||
# This allows the kernel to start without crashing if modules in PYTHONPATH
|
||||
# shadow standard library modules.
|
||||
def set_spyder_pythonpath():
|
||||
pypath = os.environ.get('SPY_PYTHONPATH')
|
||||
if pypath:
|
||||
sys.path.extend(pypath.split(os.pathsep))
|
||||
os.environ.update({'PYTHONPATH': pypath})
|
||||
|
||||
set_spyder_pythonpath()
|
||||
@@ -0,0 +1,975 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
|
||||
"""Spyder debugger."""
|
||||
|
||||
import ast
|
||||
import bdb
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import threading
|
||||
from collections import namedtuple
|
||||
|
||||
from IPython.core.autocall import ZMQExitAutocall
|
||||
from IPython.core.debugger import Pdb as ipyPdb
|
||||
from IPython.core.getipython import get_ipython
|
||||
|
||||
from spyder_kernels.comms.frontendcomm import CommError, frontend_request
|
||||
from spyder_kernels.customize.utils import path_is_library, capture_last_Expr
|
||||
from spyder_kernels.py3compat import (
|
||||
TimeoutError, PY2, _print, isidentifier, PY3, input)
|
||||
|
||||
if not PY2:
|
||||
from IPython.core.inputtransformer2 import TransformerManager
|
||||
import builtins
|
||||
basestring = (str,)
|
||||
else:
|
||||
import __builtin__ as builtins
|
||||
from IPython.core.inputsplitter import IPythonInputSplitter as TransformerManager
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DebugWrapper(object):
|
||||
"""
|
||||
Notifies the frontend when debugging starts/stops
|
||||
"""
|
||||
def __init__(self, pdb_obj):
|
||||
self.pdb_obj = pdb_obj
|
||||
|
||||
def __enter__(self):
|
||||
"""
|
||||
Debugging starts.
|
||||
"""
|
||||
self.pdb_obj._frontend_notified = True
|
||||
try:
|
||||
frontend_request(blocking=True).set_debug_state(True)
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug("Could not send debugging state to the frontend.")
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""
|
||||
Debugging ends.
|
||||
"""
|
||||
self.pdb_obj._frontend_notified = False
|
||||
try:
|
||||
frontend_request(blocking=True).set_debug_state(False)
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug("Could not send debugging state to the frontend.")
|
||||
|
||||
|
||||
class SpyderPdb(ipyPdb, object): # Inherits `object` to call super() in PY2
|
||||
"""
|
||||
Extends Pdb to add features:
|
||||
|
||||
- Process IPython magics.
|
||||
- Accepts multiline input.
|
||||
- Better interrupt signal handling.
|
||||
- Option to skip libraries while stepping.
|
||||
- Add completion to non-command code.
|
||||
"""
|
||||
|
||||
send_initial_notification = True
|
||||
starting = True
|
||||
|
||||
def __init__(self, completekey='tab', stdin=None, stdout=None,
|
||||
skip=None, nosigint=False):
|
||||
"""Init Pdb."""
|
||||
self.curframe_locals = None
|
||||
# Only set to true when calling debugfile
|
||||
self.continue_if_has_breakpoints = False
|
||||
self.pdb_ignore_lib = False
|
||||
self.pdb_execute_events = False
|
||||
self.pdb_use_exclamation_mark = False
|
||||
self._exclamation_warning_printed = False
|
||||
self.pdb_stop_first_line = True
|
||||
self._disable_next_stack_entry = False
|
||||
super(SpyderPdb, self).__init__()
|
||||
self._pdb_breaking = False
|
||||
self._frontend_notified = False
|
||||
|
||||
# content of tuple: (filename, line number)
|
||||
self._previous_step = None
|
||||
|
||||
# Don't report hidden frames for IPython 7.24+. This attribute
|
||||
# has no effect in previous versions.
|
||||
self.report_skipped = False
|
||||
|
||||
|
||||
# Keep track of remote filename
|
||||
self.remote_filename = None
|
||||
|
||||
# Line received from the frontend
|
||||
self._cmd_input_line = None
|
||||
|
||||
# This is not available in IPython 5
|
||||
if hasattr(self, '_predicates'):
|
||||
# Turn off IPython's debugger skip funcionality by default because
|
||||
# it makes our debugger quite slow. It's also important to remark
|
||||
# that this functionality doesn't do anything on its own. Users
|
||||
# need to mark what frames they want to skip for it to be useful.
|
||||
# So, we hope that knowledgeable users will find that they need to
|
||||
# enable it in Spyder.
|
||||
# Fixes spyder-ide/spyder#20639.
|
||||
self._predicates["debuggerskip"] = False
|
||||
|
||||
# --- Methods overriden for code execution
|
||||
def print_exclamation_warning(self):
|
||||
"""Print pdb warning for exclamation mark."""
|
||||
if not self._exclamation_warning_printed:
|
||||
print("Warning: The exclamation mark option is enabled. "
|
||||
"Please use '!' as a prefix for Pdb commands.")
|
||||
self._exclamation_warning_printed = True
|
||||
|
||||
def default(self, line):
|
||||
"""
|
||||
Default way of running pdb statment.
|
||||
"""
|
||||
execute_events = self.pdb_execute_events
|
||||
if line[:1] == '!':
|
||||
line = line[1:]
|
||||
elif self.pdb_use_exclamation_mark:
|
||||
self.print_exclamation_warning()
|
||||
self.error("Unknown command '" + line.split()[0] + "'")
|
||||
return
|
||||
# Disallow the use of %debug magic in the debugger
|
||||
if line.startswith("%debug"):
|
||||
self.error("Please don't use '%debug' in the debugger.\n"
|
||||
"For a recursive debugger, use the pdb 'debug'"
|
||||
" command instead")
|
||||
return
|
||||
locals = self.curframe_locals
|
||||
globals = self.curframe.f_globals
|
||||
|
||||
if self.pdb_use_exclamation_mark:
|
||||
# Find pdb commands executed without !
|
||||
cmd, arg, line = self.parseline(line)
|
||||
if cmd:
|
||||
cmd_in_namespace = (
|
||||
cmd in globals
|
||||
or cmd in locals
|
||||
or cmd in builtins.__dict__
|
||||
)
|
||||
# Special case for quit and exit
|
||||
if cmd in ("quit", "exit"):
|
||||
if cmd in globals and isinstance(
|
||||
globals[cmd], ZMQExitAutocall):
|
||||
# Use the pdb call
|
||||
cmd_in_namespace = False
|
||||
cmd_func = getattr(self, 'do_' + cmd, None)
|
||||
is_pdb_cmd = cmd_func is not None
|
||||
# Look for assignment
|
||||
is_assignment = False
|
||||
try:
|
||||
for node in ast.walk(ast.parse(line)):
|
||||
if isinstance(node, ast.Assign):
|
||||
is_assignment = True
|
||||
break
|
||||
except SyntaxError:
|
||||
pass
|
||||
|
||||
if is_pdb_cmd:
|
||||
if not cmd_in_namespace and not is_assignment:
|
||||
# This is a pdb command without the '!' prefix.
|
||||
self.lastcmd = line
|
||||
return cmd_func(arg)
|
||||
else:
|
||||
# The pdb command is masked by something
|
||||
self.print_exclamation_warning()
|
||||
try:
|
||||
line = TransformerManager().transform_cell(line)
|
||||
save_stdout = sys.stdout
|
||||
save_stdin = sys.stdin
|
||||
save_displayhook = sys.displayhook
|
||||
try:
|
||||
sys.stdin = self.stdin
|
||||
sys.stdout = self.stdout
|
||||
sys.displayhook = self.displayhook
|
||||
if execute_events:
|
||||
get_ipython().events.trigger('pre_execute')
|
||||
|
||||
code_ast = ast.parse(line)
|
||||
|
||||
if line.rstrip()[-1:] == ";":
|
||||
# Supress output with ;
|
||||
capture_last_expression = False
|
||||
else:
|
||||
code_ast, capture_last_expression = capture_last_Expr(
|
||||
code_ast, "_spyderpdb_out")
|
||||
|
||||
if locals is not globals:
|
||||
# Mitigates a behaviour of CPython that makes it difficult
|
||||
# to work with exec and the local namespace
|
||||
# See:
|
||||
# - https://bugs.python.org/issue41918
|
||||
# - https://bugs.python.org/issue46153
|
||||
# - https://bugs.python.org/issue21161
|
||||
# - spyder-ide/spyder#13909
|
||||
# - spyder-ide/spyder-kernels#345
|
||||
#
|
||||
# The idea here is that the best way to emulate being in a
|
||||
# function is to actually execute the code in a function.
|
||||
# A function called `_spyderpdb_code` is created and
|
||||
# called. It will first load the locals, execute the code,
|
||||
# and then update the locals.
|
||||
#
|
||||
# One limitation of this approach is that locals() is only
|
||||
# a copy of the curframe locals. This means that closures
|
||||
# for example are early binding instead of late binding.
|
||||
|
||||
# Create a function
|
||||
indent = " "
|
||||
code = ["def _spyderpdb_code():"]
|
||||
|
||||
# Load the locals
|
||||
globals["_spyderpdb_builtins_locals"] = builtins.locals
|
||||
|
||||
# Save builtins locals in case it is shadowed
|
||||
globals["_spyderpdb_locals"] = locals
|
||||
|
||||
# Load locals if they have a valid name
|
||||
# In comprehensions, locals could contain ".0" for example
|
||||
code += [indent + "{k} = _spyderpdb_locals['{k}']".format(
|
||||
k=k) for k in locals if isidentifier(k)]
|
||||
|
||||
|
||||
# Update the locals
|
||||
code += [indent + "_spyderpdb_locals.update("
|
||||
"_spyderpdb_builtins_locals())"]
|
||||
|
||||
# Run the function
|
||||
code += ["_spyderpdb_code()"]
|
||||
|
||||
# Cleanup
|
||||
code += [
|
||||
"del _spyderpdb_code",
|
||||
"del _spyderpdb_locals",
|
||||
"del _spyderpdb_builtins_locals"
|
||||
]
|
||||
|
||||
# Parse the function
|
||||
fun_ast = ast.parse('\n'.join(code) + '\n')
|
||||
|
||||
# Inject code_ast in the function before the locals update
|
||||
fun_ast.body[0].body = (
|
||||
fun_ast.body[0].body[:-1] # The locals
|
||||
+ code_ast.body # Code to run
|
||||
+ fun_ast.body[0].body[-1:] # Locals update
|
||||
)
|
||||
code_ast = fun_ast
|
||||
|
||||
exec(compile(code_ast, "<stdin>", "exec"), globals)
|
||||
|
||||
if capture_last_expression:
|
||||
out = globals.pop("_spyderpdb_out", None)
|
||||
if out is not None:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
try:
|
||||
frontend_request(blocking=False).show_pdb_output(
|
||||
repr(out))
|
||||
except (CommError, TimeoutError):
|
||||
# Fallback
|
||||
print("pdb out> ", repr(out))
|
||||
|
||||
finally:
|
||||
if execute_events:
|
||||
get_ipython().events.trigger('post_execute')
|
||||
sys.stdout = save_stdout
|
||||
sys.stdin = save_stdin
|
||||
sys.displayhook = save_displayhook
|
||||
except BaseException:
|
||||
if PY2:
|
||||
t, v = sys.exc_info()[:2]
|
||||
if type(t) == type(''):
|
||||
exc_type_name = t
|
||||
else: exc_type_name = t.__name__
|
||||
print >>self.stdout, '***', exc_type_name + ':', v
|
||||
else:
|
||||
exc_info = sys.exc_info()[:2]
|
||||
self.error(
|
||||
traceback.format_exception_only(*exc_info)[-1].strip())
|
||||
|
||||
# --- Methods overriden for signal handling
|
||||
def sigint_handler(self, signum, frame):
|
||||
"""
|
||||
Handle a sigint signal. Break on the frame above this one.
|
||||
|
||||
This method is not present in python2 so this won't be called there.
|
||||
"""
|
||||
if self.allow_kbdint:
|
||||
raise KeyboardInterrupt
|
||||
self.message("\nProgram interrupted. (Use 'cont' to resume).")
|
||||
# avoid stopping in set_trace
|
||||
sys.settrace(None)
|
||||
self._pdb_breaking = True
|
||||
self.set_step()
|
||||
self.set_trace(sys._getframe())
|
||||
|
||||
def interaction(self, frame, traceback):
|
||||
"""
|
||||
Called when a user interaction is required.
|
||||
|
||||
If this is from sigint, break on the upper frame.
|
||||
If the frame is in spydercustomize.py, quit.
|
||||
Notifies spyder and print current code.
|
||||
"""
|
||||
if self._pdb_breaking:
|
||||
self._pdb_breaking = False
|
||||
if frame and frame.f_back:
|
||||
return self.interaction(frame.f_back, traceback)
|
||||
|
||||
# This is necessary to handle chained exceptions in Pdb, support for
|
||||
# which was added in IPython 8.15 and will be the default in Python
|
||||
# 3.13 (see ipython/ipython#14146).
|
||||
if isinstance(traceback, BaseException):
|
||||
_chained_exceptions, tb = self._get_tb_and_exceptions(traceback)
|
||||
|
||||
with self._hold_exceptions(_chained_exceptions):
|
||||
self.interaction(frame, tb)
|
||||
|
||||
return
|
||||
|
||||
self.setup(frame, traceback)
|
||||
self.print_stack_entry(self.stack[self.curindex])
|
||||
|
||||
if self._frontend_notified:
|
||||
self._cmdloop()
|
||||
else:
|
||||
with DebugWrapper(self):
|
||||
self._cmdloop()
|
||||
|
||||
self.forget()
|
||||
|
||||
def print_stack_entry(self, frame_lineno, prompt_prefix='\n-> ',
|
||||
context=None):
|
||||
"""Disable printing stack entry if requested."""
|
||||
if self._disable_next_stack_entry:
|
||||
self._disable_next_stack_entry = False
|
||||
return
|
||||
return super(SpyderPdb, self).print_stack_entry(
|
||||
frame_lineno, prompt_prefix, context)
|
||||
|
||||
# --- Methods overriden for skipping libraries
|
||||
def stop_here(self, frame):
|
||||
"""Check if pdb should stop here."""
|
||||
if (frame is not None
|
||||
and "__tracebackhide__" in frame.f_locals
|
||||
and frame.f_locals["__tracebackhide__"] == "__pdb_exit__"):
|
||||
self.onecmd('exit')
|
||||
return False
|
||||
|
||||
if not super(SpyderPdb, self).stop_here(frame):
|
||||
return False
|
||||
filename = frame.f_code.co_filename
|
||||
if filename.startswith('<'):
|
||||
# This is not a file
|
||||
return True
|
||||
if self.pdb_ignore_lib and path_is_library(filename):
|
||||
return False
|
||||
return True
|
||||
|
||||
def do_where(self, arg):
|
||||
"""w(here)
|
||||
Print a stack trace, with the most recent frame at the bottom.
|
||||
An arrow indicates the "current frame", which determines the
|
||||
context of most commands. 'bt' is an alias for this command.
|
||||
|
||||
Take a number as argument as an (optional) number of context line to
|
||||
print"""
|
||||
super(SpyderPdb, self).do_where(arg)
|
||||
try:
|
||||
frontend_request(blocking=False).do_where()
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug("Could not send where request to the frontend.")
|
||||
|
||||
do_w = do_where
|
||||
|
||||
do_bt = do_where
|
||||
|
||||
# --- Method defined by us to respond to ipython complete protocol
|
||||
def do_complete(self, code, cursor_pos):
|
||||
"""
|
||||
Respond to a complete request.
|
||||
"""
|
||||
if self.pdb_use_exclamation_mark:
|
||||
return self._complete_exclamation(code, cursor_pos)
|
||||
else:
|
||||
return self._complete_default(code, cursor_pos)
|
||||
|
||||
def _complete_default(self, code, cursor_pos):
|
||||
"""
|
||||
Respond to a complete request if not pdb_use_exclamation_mark.
|
||||
"""
|
||||
if cursor_pos is None:
|
||||
cursor_pos = len(code)
|
||||
|
||||
# Get text to complete
|
||||
text = code[:cursor_pos].split(' ')[-1]
|
||||
# Choose Pdb function to complete, based on cmd.py
|
||||
origline = code
|
||||
line = origline.lstrip()
|
||||
if not line:
|
||||
# Nothing to complete
|
||||
return
|
||||
stripped = len(origline) - len(line)
|
||||
begidx = cursor_pos - len(text) - stripped
|
||||
endidx = cursor_pos - stripped
|
||||
|
||||
compfunc = None
|
||||
ipython_do_complete = True
|
||||
if begidx > 0:
|
||||
# This could be after a Pdb command
|
||||
cmd, args, _ = self.parseline(line)
|
||||
if cmd != '':
|
||||
try:
|
||||
# Function to complete Pdb command arguments
|
||||
compfunc = getattr(self, 'complete_' + cmd)
|
||||
# Don't call ipython do_complete for commands
|
||||
ipython_do_complete = False
|
||||
except AttributeError:
|
||||
pass
|
||||
elif line[0] != '!':
|
||||
# This could be a Pdb command
|
||||
compfunc = self.completenames
|
||||
|
||||
def is_name_or_composed(text):
|
||||
if not text or text[0] == '.':
|
||||
return False
|
||||
# We want to keep value.subvalue
|
||||
return isidentifier(text.replace('.', ''))
|
||||
|
||||
while text and not is_name_or_composed(text):
|
||||
text = text[1:]
|
||||
begidx += 1
|
||||
|
||||
matches = []
|
||||
if compfunc:
|
||||
matches = compfunc(text, line, begidx, endidx)
|
||||
|
||||
cursor_start = cursor_pos - len(text)
|
||||
|
||||
if ipython_do_complete:
|
||||
kernel = get_ipython().kernel
|
||||
# Make complete call with current frame
|
||||
if self.curframe:
|
||||
if self.curframe_locals:
|
||||
Frame = namedtuple("Frame", ["f_locals", "f_globals"])
|
||||
frame = Frame(self.curframe_locals,
|
||||
self.curframe.f_globals)
|
||||
else:
|
||||
frame = self.curframe
|
||||
kernel.shell.set_completer_frame(frame)
|
||||
result = kernel._do_complete(code, cursor_pos)
|
||||
# Reset frame
|
||||
kernel.shell.set_completer_frame()
|
||||
# If there is no Pdb results to merge, return the result
|
||||
if not compfunc:
|
||||
return result
|
||||
|
||||
ipy_matches = result['matches']
|
||||
# Make sure both match lists start at the same place
|
||||
if cursor_start < result['cursor_start']:
|
||||
# Fill IPython matches
|
||||
missing_txt = code[cursor_start:result['cursor_start']]
|
||||
ipy_matches = [missing_txt + m for m in ipy_matches]
|
||||
elif result['cursor_start'] < cursor_start:
|
||||
# Fill Pdb matches
|
||||
missing_txt = code[result['cursor_start']:cursor_start]
|
||||
matches = [missing_txt + m for m in matches]
|
||||
cursor_start = result['cursor_start']
|
||||
|
||||
# Add Pdb-specific matches
|
||||
matches += [match for match in ipy_matches if match not in matches]
|
||||
|
||||
return {'matches': matches,
|
||||
'cursor_end': cursor_pos,
|
||||
'cursor_start': cursor_start,
|
||||
'metadata': {},
|
||||
'status': 'ok'}
|
||||
|
||||
def _complete_exclamation(self, code, cursor_pos):
|
||||
"""
|
||||
Respond to a complete request if pdb_use_exclamation_mark.
|
||||
"""
|
||||
if cursor_pos is None:
|
||||
cursor_pos = len(code)
|
||||
|
||||
# Get text to complete
|
||||
text = code[:cursor_pos].split(' ')[-1]
|
||||
# Choose Pdb function to complete, based on cmd.py
|
||||
origline = code
|
||||
line = origline.lstrip()
|
||||
if not line:
|
||||
# Nothing to complete
|
||||
return
|
||||
is_pdb_command = line[0] == '!'
|
||||
is_pdb_command_name = False
|
||||
|
||||
stripped = len(origline) - len(line)
|
||||
begidx = cursor_pos - len(text) - stripped
|
||||
endidx = cursor_pos - stripped
|
||||
|
||||
compfunc = None
|
||||
|
||||
if is_pdb_command:
|
||||
line = line[1:]
|
||||
begidx -= 1
|
||||
endidx -= 1
|
||||
if begidx == -1:
|
||||
is_pdb_command_name = True
|
||||
text = text[1:]
|
||||
begidx += 1
|
||||
compfunc = self.completenames
|
||||
else:
|
||||
cmd, args, _ = self.parseline(line)
|
||||
if cmd != '':
|
||||
try:
|
||||
# Function to complete Pdb command arguments
|
||||
compfunc = getattr(self, 'complete_' + cmd)
|
||||
except AttributeError:
|
||||
# This command doesn't exist, nothing to complete
|
||||
return
|
||||
else:
|
||||
# We don't know this command
|
||||
return
|
||||
|
||||
if not is_pdb_command_name:
|
||||
# Remove eg. leading opening parenthesis
|
||||
def is_name_or_composed(text):
|
||||
if not text or text[0] == '.':
|
||||
return False
|
||||
# We want to keep value.subvalue
|
||||
return isidentifier(text.replace('.', ''))
|
||||
|
||||
while text and not is_name_or_composed(text):
|
||||
text = text[1:]
|
||||
begidx += 1
|
||||
|
||||
cursor_start = cursor_pos - len(text)
|
||||
matches = []
|
||||
if is_pdb_command:
|
||||
matches = compfunc(text, line, begidx, endidx)
|
||||
return {
|
||||
'matches': matches,
|
||||
'cursor_end': cursor_pos,
|
||||
'cursor_start': cursor_start,
|
||||
'metadata': {},
|
||||
'status': 'ok'
|
||||
}
|
||||
|
||||
kernel = get_ipython().kernel
|
||||
# Make complete call with current frame
|
||||
if self.curframe:
|
||||
if self.curframe_locals:
|
||||
Frame = namedtuple("Frame", ["f_locals", "f_globals"])
|
||||
frame = Frame(self.curframe_locals,
|
||||
self.curframe.f_globals)
|
||||
else:
|
||||
frame = self.curframe
|
||||
kernel.shell.set_completer_frame(frame)
|
||||
result = kernel._do_complete(code, cursor_pos)
|
||||
# Reset frame
|
||||
kernel.shell.set_completer_frame()
|
||||
return result
|
||||
|
||||
# --- Methods overriden by us for Spyder integration
|
||||
def postloop(self):
|
||||
# postloop() is called when the debugger’s input prompt exists. Reset
|
||||
# _previous_step so that publish_pdb_state() actually notifies Spyder
|
||||
# about a changed frame the next the input prompt is entered again.
|
||||
self._previous_step = None
|
||||
|
||||
def preloop(self):
|
||||
"""Ask Spyder for breakpoints before the first prompt is created."""
|
||||
try:
|
||||
pdb_settings = frontend_request(blocking=True).get_pdb_settings()
|
||||
self.pdb_ignore_lib = pdb_settings['pdb_ignore_lib']
|
||||
self.pdb_execute_events = pdb_settings['pdb_execute_events']
|
||||
self.pdb_use_exclamation_mark = pdb_settings[
|
||||
'pdb_use_exclamation_mark']
|
||||
self.pdb_stop_first_line = pdb_settings['pdb_stop_first_line']
|
||||
if self.starting:
|
||||
self.set_spyder_breakpoints(pdb_settings['breakpoints'])
|
||||
if self.send_initial_notification:
|
||||
self.publish_pdb_state()
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug("Could not get breakpoints from the frontend.")
|
||||
super(SpyderPdb, self).preloop()
|
||||
|
||||
def set_continue(self):
|
||||
"""
|
||||
Stop only at breakpoints or when finished.
|
||||
|
||||
Reimplemented to avoid stepping out of debugging if there are no
|
||||
breakpoints. We could add more later.
|
||||
"""
|
||||
# Don't stop except at breakpoints or when finished
|
||||
self._set_stopinfo(self.botframe, None, -1)
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Register Pdb session after reset.
|
||||
"""
|
||||
super(SpyderPdb, self).reset()
|
||||
get_ipython().pdb_session = self
|
||||
|
||||
def do_debug(self, arg):
|
||||
"""
|
||||
Debug code
|
||||
|
||||
Enter a recursive debugger that steps through the code
|
||||
argument (which is an arbitrary expression or statement to be
|
||||
executed in the current environment).
|
||||
"""
|
||||
try:
|
||||
super(SpyderPdb, self).do_debug(arg)
|
||||
except Exception:
|
||||
if PY2:
|
||||
t, v = sys.exc_info()[:2]
|
||||
if type(t) == type(''):
|
||||
exc_type_name = t
|
||||
else: exc_type_name = t.__name__
|
||||
print >>self.stdout, '***', exc_type_name + ':', v
|
||||
else:
|
||||
exc_info = sys.exc_info()[:2]
|
||||
self.error(
|
||||
traceback.format_exception_only(*exc_info)[-1].strip())
|
||||
get_ipython().pdb_session = self
|
||||
|
||||
def user_return(self, frame, return_value):
|
||||
"""This function is called when a return trap is set here."""
|
||||
# This is useful when debugging in an active interpreter (otherwise,
|
||||
# the debugger will stop before reaching the target file)
|
||||
if self._wait_for_mainpyfile:
|
||||
if (self.mainpyfile != self.canonic(frame.f_code.co_filename)
|
||||
or frame.f_lineno <= 0):
|
||||
return
|
||||
self._wait_for_mainpyfile = False
|
||||
super(SpyderPdb, self).user_return(frame, return_value)
|
||||
|
||||
def _cmdloop(self):
|
||||
"""Modifies the error text."""
|
||||
while True:
|
||||
try:
|
||||
# keyboard interrupts allow for an easy way to cancel
|
||||
# the current command, so allow them during interactive input
|
||||
self.allow_kbdint = True
|
||||
self.cmdloop()
|
||||
self.allow_kbdint = False
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
_print("--KeyboardInterrupt--\n"
|
||||
"For copying text while debugging, use Ctrl+Shift+C",
|
||||
file=self.stdout)
|
||||
|
||||
|
||||
def cmdloop(self, intro=None):
|
||||
"""
|
||||
Repeatedly issue a prompt, accept input, parse an initial prefix
|
||||
off the received input, and dispatch to action methods, passing them
|
||||
the remainder of the line as argument.
|
||||
"""
|
||||
self.preloop()
|
||||
if intro is not None:
|
||||
self.intro = intro
|
||||
if self.intro:
|
||||
self.stdout.write(str(self.intro)+"\n")
|
||||
stop = None
|
||||
while not stop:
|
||||
if self.cmdqueue:
|
||||
line = self.cmdqueue.pop(0)
|
||||
else:
|
||||
try:
|
||||
line = self.cmd_input(self.prompt)
|
||||
except EOFError:
|
||||
line = 'EOF'
|
||||
line = self.precmd(line)
|
||||
stop = self.onecmd(line)
|
||||
stop = self.postcmd(stop, line)
|
||||
self.postloop()
|
||||
|
||||
def cmd_input(self, prompt=''):
|
||||
"""
|
||||
Get input from frontend. Blocks until return
|
||||
"""
|
||||
kernel = get_ipython().kernel
|
||||
# Only works if the comm is open
|
||||
if not kernel.frontend_comm.is_open():
|
||||
return input(prompt)
|
||||
|
||||
# Flush output before making the request.
|
||||
sys.stderr.flush()
|
||||
sys.stdout.flush()
|
||||
sys.__stderr__.flush()
|
||||
sys.__stdout__.flush()
|
||||
|
||||
# Send the input request.
|
||||
self._cmd_input_line = None
|
||||
kernel.frontend_call().pdb_input(prompt)
|
||||
|
||||
# Allow GUI event loop to update
|
||||
if PY3:
|
||||
is_main_thread = (
|
||||
threading.current_thread() is threading.main_thread())
|
||||
else:
|
||||
is_main_thread = isinstance(
|
||||
threading.current_thread(), threading._MainThread)
|
||||
|
||||
# Get input by running eventloop
|
||||
if is_main_thread and kernel.eventloop:
|
||||
while self._cmd_input_line is None:
|
||||
eventloop = kernel.eventloop
|
||||
# Check if the current backend is Tk on Windows
|
||||
# to let GUI update.
|
||||
# See spyder-ide/spyder#17523
|
||||
if (eventloop and hasattr(kernel, "app_wrapper") and
|
||||
os.name == "nt"):
|
||||
kernel.app_wrapper.app.update()
|
||||
elif eventloop:
|
||||
eventloop(kernel)
|
||||
else:
|
||||
break
|
||||
|
||||
# Get input by blocking
|
||||
if self._cmd_input_line is None:
|
||||
kernel.frontend_comm.wait_until(
|
||||
lambda: self._cmd_input_line is not None)
|
||||
|
||||
return self._cmd_input_line
|
||||
|
||||
def precmd(self, line):
|
||||
"""
|
||||
Hook method executed just before the command line is
|
||||
interpreted, but after the input prompt is generated and issued.
|
||||
|
||||
Here we switch ! and non !
|
||||
"""
|
||||
if not self.pdb_use_exclamation_mark:
|
||||
return line
|
||||
if not line:
|
||||
return line
|
||||
if line[0] == '!':
|
||||
line = line[1:]
|
||||
else:
|
||||
line = '!' + line
|
||||
return line
|
||||
|
||||
def postcmd(self, stop, line):
|
||||
"""Hook method executed just after a command dispatch is finished."""
|
||||
# Flush in case the command produced output on underlying outputs
|
||||
sys.__stderr__.flush()
|
||||
sys.__stdout__.flush()
|
||||
self.publish_pdb_state()
|
||||
return super(SpyderPdb, self).postcmd(stop, line)
|
||||
|
||||
if PY2:
|
||||
def break_here(self, frame):
|
||||
"""
|
||||
Breakpoints don't work for files with non-ascii chars in Python 2
|
||||
|
||||
Fixes Issue 1484
|
||||
"""
|
||||
from bdb import effective
|
||||
filename = self.canonic(frame.f_code.co_filename)
|
||||
try:
|
||||
filename = unicode(filename, "utf-8")
|
||||
except TypeError:
|
||||
pass
|
||||
if filename not in self.breaks:
|
||||
return False
|
||||
lineno = frame.f_lineno
|
||||
if lineno not in self.breaks[filename]:
|
||||
# The line itself has no breakpoint, but maybe the line is the
|
||||
# first line of a function with breakpoint set by function name
|
||||
lineno = frame.f_code.co_firstlineno
|
||||
if lineno not in self.breaks[filename]:
|
||||
return False
|
||||
|
||||
# flag says ok to delete temp. bp
|
||||
(bp, flag) = effective(filename, lineno, frame)
|
||||
if bp:
|
||||
self.currentbp = bp.number
|
||||
if (flag and bp.temporary):
|
||||
self.do_clear(str(bp.number))
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
# --- Methods defined by us for Spyder integration
|
||||
def set_spyder_breakpoints(self, breakpoints):
|
||||
"""Set Spyder breakpoints."""
|
||||
self.clear_all_breaks()
|
||||
# -----Really deleting all breakpoints:
|
||||
for bp in bdb.Breakpoint.bpbynumber:
|
||||
if bp:
|
||||
bp.deleteMe()
|
||||
bdb.Breakpoint.next = 1
|
||||
bdb.Breakpoint.bplist = {}
|
||||
bdb.Breakpoint.bpbynumber = [None]
|
||||
# -----
|
||||
for fname, data in list(breakpoints.items()):
|
||||
for linenumber, condition in data:
|
||||
try:
|
||||
self.set_break(self.canonic(fname), linenumber,
|
||||
cond=condition)
|
||||
except ValueError:
|
||||
# Fixes spyder/issues/15546
|
||||
# The file is not readable
|
||||
pass
|
||||
|
||||
# Jump to first breakpoint.
|
||||
# Fixes issue 2034
|
||||
if self.starting:
|
||||
# Only run this after a Pdb session is created
|
||||
self.starting = False
|
||||
|
||||
# Get all breakpoints for the file we're going to debug
|
||||
frame = self.curframe
|
||||
if not frame:
|
||||
# We are not debugging, return. Solves #10290
|
||||
return
|
||||
lineno = frame.f_lineno
|
||||
breaks = self.get_file_breaks(frame.f_code.co_filename)
|
||||
|
||||
# Do 'continue' if the first breakpoint is *not* placed
|
||||
# where the debugger is going to land.
|
||||
# Fixes issue 4681
|
||||
if self.pdb_stop_first_line:
|
||||
do_continue = (
|
||||
self.continue_if_has_breakpoints
|
||||
and breaks
|
||||
and lineno < breaks[0])
|
||||
else:
|
||||
# The breakpoint could be in another file.
|
||||
do_continue = (
|
||||
self.continue_if_has_breakpoints
|
||||
and not (breaks and lineno >= breaks[0]))
|
||||
|
||||
if do_continue:
|
||||
try:
|
||||
if self.pdb_use_exclamation_mark:
|
||||
cont_cmd = '!continue'
|
||||
else:
|
||||
cont_cmd = 'continue'
|
||||
frontend_request(blocking=False).pdb_execute(cont_cmd)
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug(
|
||||
"Could not send a Pdb continue call to the frontend.")
|
||||
|
||||
def publish_pdb_state(self):
|
||||
"""
|
||||
Send debugger state (frame position) to the frontend.
|
||||
|
||||
The state is only sent if it has changed since the last update.
|
||||
"""
|
||||
|
||||
frame = self.curframe
|
||||
if frame is None:
|
||||
self._previous_step = None
|
||||
return
|
||||
|
||||
# Get filename and line number of the current frame
|
||||
fname = self.canonic(frame.f_code.co_filename)
|
||||
if PY2:
|
||||
try:
|
||||
fname = unicode(fname, "utf-8")
|
||||
except TypeError:
|
||||
pass
|
||||
if fname == self.mainpyfile and self.remote_filename is not None:
|
||||
fname = self.remote_filename
|
||||
lineno = frame.f_lineno
|
||||
|
||||
if self._previous_step == (fname, lineno):
|
||||
return
|
||||
|
||||
# Set step of the current frame (if any)
|
||||
step = {}
|
||||
self._previous_step = None
|
||||
if isinstance(fname, basestring) and isinstance(lineno, int):
|
||||
step = dict(fname=fname, lineno=lineno)
|
||||
self._previous_step = (fname, lineno)
|
||||
|
||||
try:
|
||||
frontend_request(blocking=False).pdb_state(dict(step=step))
|
||||
except (CommError, TimeoutError):
|
||||
logger.debug("Could not send Pdb state to the frontend.")
|
||||
|
||||
def run(self, cmd, globals=None, locals=None):
|
||||
"""Debug a statement executed via the exec() function.
|
||||
|
||||
globals defaults to __main__.dict; locals defaults to globals.
|
||||
"""
|
||||
self.starting = True
|
||||
with DebugWrapper(self):
|
||||
super(SpyderPdb, self).run(cmd, globals, locals)
|
||||
|
||||
def runeval(self, expr, globals=None, locals=None):
|
||||
"""Debug an expression executed via the eval() function.
|
||||
|
||||
globals defaults to __main__.dict; locals defaults to globals.
|
||||
"""
|
||||
self.starting = True
|
||||
with DebugWrapper(self):
|
||||
super(SpyderPdb, self).runeval(expr, globals, locals)
|
||||
|
||||
def runcall(self, *args, **kwds):
|
||||
"""Debug a single function call.
|
||||
|
||||
Return the result of the function call.
|
||||
"""
|
||||
self.starting = True
|
||||
with DebugWrapper(self):
|
||||
super(SpyderPdb, self).runcall(*args, **kwds)
|
||||
|
||||
def enter_recursive_debugger(self, code, filename,
|
||||
continue_if_has_breakpoints):
|
||||
"""
|
||||
Enter debugger recursively.
|
||||
"""
|
||||
sys.settrace(None)
|
||||
globals = self.curframe.f_globals
|
||||
locals = self.curframe_locals
|
||||
# Create child debugger
|
||||
debugger = SpyderPdb(
|
||||
completekey=self.completekey,
|
||||
stdin=self.stdin, stdout=self.stdout)
|
||||
debugger.use_rawinput = self.use_rawinput
|
||||
debugger.prompt = "(%s) " % self.prompt.strip()
|
||||
|
||||
debugger.set_remote_filename(filename)
|
||||
debugger.continue_if_has_breakpoints = continue_if_has_breakpoints
|
||||
|
||||
# Enter recursive debugger
|
||||
sys.call_tracing(debugger.run, (code, globals, locals))
|
||||
# Reset parent debugger
|
||||
sys.settrace(self.trace_dispatch)
|
||||
self.lastcmd = debugger.lastcmd
|
||||
get_ipython().pdb_session = self
|
||||
|
||||
# Reset _previous_step so that publish_pdb_state() called from within
|
||||
# postcmd() notifies Spyder about a changed debugger position. The reset
|
||||
# is required because the recursive debugger might change the position,
|
||||
# but the parent debugger (self) is not aware of this.
|
||||
self._previous_step = None
|
||||
|
||||
def set_remote_filename(self, filename):
|
||||
"""Set remote filename to signal Spyder on mainpyfile."""
|
||||
self.remote_filename = filename
|
||||
self.mainpyfile = self.canonic(filename)
|
||||
self._wait_for_mainpyfile = True
|
||||
|
||||
|
||||
def get_new_debugger(filename, continue_if_has_breakpoints):
|
||||
"""Get a new debugger."""
|
||||
debugger = SpyderPdb()
|
||||
debugger.set_remote_filename(filename)
|
||||
debugger.continue_if_has_breakpoints = continue_if_has_breakpoints
|
||||
return debugger
|
||||
@@ -0,0 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Tests for spydercustomize."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,121 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Tests for the User Module Reloader."""
|
||||
|
||||
# Stdlib imports
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Third party imports
|
||||
import pytest
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.py3compat import to_text_string
|
||||
from spyder_kernels.customize.umr import UserModuleReloader
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def user_module(tmpdir):
|
||||
"""Create a simple module in tmpdir as an example of a user module."""
|
||||
if to_text_string(tmpdir) not in sys.path:
|
||||
sys.path.append(to_text_string(tmpdir))
|
||||
|
||||
def create_module(modname):
|
||||
modfile = tmpdir.mkdir(modname).join('bar.py')
|
||||
code = """
|
||||
def square(x):
|
||||
return x**2
|
||||
"""
|
||||
modfile.write(code)
|
||||
|
||||
init_file = tmpdir.join(modname).join('__init__.py')
|
||||
init_file.write('#')
|
||||
|
||||
return create_module
|
||||
|
||||
|
||||
def test_umr_skip_cython(user_module):
|
||||
"""
|
||||
Test that the UMR doesn't try to reload modules when Cython
|
||||
support is active.
|
||||
"""
|
||||
# Create user module
|
||||
user_module('foo')
|
||||
|
||||
# Activate Cython support
|
||||
os.environ['SPY_RUN_CYTHON'] = 'True'
|
||||
|
||||
# Create UMR
|
||||
umr = UserModuleReloader()
|
||||
|
||||
import foo
|
||||
assert umr.is_module_reloadable(foo, 'foo') == False
|
||||
|
||||
# Deactivate Cython support
|
||||
os.environ['SPY_RUN_CYTHON'] = 'False'
|
||||
|
||||
|
||||
def test_umr_run(user_module):
|
||||
"""Test that UMR's run method is working correctly."""
|
||||
# Create user module
|
||||
user_module('foo1')
|
||||
|
||||
# Activate verbose mode in the UMR
|
||||
os.environ['SPY_UMR_VERBOSE'] = 'True'
|
||||
|
||||
# Create UMR
|
||||
umr = UserModuleReloader()
|
||||
|
||||
from foo1.bar import square
|
||||
umr.run()
|
||||
umr.modnames_to_reload == ['foo', 'foo.bar']
|
||||
|
||||
|
||||
def test_umr_previous_modules(user_module):
|
||||
"""Test that UMR's previos_modules is working as expected."""
|
||||
# Create user module
|
||||
user_module('foo2')
|
||||
|
||||
# Create UMR
|
||||
umr = UserModuleReloader()
|
||||
|
||||
import foo2
|
||||
assert 'IPython' in umr.previous_modules
|
||||
assert 'foo2' not in umr.previous_modules
|
||||
|
||||
|
||||
def test_umr_namelist():
|
||||
"""Test that the UMR skips modules according to its name."""
|
||||
umr = UserModuleReloader()
|
||||
|
||||
assert umr.is_module_in_namelist('tensorflow')
|
||||
assert umr.is_module_in_namelist('pytorch')
|
||||
assert umr.is_module_in_namelist('spyder_kernels')
|
||||
assert not umr.is_module_in_namelist('foo')
|
||||
|
||||
|
||||
def test_umr_reload_modules(user_module):
|
||||
"""Test that the UMR only tries to reload user modules."""
|
||||
# Create user module
|
||||
user_module('foo3')
|
||||
|
||||
# Create UMR
|
||||
umr = UserModuleReloader()
|
||||
|
||||
# Don't reload stdlib modules
|
||||
import xml
|
||||
assert not umr.is_module_reloadable(xml, 'xml')
|
||||
|
||||
# Don't reload third-party modules
|
||||
import numpy
|
||||
assert not umr.is_module_reloadable(numpy, 'numpy')
|
||||
|
||||
# Reload user modules
|
||||
import foo3
|
||||
assert umr.is_module_reloadable(foo3, 'foo3')
|
||||
@@ -0,0 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from spyder_kernels.customize.utils import create_pathlist
|
||||
|
||||
|
||||
def test_user_sitepackages_in_pathlist():
|
||||
"""Test that we include user site-packages in pathlist."""
|
||||
if sys.platform.startswith('linux'):
|
||||
user_path = 'local'
|
||||
elif (sys.platform == 'darwin' or sys.platform.startswith('freebsd')):
|
||||
user_path = os.path.expanduser('~/.local')
|
||||
else:
|
||||
user_path = 'Roaming'
|
||||
|
||||
assert any([user_path in path for path in create_pathlist()])
|
||||
@@ -0,0 +1,143 @@
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
|
||||
"""User module reloader."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from spyder_kernels.customize.utils import path_is_library
|
||||
from spyder_kernels.py3compat import PY2, _print
|
||||
|
||||
|
||||
class UserModuleReloader(object):
|
||||
"""
|
||||
User Module Reloader (UMR) aims at deleting user modules
|
||||
to force Python to deeply reload them during import
|
||||
|
||||
pathlist [list]: blacklist in terms of module path
|
||||
namelist [list]: blacklist in terms of module name
|
||||
"""
|
||||
|
||||
def __init__(self, namelist=None, pathlist=None):
|
||||
if namelist is None:
|
||||
namelist = []
|
||||
else:
|
||||
try:
|
||||
namelist = namelist.split(',')
|
||||
except Exception:
|
||||
namelist = []
|
||||
|
||||
# Spyder modules
|
||||
spy_modules = ['spyder_kernels']
|
||||
|
||||
# Matplotlib modules
|
||||
mpl_modules = ['matplotlib', 'tkinter', 'Tkinter']
|
||||
|
||||
# Add other, necessary modules to the UMR blacklist
|
||||
# astropy: See spyder-ide/spyder#6962
|
||||
# pytorch: See spyder-ide/spyder#7041
|
||||
# fastmat: See spyder-ide/spyder#7190
|
||||
# pythoncom: See spyder-ide/spyder#7190
|
||||
# tensorflow: See spyder-ide/spyder#8697
|
||||
other_modules = ['pytorch', 'pythoncom', 'tensorflow']
|
||||
if PY2:
|
||||
py2_modules = ['astropy', 'fastmat']
|
||||
other_modules = other_modules + py2_modules
|
||||
self.namelist = namelist + spy_modules + mpl_modules + other_modules
|
||||
|
||||
self.pathlist = pathlist
|
||||
|
||||
# List of previously loaded modules
|
||||
self.previous_modules = list(sys.modules.keys())
|
||||
|
||||
# List of module names to reload
|
||||
self.modnames_to_reload = []
|
||||
|
||||
# Activate Cython support
|
||||
self.has_cython = False
|
||||
self.activate_cython()
|
||||
|
||||
# Check if the UMR is enabled or not
|
||||
enabled = os.environ.get("SPY_UMR_ENABLED", "")
|
||||
self.enabled = enabled.lower() == "true"
|
||||
|
||||
# Check if the UMR should print the list of reloaded modules or not
|
||||
verbose = os.environ.get("SPY_UMR_VERBOSE", "")
|
||||
self.verbose = verbose.lower() == "true"
|
||||
|
||||
def is_module_reloadable(self, module, modname):
|
||||
"""Decide if a module is reloadable or not."""
|
||||
if self.has_cython:
|
||||
# Don't return cached inline compiled .PYX files
|
||||
return False
|
||||
else:
|
||||
if (path_is_library(getattr(module, '__file__', None),
|
||||
self.pathlist) or
|
||||
self.is_module_in_namelist(modname)):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def is_module_in_namelist(self, modname):
|
||||
"""Decide if a module can be reloaded or not according to its name."""
|
||||
return set(modname.split('.')) & set(self.namelist)
|
||||
|
||||
def activate_cython(self):
|
||||
"""
|
||||
Activate Cython support.
|
||||
|
||||
We need to run this here because if the support is
|
||||
active, we don't to run the UMR at all.
|
||||
"""
|
||||
run_cython = os.environ.get("SPY_RUN_CYTHON") == "True"
|
||||
|
||||
if run_cython:
|
||||
try:
|
||||
__import__('Cython')
|
||||
self.has_cython = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if self.has_cython:
|
||||
# Import pyximport to enable Cython files support for
|
||||
# import statement
|
||||
import pyximport
|
||||
pyx_setup_args = {}
|
||||
|
||||
# Add Numpy include dir to pyximport/distutils
|
||||
try:
|
||||
import numpy
|
||||
pyx_setup_args['include_dirs'] = numpy.get_include()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Setup pyximport and enable Cython files reload
|
||||
pyximport.install(setup_args=pyx_setup_args,
|
||||
reload_support=True)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Delete user modules to force Python to deeply reload them
|
||||
|
||||
Do not del modules which are considered as system modules, i.e.
|
||||
modules installed in subdirectories of Python interpreter's binary
|
||||
Do not del C modules
|
||||
"""
|
||||
self.modnames_to_reload = []
|
||||
for modname, module in list(sys.modules.items()):
|
||||
if modname not in self.previous_modules:
|
||||
# Decide if a module can be reloaded or not
|
||||
if self.is_module_reloadable(module, modname):
|
||||
self.modnames_to_reload.append(modname)
|
||||
del sys.modules[modname]
|
||||
else:
|
||||
continue
|
||||
|
||||
# Report reloaded modules
|
||||
if self.verbose and self.modnames_to_reload:
|
||||
modnames = self.modnames_to_reload
|
||||
_print("\x1b[4;33m%s\x1b[24m%s\x1b[0m"
|
||||
% ("Reloaded modules", ": "+", ".join(modnames)))
|
||||
@@ -0,0 +1,140 @@
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
|
||||
"""Utility functions."""
|
||||
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
|
||||
def create_pathlist():
|
||||
"""
|
||||
Create list of Python library paths to be skipped from module
|
||||
reloading and Pdb steps.
|
||||
"""
|
||||
# Get standard installation paths
|
||||
try:
|
||||
paths = sysconfig.get_paths()
|
||||
standard_paths = [paths['stdlib'],
|
||||
paths['purelib'],
|
||||
paths['scripts'],
|
||||
paths['data']]
|
||||
except Exception:
|
||||
standard_paths = []
|
||||
|
||||
# Get user installation path
|
||||
# See spyder-ide/spyder#8776
|
||||
try:
|
||||
import site
|
||||
if getattr(site, 'getusersitepackages', False):
|
||||
# Virtualenvs don't have this function but
|
||||
# conda envs do
|
||||
user_path = [site.getusersitepackages()]
|
||||
elif getattr(site, 'USER_SITE', False):
|
||||
# However, it seems virtualenvs have this
|
||||
# constant
|
||||
user_path = [site.USER_SITE]
|
||||
else:
|
||||
user_path = []
|
||||
except Exception:
|
||||
user_path = []
|
||||
|
||||
return standard_paths + user_path
|
||||
|
||||
|
||||
def path_is_library(path, initial_pathlist=None):
|
||||
"""Decide if a path is in user code or a library according to its path."""
|
||||
# Compute DEFAULT_PATHLIST only once and make it global to reuse it
|
||||
# in any future call of this function.
|
||||
if 'DEFAULT_PATHLIST' not in globals():
|
||||
global DEFAULT_PATHLIST
|
||||
DEFAULT_PATHLIST = create_pathlist()
|
||||
|
||||
if initial_pathlist is None:
|
||||
initial_pathlist = []
|
||||
|
||||
pathlist = initial_pathlist + DEFAULT_PATHLIST
|
||||
|
||||
if path is None:
|
||||
# Path probably comes from a C module that is statically linked
|
||||
# into the interpreter. There is no way to know its path, so we
|
||||
# choose to ignore it.
|
||||
return True
|
||||
elif any([p in path for p in pathlist]):
|
||||
# We don't want to consider paths that belong to the standard
|
||||
# library or installed to site-packages.
|
||||
return True
|
||||
elif os.name == 'nt':
|
||||
if re.search(r'.*\\pkgs\\.*', path):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
elif not os.name == 'nt':
|
||||
# Paths containing the strings below can be part of the default
|
||||
# Linux installation, Homebrew or the user site-packages in a
|
||||
# virtualenv.
|
||||
patterns = [
|
||||
r'^/usr/lib.*',
|
||||
r'^/usr/local/lib.*',
|
||||
r'^/usr/.*/dist-packages/.*',
|
||||
r'^/home/.*/.local/lib.*',
|
||||
r'^/Library/.*',
|
||||
r'^/Users/.*/Library/.*',
|
||||
r'^/Users/.*/.local/.*',
|
||||
]
|
||||
|
||||
if [p for p in patterns if re.search(p, path)]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def capture_last_Expr(code_ast, out_varname):
|
||||
"""Parse line and modify code to capture in globals the last expression."""
|
||||
# Modify ast code to capture the last expression
|
||||
capture_last_expression = False
|
||||
if (
|
||||
len(code_ast.body)
|
||||
and isinstance(code_ast.body[-1], ast.Expr)
|
||||
):
|
||||
capture_last_expression = True
|
||||
expr_node = code_ast.body[-1]
|
||||
# Create new assign node
|
||||
assign_node = ast.parse(
|
||||
'globals()[{}] = None'.format(repr(out_varname))).body[0]
|
||||
# Replace None by the value
|
||||
assign_node.value = expr_node.value
|
||||
# Fix line number and column offset
|
||||
assign_node.lineno = expr_node.lineno
|
||||
assign_node.col_offset = expr_node.col_offset
|
||||
if sys.version_info[:2] >= (3, 8):
|
||||
# Exists from 3.8, necessary from 3.11
|
||||
assign_node.end_lineno = expr_node.end_lineno
|
||||
if assign_node.lineno == assign_node.end_lineno:
|
||||
# Add 'globals()[{}] = ' and remove 'None'
|
||||
assign_node.end_col_offset += expr_node.end_col_offset - 4
|
||||
else:
|
||||
assign_node.end_col_offset = expr_node.end_col_offset
|
||||
code_ast.body[-1] = assign_node
|
||||
return code_ast, capture_last_expression
|
||||
|
||||
|
||||
def canonic(filename):
|
||||
"""
|
||||
Return canonical form of filename.
|
||||
|
||||
This is a copy of bdb.canonic, so that the debugger will process
|
||||
filenames in the same way
|
||||
"""
|
||||
if filename == "<" + filename[1:-1] + ">":
|
||||
return filename
|
||||
canonic = os.path.abspath(filename)
|
||||
canonic = os.path.normcase(canonic)
|
||||
return canonic
|
||||
@@ -0,0 +1,360 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
spyder.py3compat
|
||||
----------------
|
||||
|
||||
Transitional module providing compatibility functions intended to help
|
||||
migrating from Python 2 to Python 3.
|
||||
|
||||
This module should be fully compatible with:
|
||||
* Python >=v2.6
|
||||
* Python 3
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import operator
|
||||
import os
|
||||
import sys
|
||||
|
||||
PY2 = sys.version[0] == '2'
|
||||
PY3 = sys.version[0] == '3'
|
||||
|
||||
if PY3:
|
||||
# keep reference to builtin_mod because the kernel overrides that value
|
||||
# to forward requests to a frontend.
|
||||
def input(prompt=''):
|
||||
return builtin_mod.input(prompt)
|
||||
builtin_mod_name = "builtins"
|
||||
import builtins as builtin_mod
|
||||
else:
|
||||
# keep reference to builtin_mod because the kernel overrides that value
|
||||
# to forward requests to a frontend.
|
||||
def input(prompt=''):
|
||||
return builtin_mod.raw_input(prompt)
|
||||
builtin_mod_name = "__builtin__"
|
||||
import __builtin__ as builtin_mod
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Data types
|
||||
#==============================================================================
|
||||
if PY2:
|
||||
# Python 2
|
||||
TEXT_TYPES = (str, unicode)
|
||||
INT_TYPES = (int, long)
|
||||
else:
|
||||
# Python 3
|
||||
TEXT_TYPES = (str,)
|
||||
INT_TYPES = (int,)
|
||||
NUMERIC_TYPES = tuple(list(INT_TYPES) + [float, complex])
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Renamed/Reorganized modules
|
||||
#==============================================================================
|
||||
if PY2:
|
||||
# Python 2
|
||||
import __builtin__ as builtins
|
||||
import ConfigParser as configparser
|
||||
try:
|
||||
import _winreg as winreg
|
||||
except ImportError:
|
||||
pass
|
||||
from sys import maxint as maxsize
|
||||
try:
|
||||
import CStringIO as io
|
||||
except ImportError:
|
||||
import StringIO as io
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
from UserDict import DictMixin as MutableMapping
|
||||
import thread as _thread
|
||||
import repr as reprlib
|
||||
import Queue
|
||||
else:
|
||||
# Python 3
|
||||
import builtins
|
||||
import configparser
|
||||
try:
|
||||
import winreg
|
||||
except ImportError:
|
||||
pass
|
||||
from sys import maxsize
|
||||
import io
|
||||
import pickle
|
||||
from collections.abc import MutableMapping
|
||||
import _thread
|
||||
import reprlib
|
||||
import queue as Queue
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Strings
|
||||
#==============================================================================
|
||||
def is_type_text_string(obj):
|
||||
"""Return True if `obj` is type text string, False if it is anything else,
|
||||
like an instance of a class that extends the basestring class."""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return type(obj) in [str, unicode]
|
||||
else:
|
||||
# Python 3
|
||||
return type(obj) in [str, bytes]
|
||||
|
||||
def is_text_string(obj):
|
||||
"""Return True if `obj` is a text string, False if it is anything else,
|
||||
like binary data (Python 3) or QString (Python 2, PyQt API #1)"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return isinstance(obj, basestring)
|
||||
else:
|
||||
# Python 3
|
||||
return isinstance(obj, str)
|
||||
|
||||
def is_binary_string(obj):
|
||||
"""Return True if `obj` is a binary string, False if it is anything else"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return isinstance(obj, str)
|
||||
else:
|
||||
# Python 3
|
||||
return isinstance(obj, bytes)
|
||||
|
||||
def is_string(obj):
|
||||
"""Return True if `obj` is a text or binary Python string object,
|
||||
False if it is anything else, like a QString (Python 2, PyQt API #1)"""
|
||||
return is_text_string(obj) or is_binary_string(obj)
|
||||
|
||||
def is_unicode(obj):
|
||||
"""Return True if `obj` is unicode"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return isinstance(obj, unicode)
|
||||
else:
|
||||
# Python 3
|
||||
return isinstance(obj, str)
|
||||
|
||||
def to_text_string(obj, encoding=None):
|
||||
"""Convert `obj` to (unicode) text string"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
if encoding is None:
|
||||
return unicode(obj)
|
||||
else:
|
||||
return unicode(obj, encoding)
|
||||
else:
|
||||
# Python 3
|
||||
if encoding is None:
|
||||
return str(obj)
|
||||
elif isinstance(obj, str):
|
||||
# In case this function is not used properly, this could happen
|
||||
return obj
|
||||
else:
|
||||
return str(obj, encoding)
|
||||
|
||||
def to_binary_string(obj, encoding=None):
|
||||
"""Convert `obj` to binary string (bytes in Python 3, str in Python 2)"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
if encoding is None:
|
||||
return str(obj)
|
||||
else:
|
||||
return obj.encode(encoding)
|
||||
else:
|
||||
# Python 3
|
||||
return bytes(obj, 'utf-8' if encoding is None else encoding)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Function attributes
|
||||
#==============================================================================
|
||||
def get_func_code(func):
|
||||
"""Return function code object"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return func.func_code
|
||||
else:
|
||||
# Python 3
|
||||
return func.__code__
|
||||
|
||||
def get_func_name(func):
|
||||
"""Return function name"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return func.func_name
|
||||
else:
|
||||
# Python 3
|
||||
return func.__name__
|
||||
|
||||
def get_func_defaults(func):
|
||||
"""Return function default argument values"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return func.func_defaults
|
||||
else:
|
||||
# Python 3
|
||||
return func.__defaults__
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Special method attributes
|
||||
#==============================================================================
|
||||
def get_meth_func(obj):
|
||||
"""Return method function object"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return obj.im_func
|
||||
else:
|
||||
# Python 3
|
||||
return obj.__func__
|
||||
|
||||
def get_meth_class_inst(obj):
|
||||
"""Return method class instance"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return obj.im_self
|
||||
else:
|
||||
# Python 3
|
||||
return obj.__self__
|
||||
|
||||
def get_meth_class(obj):
|
||||
"""Return method class"""
|
||||
if PY2:
|
||||
# Python 2
|
||||
return obj.im_class
|
||||
else:
|
||||
# Python 3
|
||||
return obj.__self__.__class__
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Misc.
|
||||
#==============================================================================
|
||||
if PY2:
|
||||
def _print(*objects, **options):
|
||||
end = options.get('end', '\n')
|
||||
file = options.get('file', sys.stdout)
|
||||
sep = options.get('sep', ' ')
|
||||
string = sep.join([str(obj) for obj in objects])
|
||||
print(string, file=file, end=end, sep=sep)
|
||||
else:
|
||||
_print = print
|
||||
|
||||
|
||||
if PY2:
|
||||
# Python 2
|
||||
getcwd = os.getcwdu
|
||||
cmp = cmp
|
||||
import string
|
||||
str_lower = string.lower
|
||||
from itertools import izip_longest as zip_longest
|
||||
from backports.functools_lru_cache import lru_cache
|
||||
else:
|
||||
# Python 3
|
||||
getcwd = os.getcwd
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
str_lower = str.lower
|
||||
from itertools import zip_longest
|
||||
from functools import lru_cache
|
||||
|
||||
def qbytearray_to_str(qba):
|
||||
"""Convert QByteArray object to str in a way compatible with Python 2/3"""
|
||||
return str(bytes(qba.toHex().data()).decode())
|
||||
|
||||
# =============================================================================
|
||||
# Dict funcs
|
||||
# =============================================================================
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
# =============================================================================
|
||||
# Exceptions
|
||||
# =============================================================================
|
||||
if PY2:
|
||||
TimeoutError = RuntimeError
|
||||
FileNotFoundError = IOError
|
||||
else:
|
||||
TimeoutError = TimeoutError
|
||||
FileNotFoundError = FileNotFoundError
|
||||
|
||||
if PY2:
|
||||
import re
|
||||
import tokenize
|
||||
def isidentifier(string):
|
||||
"""Check if string can be a variable name."""
|
||||
return re.match(tokenize.Name + r'\Z', string) is not None
|
||||
|
||||
if os.name == 'nt':
|
||||
def encode(u):
|
||||
"""Try encoding with utf8."""
|
||||
if isinstance(u, unicode):
|
||||
return u.encode('utf8', 'replace')
|
||||
return u
|
||||
else:
|
||||
def encode(u):
|
||||
"""Try encoding with file system encoding."""
|
||||
if isinstance(u, unicode):
|
||||
return u.encode(sys.getfilesystemencoding())
|
||||
return u
|
||||
else:
|
||||
def isidentifier(string):
|
||||
"""Check if string can be a variable name."""
|
||||
return string.isidentifier()
|
||||
|
||||
def encode(u):
|
||||
"""Encoding is not a problem in python 3."""
|
||||
return u
|
||||
|
||||
|
||||
def compat_exec(code, globals, locals):
|
||||
# Wrap exec in a function
|
||||
exec(code, globals, locals)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
@@ -0,0 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Utilities
|
||||
"""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,376 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Utilities and wrappers around inspect module"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import inspect
|
||||
import re
|
||||
|
||||
# Local imports:
|
||||
from spyder_kernels.py3compat import (is_text_string, builtins, get_meth_func,
|
||||
get_meth_class_inst, get_meth_class,
|
||||
get_func_defaults, to_text_string, PY2)
|
||||
|
||||
|
||||
SYMBOLS = r"[^\'\"a-zA-Z0-9_.]"
|
||||
|
||||
|
||||
def getobj(txt, last=False):
|
||||
"""Return the last valid object name in string"""
|
||||
txt_end = ""
|
||||
for startchar, endchar in ["[]", "()"]:
|
||||
if txt.endswith(endchar):
|
||||
pos = txt.rfind(startchar)
|
||||
if pos:
|
||||
txt_end = txt[pos:]
|
||||
txt = txt[:pos]
|
||||
tokens = re.split(SYMBOLS, txt)
|
||||
token = None
|
||||
try:
|
||||
while token is None or re.match(SYMBOLS, token):
|
||||
token = tokens.pop()
|
||||
if token.endswith('.'):
|
||||
token = token[:-1]
|
||||
if token.startswith('.'):
|
||||
# Invalid object name
|
||||
return None
|
||||
if last:
|
||||
#XXX: remove this statement as well as the "last" argument
|
||||
token += txt[ txt.rfind(token) + len(token) ]
|
||||
token += txt_end
|
||||
if token:
|
||||
return token
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
|
||||
def getobjdir(obj):
|
||||
"""
|
||||
For standard objects, will simply return dir(obj)
|
||||
In special cases (e.g. WrapITK package), will return only string elements
|
||||
of result returned by dir(obj)
|
||||
"""
|
||||
return [item for item in dir(obj) if is_text_string(item)]
|
||||
|
||||
|
||||
def getdoc(obj):
|
||||
"""
|
||||
Return text documentation from an object. This comes in a form of
|
||||
dictionary with four keys:
|
||||
|
||||
name:
|
||||
The name of the inspected object
|
||||
argspec:
|
||||
It's argspec
|
||||
note:
|
||||
A phrase describing the type of object (function or method) we are
|
||||
inspecting, and the module it belongs to.
|
||||
docstring:
|
||||
It's docstring
|
||||
"""
|
||||
|
||||
docstring = inspect.getdoc(obj) or inspect.getcomments(obj) or ''
|
||||
|
||||
# Most of the time doc will only contain ascii characters, but there are
|
||||
# some docstrings that contain non-ascii characters. Not all source files
|
||||
# declare their encoding in the first line, so querying for that might not
|
||||
# yield anything, either. So assume the most commonly used
|
||||
# multi-byte file encoding (which also covers ascii).
|
||||
try:
|
||||
docstring = to_text_string(docstring)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Doc dict keys
|
||||
doc = {'name': '',
|
||||
'argspec': '',
|
||||
'note': '',
|
||||
'docstring': docstring}
|
||||
|
||||
if callable(obj):
|
||||
try:
|
||||
name = obj.__name__
|
||||
except AttributeError:
|
||||
doc['docstring'] = docstring
|
||||
return doc
|
||||
if inspect.ismethod(obj):
|
||||
imclass = get_meth_class(obj)
|
||||
if get_meth_class_inst(obj) is not None:
|
||||
doc['note'] = 'Method of %s instance' \
|
||||
% get_meth_class_inst(obj).__class__.__name__
|
||||
else:
|
||||
doc['note'] = 'Unbound %s method' % imclass.__name__
|
||||
obj = get_meth_func(obj)
|
||||
elif hasattr(obj, '__module__'):
|
||||
doc['note'] = 'Function of %s module' % obj.__module__
|
||||
else:
|
||||
doc['note'] = 'Function'
|
||||
doc['name'] = obj.__name__
|
||||
if inspect.isfunction(obj):
|
||||
if PY2:
|
||||
args, varargs, varkw, defaults = inspect.getargspec(obj)
|
||||
doc['argspec'] = inspect.formatargspec(
|
||||
args, varargs, varkw, defaults,
|
||||
formatvalue=lambda o:'='+repr(o))
|
||||
else:
|
||||
# This is necessary to catch errors for objects without a
|
||||
# signature, like numpy.where.
|
||||
# Fixes spyder-ide/spyder#21148
|
||||
try:
|
||||
sig = inspect.signature(obj)
|
||||
except ValueError:
|
||||
sig = getargspecfromtext(doc['docstring'])
|
||||
if not sig:
|
||||
sig = '(...)'
|
||||
doc['argspec'] = str(sig)
|
||||
if name == '<lambda>':
|
||||
doc['name'] = name + ' lambda '
|
||||
doc['argspec'] = doc['argspec'][1:-1] # remove parentheses
|
||||
else:
|
||||
argspec = getargspecfromtext(doc['docstring'])
|
||||
if argspec:
|
||||
doc['argspec'] = argspec
|
||||
# Many scipy and numpy docstrings begin with a function
|
||||
# signature on the first line. This ends up begin redundant
|
||||
# when we are using title and argspec to create the
|
||||
# rich text "Definition:" field. We'll carefully remove this
|
||||
# redundancy but only under a strict set of conditions:
|
||||
# Remove the starting charaters of the 'doc' portion *iff*
|
||||
# the non-whitespace characters on the first line
|
||||
# match *exactly* the combined function title
|
||||
# and argspec we determined above.
|
||||
signature = doc['name'] + doc['argspec']
|
||||
docstring_blocks = doc['docstring'].split("\n\n")
|
||||
first_block = docstring_blocks[0].strip()
|
||||
if first_block == signature:
|
||||
doc['docstring'] = doc['docstring'].replace(
|
||||
signature, '', 1).lstrip()
|
||||
else:
|
||||
doc['argspec'] = '(...)'
|
||||
|
||||
# Remove self from argspec
|
||||
argspec = doc['argspec']
|
||||
doc['argspec'] = argspec.replace('(self)', '()').replace('(self, ', '(')
|
||||
|
||||
return doc
|
||||
|
||||
|
||||
def getsource(obj):
|
||||
"""Wrapper around inspect.getsource"""
|
||||
try:
|
||||
try:
|
||||
src = to_text_string(inspect.getsource(obj))
|
||||
except TypeError:
|
||||
if hasattr(obj, '__class__'):
|
||||
src = to_text_string(inspect.getsource(obj.__class__))
|
||||
else:
|
||||
# Bindings like VTK or ITK require this case
|
||||
src = getdoc(obj)
|
||||
return src
|
||||
except (TypeError, IOError):
|
||||
return
|
||||
|
||||
|
||||
def getsignaturefromtext(text, objname):
|
||||
"""Get object signature from text (i.e. object documentation)."""
|
||||
if isinstance(text, dict):
|
||||
text = text.get('docstring', '')
|
||||
|
||||
# Regexps
|
||||
args_re = r'(\(.+?\))'
|
||||
if objname:
|
||||
signature_re = objname + args_re
|
||||
else:
|
||||
identifier_re = r'(\w+)'
|
||||
signature_re = identifier_re + args_re
|
||||
|
||||
# Grabbing signatures
|
||||
if not text:
|
||||
text = ''
|
||||
|
||||
sigs = re.findall(signature_re, text)
|
||||
|
||||
# The most relevant signature is usually the first one. There could be
|
||||
# others in doctests or other places, but those are not so important.
|
||||
sig = ''
|
||||
if sigs:
|
||||
if PY2:
|
||||
# We don't have an easy way to check if the identifier detected by
|
||||
# signature_re is a valid one in Python 2. So, we simply select the
|
||||
# first match.
|
||||
sig = sigs[0] if objname else sigs[0][1]
|
||||
else:
|
||||
# Default signatures returned by IPython.
|
||||
# Notes:
|
||||
# * These are not real signatures but only used to provide a
|
||||
# placeholder.
|
||||
# * We skip them if we can find other signatures in `text`.
|
||||
# * This is necessary because we also use this function in Spyder
|
||||
# to parse the content of inspect replies that come from the
|
||||
# kernel, which can include these signatures.
|
||||
default_ipy_sigs = [
|
||||
'(*args, **kwargs)',
|
||||
'(self, /, *args, **kwargs)'
|
||||
]
|
||||
|
||||
if objname:
|
||||
real_sigs = [s for s in sigs if s not in default_ipy_sigs]
|
||||
|
||||
if real_sigs:
|
||||
sig = real_sigs[0]
|
||||
else:
|
||||
sig = sigs[0]
|
||||
else:
|
||||
valid_sigs = [s for s in sigs if s[0].isidentifier()]
|
||||
|
||||
if valid_sigs:
|
||||
real_sigs = [
|
||||
s for s in valid_sigs if s[1] not in default_ipy_sigs
|
||||
]
|
||||
|
||||
if real_sigs:
|
||||
sig = real_sigs[0][1]
|
||||
else:
|
||||
sig = valid_sigs[0][1]
|
||||
|
||||
return sig
|
||||
|
||||
|
||||
def getargspecfromtext(text):
|
||||
"""
|
||||
Try to get the formatted argspec of a callable from the first block of its
|
||||
docstring.
|
||||
|
||||
This will return something like `(x, y, k=1)`.
|
||||
"""
|
||||
blocks = text.split("\n\n")
|
||||
first_block = blocks[0].strip().replace('\n', '')
|
||||
return getsignaturefromtext(first_block, '')
|
||||
|
||||
|
||||
def getargsfromtext(text, objname):
|
||||
"""Get arguments from text (object documentation)."""
|
||||
signature = getsignaturefromtext(text, objname)
|
||||
if signature:
|
||||
argtxt = signature[signature.find('(') + 1:-1]
|
||||
return argtxt.split(',')
|
||||
|
||||
|
||||
def getargsfromdoc(obj):
|
||||
"""Get arguments from object doc"""
|
||||
if obj.__doc__ is not None:
|
||||
return getargsfromtext(obj.__doc__, obj.__name__)
|
||||
|
||||
|
||||
def getargs(obj):
|
||||
"""Get the names and default values of a function's arguments"""
|
||||
if inspect.isfunction(obj) or inspect.isbuiltin(obj):
|
||||
func_obj = obj
|
||||
elif inspect.ismethod(obj):
|
||||
func_obj = get_meth_func(obj)
|
||||
elif inspect.isclass(obj) and hasattr(obj, '__init__'):
|
||||
func_obj = getattr(obj, '__init__')
|
||||
else:
|
||||
return []
|
||||
|
||||
if not hasattr(func_obj, '__code__'):
|
||||
# Builtin: try to extract info from doc
|
||||
args = getargsfromdoc(func_obj)
|
||||
if args is not None:
|
||||
return args
|
||||
else:
|
||||
# Example: PyQt5
|
||||
return getargsfromdoc(obj)
|
||||
|
||||
args, _, _ = inspect.getargs(func_obj.__code__)
|
||||
if not args:
|
||||
return getargsfromdoc(obj)
|
||||
|
||||
# Supporting tuple arguments in def statement:
|
||||
for i_arg, arg in enumerate(args):
|
||||
if isinstance(arg, list):
|
||||
args[i_arg] = "(%s)" % ", ".join(arg)
|
||||
|
||||
defaults = get_func_defaults(func_obj)
|
||||
if defaults is not None:
|
||||
for index, default in enumerate(defaults):
|
||||
args[index + len(args) - len(defaults)] += '=' + repr(default)
|
||||
|
||||
if inspect.isclass(obj) or inspect.ismethod(obj):
|
||||
if len(args) == 1:
|
||||
return None
|
||||
|
||||
# Remove 'self' from args
|
||||
if 'self' in args:
|
||||
args.remove('self')
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def getargtxt(obj, one_arg_per_line=True):
|
||||
"""
|
||||
Get the names and default values of a function's arguments
|
||||
Return list with separators (', ') formatted for calltips
|
||||
"""
|
||||
args = getargs(obj)
|
||||
if args:
|
||||
sep = ', '
|
||||
textlist = None
|
||||
for i_arg, arg in enumerate(args):
|
||||
if textlist is None:
|
||||
textlist = ['']
|
||||
textlist[-1] += arg
|
||||
if i_arg < len(args)-1:
|
||||
textlist[-1] += sep
|
||||
if len(textlist[-1]) >= 32 or one_arg_per_line:
|
||||
textlist.append('')
|
||||
if inspect.isclass(obj) or inspect.ismethod(obj):
|
||||
if len(textlist) == 1:
|
||||
return None
|
||||
if 'self'+sep in textlist:
|
||||
textlist.remove('self'+sep)
|
||||
return textlist
|
||||
|
||||
|
||||
def isdefined(obj, force_import=False, namespace=None):
|
||||
"""Return True if object is defined in namespace
|
||||
If namespace is None --> namespace = locals()"""
|
||||
if namespace is None:
|
||||
namespace = locals()
|
||||
attr_list = obj.split('.')
|
||||
base = attr_list.pop(0)
|
||||
if len(base) == 0:
|
||||
return False
|
||||
if base not in builtins.__dict__ and base not in namespace:
|
||||
if force_import:
|
||||
try:
|
||||
module = __import__(base, globals(), namespace)
|
||||
if base not in globals():
|
||||
globals()[base] = module
|
||||
namespace[base] = module
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
for attr in attr_list:
|
||||
try:
|
||||
attr_not_found = not hasattr(eval(base, namespace), attr)
|
||||
except (AttributeError, SyntaxError, TypeError):
|
||||
return False
|
||||
if attr_not_found:
|
||||
if force_import:
|
||||
try:
|
||||
__import__(base+'.'+attr, globals(), namespace)
|
||||
except (ImportError, SyntaxError):
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
base += '.'+attr
|
||||
return True
|
||||
@@ -0,0 +1,557 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Input/Output Utilities
|
||||
|
||||
Note: 'load' functions has to return a dictionary from which a globals()
|
||||
namespace may be updated
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# Standard library imports
|
||||
import sys
|
||||
import os
|
||||
import os.path as osp
|
||||
import tarfile
|
||||
import tempfile
|
||||
import shutil
|
||||
import types
|
||||
import json
|
||||
import inspect
|
||||
import dis
|
||||
import copy
|
||||
import glob
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.py3compat import getcwd, pickle, PY2, to_text_string
|
||||
from spyder_kernels.utils.lazymodules import (
|
||||
FakeObject, numpy as np, pandas as pd, PIL, scipy as sp)
|
||||
|
||||
|
||||
class MatlabStruct(dict):
|
||||
"""
|
||||
Matlab style struct, enhanced.
|
||||
|
||||
Supports dictionary and attribute style access. Can be pickled,
|
||||
and supports code completion in a REPL.
|
||||
|
||||
Examples
|
||||
========
|
||||
>>> from spyder.utils.iofuncs import MatlabStruct
|
||||
>>> a = MatlabStruct()
|
||||
>>> a.b = 'spam' # a["b"] == 'spam'
|
||||
>>> a.c["d"] = 'eggs' # a.c.d == 'eggs'
|
||||
>>> print(a)
|
||||
{'c': {'d': 'eggs'}, 'b': 'spam'}
|
||||
|
||||
"""
|
||||
def __getattr__(self, attr):
|
||||
"""Access the dictionary keys for unknown attributes."""
|
||||
try:
|
||||
return self[attr]
|
||||
except KeyError:
|
||||
msg = "'MatlabStruct' object has no attribute %s" % attr
|
||||
raise AttributeError(msg)
|
||||
|
||||
def __getitem__(self, attr):
|
||||
"""
|
||||
Get a dict value; create a MatlabStruct if requesting a submember.
|
||||
|
||||
Do not create a key if the attribute starts with an underscore.
|
||||
"""
|
||||
if attr in self.keys() or attr.startswith('_'):
|
||||
return dict.__getitem__(self, attr)
|
||||
frame = inspect.currentframe()
|
||||
# step into the function that called us
|
||||
if frame.f_back.f_back and self._is_allowed(frame.f_back.f_back):
|
||||
dict.__setitem__(self, attr, MatlabStruct())
|
||||
elif self._is_allowed(frame.f_back):
|
||||
dict.__setitem__(self, attr, MatlabStruct())
|
||||
return dict.__getitem__(self, attr)
|
||||
|
||||
def _is_allowed(self, frame):
|
||||
"""Check for allowed op code in the calling frame"""
|
||||
allowed = [dis.opmap['STORE_ATTR'], dis.opmap['LOAD_CONST'],
|
||||
dis.opmap.get('STOP_CODE', 0)]
|
||||
bytecode = frame.f_code.co_code
|
||||
instruction = bytecode[frame.f_lasti + 3]
|
||||
instruction = ord(instruction) if PY2 else instruction
|
||||
return instruction in allowed
|
||||
|
||||
__setattr__ = dict.__setitem__
|
||||
__delattr__ = dict.__delitem__
|
||||
|
||||
@property
|
||||
def __dict__(self):
|
||||
"""Allow for code completion in a REPL"""
|
||||
return self.copy()
|
||||
|
||||
|
||||
def get_matlab_value(val):
|
||||
"""
|
||||
Extract a value from a Matlab file
|
||||
|
||||
From the oct2py project, see
|
||||
https://pythonhosted.org/oct2py/conversions.html
|
||||
"""
|
||||
# Extract each item of a list.
|
||||
if isinstance(val, list):
|
||||
return [get_matlab_value(v) for v in val]
|
||||
|
||||
# Ignore leaf objects.
|
||||
if not isinstance(val, np.ndarray):
|
||||
return val
|
||||
|
||||
# Convert user defined classes.
|
||||
if hasattr(val, 'classname'):
|
||||
out = dict()
|
||||
for name in val.dtype.names:
|
||||
out[name] = get_matlab_value(val[name].squeeze().tolist())
|
||||
cls = type(val.classname, (object,), out)
|
||||
return cls()
|
||||
|
||||
# Extract struct data.
|
||||
elif val.dtype.names:
|
||||
out = MatlabStruct()
|
||||
for name in val.dtype.names:
|
||||
out[name] = get_matlab_value(val[name].squeeze().tolist())
|
||||
val = out
|
||||
|
||||
# Extract cells.
|
||||
elif val.dtype.kind == 'O':
|
||||
val = val.squeeze().tolist()
|
||||
if not isinstance(val, list):
|
||||
val = [val]
|
||||
val = get_matlab_value(val)
|
||||
|
||||
# Compress singleton values.
|
||||
elif val.size == 1:
|
||||
val = val.item()
|
||||
|
||||
# Compress empty values.
|
||||
elif val.size == 0:
|
||||
if val.dtype.kind in 'US':
|
||||
val = ''
|
||||
else:
|
||||
val = []
|
||||
|
||||
return val
|
||||
|
||||
|
||||
def load_matlab(filename):
|
||||
if sp.io is FakeObject:
|
||||
return None, ''
|
||||
|
||||
try:
|
||||
out = sp.io.loadmat(filename, struct_as_record=True)
|
||||
data = dict()
|
||||
for (key, value) in out.items():
|
||||
data[key] = get_matlab_value(value)
|
||||
return data, None
|
||||
except Exception as error:
|
||||
return None, str(error)
|
||||
|
||||
|
||||
def save_matlab(data, filename):
|
||||
if sp.io is FakeObject:
|
||||
return
|
||||
|
||||
try:
|
||||
sp.io.savemat(filename, data, oned_as='row')
|
||||
except Exception as error:
|
||||
return str(error)
|
||||
|
||||
|
||||
def load_array(filename):
|
||||
if np.load is FakeObject:
|
||||
return None, ''
|
||||
|
||||
try:
|
||||
name = osp.splitext(osp.basename(filename))[0]
|
||||
data = np.load(filename)
|
||||
if isinstance(data, np.lib.npyio.NpzFile):
|
||||
return dict(data), None
|
||||
elif hasattr(data, 'keys'):
|
||||
return data, None
|
||||
else:
|
||||
return {name: data}, None
|
||||
except Exception as error:
|
||||
return None, str(error)
|
||||
|
||||
|
||||
def __save_array(data, basename, index):
|
||||
"""Save numpy array"""
|
||||
fname = basename + '_%04d.npy' % index
|
||||
np.save(fname, data)
|
||||
return fname
|
||||
|
||||
|
||||
if sys.byteorder == 'little':
|
||||
_ENDIAN = '<'
|
||||
else:
|
||||
_ENDIAN = '>'
|
||||
|
||||
DTYPES = {
|
||||
"1": ('|b1', None),
|
||||
"L": ('|u1', None),
|
||||
"I": ('%si4' % _ENDIAN, None),
|
||||
"F": ('%sf4' % _ENDIAN, None),
|
||||
"I;16": ('|u2', None),
|
||||
"I;16S": ('%si2' % _ENDIAN, None),
|
||||
"P": ('|u1', None),
|
||||
"RGB": ('|u1', 3),
|
||||
"RGBX": ('|u1', 4),
|
||||
"RGBA": ('|u1', 4),
|
||||
"CMYK": ('|u1', 4),
|
||||
"YCbCr": ('|u1', 4),
|
||||
}
|
||||
|
||||
|
||||
def __image_to_array(filename):
|
||||
img = PIL.Image.open(filename)
|
||||
try:
|
||||
dtype, extra = DTYPES[img.mode]
|
||||
except KeyError:
|
||||
raise RuntimeError("%s mode is not supported" % img.mode)
|
||||
shape = (img.size[1], img.size[0])
|
||||
if extra is not None:
|
||||
shape += (extra,)
|
||||
return np.array(img.getdata(), dtype=np.dtype(dtype)).reshape(shape)
|
||||
|
||||
|
||||
def load_image(filename):
|
||||
if PIL.Image is FakeObject or np.array is FakeObject:
|
||||
return None, ''
|
||||
|
||||
try:
|
||||
name = osp.splitext(osp.basename(filename))[0]
|
||||
return {name: __image_to_array(filename)}, None
|
||||
except Exception as error:
|
||||
return None, str(error)
|
||||
|
||||
|
||||
def load_pickle(filename):
|
||||
"""Load a pickle file as a dictionary"""
|
||||
try:
|
||||
if pd.read_pickle is not FakeObject:
|
||||
return pd.read_pickle(filename), None
|
||||
else:
|
||||
with open(filename, 'rb') as fid:
|
||||
data = pickle.load(fid)
|
||||
return data, None
|
||||
except Exception as err:
|
||||
return None, str(err)
|
||||
|
||||
|
||||
def load_json(filename):
|
||||
"""Load a json file as a dictionary"""
|
||||
try:
|
||||
if PY2:
|
||||
args = 'rb'
|
||||
else:
|
||||
args = 'r'
|
||||
with open(filename, args) as fid:
|
||||
data = json.load(fid)
|
||||
return data, None
|
||||
except Exception as err:
|
||||
return None, str(err)
|
||||
|
||||
|
||||
def save_dictionary(data, filename):
|
||||
"""Save dictionary in a single file .spydata file"""
|
||||
filename = osp.abspath(filename)
|
||||
old_cwd = getcwd()
|
||||
os.chdir(osp.dirname(filename))
|
||||
error_message = None
|
||||
skipped_keys = []
|
||||
data_copy = {}
|
||||
|
||||
try:
|
||||
# Copy dictionary before modifying it to fix #6689
|
||||
for obj_name, obj_value in data.items():
|
||||
# Skip modules, since they can't be pickled, users virtually never
|
||||
# would want them to be and so they don't show up in the skip list.
|
||||
# Skip callables, since they are only pickled by reference and thus
|
||||
# must already be present in the user's environment anyway.
|
||||
if not (callable(obj_value) or isinstance(obj_value,
|
||||
types.ModuleType)):
|
||||
# If an object cannot be deepcopied, then it cannot be pickled.
|
||||
# Ergo, we skip it and list it later.
|
||||
try:
|
||||
data_copy[obj_name] = copy.deepcopy(obj_value)
|
||||
except Exception:
|
||||
skipped_keys.append(obj_name)
|
||||
data = data_copy
|
||||
if not data:
|
||||
raise RuntimeError('No supported objects to save')
|
||||
|
||||
saved_arrays = {}
|
||||
if np.ndarray is not FakeObject:
|
||||
# Saving numpy arrays with np.save
|
||||
arr_fname = osp.splitext(filename)[0]
|
||||
for name in list(data.keys()):
|
||||
try:
|
||||
if (isinstance(data[name], np.ndarray) and
|
||||
data[name].size > 0):
|
||||
# Save arrays at data root
|
||||
fname = __save_array(data[name], arr_fname,
|
||||
len(saved_arrays))
|
||||
saved_arrays[(name, None)] = osp.basename(fname)
|
||||
data.pop(name)
|
||||
elif isinstance(data[name], (list, dict)):
|
||||
# Save arrays nested in lists or dictionaries
|
||||
if isinstance(data[name], list):
|
||||
iterator = enumerate(data[name])
|
||||
else:
|
||||
iterator = iter(list(data[name].items()))
|
||||
to_remove = []
|
||||
for index, value in iterator:
|
||||
if (isinstance(value, np.ndarray) and
|
||||
value.size > 0):
|
||||
fname = __save_array(value, arr_fname,
|
||||
len(saved_arrays))
|
||||
saved_arrays[(name, index)] = (
|
||||
osp.basename(fname))
|
||||
to_remove.append(index)
|
||||
for index in sorted(to_remove, reverse=True):
|
||||
data[name].pop(index)
|
||||
except (RuntimeError, pickle.PicklingError, TypeError,
|
||||
AttributeError, IndexError):
|
||||
# If an array can't be saved with numpy for some reason,
|
||||
# leave the object intact and try to save it normally.
|
||||
pass
|
||||
if saved_arrays:
|
||||
data['__saved_arrays__'] = saved_arrays
|
||||
|
||||
pickle_filename = osp.splitext(filename)[0] + '.pickle'
|
||||
# Attempt to pickle everything.
|
||||
# If pickling fails, iterate through to eliminate problem objs & retry.
|
||||
with open(pickle_filename, 'w+b') as fdesc:
|
||||
try:
|
||||
pickle.dump(data, fdesc, protocol=2)
|
||||
except (pickle.PicklingError, AttributeError, TypeError,
|
||||
ImportError, IndexError, RuntimeError):
|
||||
data_filtered = {}
|
||||
for obj_name, obj_value in data.items():
|
||||
try:
|
||||
pickle.dumps(obj_value, protocol=2)
|
||||
except Exception:
|
||||
skipped_keys.append(obj_name)
|
||||
else:
|
||||
data_filtered[obj_name] = obj_value
|
||||
if not data_filtered:
|
||||
raise RuntimeError('No supported objects to save')
|
||||
pickle.dump(data_filtered, fdesc, protocol=2)
|
||||
|
||||
# Use PAX (POSIX.1-2001) format instead of default GNU.
|
||||
# This improves interoperability and UTF-8/long variable name support.
|
||||
with tarfile.open(filename, "w", format=tarfile.PAX_FORMAT) as tar:
|
||||
for fname in ([pickle_filename]
|
||||
+ [fn for fn in list(saved_arrays.values())]):
|
||||
tar.add(osp.basename(fname))
|
||||
os.remove(fname)
|
||||
except (RuntimeError, pickle.PicklingError, TypeError) as error:
|
||||
error_message = to_text_string(error)
|
||||
else:
|
||||
if skipped_keys:
|
||||
skipped_keys.sort()
|
||||
error_message = ('Some objects could not be saved: '
|
||||
+ ', '.join(skipped_keys))
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
return error_message
|
||||
|
||||
|
||||
def is_within_directory(directory, target):
|
||||
"""Check if a file is within a directory."""
|
||||
abs_directory = os.path.abspath(directory)
|
||||
abs_target = os.path.abspath(target)
|
||||
prefix = os.path.commonprefix([abs_directory, abs_target])
|
||||
return prefix == abs_directory
|
||||
|
||||
|
||||
def safe_extract(tar, path=".", members=None, numeric_owner=False):
|
||||
"""Safely extract a tar file."""
|
||||
for member in tar.getmembers():
|
||||
member_path = os.path.join(path, member.name)
|
||||
if not is_within_directory(path, member_path):
|
||||
raise Exception(
|
||||
"Attempted path traversal in tar file {}".format(
|
||||
repr(tar.name)
|
||||
)
|
||||
)
|
||||
tar.extractall(path, members, numeric_owner=numeric_owner)
|
||||
|
||||
|
||||
def load_dictionary(filename):
|
||||
"""Load dictionary from .spydata file"""
|
||||
filename = osp.abspath(filename)
|
||||
old_cwd = getcwd()
|
||||
tmp_folder = tempfile.mkdtemp()
|
||||
os.chdir(tmp_folder)
|
||||
data = None
|
||||
error_message = None
|
||||
try:
|
||||
with tarfile.open(filename, "r") as tar:
|
||||
if PY2:
|
||||
tar.extractall()
|
||||
else:
|
||||
safe_extract(tar)
|
||||
|
||||
pickle_filename = glob.glob('*.pickle')[0]
|
||||
# 'New' format (Spyder >=2.2 for Python 2 and Python 3)
|
||||
with open(pickle_filename, 'rb') as fdesc:
|
||||
data = pickle.loads(fdesc.read())
|
||||
saved_arrays = {}
|
||||
if np.load is not FakeObject:
|
||||
# Loading numpy arrays saved with np.save
|
||||
try:
|
||||
saved_arrays = data.pop('__saved_arrays__')
|
||||
for (name, index), fname in list(saved_arrays.items()):
|
||||
arr = np.load(osp.join(tmp_folder, fname), allow_pickle=True)
|
||||
if index is None:
|
||||
data[name] = arr
|
||||
elif isinstance(data[name], dict):
|
||||
data[name][index] = arr
|
||||
else:
|
||||
data[name].insert(index, arr)
|
||||
except KeyError:
|
||||
pass
|
||||
# Except AttributeError from e.g. trying to load function no longer present
|
||||
except (AttributeError, EOFError, ValueError) as error:
|
||||
error_message = to_text_string(error)
|
||||
# To ensure working dir gets changed back and temp dir wiped no matter what
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
try:
|
||||
shutil.rmtree(tmp_folder)
|
||||
except OSError as error:
|
||||
error_message = to_text_string(error)
|
||||
return data, error_message
|
||||
|
||||
|
||||
class IOFunctions(object):
|
||||
def __init__(self):
|
||||
self.load_extensions = None
|
||||
self.save_extensions = None
|
||||
self.load_filters = None
|
||||
self.save_filters = None
|
||||
self.load_funcs = None
|
||||
self.save_funcs = None
|
||||
|
||||
def setup(self):
|
||||
iofuncs = self.get_internal_funcs()+self.get_3rd_party_funcs()
|
||||
load_extensions = {}
|
||||
save_extensions = {}
|
||||
load_funcs = {}
|
||||
save_funcs = {}
|
||||
load_filters = []
|
||||
save_filters = []
|
||||
load_ext = []
|
||||
for ext, name, loadfunc, savefunc in iofuncs:
|
||||
filter_str = to_text_string(name + " (*%s)" % ext)
|
||||
if loadfunc is not None:
|
||||
load_filters.append(filter_str)
|
||||
load_extensions[filter_str] = ext
|
||||
load_funcs[ext] = loadfunc
|
||||
load_ext.append(ext)
|
||||
if savefunc is not None:
|
||||
save_extensions[filter_str] = ext
|
||||
save_filters.append(filter_str)
|
||||
save_funcs[ext] = savefunc
|
||||
load_filters.insert(0, to_text_string("Supported files"+" (*"+\
|
||||
" *".join(load_ext)+")"))
|
||||
load_filters.append(to_text_string("All files (*.*)"))
|
||||
self.load_filters = "\n".join(load_filters)
|
||||
self.save_filters = "\n".join(save_filters)
|
||||
self.load_funcs = load_funcs
|
||||
self.save_funcs = save_funcs
|
||||
self.load_extensions = load_extensions
|
||||
self.save_extensions = save_extensions
|
||||
|
||||
def get_internal_funcs(self):
|
||||
return [
|
||||
('.spydata', "Spyder data files",
|
||||
load_dictionary, save_dictionary),
|
||||
('.npy', "NumPy arrays", load_array, None),
|
||||
('.npz', "NumPy zip arrays", load_array, None),
|
||||
('.mat', "Matlab files", load_matlab, save_matlab),
|
||||
('.csv', "CSV text files", 'import_wizard', None),
|
||||
('.txt', "Text files", 'import_wizard', None),
|
||||
('.jpg', "JPEG images", load_image, None),
|
||||
('.png', "PNG images", load_image, None),
|
||||
('.gif', "GIF images", load_image, None),
|
||||
('.tif', "TIFF images", load_image, None),
|
||||
('.pkl', "Pickle files", load_pickle, None),
|
||||
('.pickle', "Pickle files", load_pickle, None),
|
||||
('.json', "JSON files", load_json, None),
|
||||
]
|
||||
|
||||
def get_3rd_party_funcs(self):
|
||||
other_funcs = []
|
||||
try:
|
||||
from spyder.otherplugins import get_spyderplugins_mods
|
||||
for mod in get_spyderplugins_mods(io=True):
|
||||
try:
|
||||
other_funcs.append((mod.FORMAT_EXT, mod.FORMAT_NAME,
|
||||
mod.FORMAT_LOAD, mod.FORMAT_SAVE))
|
||||
except AttributeError as error:
|
||||
print("%s: %s" % (mod, str(error)), file=sys.stderr)
|
||||
except ImportError:
|
||||
pass
|
||||
return other_funcs
|
||||
|
||||
def save(self, data, filename):
|
||||
ext = osp.splitext(filename)[1].lower()
|
||||
if ext in self.save_funcs:
|
||||
return self.save_funcs[ext](data, filename)
|
||||
else:
|
||||
return "<b>Unsupported file type '%s'</b>" % ext
|
||||
|
||||
def load(self, filename):
|
||||
ext = osp.splitext(filename)[1].lower()
|
||||
if ext in self.load_funcs:
|
||||
return self.load_funcs[ext](filename)
|
||||
else:
|
||||
return None, "<b>Unsupported file type '%s'</b>" % ext
|
||||
|
||||
iofunctions = IOFunctions()
|
||||
iofunctions.setup()
|
||||
|
||||
|
||||
def save_auto(data, filename):
|
||||
"""Save data into filename, depending on file extension"""
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import datetime
|
||||
testdict = {'d': 1, 'a': np.random.rand(10, 10), 'b': [1, 2]}
|
||||
testdate = datetime.date(1945, 5, 8)
|
||||
example = {'str': 'kjkj kj k j j kj k jkj',
|
||||
'unicode': u'éù',
|
||||
'list': [1, 3, [4, 5, 6], 'kjkj', None],
|
||||
'tuple': ([1, testdate, testdict], 'kjkj', None),
|
||||
'dict': testdict,
|
||||
'float': 1.2233,
|
||||
'array': np.random.rand(4000, 400),
|
||||
'empty_array': np.array([]),
|
||||
'date': testdate,
|
||||
'datetime': datetime.datetime(1945, 5, 8),
|
||||
}
|
||||
import time
|
||||
t0 = time.time()
|
||||
save_dictionary(example, "test.spydata")
|
||||
print(" Data saved in %.3f seconds" % (time.time()-t0)) # spyder: test-skip
|
||||
t0 = time.time()
|
||||
example2, ok = load_dictionary("test.spydata")
|
||||
os.remove("test.spydata")
|
||||
|
||||
print("Data loaded in %.3f seconds" % (time.time()-t0)) # spyder: test-skip
|
||||
@@ -0,0 +1,69 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Lazy modules.
|
||||
|
||||
They are useful to not import big modules until it's really necessary.
|
||||
"""
|
||||
|
||||
from spyder_kernels.utils.misc import is_module_installed
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Auxiliary classes
|
||||
# =============================================================================
|
||||
class FakeObject(object):
|
||||
"""Fake class used in replacement of missing objects"""
|
||||
pass
|
||||
|
||||
|
||||
class LazyModule(object):
|
||||
"""Lazy module loader class."""
|
||||
|
||||
def __init__(self, modname, second_level_attrs=None):
|
||||
"""
|
||||
Lazy module loader class.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
modname: str
|
||||
Module name to lazy load.
|
||||
second_level_attrs: list (optional)
|
||||
List of second level attributes to add to the FakeObject
|
||||
that stands for the module in case it's not found.
|
||||
"""
|
||||
self.__spy_modname__ = modname
|
||||
self.__spy_mod__ = FakeObject
|
||||
|
||||
# Set required second level attributes
|
||||
if second_level_attrs is not None:
|
||||
for attr in second_level_attrs:
|
||||
setattr(self.__spy_mod__, attr, FakeObject)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if is_module_installed(self.__spy_modname__):
|
||||
self.__spy_mod__ = __import__(self.__spy_modname__)
|
||||
else:
|
||||
return self.__spy_mod__
|
||||
|
||||
return getattr(self.__spy_mod__, name)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Lazy modules
|
||||
# =============================================================================
|
||||
numpy = LazyModule('numpy', ['MaskedArray'])
|
||||
|
||||
pandas = LazyModule('pandas')
|
||||
|
||||
PIL = LazyModule('PIL.Image', ['Image'])
|
||||
|
||||
bs4 = LazyModule('bs4', ['NavigableString'])
|
||||
|
||||
scipy = LazyModule('scipy.io')
|
||||
@@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Miscellaneous utilities"""
|
||||
|
||||
import re
|
||||
|
||||
from spyder_kernels.py3compat import lru_cache
|
||||
|
||||
|
||||
@lru_cache(maxsize=100)
|
||||
def is_module_installed(module_name):
|
||||
"""
|
||||
Simpler version of spyder.utils.programs.is_module_installed.
|
||||
"""
|
||||
try:
|
||||
mod = __import__(module_name)
|
||||
# This is necessary to not report that the module is installed
|
||||
# when only its __pycache__ directory is present.
|
||||
if getattr(mod, '__file__', None):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except Exception:
|
||||
# Module is not installed
|
||||
return False
|
||||
|
||||
|
||||
def fix_reference_name(name, blacklist=None):
|
||||
"""Return a syntax-valid Python reference name from an arbitrary name"""
|
||||
name = "".join(re.split(r'[^0-9a-zA-Z_]', name))
|
||||
while name and not re.match(r'([a-zA-Z]+[0-9a-zA-Z_]*)$', name):
|
||||
if not re.match(r'[a-zA-Z]', name[0]):
|
||||
name = name[1:]
|
||||
continue
|
||||
name = str(name)
|
||||
if not name:
|
||||
name = "data"
|
||||
if blacklist is not None and name in blacklist:
|
||||
get_new_name = lambda index: name+('_%03d' % index)
|
||||
index = 0
|
||||
while get_new_name(index) in blacklist:
|
||||
index += 1
|
||||
name = get_new_name(index)
|
||||
return name
|
||||
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Matplotlib utilities."""
|
||||
|
||||
from spyder_kernels.utils.misc import is_module_installed
|
||||
|
||||
|
||||
# Mapping of inline figure formats
|
||||
INLINE_FIGURE_FORMATS = {
|
||||
'0': 'png',
|
||||
'1': 'svg'
|
||||
}
|
||||
|
||||
|
||||
# Inline backend
|
||||
if is_module_installed('matplotlib_inline'):
|
||||
inline_backend = 'module://matplotlib_inline.backend_inline'
|
||||
else:
|
||||
inline_backend = 'module://ipykernel.pylab.backend_inline'
|
||||
|
||||
|
||||
# Mapping of matlotlib backends options to Spyder
|
||||
MPL_BACKENDS_TO_SPYDER = {
|
||||
inline_backend: 0,
|
||||
'Qt5Agg': 2,
|
||||
'QtAgg': 2, # For Matplotlib 3.5+
|
||||
'TkAgg': 3,
|
||||
'MacOSX': 4,
|
||||
}
|
||||
|
||||
|
||||
def automatic_backend():
|
||||
"""Get Matplolib automatic backend option."""
|
||||
if is_module_installed('PyQt5'):
|
||||
auto_backend = 'qt5'
|
||||
elif is_module_installed('_tkinter'):
|
||||
auto_backend = 'tk'
|
||||
else:
|
||||
auto_backend = 'inline'
|
||||
return auto_backend
|
||||
|
||||
|
||||
# Mapping of Spyder options to backends
|
||||
MPL_BACKENDS_FROM_SPYDER = {
|
||||
'0': 'inline',
|
||||
'1': automatic_backend(),
|
||||
'2': 'qt5',
|
||||
'3': 'tk',
|
||||
'4': 'osx'
|
||||
}
|
||||
@@ -0,0 +1,716 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Utilities to build a namespace view.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from itertools import islice
|
||||
import inspect
|
||||
import re
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.py3compat import (NUMERIC_TYPES, INT_TYPES, TEXT_TYPES,
|
||||
to_text_string, is_text_string,
|
||||
is_type_text_string,
|
||||
is_binary_string, PY2,
|
||||
to_binary_string, iteritems)
|
||||
from spyder_kernels.utils.lazymodules import (
|
||||
bs4, FakeObject, numpy as np, pandas as pd, PIL)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Numpy support
|
||||
#==============================================================================
|
||||
def get_numeric_numpy_types():
|
||||
return (np.int64, np.int32, np.int16, np.int8, np.uint64, np.uint32,
|
||||
np.uint16, np.uint8, np.float64, np.float32, np.float16,
|
||||
np.complex64, np.complex128, np.bool_)
|
||||
|
||||
|
||||
def get_numpy_dtype(obj):
|
||||
"""
|
||||
Return Numpy data type associated to `obj`.
|
||||
|
||||
Return None if Numpy is not available, if we get errors or if `obj` is not
|
||||
a Numpy array or scalar.
|
||||
"""
|
||||
# Check if NumPy is available
|
||||
if np.ndarray is not FakeObject:
|
||||
# All Numpy scalars inherit from np.generic and all Numpy arrays
|
||||
# inherit from np.ndarray. If we check that we are certain we have one
|
||||
# of these types then we are less likely to generate an exception
|
||||
# below.
|
||||
# Note: The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
scalar_or_array = (
|
||||
isinstance(obj, np.generic) or isinstance(obj, np.ndarray)
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
if scalar_or_array:
|
||||
try:
|
||||
return obj.dtype.type
|
||||
except (AttributeError, RuntimeError):
|
||||
# AttributeError: some NumPy objects have no dtype attribute
|
||||
# RuntimeError: happens with NetCDF objects (Issue 998)
|
||||
return
|
||||
|
||||
|
||||
def get_numpy_type_string(value):
|
||||
"""Get the type of a Numpy object as a string."""
|
||||
np_dtype = get_numpy_dtype(value)
|
||||
if np_dtype is None or not hasattr(value, 'size'):
|
||||
return 'Unknown'
|
||||
elif value.size == 1:
|
||||
return 'Scalar'
|
||||
else:
|
||||
return 'Array'
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Misc.
|
||||
#==============================================================================
|
||||
def address(obj):
|
||||
"""Return object address as a string: '<classname @ address>'"""
|
||||
return "<%s @ %s>" % (obj.__class__.__name__,
|
||||
hex(id(obj)).upper().replace('X', 'x'))
|
||||
|
||||
|
||||
def try_to_eval(value):
|
||||
"""Try to eval value"""
|
||||
try:
|
||||
return eval(value)
|
||||
except (NameError, SyntaxError, ImportError):
|
||||
return value
|
||||
|
||||
|
||||
def get_size(item):
|
||||
"""Return shape/size/len of an item of arbitrary type"""
|
||||
try:
|
||||
if (
|
||||
hasattr(item, 'size') and hasattr(item.size, 'compute') or
|
||||
hasattr(item, 'shape') and hasattr(item.shape, 'compute')
|
||||
):
|
||||
# This is necessary to avoid an error when trying to
|
||||
# get the size/shape of dask objects. We don't compute the
|
||||
# size/shape since such operation could be expensive.
|
||||
# Fixes spyder-ide/spyder#16844
|
||||
return 1
|
||||
elif (
|
||||
hasattr(item, 'shape') and
|
||||
isinstance(item.shape, (tuple, np.integer))
|
||||
):
|
||||
try:
|
||||
if item.shape:
|
||||
# This is needed since values could return as
|
||||
# `shape` an instance of a `tuple` subclass.
|
||||
# See spyder-ide/spyder#16348
|
||||
if isinstance(item.shape, tuple):
|
||||
return tuple(item.shape)
|
||||
return item.shape
|
||||
else:
|
||||
# Scalar value
|
||||
return 1
|
||||
except RecursionError:
|
||||
# This is necessary to avoid an error when trying to
|
||||
# get the shape of these objects.
|
||||
# Fixes spyder-ide/spyder-kernels#217
|
||||
return (-1, -1)
|
||||
elif (hasattr(item, 'size') and
|
||||
isinstance(item.size, (tuple, np.integer))):
|
||||
try:
|
||||
return item.size
|
||||
except RecursionError:
|
||||
return (-1, -1)
|
||||
elif hasattr(item, '__len__'):
|
||||
return len(item)
|
||||
else:
|
||||
return 1
|
||||
except Exception:
|
||||
# There is one item
|
||||
return 1
|
||||
|
||||
|
||||
def get_object_attrs(obj):
|
||||
"""
|
||||
Get the attributes of an object using dir.
|
||||
|
||||
This filters protected attributes
|
||||
"""
|
||||
attrs = [k for k in dir(obj) if not k.startswith('__')]
|
||||
if not attrs:
|
||||
attrs = dir(obj)
|
||||
return attrs
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Date and datetime objects support
|
||||
#==============================================================================
|
||||
import datetime
|
||||
|
||||
try:
|
||||
from dateutil.parser import parse as dateparse
|
||||
except:
|
||||
def dateparse(datestr): # analysis:ignore
|
||||
"""Just for 'year, month, day' strings"""
|
||||
return datetime.datetime( *list(map(int, datestr.split(','))) )
|
||||
|
||||
|
||||
def datestr_to_datetime(value):
|
||||
rp = value.rfind('(')+1
|
||||
v = dateparse(value[rp:-1])
|
||||
print(value, "-->", v) # spyder: test-skip
|
||||
return v
|
||||
|
||||
|
||||
def str_to_timedelta(value):
|
||||
"""Convert a string to a datetime.timedelta value.
|
||||
|
||||
The following strings are accepted:
|
||||
|
||||
- 'datetime.timedelta(1, 5, 12345)'
|
||||
- 'timedelta(1, 5, 12345)'
|
||||
- '(1, 5, 12345)'
|
||||
- '1, 5, 12345'
|
||||
- '1'
|
||||
|
||||
if there are less then three parameters, the missing parameters are
|
||||
assumed to be 0. Variations in the spacing of the parameters are allowed.
|
||||
|
||||
Raises:
|
||||
ValueError for strings not matching the above criterion.
|
||||
|
||||
"""
|
||||
m = re.match(r'^(?:(?:datetime\.)?timedelta)?'
|
||||
r'\(?'
|
||||
r'([^)]*)'
|
||||
r'\)?$', value)
|
||||
if not m:
|
||||
raise ValueError('Invalid string for datetime.timedelta')
|
||||
args = [int(a.strip()) for a in m.group(1).split(',')]
|
||||
return datetime.timedelta(*args)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Supported types
|
||||
#==============================================================================
|
||||
def is_editable_type(value):
|
||||
"""
|
||||
Return True if data type is editable with a standard GUI-based editor,
|
||||
like CollectionsEditor, ArrayEditor, QDateEdit or a simple QLineEdit.
|
||||
"""
|
||||
if not is_known_type(value):
|
||||
return False
|
||||
else:
|
||||
supported_types = [
|
||||
'bool', 'int', 'long', 'float', 'complex', 'list', 'set', 'dict',
|
||||
'tuple', 'str', 'unicode', 'NDArray', 'MaskedArray', 'Matrix',
|
||||
'DataFrame', 'Series', 'PIL.Image.Image', 'datetime.date',
|
||||
'datetime.timedelta'
|
||||
]
|
||||
|
||||
if (get_type_string(value) not in supported_types and
|
||||
not isinstance(value, pd.Index)):
|
||||
np_dtype = get_numpy_dtype(value)
|
||||
if np_dtype is None or not hasattr(value, 'size'):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Sorting
|
||||
#==============================================================================
|
||||
def sort_against(list1, list2, reverse=False, sort_key=None):
|
||||
"""
|
||||
Arrange items of list1 in the same order as sorted(list2).
|
||||
|
||||
In other words, apply to list1 the permutation which takes list2
|
||||
to sorted(list2, reverse).
|
||||
"""
|
||||
if sort_key is None:
|
||||
key = lambda x: x[0]
|
||||
else:
|
||||
key = lambda x: sort_key(x[0])
|
||||
try:
|
||||
return [item for _, item in
|
||||
sorted(zip(list2, list1), key=key, reverse=reverse)]
|
||||
except:
|
||||
return list1
|
||||
|
||||
|
||||
def unsorted_unique(lista):
|
||||
"""Removes duplicates from lista neglecting its initial ordering"""
|
||||
return list(set(lista))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Display <--> Value
|
||||
#==============================================================================
|
||||
def default_display(value, with_module=True):
|
||||
"""Default display for unknown objects."""
|
||||
object_type = type(value)
|
||||
try:
|
||||
name = object_type.__name__
|
||||
module = object_type.__module__
|
||||
|
||||
# Classes correspond to new types
|
||||
if name == 'type':
|
||||
name = 'class'
|
||||
|
||||
if with_module:
|
||||
if name == 'module':
|
||||
return value.__name__ + ' module'
|
||||
if module == 'builtins':
|
||||
return name + ' object'
|
||||
return name + ' object of ' + module + ' module'
|
||||
return name
|
||||
except Exception:
|
||||
type_str = to_text_string(object_type)
|
||||
return type_str[1:-1]
|
||||
|
||||
|
||||
def collections_display(value, level):
|
||||
"""Display for collections (i.e. list, set, tuple and dict)."""
|
||||
is_dict = isinstance(value, dict)
|
||||
is_set = isinstance(value, set)
|
||||
|
||||
# Get elements
|
||||
if is_dict:
|
||||
elements = iteritems(value)
|
||||
else:
|
||||
elements = value
|
||||
|
||||
# Truncate values
|
||||
truncate = False
|
||||
if level == 1 and len(value) > 10:
|
||||
elements = islice(elements, 10) if is_dict or is_set else value[:10]
|
||||
truncate = True
|
||||
elif level == 2 and len(value) > 5:
|
||||
elements = islice(elements, 5) if is_dict or is_set else value[:5]
|
||||
truncate = True
|
||||
|
||||
# Get display of each element
|
||||
if level <= 2:
|
||||
if is_dict:
|
||||
displays = [value_to_display(k, level=level) + ':' +
|
||||
value_to_display(v, level=level)
|
||||
for (k, v) in list(elements)]
|
||||
else:
|
||||
displays = [value_to_display(e, level=level)
|
||||
for e in elements]
|
||||
if truncate:
|
||||
displays.append('...')
|
||||
display = ', '.join(displays)
|
||||
else:
|
||||
display = '...'
|
||||
|
||||
# Return display
|
||||
if is_dict:
|
||||
display = '{' + display + '}'
|
||||
elif isinstance(value, list):
|
||||
display = '[' + display + ']'
|
||||
elif isinstance(value, set):
|
||||
display = '{' + display + '}'
|
||||
else:
|
||||
display = '(' + display + ')'
|
||||
|
||||
return display
|
||||
|
||||
|
||||
def value_to_display(value, minmax=False, level=0):
|
||||
"""Convert value for display purpose"""
|
||||
# To save current Numpy printoptions
|
||||
np_printoptions = FakeObject
|
||||
numeric_numpy_types = get_numeric_numpy_types()
|
||||
|
||||
try:
|
||||
if np.ndarray is not FakeObject:
|
||||
# Save printoptions
|
||||
np_printoptions = np.get_printoptions()
|
||||
# Set max number of elements to show for Numpy arrays
|
||||
# in our display
|
||||
np.set_printoptions(threshold=10)
|
||||
if isinstance(value, np.recarray):
|
||||
if level == 0:
|
||||
fields = value.names
|
||||
display = 'Field names: ' + ', '.join(fields)
|
||||
else:
|
||||
display = 'Recarray'
|
||||
elif isinstance(value, np.ma.MaskedArray):
|
||||
display = 'Masked array'
|
||||
elif isinstance(value, np.ndarray):
|
||||
if level == 0:
|
||||
if minmax:
|
||||
try:
|
||||
display = 'Min: %r\nMax: %r' % (value.min(), value.max())
|
||||
except (TypeError, ValueError):
|
||||
if value.dtype.type in numeric_numpy_types:
|
||||
display = str(value)
|
||||
else:
|
||||
display = default_display(value)
|
||||
elif value.dtype.type in numeric_numpy_types:
|
||||
display = str(value)
|
||||
else:
|
||||
display = default_display(value)
|
||||
else:
|
||||
display = 'Numpy array'
|
||||
elif any([type(value) == t for t in [list, set, tuple, dict]]):
|
||||
display = collections_display(value, level+1)
|
||||
elif isinstance(value, PIL.Image.Image):
|
||||
if level == 0:
|
||||
display = '%s Mode: %s' % (address(value), value.mode)
|
||||
else:
|
||||
display = 'Image'
|
||||
elif isinstance(value, pd.DataFrame):
|
||||
if level == 0:
|
||||
cols = value.columns
|
||||
if PY2 and len(cols) > 0:
|
||||
# Get rid of possible BOM utf-8 data present at the
|
||||
# beginning of a file, which gets attached to the first
|
||||
# column header when headers are present in the first
|
||||
# row.
|
||||
# Fixes Issue 2514
|
||||
try:
|
||||
ini_col = to_text_string(cols[0], encoding='utf-8-sig')
|
||||
except:
|
||||
ini_col = to_text_string(cols[0])
|
||||
cols = [ini_col] + [to_text_string(c) for c in cols[1:]]
|
||||
else:
|
||||
cols = [to_text_string(c) for c in cols]
|
||||
display = 'Column names: ' + ', '.join(list(cols))
|
||||
else:
|
||||
display = 'Dataframe'
|
||||
elif isinstance(value, bs4.element.NavigableString):
|
||||
# Fixes Issue 2448
|
||||
display = to_text_string(value)
|
||||
if level > 0:
|
||||
display = u"'" + display + u"'"
|
||||
elif isinstance(value, pd.Index):
|
||||
if level == 0:
|
||||
try:
|
||||
display = value._summary()
|
||||
except AttributeError:
|
||||
display = value.summary()
|
||||
else:
|
||||
display = 'Index'
|
||||
elif is_binary_string(value):
|
||||
# We don't apply this to classes that extend string types
|
||||
# See issue 5636
|
||||
if is_type_text_string(value):
|
||||
try:
|
||||
display = to_text_string(value, 'utf8')
|
||||
if level > 0:
|
||||
display = u"'" + display + u"'"
|
||||
except:
|
||||
display = value
|
||||
if level > 0:
|
||||
display = b"'" + display + b"'"
|
||||
else:
|
||||
display = default_display(value)
|
||||
elif is_text_string(value):
|
||||
# We don't apply this to classes that extend string types
|
||||
# See issue 5636
|
||||
if is_type_text_string(value):
|
||||
display = value
|
||||
if level > 0:
|
||||
display = u"'" + display + u"'"
|
||||
else:
|
||||
display = default_display(value)
|
||||
elif (isinstance(value, datetime.date) or
|
||||
isinstance(value, datetime.timedelta)):
|
||||
display = str(value)
|
||||
elif (isinstance(value, NUMERIC_TYPES) or
|
||||
isinstance(value, bool) or
|
||||
isinstance(value, numeric_numpy_types)):
|
||||
display = repr(value)
|
||||
else:
|
||||
if level == 0:
|
||||
display = default_display(value)
|
||||
else:
|
||||
display = default_display(value, with_module=False)
|
||||
except Exception:
|
||||
display = default_display(value)
|
||||
|
||||
# Truncate display at 70 chars to avoid freezing Spyder
|
||||
# because of large displays
|
||||
if len(display) > 70:
|
||||
if is_binary_string(display):
|
||||
ellipses = b' ...'
|
||||
else:
|
||||
ellipses = u' ...'
|
||||
display = display[:70].rstrip() + ellipses
|
||||
|
||||
# Restore Numpy printoptions
|
||||
if np_printoptions is not FakeObject:
|
||||
np.set_printoptions(**np_printoptions)
|
||||
|
||||
return display
|
||||
|
||||
|
||||
def display_to_value(value, default_value, ignore_errors=True):
|
||||
"""Convert back to value"""
|
||||
from qtpy.compat import from_qvariant
|
||||
value = from_qvariant(value, to_text_string)
|
||||
try:
|
||||
np_dtype = get_numpy_dtype(default_value)
|
||||
if isinstance(default_value, bool):
|
||||
# We must test for boolean before NumPy data types
|
||||
# because `bool` class derives from `int` class
|
||||
try:
|
||||
value = bool(float(value))
|
||||
except ValueError:
|
||||
value = value.lower() == "true"
|
||||
elif np_dtype is not None:
|
||||
if 'complex' in str(type(default_value)):
|
||||
value = np_dtype(complex(value))
|
||||
else:
|
||||
value = np_dtype(value)
|
||||
elif is_binary_string(default_value):
|
||||
value = to_binary_string(value, 'utf8')
|
||||
elif is_text_string(default_value):
|
||||
value = to_text_string(value)
|
||||
elif isinstance(default_value, complex):
|
||||
value = complex(value)
|
||||
elif isinstance(default_value, float):
|
||||
value = float(value)
|
||||
elif isinstance(default_value, int):
|
||||
try:
|
||||
value = int(value)
|
||||
except ValueError:
|
||||
value = float(value)
|
||||
elif isinstance(default_value, datetime.datetime):
|
||||
value = datestr_to_datetime(value)
|
||||
elif isinstance(default_value, datetime.date):
|
||||
value = datestr_to_datetime(value).date()
|
||||
elif isinstance(default_value, datetime.timedelta):
|
||||
value = str_to_timedelta(value)
|
||||
elif ignore_errors:
|
||||
value = try_to_eval(value)
|
||||
else:
|
||||
value = eval(value)
|
||||
except (ValueError, SyntaxError):
|
||||
if ignore_errors:
|
||||
value = try_to_eval(value)
|
||||
else:
|
||||
return default_value
|
||||
return value
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Types
|
||||
# =============================================================================
|
||||
def get_type_string(item):
|
||||
"""Return type string of an object."""
|
||||
# The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
# Numpy objects (don't change the order!)
|
||||
if isinstance(item, np.ma.MaskedArray):
|
||||
return "MaskedArray"
|
||||
if isinstance(item, np.matrix):
|
||||
return "Matrix"
|
||||
if isinstance(item, np.ndarray):
|
||||
return "NDArray"
|
||||
|
||||
# Pandas objects
|
||||
if isinstance(item, pd.DataFrame):
|
||||
return "DataFrame"
|
||||
if isinstance(item, pd.Index):
|
||||
return type(item).__name__
|
||||
if isinstance(item, pd.Series):
|
||||
return "Series"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
found = re.findall(r"<(?:type|class) '(\S*)'>",
|
||||
to_text_string(type(item)))
|
||||
if found:
|
||||
if found[0] == 'type':
|
||||
return 'class'
|
||||
return found[0]
|
||||
else:
|
||||
return 'Unknown'
|
||||
|
||||
|
||||
def is_known_type(item):
|
||||
"""Return True if object has a known type"""
|
||||
# Unfortunately, the masked array case is specific
|
||||
return (isinstance(item, np.ma.MaskedArray) or
|
||||
get_type_string(item) != 'Unknown')
|
||||
|
||||
|
||||
def get_human_readable_type(item):
|
||||
"""Return human-readable type string of an item"""
|
||||
# The try/except is necessary to fix spyder-ide/spyder#19516.
|
||||
try:
|
||||
if isinstance(item, (np.ndarray, np.ma.MaskedArray)):
|
||||
return u'Array of ' + item.dtype.name
|
||||
elif isinstance(item, PIL.Image.Image):
|
||||
return "Image"
|
||||
else:
|
||||
text = get_type_string(item)
|
||||
return text[text.find('.')+1:]
|
||||
except Exception:
|
||||
return 'Unknown'
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Globals filter: filter namespace dictionaries (to be edited in
|
||||
# CollectionsEditor)
|
||||
#==============================================================================
|
||||
def is_supported(value, check_all=False, filters=None, iterate=False):
|
||||
"""Return True if value is supported, False otherwise."""
|
||||
assert filters is not None
|
||||
if value is None:
|
||||
return True
|
||||
if is_callable_or_module(value):
|
||||
return True
|
||||
elif not is_editable_type(value):
|
||||
return False
|
||||
elif not isinstance(value, filters):
|
||||
return False
|
||||
elif iterate:
|
||||
if isinstance(value, (list, tuple, set)):
|
||||
valid_count = 0
|
||||
for val in value:
|
||||
if is_supported(val, filters=filters, iterate=check_all):
|
||||
valid_count += 1
|
||||
if not check_all:
|
||||
break
|
||||
return valid_count > 0
|
||||
elif isinstance(value, dict):
|
||||
for key, val in list(value.items()):
|
||||
if not is_supported(key, filters=filters, iterate=check_all) \
|
||||
or not is_supported(val, filters=filters,
|
||||
iterate=check_all):
|
||||
return False
|
||||
if not check_all:
|
||||
break
|
||||
return True
|
||||
|
||||
|
||||
def is_callable_or_module(value):
|
||||
"""Return True if value is a callable or module, False otherwise."""
|
||||
try:
|
||||
callable_or_module = callable(value) or inspect.ismodule(value)
|
||||
except Exception:
|
||||
callable_or_module = False
|
||||
return callable_or_module
|
||||
|
||||
|
||||
def globalsfilter(input_dict, check_all=False, filters=None,
|
||||
exclude_private=None, exclude_capitalized=None,
|
||||
exclude_uppercase=None, exclude_unsupported=None,
|
||||
excluded_names=None, exclude_callables_and_modules=None):
|
||||
"""Keep objects in namespace view according to different criteria."""
|
||||
output_dict = {}
|
||||
_is_string = is_type_text_string
|
||||
|
||||
for key, value in list(input_dict.items()):
|
||||
excluded = (
|
||||
(exclude_private and _is_string(key) and key.startswith('_')) or
|
||||
(exclude_capitalized and _is_string(key) and key[0].isupper()) or
|
||||
(exclude_uppercase and _is_string(key) and key.isupper() and
|
||||
len(key) > 1 and not key[1:].isdigit()) or
|
||||
(key in excluded_names) or
|
||||
(exclude_callables_and_modules and is_callable_or_module(value)) or
|
||||
(exclude_unsupported and
|
||||
not is_supported(value, check_all=check_all, filters=filters))
|
||||
)
|
||||
if not excluded:
|
||||
output_dict[key] = value
|
||||
return output_dict
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Create view to be displayed by NamespaceBrowser
|
||||
#==============================================================================
|
||||
REMOTE_SETTINGS = ('check_all', 'exclude_private', 'exclude_uppercase',
|
||||
'exclude_capitalized', 'exclude_unsupported',
|
||||
'excluded_names', 'minmax', 'show_callable_attributes',
|
||||
'show_special_attributes', 'exclude_callables_and_modules')
|
||||
|
||||
|
||||
def get_supported_types():
|
||||
"""
|
||||
Return a dictionnary containing types lists supported by the
|
||||
namespace browser.
|
||||
|
||||
Note:
|
||||
If you update this list, don't forget to update variablexplorer.rst
|
||||
in spyder-docs
|
||||
"""
|
||||
from datetime import date, timedelta
|
||||
editable_types = [int, float, complex, list, set, dict, tuple, date,
|
||||
timedelta] + list(TEXT_TYPES) + list(INT_TYPES)
|
||||
try:
|
||||
from numpy import ndarray, matrix, generic
|
||||
editable_types += [ndarray, matrix, generic]
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
from pandas import DataFrame, Series, Index
|
||||
editable_types += [DataFrame, Series, Index]
|
||||
except:
|
||||
pass
|
||||
picklable_types = editable_types[:]
|
||||
try:
|
||||
from PIL import Image
|
||||
editable_types.append(Image.Image)
|
||||
except:
|
||||
pass
|
||||
return dict(picklable=picklable_types, editable=editable_types)
|
||||
|
||||
|
||||
def get_remote_data(data, settings, mode, more_excluded_names=None):
|
||||
"""
|
||||
Return globals according to filter described in *settings*:
|
||||
* data: data to be filtered (dictionary)
|
||||
* settings: variable explorer settings (dictionary)
|
||||
* mode (string): 'editable' or 'picklable'
|
||||
* more_excluded_names: additional excluded names (list)
|
||||
"""
|
||||
supported_types = get_supported_types()
|
||||
assert mode in list(supported_types.keys())
|
||||
excluded_names = list(settings['excluded_names'])
|
||||
if more_excluded_names is not None:
|
||||
excluded_names += more_excluded_names
|
||||
return globalsfilter(
|
||||
data,
|
||||
check_all=settings['check_all'],
|
||||
filters=tuple(supported_types[mode]),
|
||||
exclude_private=settings['exclude_private'],
|
||||
exclude_uppercase=settings['exclude_uppercase'],
|
||||
exclude_capitalized=settings['exclude_capitalized'],
|
||||
exclude_unsupported=settings['exclude_unsupported'],
|
||||
exclude_callables_and_modules=settings['exclude_callables_and_modules'],
|
||||
excluded_names=excluded_names)
|
||||
|
||||
|
||||
def make_remote_view(data, settings, more_excluded_names=None):
|
||||
"""
|
||||
Make a remote view of dictionary *data*
|
||||
-> globals explorer
|
||||
"""
|
||||
data = get_remote_data(data, settings, mode='editable',
|
||||
more_excluded_names=more_excluded_names)
|
||||
remote = {}
|
||||
for key, value in list(data.items()):
|
||||
view = value_to_display(value, minmax=settings['minmax'])
|
||||
remote[key] = {
|
||||
'type': get_human_readable_type(value),
|
||||
'size': get_size(value),
|
||||
'view': view,
|
||||
'python_type': get_type_string(value),
|
||||
'numpy_type': get_numpy_type_string(value)
|
||||
}
|
||||
|
||||
return remote
|
||||
@@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2018- Spyder Kernels Contributors
|
||||
# Taken from the tests utils in the Metakernel package
|
||||
# See utils.py at https://github.com/Calysto/metakernel/metakernel/tests
|
||||
# Licensed under the terms of the BSD License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
try:
|
||||
from jupyter_client import session as ss
|
||||
except ImportError:
|
||||
from IPython.kernel.zmq import session as ss
|
||||
import zmq
|
||||
import logging
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
from spyder_kernels.console.kernel import SpyderKernel
|
||||
|
||||
|
||||
def get_kernel(kernel_class=SpyderKernel):
|
||||
"""Get an instance of a kernel with the kernel class given."""
|
||||
log = logging.getLogger('test')
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
for hdlr in log.handlers:
|
||||
log.removeHandler(hdlr)
|
||||
|
||||
hdlr = logging.StreamHandler(StringIO())
|
||||
hdlr.setLevel(logging.DEBUG)
|
||||
log.addHandler(hdlr)
|
||||
|
||||
context = zmq.Context.instance()
|
||||
iopub_socket = context.socket(zmq.PUB)
|
||||
|
||||
kernel = kernel_class(session=ss.Session(), iopub_socket=iopub_socket,
|
||||
log=log)
|
||||
return kernel
|
||||
|
||||
|
||||
def get_log_text(kernel):
|
||||
"""Get the log of the given kernel."""
|
||||
return kernel.log.handlers[0].stream.getvalue()
|
||||
@@ -0,0 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""Tests."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,160 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Tests for dochelpers.py
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Test library imports
|
||||
import pytest
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.utils.dochelpers import (
|
||||
getargtxt, getargspecfromtext, getdoc, getobj, getsignaturefromtext,
|
||||
isdefined)
|
||||
from spyder_kernels.py3compat import PY2
|
||||
|
||||
|
||||
class Test(object):
|
||||
def method(self, x, y=2):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
PY2 or os.name == 'nt', reason="Only works on Linux and Mac")
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
|
||||
reason="Fails on Mac with Python 3.8")
|
||||
def test_dochelpers():
|
||||
"""Test dochelpers."""
|
||||
assert getargtxt(Test.method) == ['x, ', 'y=2']
|
||||
assert not getargtxt(Test.__init__)
|
||||
|
||||
assert getdoc(sorted) == {
|
||||
'note': 'Function of builtins module',
|
||||
'argspec': '(...)',
|
||||
'docstring': 'Return a new list containing all items from the '
|
||||
'iterable in ascending order.\n\nA custom key function '
|
||||
'can be supplied to customize the sort order, and the\n'
|
||||
'reverse flag can be set to request the result in '
|
||||
'descending order.',
|
||||
'name': 'sorted'
|
||||
}
|
||||
assert not getargtxt(sorted)
|
||||
|
||||
assert isdefined('numpy.take', force_import=True)
|
||||
assert isdefined('__import__')
|
||||
assert not isdefined('zzz', force_import=True)
|
||||
|
||||
assert getobj('globals') == 'globals'
|
||||
assert not getobj('globals().keys')
|
||||
assert getobj('+scipy.signal.') == 'scipy.signal'
|
||||
assert getobj('4.') == '4'
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason="Fails in Python 2")
|
||||
def test_no_signature():
|
||||
"""
|
||||
Test that we can get documentation for objects for which Python can't get a
|
||||
signature directly because it gives an error.
|
||||
|
||||
This is a regression test for issue spyder-ide/spyder#21148
|
||||
"""
|
||||
import numpy as np
|
||||
doc = getdoc(np.where)
|
||||
signature = doc['argspec']
|
||||
assert signature and signature != "(...)" and signature.startswith("(")
|
||||
assert doc['docstring']
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'text, name, expected',
|
||||
[
|
||||
('foo(x, y)', 'foo', '(x, y)'),
|
||||
('foo(x, y)', '', '(x, y)'),
|
||||
]
|
||||
)
|
||||
def test_getsignaturefromtext_py2(text, name, expected):
|
||||
assert getsignaturefromtext(text, name) == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason="Don't work in Python 2")
|
||||
@pytest.mark.parametrize(
|
||||
'text, name, expected',
|
||||
[
|
||||
# Simple text with and without name
|
||||
('foo(x, y)', 'foo', '(x, y)'),
|
||||
('foo(x, y)', '', '(x, y)'),
|
||||
# Single arg
|
||||
('foo(x)', '', '(x)'),
|
||||
('foo(x = {})', '', '(x = {})'),
|
||||
# Not a valid identifier
|
||||
('1a(x, y)', '', ''),
|
||||
# Valid identifier
|
||||
('a1(x, y=2)', '', '(x, y=2)'),
|
||||
# Unicode identifier with and without name
|
||||
('ΣΔ(x, y)', 'ΣΔ', '(x, y)'),
|
||||
('ΣΔ(x, y)', '', '(x, y)'),
|
||||
# Multiple signatures in a single line
|
||||
('ΣΔ(x, y) foo(a, b)', '', '(x, y)'),
|
||||
('1a(x, y) foo(a, b)', '', '(a, b)'),
|
||||
# Multiple signatures in multiple lines
|
||||
('foo(a, b = 1)\n\nΣΔ(x, y=2)', '', '(a, b = 1)'),
|
||||
('1a(a, b = 1)\n\nΣΔ(x, y=2)', '', '(x, y=2)'),
|
||||
# Signature after math operations
|
||||
('2(3 + 5) 3*(99) ΣΔ(x, y)', '', '(x, y)'),
|
||||
# No identifier
|
||||
('(x, y)', '', ''),
|
||||
('foo (a=1, b = 2)', '', ''),
|
||||
# Empty signature
|
||||
('foo()', '', ''),
|
||||
('foo()', 'foo', ''),
|
||||
]
|
||||
)
|
||||
def test_getsignaturefromtext(text, name, expected):
|
||||
assert getsignaturefromtext(text, name) == expected
|
||||
|
||||
|
||||
def test_multisignature():
|
||||
"""
|
||||
Test that we can get at least one signature from an object with multiple
|
||||
ones declared in its docstring.
|
||||
"""
|
||||
def foo():
|
||||
"""
|
||||
foo(x, y) foo(a, b)
|
||||
foo(c, d)
|
||||
"""
|
||||
|
||||
signature = getargspecfromtext(foo.__doc__)
|
||||
assert signature == "(x, y)"
|
||||
|
||||
|
||||
def test_multiline_signature():
|
||||
"""
|
||||
Test that we can get signatures splitted into multiple lines in a
|
||||
docstring.
|
||||
"""
|
||||
def foo():
|
||||
"""
|
||||
foo(x,
|
||||
y)
|
||||
|
||||
This is a docstring.
|
||||
"""
|
||||
|
||||
signature = getargspecfromtext(foo.__doc__)
|
||||
assert signature.startswith("(x, ")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main()
|
||||
@@ -0,0 +1,344 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Tests for iofuncs.py.
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import io
|
||||
import os
|
||||
import copy
|
||||
|
||||
# Third party imports
|
||||
import pytest
|
||||
import numpy as np
|
||||
|
||||
# Local imports
|
||||
import spyder_kernels.utils.iofuncs as iofuncs
|
||||
from spyder_kernels.py3compat import is_text_string, PY2
|
||||
|
||||
|
||||
# Full path to this file's parent directory for loading data
|
||||
LOCATION = os.path.realpath(os.path.join(os.getcwd(),
|
||||
os.path.dirname(__file__)))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# ---- Helper functions and classes
|
||||
# =============================================================================
|
||||
def are_namespaces_equal(actual, expected):
|
||||
if actual is None and expected is None:
|
||||
return True
|
||||
are_equal = True
|
||||
for var in sorted(expected.keys()):
|
||||
try:
|
||||
are_equal = are_equal and bool(np.mean(
|
||||
expected[var] == actual[var]))
|
||||
except ValueError:
|
||||
are_equal = are_equal and all(
|
||||
[np.all(obj1 == obj2) for obj1, obj2 in zip(expected[var],
|
||||
actual[var])])
|
||||
print(str(var) + ": " + str(are_equal))
|
||||
return are_equal
|
||||
|
||||
|
||||
class CustomObj(object):
|
||||
"""A custom class of objects for testing."""
|
||||
def __init__(self, data):
|
||||
self.data = None
|
||||
if data:
|
||||
self.data = data
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
|
||||
class UnDeepCopyableObj(CustomObj):
|
||||
"""A class of objects that cannot be deepcopied."""
|
||||
def __getstate__(self):
|
||||
raise RuntimeError()
|
||||
|
||||
|
||||
class UnPickleableObj(UnDeepCopyableObj):
|
||||
"""A class of objects that can deepcopied, but not pickled."""
|
||||
def __deepcopy__(self, memo):
|
||||
new_one = self.__class__.__new__(self.__class__)
|
||||
new_one.__dict__.update(self.__dict__)
|
||||
return new_one
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# ---- Fixtures
|
||||
# =============================================================================
|
||||
@pytest.fixture
|
||||
def spydata_values():
|
||||
"""
|
||||
Define spydata file ground truth values.
|
||||
|
||||
The file export_data.spydata contains five variables to be loaded.
|
||||
This fixture declares those variables in a static way.
|
||||
"""
|
||||
A = 1
|
||||
B = 'ham'
|
||||
C = np.eye(3)
|
||||
D = {'a': True, 'b': np.eye(4, dtype=np.complex128)}
|
||||
E = [np.eye(2, dtype=np.int64), 42.0, np.eye(3, dtype=np.bool_), np.eye(4, dtype=object)]
|
||||
return {'A': A, 'B': B, 'C': C, 'D': D, 'E': E}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def real_values():
|
||||
"""
|
||||
Load a Numpy pickled file.
|
||||
|
||||
The file numpy_data.npz contains six variables, each one represents the
|
||||
expected test values after a manual conversion of the same variables
|
||||
defined and evaluated in MATLAB. The manual type conversion was done
|
||||
over several variable types, such as: Matrices/Vectors, Scalar and
|
||||
Complex numbers, Structs, Strings and Cell Arrays. The set of variables
|
||||
was defined to allow and test the deep conversion of a compound type,
|
||||
i.e., a struct that contains other types that need to be converted,
|
||||
like other structs, matrices and Cell Arrays.
|
||||
"""
|
||||
path = os.path.join(LOCATION, 'numpy_data.npz')
|
||||
file_s = np.load(path, allow_pickle=True)
|
||||
A = file_s['A'].item()
|
||||
B = file_s['B']
|
||||
C = file_s['C']
|
||||
D = file_s['D'].item()
|
||||
E = file_s['E']
|
||||
return {'A': A, 'B': B, 'C': C, 'D': D, 'E': E}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def namespace_objects_full(spydata_values):
|
||||
"""
|
||||
Define a dictionary of objects of a variety of different types to be saved.
|
||||
|
||||
This fixture reprisents the state of the namespace before saving and
|
||||
filtering out un-deep-copyable, un-pickleable, and uninteresting objects.
|
||||
"""
|
||||
namespace_dict = copy.deepcopy(spydata_values)
|
||||
namespace_dict['expected_error_string'] = (
|
||||
'Some objects could not be saved: '
|
||||
'undeepcopyable_instance, unpickleable_instance')
|
||||
namespace_dict['module_obj'] = io
|
||||
namespace_dict['class_obj'] = Exception
|
||||
namespace_dict['function_obj'] = os.path.join
|
||||
namespace_dict['unpickleable_instance'] = UnPickleableObj("spam")
|
||||
namespace_dict['undeepcopyable_instance'] = UnDeepCopyableObj("ham")
|
||||
namespace_dict['custom_instance'] = CustomObj("eggs")
|
||||
|
||||
return namespace_dict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def namespace_objects_filtered(spydata_values):
|
||||
"""
|
||||
Define a dictionary of the objects from the namespace that can be saved.
|
||||
|
||||
This fixture reprisents the state of the namespace after saving and
|
||||
filtering out un-deep-copyable, un-pickleable, and uninteresting objects.
|
||||
"""
|
||||
namespace_dict = copy.deepcopy(spydata_values)
|
||||
namespace_dict['custom_instance'] = CustomObj("eggs")
|
||||
|
||||
return namespace_dict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def namespace_objects_nocopyable():
|
||||
"""
|
||||
Define a dictionary of that cannot be deepcopied.
|
||||
"""
|
||||
namespace_dict = {}
|
||||
namespace_dict['expected_error_string'] = 'No supported objects to save'
|
||||
namespace_dict['class_obj'] = Exception
|
||||
namespace_dict['undeepcopyable_instance'] = UnDeepCopyableObj("ham")
|
||||
|
||||
return namespace_dict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def namespace_objects_nopickleable():
|
||||
"""
|
||||
Define a dictionary of objects that cannot be pickled.
|
||||
"""
|
||||
namespace_dict = {}
|
||||
namespace_dict['expected_error_string'] = 'No supported objects to save'
|
||||
namespace_dict['function_obj'] = os.path.join
|
||||
namespace_dict['unpickleable_instance'] = UnPickleableObj("spam")
|
||||
|
||||
return namespace_dict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def input_namespace(request):
|
||||
if request.param is None:
|
||||
return None
|
||||
else:
|
||||
return request.getfixturevalue(request.param)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def expected_namespace(request):
|
||||
if request.param is None:
|
||||
return None
|
||||
else:
|
||||
return request.getfixturevalue(request.param)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# ---- Tests
|
||||
# =============================================================================
|
||||
def test_npz_import():
|
||||
"""
|
||||
Test the load of .npz files as dictionaries.
|
||||
"""
|
||||
filename = os.path.join(LOCATION, 'import_data.npz')
|
||||
data = iofuncs.load_array(filename)
|
||||
assert isinstance(data, tuple)
|
||||
variables, error = data
|
||||
assert variables['val1'] == np.array(1) and not error
|
||||
|
||||
|
||||
@pytest.mark.skipif(iofuncs.load_matlab is None, reason="SciPy required")
|
||||
def test_matlab_import(real_values):
|
||||
"""
|
||||
Test the automatic conversion and import of variables from MATLAB.
|
||||
|
||||
This test loads a file stored in MATLAB, the variables defined are
|
||||
equivalent to the manually converted values done over Numpy. This test
|
||||
allows to evaluate the function which processes the conversion automa-
|
||||
tically. i.e., The automatic conversion results should be equal to the
|
||||
manual conversion of the variables.
|
||||
"""
|
||||
path = os.path.join(LOCATION, 'data.mat')
|
||||
inf, _ = iofuncs.load_matlab(path)
|
||||
valid = True
|
||||
for var in sorted(real_values.keys()):
|
||||
valid = valid and bool(np.mean(real_values[var] == inf[var]))
|
||||
assert valid
|
||||
|
||||
|
||||
@pytest.mark.skipif(PY2, reason="Fails on Python 2")
|
||||
@pytest.mark.parametrize('spydata_file_name', ['export_data.spydata',
|
||||
'export_data_renamed.spydata'])
|
||||
def test_spydata_import(spydata_file_name, spydata_values):
|
||||
"""
|
||||
Test spydata handling and variable importing.
|
||||
|
||||
This test loads all the variables contained inside a spydata tar
|
||||
container and compares them against their static values.
|
||||
It tests both a file with the original name, and one that has been renamed
|
||||
in order to catch Issue #9 .
|
||||
"""
|
||||
path = os.path.join(LOCATION, spydata_file_name)
|
||||
data, error = iofuncs.load_dictionary(path)
|
||||
assert error is None
|
||||
assert are_namespaces_equal(data, spydata_values)
|
||||
|
||||
|
||||
def test_spydata_import_witherror():
|
||||
"""
|
||||
Test that import fails gracefully with a fn not present in the namespace.
|
||||
|
||||
Checks that the error is caught, the message is passed back,
|
||||
and the current working directory is restored afterwards.
|
||||
"""
|
||||
original_cwd = os.getcwd()
|
||||
path = os.path.join(LOCATION, 'export_data_withfunction.spydata')
|
||||
data, error = iofuncs.load_dictionary(path)
|
||||
assert error and is_text_string(error)
|
||||
assert data is None
|
||||
assert os.getcwd() == original_cwd
|
||||
|
||||
|
||||
def test_spydata_import_missing_file():
|
||||
"""
|
||||
Test that import fails properly when file is missing, and resets the cwd.
|
||||
"""
|
||||
original_cwd = os.getcwd()
|
||||
path = os.path.join(LOCATION, 'non_existant_path_2019-01-23.spydata')
|
||||
try:
|
||||
iofuncs.load_dictionary(path)
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
# Fail if exception did not occur when it should
|
||||
assert False
|
||||
assert os.getcwd() == original_cwd
|
||||
|
||||
|
||||
@pytest.mark.skipif(iofuncs.load_matlab is None, reason="SciPy required")
|
||||
def test_matlabstruct():
|
||||
"""Test support for matlab stlye struct."""
|
||||
a = iofuncs.MatlabStruct()
|
||||
a.b = 'spam'
|
||||
assert a["b"] == 'spam'
|
||||
a.c["d"] = 'eggs'
|
||||
assert a.c.d == 'eggs'
|
||||
assert a == {'c': {'d': 'eggs'}, 'b': 'spam'}
|
||||
a['d'] = [1, 2, 3]
|
||||
|
||||
buf = io.BytesIO()
|
||||
iofuncs.save_matlab(a, buf)
|
||||
buf.seek(0)
|
||||
data, error = iofuncs.load_matlab(buf)
|
||||
|
||||
assert error is None
|
||||
assert data['b'] == 'spam'
|
||||
assert data['c'].d == 'eggs'
|
||||
assert data['d'].tolist() == [[1, 2, 3]]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_namespace,expected_namespace,filename', [
|
||||
('spydata_values', 'spydata_values', 'export_data_copy'),
|
||||
('namespace_objects_full', 'namespace_objects_filtered', 'export_data_2'),
|
||||
('namespace_objects_nocopyable', None, 'export_data_none_1'),
|
||||
('namespace_objects_nopickleable', None, 'export_data_none_2'),
|
||||
], indirect=['input_namespace', 'expected_namespace'])
|
||||
def test_spydata_export(input_namespace, expected_namespace,
|
||||
filename):
|
||||
"""
|
||||
Test spydata export and re-import.
|
||||
|
||||
This test saves the variables in ``spydata`` format and then
|
||||
reloads and checks them to make sure they save/restore properly
|
||||
and no errors occur during the process.
|
||||
"""
|
||||
path = os.path.join(LOCATION, filename + '.spydata')
|
||||
expected_error = None
|
||||
if 'expected_error_string' in input_namespace:
|
||||
expected_error = input_namespace['expected_error_string']
|
||||
del input_namespace['expected_error_string']
|
||||
cwd_original = os.getcwd()
|
||||
|
||||
try:
|
||||
export_error = iofuncs.save_dictionary(input_namespace, path)
|
||||
assert export_error == expected_error
|
||||
if expected_namespace is None:
|
||||
assert not os.path.isfile(path)
|
||||
else:
|
||||
data_actual, import_error = iofuncs.load_dictionary(path)
|
||||
assert import_error is None
|
||||
print(data_actual.keys())
|
||||
print(expected_namespace.keys())
|
||||
assert are_namespaces_equal(data_actual, expected_namespace)
|
||||
assert cwd_original == os.getcwd()
|
||||
finally:
|
||||
if os.path.isfile(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
except (IOError, OSError, PermissionError):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main()
|
||||
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
import pytest
|
||||
|
||||
from spyder_kernels.utils.lazymodules import LazyModule, FakeObject
|
||||
|
||||
|
||||
def test_non_existent_module():
|
||||
"""Test that we retun FakeObject's for non-existing modules."""
|
||||
mod = LazyModule('no_module', second_level_attrs=['a'])
|
||||
|
||||
# First level attributes must return FakeObject
|
||||
assert mod.foo is FakeObject
|
||||
|
||||
# Second level attributes in second_level_attrs should return
|
||||
# FakeObject too.
|
||||
assert mod.foo.a is FakeObject
|
||||
|
||||
# Other second level attributes should raise an error.
|
||||
with pytest.raises(AttributeError):
|
||||
mod.foo.b
|
||||
|
||||
|
||||
def test_existing_modules():
|
||||
"""Test that lazy modules work for existing modules."""
|
||||
np = LazyModule('numpy')
|
||||
import numpy
|
||||
|
||||
# Both the lazy and actual modules should return the same.
|
||||
assert np.ndarray == numpy.ndarray
|
||||
|
||||
# The lazy module should have these extra attributes
|
||||
assert np.__spy_mod__
|
||||
assert np.__spy_modname__
|
||||
@@ -0,0 +1,458 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2009- Spyder Kernels Contributors
|
||||
#
|
||||
# Licensed under the terms of the MIT License
|
||||
# (see spyder_kernels/__init__.py for details)
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
"""
|
||||
Tests for utils.py
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
from collections import defaultdict
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
# Third party imports
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
import xarray as xr
|
||||
import PIL.Image
|
||||
|
||||
# Local imports
|
||||
from spyder_kernels.py3compat import PY2
|
||||
from spyder_kernels.utils.nsview import (
|
||||
sort_against, is_supported, value_to_display, get_size,
|
||||
get_supported_types, get_type_string, get_numpy_type_string,
|
||||
is_editable_type)
|
||||
|
||||
|
||||
def generate_complex_object():
|
||||
"""Taken from issue #4221."""
|
||||
bug = defaultdict(list)
|
||||
for i in range(50000):
|
||||
a = {j:np.random.rand(10) for j in range(10)}
|
||||
bug[i] = a
|
||||
return bug
|
||||
|
||||
|
||||
COMPLEX_OBJECT = generate_complex_object()
|
||||
DF = pd.DataFrame([1,2,3])
|
||||
DATASET = xr.Dataset({0: pd.DataFrame([1,2]), 1:pd.DataFrame([3,4])})
|
||||
|
||||
|
||||
# --- Tests
|
||||
# -----------------------------------------------------------------------------
|
||||
def test_get_size():
|
||||
"""Test that the size of all values is returned correctly"""
|
||||
|
||||
class RecursionClassNoLen():
|
||||
def __getattr__(self, name):
|
||||
if name=='size': return self.name
|
||||
else:
|
||||
return super(object, self).__getattribute__(name)
|
||||
|
||||
|
||||
length = [list([1,2,3]), tuple([1,2,3]), set([1,2,3]), '123',
|
||||
{1:1, 2:2, 3:3}]
|
||||
for obj in length:
|
||||
assert get_size(obj) == 3
|
||||
|
||||
df = pd.DataFrame([[1,2,3], [1,2,3]])
|
||||
assert get_size(df) == (2, 3)
|
||||
|
||||
df = pd.Series([1,2,3])
|
||||
assert get_size(df) == (3,)
|
||||
|
||||
df = pd.Index([1,2,3])
|
||||
assert get_size(df) == (3,)
|
||||
|
||||
arr = np.array([[1,2,3], [1,2,3]], dtype=np.complex128)
|
||||
assert get_size(arr) == (2, 3)
|
||||
|
||||
img = PIL.Image.new('RGB', (256,256))
|
||||
assert get_size(img) == (256,256)
|
||||
|
||||
obj = RecursionClassNoLen()
|
||||
assert get_size(obj) == 1
|
||||
|
||||
|
||||
|
||||
def test_sort_against():
|
||||
lista = [5, 6, 7]
|
||||
listb = [2, 3, 1]
|
||||
res = sort_against(lista, listb)
|
||||
assert res == [7, 5, 6]
|
||||
|
||||
|
||||
def test_sort_against_is_stable():
|
||||
lista = [3, 0, 1]
|
||||
listb = [1, 1, 1]
|
||||
res = sort_against(lista, listb)
|
||||
assert res == lista
|
||||
|
||||
|
||||
def test_none_values_are_supported():
|
||||
"""Tests that None values are displayed by default"""
|
||||
supported_types = get_supported_types()
|
||||
mode = 'editable'
|
||||
none_var = None
|
||||
none_list = [2, None, 3, None]
|
||||
none_dict = {'a': None, 'b': 4}
|
||||
none_tuple = (None, [3, None, 4], 'eggs')
|
||||
assert is_supported(none_var, filters=tuple(supported_types[mode]))
|
||||
assert is_supported(none_list, filters=tuple(supported_types[mode]))
|
||||
assert is_supported(none_dict, filters=tuple(supported_types[mode]))
|
||||
assert is_supported(none_tuple, filters=tuple(supported_types[mode]))
|
||||
|
||||
|
||||
def test_str_subclass_display():
|
||||
"""Test for value_to_display of subclasses of str/basestring."""
|
||||
class Test(str):
|
||||
def __repr__(self):
|
||||
return 'test'
|
||||
value = Test()
|
||||
value_display = value_to_display(value)
|
||||
assert 'Test object' in value_display
|
||||
|
||||
|
||||
def test_default_display():
|
||||
"""Tests for default_display."""
|
||||
# Display of defaultdict
|
||||
assert (value_to_display(COMPLEX_OBJECT) ==
|
||||
'defaultdict object of collections module')
|
||||
|
||||
# Display of array of COMPLEX_OBJECT
|
||||
assert (value_to_display(np.array(COMPLEX_OBJECT)) ==
|
||||
'ndarray object of numpy module')
|
||||
|
||||
# Display of Dataset
|
||||
assert (value_to_display(DATASET) ==
|
||||
'Dataset object of xarray.core.dataset module')
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
|
||||
reason="Fails on Mac with Python 3.8")
|
||||
def test_list_display():
|
||||
"""Tests for display of lists."""
|
||||
long_list = list(range(100))
|
||||
|
||||
# Simple list
|
||||
assert value_to_display([1, 2, 3]) == '[1, 2, 3]'
|
||||
|
||||
# Long list
|
||||
assert (value_to_display(long_list) ==
|
||||
'[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ...]')
|
||||
|
||||
# Short list of lists
|
||||
assert (value_to_display([long_list] * 3) ==
|
||||
'[[0, 1, 2, 3, 4, ...], [0, 1, 2, 3, 4, ...], [0, 1, 2, 3, 4, ...]]')
|
||||
|
||||
# Long list of lists
|
||||
result = '[' + ''.join('[0, 1, 2, 3, 4, ...], '*10)[:-2] + ']'
|
||||
assert value_to_display([long_list] * 10) == result[:70] + ' ...'
|
||||
|
||||
# Multiple level lists
|
||||
assert (value_to_display([[1, 2, 3, [4], 5]] + long_list) ==
|
||||
'[[1, 2, 3, [...], 5], 0, 1, 2, 3, 4, 5, 6, 7, 8, ...]')
|
||||
assert value_to_display([1, 2, [DF]]) == '[1, 2, [Dataframe]]'
|
||||
assert value_to_display([1, 2, [[DF], DATASET]]) == '[1, 2, [[...], Dataset]]'
|
||||
|
||||
# List of complex object
|
||||
assert value_to_display([COMPLEX_OBJECT]) == '[defaultdict]'
|
||||
|
||||
# List of composed objects
|
||||
li = [COMPLEX_OBJECT, DATASET, 1, {1:2, 3:4}, DF]
|
||||
result = '[defaultdict, Dataset, 1, {1:2, 3:4}, Dataframe]'
|
||||
assert value_to_display(li) == result
|
||||
|
||||
# List starting with a non-supported object (#5313)
|
||||
supported_types = tuple(get_supported_types()['editable'])
|
||||
li = [len, 1]
|
||||
assert value_to_display(li) == '[builtin_function_or_method, 1]'
|
||||
assert is_supported(li, filters=supported_types)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == 'darwin' and sys.version_info[:2] == (3, 8),
|
||||
reason="Fails on Mac with Python 3.8")
|
||||
def test_dict_display():
|
||||
"""Tests for display of dicts."""
|
||||
long_list = list(range(100))
|
||||
long_dict = dict(zip(list(range(100)), list(range(100))))
|
||||
|
||||
# Simple dict
|
||||
assert value_to_display({0:0, 'a':'b'}) == "{0:0, 'a':'b'}"
|
||||
|
||||
# Long dict
|
||||
assert (value_to_display(long_dict) ==
|
||||
'{0:0, 1:1, 2:2, 3:3, 4:4, 5:5, 6:6, 7:7, 8:8, 9:9, ...}')
|
||||
|
||||
# Short list of lists
|
||||
assert (value_to_display({1:long_dict, 2:long_dict}) ==
|
||||
'{1:{0:0, 1:1, 2:2, 3:3, 4:4, ...}, 2:{0:0, 1:1, 2:2, 3:3, 4:4, ...}}')
|
||||
|
||||
# Long dict of dicts
|
||||
result = ('{(0, 0, 0, 0, 0, ...):[0, 1, 2, 3, 4, ...], '
|
||||
'(1, 1, 1, 1, 1, ...):[0, 1, 2, 3, 4, ...]}')
|
||||
assert value_to_display({(0,)*100:long_list, (1,)*100:long_list}) == result[:70] + ' ...'
|
||||
|
||||
# Multiple level dicts
|
||||
assert (value_to_display({0: {1:1, 2:2, 3:3, 4:{0:0}, 5:5}, 1:1}) ==
|
||||
'{0:{1:1, 2:2, 3:3, 4:{...}, 5:5}, 1:1}')
|
||||
assert value_to_display({0:0, 1:1, 2:2, 3:DF}) == '{0:0, 1:1, 2:2, 3:Dataframe}'
|
||||
assert value_to_display({0:0, 1:1, 2:[[DF], DATASET]}) == '{0:0, 1:1, 2:[[...], Dataset]}'
|
||||
|
||||
# Dict of complex object
|
||||
assert value_to_display({0:COMPLEX_OBJECT}) == '{0:defaultdict}'
|
||||
|
||||
# Dict of composed objects
|
||||
li = {0:COMPLEX_OBJECT, 1:DATASET, 2:2, 3:{0:0, 1:1}, 4:DF}
|
||||
result = '{0:defaultdict, 1:Dataset, 2:2, 3:{0:0, 1:1}, 4:Dataframe}'
|
||||
assert value_to_display(li) == result
|
||||
|
||||
# Dict starting with a non-supported object (#5313)
|
||||
supported_types = tuple(get_supported_types()['editable'])
|
||||
di = {max: len, 1: 1}
|
||||
assert value_to_display(di) in (
|
||||
'{builtin_function_or_method:builtin_function_or_method, 1:1}',
|
||||
'{1:1, builtin_function_or_method:builtin_function_or_method}')
|
||||
assert is_supported(di, filters=supported_types)
|
||||
|
||||
|
||||
def test_set_display():
|
||||
"""Tests for display of sets."""
|
||||
long_set = {i for i in range(100)}
|
||||
|
||||
# Simple set
|
||||
assert value_to_display({1, 2, 3}) == '{1, 2, 3}'
|
||||
|
||||
# Long set
|
||||
disp = '{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, ...}'
|
||||
assert value_to_display(long_set) == disp
|
||||
|
||||
# Short list of sets
|
||||
disp = '[{0, 1, 2, 3, 4, ...}, {0, 1, 2, 3, 4, ...}, {0, 1, 2, 3, 4, ...}]'
|
||||
assert value_to_display([long_set] * 3) == disp
|
||||
|
||||
# Long list of sets
|
||||
disp = '[' + ''.join('{0, 1, 2, 3, 4, ...}, '*10)[:-2] + ']'
|
||||
assert value_to_display([long_set] * 10) == disp[:70] + ' ...'
|
||||
|
||||
|
||||
def test_datetime_display():
|
||||
"""Simple tests that dates, datetimes and timedeltas display correctly."""
|
||||
test_date = datetime.date(2017, 12, 18)
|
||||
test_date_2 = datetime.date(2017, 2, 2)
|
||||
|
||||
test_datetime = datetime.datetime(2017, 12, 18, 13, 43, 2)
|
||||
test_datetime_2 = datetime.datetime(2017, 8, 18, 0, 41, 27)
|
||||
|
||||
test_timedelta = datetime.timedelta(-1, 2000)
|
||||
test_timedelta_2 = datetime.timedelta(0, 3600)
|
||||
|
||||
# Simple dates/datetimes/timedeltas
|
||||
assert value_to_display(test_date) == '2017-12-18'
|
||||
assert value_to_display(test_datetime) == '2017-12-18 13:43:02'
|
||||
assert value_to_display(test_timedelta) == '-1 day, 0:33:20'
|
||||
|
||||
# Lists of dates/datetimes/timedeltas
|
||||
assert (value_to_display([test_date, test_date_2]) ==
|
||||
'[2017-12-18, 2017-02-02]')
|
||||
assert (value_to_display([test_datetime, test_datetime_2]) ==
|
||||
'[2017-12-18 13:43:02, 2017-08-18 00:41:27]')
|
||||
assert (value_to_display([test_timedelta, test_timedelta_2]) ==
|
||||
'[-1 day, 0:33:20, 1:00:00]')
|
||||
|
||||
# Tuple of dates/datetimes/timedeltas
|
||||
assert (value_to_display((test_date, test_datetime, test_timedelta)) ==
|
||||
'(2017-12-18, 2017-12-18 13:43:02, -1 day, 0:33:20)')
|
||||
|
||||
# Dict of dates/datetimes/timedeltas
|
||||
assert (value_to_display({0: test_date,
|
||||
1: test_datetime,
|
||||
2: test_timedelta_2}) ==
|
||||
("{0:2017-12-18, 1:2017-12-18 13:43:02, 2:1:00:00}"))
|
||||
|
||||
|
||||
def test_str_in_container_display():
|
||||
"""Test that strings are displayed correctly inside lists or dicts."""
|
||||
# Assert that both bytes and unicode return the right display
|
||||
assert value_to_display([b'a', u'b']) == "['a', 'b']"
|
||||
|
||||
# Encoded unicode gives bytes and it can't be transformed to
|
||||
# unicode again. So this test the except part of
|
||||
# is_binary_string(value) in value_to_display
|
||||
if PY2:
|
||||
assert value_to_display([u'Э'.encode('cp1251')]) == "['\xdd']"
|
||||
|
||||
|
||||
def test_ellipses(tmpdir):
|
||||
"""
|
||||
Test that we're adding a binary ellipses when value_to_display of
|
||||
a collection is too long and binary.
|
||||
|
||||
For issue 6942
|
||||
"""
|
||||
# Create binary file with all bytes
|
||||
file = tmpdir.new(basename='bytes.txt')
|
||||
file.write_binary(bytearray(list(range(255))))
|
||||
|
||||
# Read bytes back
|
||||
buffer = file.read(mode='rb')
|
||||
|
||||
# Assert that there's a binary ellipses in the representation
|
||||
assert b' ...' in value_to_display(buffer)
|
||||
|
||||
|
||||
def test_get_type_string():
|
||||
"""Test for get_type_string."""
|
||||
# Bools
|
||||
assert get_type_string(True) == 'bool'
|
||||
|
||||
# Numeric types (PY2 has long, which disappeared in PY3)
|
||||
if not PY2:
|
||||
expected = ['int', 'float', 'complex']
|
||||
numeric_types = [1, 1.5, 1 + 2j]
|
||||
assert [get_type_string(t) for t in numeric_types] == expected
|
||||
|
||||
# Lists
|
||||
assert get_type_string([1, 2, 3]) == 'list'
|
||||
|
||||
# Sets
|
||||
assert get_type_string({1, 2, 3}) == 'set'
|
||||
|
||||
# Dictionaries
|
||||
assert get_type_string({'a': 1, 'b': 2}) == 'dict'
|
||||
|
||||
# Tuples
|
||||
assert get_type_string((1, 2, 3)) == 'tuple'
|
||||
|
||||
# Strings
|
||||
if not PY2:
|
||||
assert get_type_string('foo') == 'str'
|
||||
|
||||
# Numpy objects
|
||||
assert get_type_string(np.array([1, 2, 3])) == 'NDArray'
|
||||
|
||||
masked_array = np.ma.MaskedArray([1, 2, 3], mask=[True, False, True])
|
||||
assert get_type_string(masked_array) == 'MaskedArray'
|
||||
|
||||
matrix = np.matrix([[1, 2], [3, 4]])
|
||||
assert get_type_string(matrix) == 'Matrix'
|
||||
|
||||
# Pandas objects
|
||||
df = pd.DataFrame([1, 2, 3])
|
||||
assert get_type_string(df) == 'DataFrame'
|
||||
|
||||
series = pd.Series([1, 2, 3])
|
||||
assert get_type_string(series) == 'Series'
|
||||
|
||||
index = pd.Index([1, 2, 3])
|
||||
assert get_type_string(index) in ['Int64Index', 'Index']
|
||||
|
||||
# PIL images
|
||||
img = PIL.Image.new('RGB', (256,256))
|
||||
assert get_type_string(img) == 'PIL.Image.Image'
|
||||
|
||||
# Datetime objects
|
||||
date = datetime.date(2010, 10, 1)
|
||||
assert get_type_string(date) == 'datetime.date'
|
||||
|
||||
date = datetime.timedelta(-1, 2000)
|
||||
assert get_type_string(date) == 'datetime.timedelta'
|
||||
|
||||
|
||||
def test_is_editable_type():
|
||||
"""Test for get_type_string."""
|
||||
# Bools
|
||||
assert is_editable_type(True)
|
||||
|
||||
# Numeric type
|
||||
numeric_types = [1, 1.5, 1 + 2j]
|
||||
assert all([is_editable_type(t) for t in numeric_types])
|
||||
|
||||
# Lists
|
||||
assert is_editable_type([1, 2, 3])
|
||||
|
||||
# Sets
|
||||
assert is_editable_type({1, 2, 3})
|
||||
|
||||
# Dictionaries
|
||||
assert is_editable_type({'a': 1, 'b': 2})
|
||||
|
||||
# Tuples
|
||||
assert is_editable_type((1, 2, 3))
|
||||
|
||||
# Strings
|
||||
assert is_editable_type('foo')
|
||||
|
||||
# Numpy objects
|
||||
assert is_editable_type(np.array([1, 2, 3]))
|
||||
|
||||
masked_array = np.ma.MaskedArray([1, 2, 3], mask=[True, False, True])
|
||||
assert is_editable_type(masked_array)
|
||||
|
||||
matrix = np.matrix([[1, 2], [3, 4]])
|
||||
assert is_editable_type(matrix)
|
||||
|
||||
# Pandas objects
|
||||
df = pd.DataFrame([1, 2, 3])
|
||||
assert is_editable_type(df)
|
||||
|
||||
series = pd.Series([1, 2, 3])
|
||||
assert is_editable_type(series)
|
||||
|
||||
index = pd.Index([1, 2, 3])
|
||||
assert is_editable_type(index)
|
||||
|
||||
# PIL images
|
||||
img = PIL.Image.new('RGB', (256,256))
|
||||
assert is_editable_type(img)
|
||||
|
||||
# Datetime objects
|
||||
date = datetime.date(2010, 10, 1)
|
||||
assert is_editable_type(date)
|
||||
|
||||
date = datetime.timedelta(-1, 2000)
|
||||
assert is_editable_type(date)
|
||||
|
||||
# Other objects
|
||||
class MyClass:
|
||||
a = 1
|
||||
assert not is_editable_type(MyClass)
|
||||
|
||||
my_instance = MyClass()
|
||||
assert not is_editable_type(my_instance)
|
||||
|
||||
|
||||
def test_get_numpy_type():
|
||||
"""Test for get_numpy_type_string."""
|
||||
# Numpy objects
|
||||
assert get_numpy_type_string(np.array([1, 2, 3])) == 'Array'
|
||||
|
||||
matrix = np.matrix([[1, 2], [3, 4]])
|
||||
assert get_numpy_type_string(matrix) == 'Array'
|
||||
|
||||
assert get_numpy_type_string(np.int32(1)) == 'Scalar'
|
||||
|
||||
# Regular Python objects
|
||||
assert get_numpy_type_string(1.5) == 'Unknown'
|
||||
assert get_numpy_type_string([1, 2, 3]) == 'Unknown'
|
||||
assert get_numpy_type_string({1: 2}) == 'Unknown'
|
||||
|
||||
# PIL images
|
||||
img = PIL.Image.new('RGB', (256,256))
|
||||
assert get_numpy_type_string(img) == 'Unknown'
|
||||
|
||||
# Pandas objects
|
||||
df = pd.DataFrame([1, 2, 3])
|
||||
assert get_numpy_type_string(df) == 'Unknown'
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main()
|
||||
Reference in New Issue
Block a user