Regression
notebooks.IntroducingGensim.ipynb./mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb (from pytest)
Error Message
nbconvert.preprocessors.execute.CellExecutionError: An error occurred while executing the following cell: ------------------ corpus = list(preprocessing.read_files(meta.index)) corpus[0][:255] # printing the first 255 characters of the first document ------------------ #x1B[0;31m---------------------------------------------------------------------------#x1B[0m #x1B[0;31mAttributeError#x1B[0m Traceback (most recent call last) #x1B[0;32m<ipython-input-7-1004dfe5ae0a>#x1B[0m in #x1B[0;36m<module>#x1B[0;34m()#x1B[0m #x1B[0;32m----> 1#x1B[0;31m #x1B[0mcorpus#x1B[0m #x1B[0;34m=#x1B[0m #x1B[0mlist#x1B[0m#x1B[0;34m(#x1B[0m#x1B[0mpreprocessing#x1B[0m#x1B[0;34m.#x1B[0m#x1B[0mread_files#x1B[0m#x1B[0;34m(#x1B[0m#x1B[0mmeta#x1B[0m#x1B[0;34m.#x1B[0m#x1B[0mindex#x1B[0m#x1B[0;34m)#x1B[0m#x1B[0;34m)#x1B[0m#x1B[0;34m#x1B[0m#x1B[0m #x1B[0m#x1B[1;32m 2#x1B[0m #x1B[0mcorpus#x1B[0m#x1B[0;34m[#x1B[0m#x1B[0;36m0#x1B[0m#x1B[0;34m]#x1B[0m#x1B[0;34m[#x1B[0m#x1B[0;34m:#x1B[0m#x1B[0;36m255#x1B[0m#x1B[0;34m]#x1B[0m #x1B[0;31m# printing the first 255 characters of the first document#x1B[0m#x1B[0;34m#x1B[0m#x1B[0m #x1B[0;31mAttributeError#x1B[0m: module 'cophi_toolbox.preprocessing' has no attribute 'read_files' AttributeError: module 'cophi_toolbox.preprocessing' has no attribute 'read_files'
Stacktrace
self = <CallInfo when='call' exception: An error occurred while executing the following cell: ------------------ corpus = lis...ing' has no attribute 'read_files' AttributeError: module 'cophi_toolbox.preprocessing' has no attribute 'read_files' > func = <function call_runtest_hook.<locals>.<lambda> at 0x7f323a2fbea0> when = 'call', treat_keyboard_interrupt_as_exception = False def __init__(self, func, when, treat_keyboard_interrupt_as_exception=False): #: context of invocation: one of "setup", "call", #: "teardown", "memocollect" self.when = when self.start = time() try: > self.result = func() ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/_pytest/runner.py:198: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ > lambda: ihook(item=item, **kwds), when=when, treat_keyboard_interrupt_as_exception=item.config.getvalue("usepdb"), ) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/_pytest/runner.py:181: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <_HookCaller 'pytest_runtest_call'>, args = () kwargs = {'item': <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'>} notincall = set() def __call__(self, *args, **kwargs): if args: raise TypeError("hook calling supports only keyword arguments") assert not self.is_historic() if self.argnames: notincall = set(self.argnames) - set(['__multicall__']) - set( kwargs.keys()) if notincall: warnings.warn( "Argument(s) {} which are declared in the hookspec " "can not be found in this hook call" .format(tuple(notincall)), stacklevel=2, ) > return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/__init__.py:617: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <_pytest.config.PytestPluginManager object at 0x7f3268364390> hook = <_HookCaller 'pytest_runtest_call'> methods = [<pluggy.HookImpl object at 0x7f32661d6e80>, <pluggy.HookImpl object at 0x7f32630c7358>, <pluggy.HookImpl object at 0x7f32630570f0>] kwargs = {'item': <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'>} def _hookexec(self, hook, methods, kwargs): # called from all hookcaller instances. # enable_tracing will set its own wrapping function at self._inner_hookexec > return self._inner_hookexec(hook, methods, kwargs) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/__init__.py:222: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ hook = <_HookCaller 'pytest_runtest_call'> methods = [<pluggy.HookImpl object at 0x7f32661d6e80>, <pluggy.HookImpl object at 0x7f32630c7358>, <pluggy.HookImpl object at 0x7f32630570f0>] kwargs = {'item': <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'>} self._inner_hookexec = lambda hook, methods, kwargs: \ hook.multicall( methods, kwargs, > firstresult=hook.spec_opts.get('firstresult'), ) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/__init__.py:216: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ hook_impls = [<pluggy.HookImpl object at 0x7f32661d6e80>, <pluggy.HookImpl object at 0x7f32630c7358>, <pluggy.HookImpl object at 0x7f32630570f0>] caller_kwargs = {'item': <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'>} firstresult = False def _multicall(hook_impls, caller_kwargs, firstresult=False): """Execute a call into multiple python functions/methods and return the result(s). ``caller_kwargs`` comes from _HookCaller.__call__(). """ __tracebackhide__ = True results = [] excinfo = None try: # run impl and wrapper setup functions in a loop teardowns = [] try: for hook_impl in reversed(hook_impls): try: args = [caller_kwargs[argname] for argname in hook_impl.argnames] except KeyError: for argname in hook_impl.argnames: if argname not in caller_kwargs: raise HookCallError( "hook call must provide argument %r" % (argname,)) if hook_impl.hookwrapper: try: gen = hook_impl.function(*args) next(gen) # first yield teardowns.append(gen) except StopIteration: _raise_wrapfail(gen, "did not yield") else: res = hook_impl.function(*args) if res is not None: results.append(res) if firstresult: # halt further impl calls break except BaseException: excinfo = sys.exc_info() finally: if firstresult: # first result hooks return a single value outcome = _Result(results[0] if results else None, excinfo) else: outcome = _Result(results, excinfo) # run all wrapper post-yield blocks for gen in reversed(teardowns): try: gen.send(outcome) _raise_wrapfail(gen, "has second yield") except StopIteration: pass > return outcome.get_result() ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/callers.py:201: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <pluggy.callers._Result object at 0x7f323a2d16d8> def get_result(self): """Get the result(s) for this hook call. If the hook was marked as a ``firstresult`` only a single value will be returned otherwise a list of results. """ __tracebackhide__ = True if self._excinfo is None: return self._result else: ex = self._excinfo if _py3: > raise ex[1].with_traceback(ex[2]) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/callers.py:76: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ hook_impls = [<pluggy.HookImpl object at 0x7f32661d6e80>, <pluggy.HookImpl object at 0x7f32630c7358>, <pluggy.HookImpl object at 0x7f32630570f0>] caller_kwargs = {'item': <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'>} firstresult = False def _multicall(hook_impls, caller_kwargs, firstresult=False): """Execute a call into multiple python functions/methods and return the result(s). ``caller_kwargs`` comes from _HookCaller.__call__(). """ __tracebackhide__ = True results = [] excinfo = None try: # run impl and wrapper setup functions in a loop teardowns = [] try: for hook_impl in reversed(hook_impls): try: args = [caller_kwargs[argname] for argname in hook_impl.argnames] except KeyError: for argname in hook_impl.argnames: if argname not in caller_kwargs: raise HookCallError( "hook call must provide argument %r" % (argname,)) if hook_impl.hookwrapper: try: gen = hook_impl.function(*args) next(gen) # first yield teardowns.append(gen) except StopIteration: _raise_wrapfail(gen, "did not yield") else: > res = hook_impl.function(*args) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/pluggy/callers.py:180: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ item = <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'> def pytest_runtest_call(item): _update_current_test_var(item, "call") sys.last_type, sys.last_value, sys.last_traceback = (None, None, None) try: > item.runtest() ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/_pytest/runner.py:109: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <RunNb '/mnt/data/jenkins/workspace/DARIAH-Topics/notebooks/IntroducingGensim.ipynb'> def runtest(self): self._skip() with io.open(self.name,encoding='utf8') as nb: notebook = nbformat.read(nb, as_version=4) # TODO: which kernel? run in pytest's or use new one (make it option) _timeout = self.parent.parent.config.getini('nbsmoke_cell_timeout') kwargs = dict(timeout=int(_timeout) if _timeout!='' else 300, allow_errors=False, # or sys.version_info[1] ? kernel_name='python') ep = ExecutePreprocessor(**kwargs) with cwd(os.path.dirname(self.name)): # jupyter notebook always does this, right? > ep.preprocess(notebook,{}) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/nbsmoke/__init__.py:274: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <nbconvert.preprocessors.execute.ExecutePreprocessor object at 0x7f32643c30b8> nb = {'metadata': {'git': {'suppress_outputs': True}, 'kernelspec': {'language': 'python', 'name': 'python3', 'display_name...: None, 'source': 'static_heatmap = PlotDocumentTopics.static_heatmap()\nstatic_heatmap.show()'}], 'nbformat_minor': 1} resources = {} def preprocess(self, nb, resources): """ Preprocess notebook executing each code cell. The input argument `nb` is modified in-place. Parameters ---------- nb : NotebookNode Notebook being executed. resources : dictionary Additional resources used in the conversion process. For example, passing ``{'metadata': {'path': run_path}}`` sets the execution path to ``run_path``. Returns ------- nb : NotebookNode The executed notebook. resources : dictionary Additional resources used in the conversion process. """ path = resources.get('metadata', {}).get('path', '') if path == '': path = None # clear display_id map self._display_id_map = {} # from jupyter_client.manager import start_new_kernel def start_new_kernel(startup_timeout=60, kernel_name='python', **kwargs): km = self.kernel_manager_class(kernel_name=kernel_name) km.start_kernel(**kwargs) kc = km.client() kc.start_channels() try: kc.wait_for_ready(timeout=startup_timeout) except RuntimeError: kc.stop_channels() km.shutdown_kernel() raise return km, kc kernel_name = nb.metadata.get('kernelspec', {}).get('name', 'python') if self.kernel_name: kernel_name = self.kernel_name self.log.info("Executing notebook with kernel: %s" % kernel_name) self.km, self.kc = start_new_kernel( startup_timeout=self.startup_timeout, kernel_name=kernel_name, extra_arguments=self.extra_arguments, cwd=path) self.kc.allow_stdin = False self.nb = nb try: > nb, resources = super(ExecutePreprocessor, self).preprocess(nb, resources) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/nbconvert/preprocessors/execute.py:262: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <nbconvert.preprocessors.execute.ExecutePreprocessor object at 0x7f32643c30b8> nb = {'metadata': {'git': {'suppress_outputs': True}, 'kernelspec': {'language': 'python', 'name': 'python3', 'display_name...: None, 'source': 'static_heatmap = PlotDocumentTopics.static_heatmap()\nstatic_heatmap.show()'}], 'nbformat_minor': 1} resources = {} def preprocess(self, nb, resources): """ Preprocessing to apply on each notebook. Must return modified nb, resources. If you wish to apply your preprocessing to each cell, you might want to override preprocess_cell method instead. Parameters ---------- nb : NotebookNode Notebook being converted resources : dictionary Additional resources used in the conversion process. Allows preprocessors to pass variables into the Jinja engine. """ for index, cell in enumerate(nb.cells): > nb.cells[index], resources = self.preprocess_cell(cell, resources, index) ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/nbconvert/preprocessors/base.py:69: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <nbconvert.preprocessors.execute.ExecutePreprocessor object at 0x7f32643c30b8> cell = {'cell_type': 'code', 'metadata': {}, 'outputs': [{'output_type': 'error', 'ename': 'AttributeError', 'evalue': "modul...list(preprocessing.read_files(meta.index))\ncorpus[0][:255] # printing the first 255 characters of the first document'} resources = {}, cell_index = 17 def preprocess_cell(self, cell, resources, cell_index): """ Executes a single code cell. See base.py for details. To execute all cells see :meth:`preprocess`. """ if cell.cell_type != 'code': return cell, resources reply, outputs = self.run_cell(cell, cell_index) cell.outputs = outputs if not self.allow_errors: for out in outputs: if out.output_type == 'error': > raise CellExecutionError.from_cell_and_msg(cell, out) E nbconvert.preprocessors.execute.CellExecutionError: An error occurred while executing the following cell: E ------------------ E corpus = list(preprocessing.read_files(meta.index)) E corpus[0][:255] # printing the first 255 characters of the first document E ------------------ E E #x1B[0;31m---------------------------------------------------------------------------#x1B[0m E #x1B[0;31mAttributeError#x1B[0m Traceback (most recent call last) E #x1B[0;32m<ipython-input-7-1004dfe5ae0a>#x1B[0m in #x1B[0;36m<module>#x1B[0;34m()#x1B[0m E #x1B[0;32m----> 1#x1B[0;31m #x1B[0mcorpus#x1B[0m #x1B[0;34m=#x1B[0m #x1B[0mlist#x1B[0m#x1B[0;34m(#x1B[0m#x1B[0mpreprocessing#x1B[0m#x1B[0;34m.#x1B[0m#x1B[0mread_files#x1B[0m#x1B[0;34m(#x1B[0m#x1B[0mmeta#x1B[0m#x1B[0;34m.#x1B[0m#x1B[0mindex#x1B[0m#x1B[0;34m)#x1B[0m#x1B[0;34m)#x1B[0m#x1B[0;34m#x1B[0m#x1B[0m E #x1B[0m#x1B[1;32m 2#x1B[0m #x1B[0mcorpus#x1B[0m#x1B[0;34m[#x1B[0m#x1B[0;36m0#x1B[0m#x1B[0;34m]#x1B[0m#x1B[0;34m[#x1B[0m#x1B[0;34m:#x1B[0m#x1B[0;36m255#x1B[0m#x1B[0;34m]#x1B[0m #x1B[0;31m# printing the first 255 characters of the first document#x1B[0m#x1B[0;34m#x1B[0m#x1B[0m E E #x1B[0;31mAttributeError#x1B[0m: module 'cophi_toolbox.preprocessing' has no attribute 'read_files' E AttributeError: module 'cophi_toolbox.preprocessing' has no attribute 'read_files' ../../shiningpanda/jobs/62c67c92/virtualenvs/d41d8cd9/lib/python3.5/site-packages/nbconvert/preprocessors/execute.py:286: CellExecutionError