Test-Command: set -e
 ; export LC_ALL=C.UTF-8
 ; export LANG=C.UTF-8
 ; export PYTEST_OPTIONS="--verbose -r s --timeout-method=thread --timeout=300 --durations=20"
 ; export SKIP_NETWORK="test_defaults or test_dashboard_address or test_respect_host_listen_address or test_contact_listen_address or test_preload_file or test_preload_module or test_preload_command or test_preload_command_default or test_local_tls"
 ; export PYTEST_SKIP_OPTIONS="not (test_adapt or test_scale or test_collections_get or test_statistical_profiling_2 or test_memory_limit_auto or test_serialize_deserialize_sparse_large or test_duplicate_client or test_dont_hold_on_to_large_messages or test_logging_file_config or test_performance_report or test_remote_access or $SKIP_NETWORK)"
 ; cp conftest.py $AUTOPKGTEST_TMP
 ; cp distributed/tests/mytest.pyz $AUTOPKGTEST_TMP
 ; cp distributed/tests/testegg-1.0.0-py3.4.egg $AUTOPKGTEST_TMP
 ; for py in $(py3versions -r 2>/dev/null)
 ; do cd "$AUTOPKGTEST_TMP"
 ;   echo "Testing with $py:"
 ;   http_proxy= $py -m pytest -m "not avoid_travis" -k "$PYTEST_SKIP_OPTIONS"  $PYTEST_OPTIONS --pyargs distributed 2>&1 || true
 ;   ERR=$?
 ;   echo Error code $ERR
 ;   if [ -e $AUTOPKGTEST_TMP/.pytest_cache/v/cache/ -a \! -e $AUTOPKGTEST_TMP/.pytest_cache/v/cache/lastfailed ]
 ;   then echo Tests passed; true
 ;   else echo Tests failed; false
 ;   fi
 ; done
Depends: @,
 python3-dill,
 python3-h5py,
 python3-ipykernel,
 python3-ipython,
# python3-ipywidgets,
 python3-joblib,
 python3-jupyter-client,
 python3-keras,
 python3-lz4,
 python3-mock,
 python3-netcdf4,
 python3-pandas,
 python3-paramiko,
 python3-pytest,
 python3-pytest-timeout,
 python3-requests,
 python3-scipy,
 python3-sklearn,
 python3-sparse,
 python3-tblib,
