diff --git a/.travis.yml b/.travis.yml index d63575feb..0772bb781 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,9 @@ matrix: env: TOX_ENV=py27 - python: 3.5 env: TOX_ENV=py35 + - python: 3.7 + env: TOX_ENV=py37 + dist: xenial - python: 2.7 env: TOX_ENV=flake8 - python: 2.7 @@ -18,7 +21,7 @@ before_install: - tsc --module amd --noImplicitAny --outdir datalab/notebook/static datalab/notebook/static/*.ts # We use tox for actually running tests. - pip install --upgrade pip tox - + script: # tox reads its configuration from tox.ini. - tox -e $TOX_ENV diff --git a/datalab/utils/__init__.py b/datalab/utils/__init__.py index a68bad496..720a80127 100644 --- a/datalab/utils/__init__.py +++ b/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import async, async_function, async_method +from ._async import async_, async_function, async_method from ._gcp_job import GCPJob from ._http import Http, RequestException from ._iterator import Iterator @@ -24,7 +24,7 @@ from ._utils import print_exception_with_last_stack, get_item, compare_datetimes, \ pick_unused_port, is_http_running_on, gcs_copy_file -__all__ = ['async', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', +__all__ = ['async_', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', 'Iterator', 'Job', 'JobError', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file'] diff --git a/datalab/utils/_async.py b/datalab/utils/_async.py index 510b68f93..b7559f009 100644 --- a/datalab/utils/_async.py +++ b/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class async(with_metaclass(abc.ABCMeta, object)): +class async_(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return _job.Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(async): +class async_function(async_): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs)) + return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs)) -class async_method(async): +class async_method(async_): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *async._preprocess_args(*args), - **async._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *async_._preprocess_args(*args), + **async_._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/datalab/utils/_lambda_job.py b/datalab/utils/_lambda_job.py index 740cd91f7..2fed3176e 100644 --- a/datalab/utils/_lambda_job.py +++ b/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.async.executor.submit(fn, *args, **kwargs) + self._future = _async.async_.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/datalab/utils/commands/_utils.py b/datalab/utils/commands/_utils.py index ba6b83e1e..e864b4903 100644 --- a/datalab/utils/commands/_utils.py +++ b/datalab/utils/commands/_utils.py @@ -30,6 +30,7 @@ import pandas_profiling except ImportError: pass +import six import sys import types import yaml @@ -323,6 +324,8 @@ def parse_config(config, env, as_dict=True): config = {} elif stripped[0] == '{': config = json.loads(config) + elif six.PY3: + config = yaml.load(config, Loader=yaml.FullLoader) else: config = yaml.load(config) if as_dict: diff --git a/google/datalab/utils/__init__.py b/google/datalab/utils/__init__.py index 2ce0eb479..f4780c937 100644 --- a/google/datalab/utils/__init__.py +++ b/google/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import async, async_function, async_method +from ._async import async_, async_function, async_method from ._http import Http, RequestException from ._iterator import Iterator from ._json_encoder import JSONEncoder @@ -23,7 +23,7 @@ pick_unused_port, is_http_running_on, gcs_copy_file, python_portable_string -__all__ = ['async', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', +__all__ = ['async_', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file', 'python_portable_string'] diff --git a/google/datalab/utils/_async.py b/google/datalab/utils/_async.py index 60aa23e1e..b22db85d0 100644 --- a/google/datalab/utils/_async.py +++ b/google/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class async(with_metaclass(abc.ABCMeta, object)): +class async_(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(async): +class async_function(async_): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs)) + return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs)) -class async_method(async): +class async_method(async_): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *async._preprocess_args(*args), - **async._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *async_._preprocess_args(*args), + **async_._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/google/datalab/utils/_lambda_job.py b/google/datalab/utils/_lambda_job.py index 74116d73d..2faa4e5ed 100644 --- a/google/datalab/utils/_lambda_job.py +++ b/google/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.async.executor.submit(fn, *args, **kwargs) + self._future = _async.async_.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/google/datalab/utils/commands/_utils.py b/google/datalab/utils/commands/_utils.py index 380da8401..2a42bf7fe 100644 --- a/google/datalab/utils/commands/_utils.py +++ b/google/datalab/utils/commands/_utils.py @@ -30,6 +30,7 @@ import pandas_profiling except ImportError: pass +import six import sys import yaml @@ -328,6 +329,8 @@ def parse_config(config, env, as_dict=True): config = {} elif stripped[0] == '{': config = json.loads(config) + elif six.PY3: + config = yaml.load(config, Loader=yaml.FullLoader) else: config = yaml.load(config) if as_dict: @@ -373,6 +376,8 @@ def parse_config_for_selected_keys(content, keys): return {}, None elif stripped[0] == '{': config = json.loads(content) + elif six.PY3: + config = yaml.load(content, Loader=yaml.FullLoader) else: config = yaml.load(content) diff --git a/setup.py b/setup.py index 8c524f7d7..6ad55fcb9 100644 --- a/setup.py +++ b/setup.py @@ -78,6 +78,7 @@ classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", "Development Status :: 4 - Beta", "Environment :: Other Environment", "Intended Audience :: Developers", diff --git a/tests/_util/commands_tests.py b/tests/_util/commands_tests.py index 0da13a035..7c1aea525 100644 --- a/tests/_util/commands_tests.py +++ b/tests/_util/commands_tests.py @@ -95,7 +95,13 @@ def test_subcommand_line_cell(self): 'string4: value4\nflag1: false') self.assertEqual(args, {'string1': 'value1', 'string2': 'value2', 'string3': 'value3', 'command': 'subcommand2', 'flag1': False}) - self.assertEqual(yaml.load(cell), {'string3': 'value3', 'string4': 'value4'}) + if six.PY3: + self.assertEqual( + yaml.load(cell, Loader=yaml.FullLoader), + {'string3': 'value3', 'string4': 'value4'} + ) + else: + self.assertEqual(yaml.load(cell), {'string3': 'value3', 'string4': 'value4'}) # Regular arg and cell arg cannot be the same name. with self.assertRaises(ValueError): @@ -133,7 +139,10 @@ def test_subcommand_var_replacement(self): args, cell = parser.parse('subcommand1 --string1 $var1', 'a: b\nstring2: $var2', namespace) self.assertEqual(args, {'string1': 'value1', 'string2': 'value2', 'flag1': False, 'dict1': None}) - self.assertEqual(yaml.load(cell), {'a': 'b', 'string2': '$var2'}) + if six.PY3: + self.assertEqual(yaml.load(cell, Loader=yaml.FullLoader), {'a': 'b', 'string2': '$var2'}) + else: + self.assertEqual(yaml.load(cell), {'a': 'b', 'string2': '$var2'}) cell = """ dict1: diff --git a/tests/pipeline/pipeline_tests.py b/tests/pipeline/pipeline_tests.py index 615468a1c..ff7f72908 100644 --- a/tests/pipeline/pipeline_tests.py +++ b/tests/pipeline/pipeline_tests.py @@ -16,6 +16,7 @@ import datetime import mock import re +import six import unittest import yaml @@ -262,7 +263,10 @@ def test_get_dag_definition(self): 'catchup=True, default_args=default_args)\n\n') def test_get_datetime_expr(self): - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) start = dag_dict.get('schedule').get('start') datetime_expr = pipeline.PipelineGenerator._get_datetime_expr_str(start) @@ -277,7 +281,10 @@ def test_get_default_args(self): self.assertIn("'email': []", actual) self.assertIn("'owner': 'Google Cloud Datalab'", actual) - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) dag_dict['schedule']['retries'] = 5 dag_dict['schedule']['email_on_retry'] = False dag_dict['schedule']['email_on_failure'] = False @@ -302,7 +309,10 @@ def test_get_default_args(self): self.assertIn("'max_retry_delay': timedelta(seconds=15)", actual) def test_get_airflow_spec_with_default_schedule(self): - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) # We delete the schedule spec to test with defaults del dag_dict['schedule'] diff --git a/tox.ini b/tox.ini index 99379028e..f3575df22 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] -# By default, we want to run tests for Python 2.7, Python 3.5, and run our -# flake8 checks. -envlist = py27,py35,flake8,coveralls +# By default, we want to run tests for Python 2.7, Python 3.5, Python 3.7, +# and run our flake8 checks. +envlist = py27,py35,py37,flake8,coveralls # If an interpreter is missing locally, skip it. skip_missing_interpreters = true @@ -10,9 +10,9 @@ skip_missing_interpreters = true # need them to run our tests suite. # # tox always installs the current package, so there's no need to list it here. -deps = apache-airflow==1.9.0 +deps = apache-airflow==1.10.9 dill==0.2.6 - tensorflow==1.8.0 + tensorflow==1.14.0 lime==0.1.1.23 xgboost==0.6a2 # Dropping this seems to cause problems with conda in some cases.