From 55b4a0732d36088a5c2c4d9076bb629d85d0714a Mon Sep 17 00:00:00 2001 From: Andrew Saintway Date: Fri, 4 Oct 2019 03:18:53 +0000 Subject: [PATCH 01/10] #711 Fix Incompatibility with Python 3.7 --- datalab/utils/__init__.py | 4 ++-- datalab/utils/_async.py | 12 ++++++------ datalab/utils/_lambda_job.py | 2 +- datalab/utils/commands/_utils.py | 2 +- google/datalab/utils/__init__.py | 4 ++-- google/datalab/utils/_async.py | 12 ++++++------ google/datalab/utils/_lambda_job.py | 2 +- google/datalab/utils/commands/_utils.py | 4 ++-- 8 files changed, 21 insertions(+), 21 deletions(-) diff --git a/datalab/utils/__init__.py b/datalab/utils/__init__.py index a68bad496..fd0e6e628 100644 --- a/datalab/utils/__init__.py +++ b/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import async, async_function, async_method +from ._async import asynchron, async_function, async_method from ._gcp_job import GCPJob from ._http import Http, RequestException from ._iterator import Iterator @@ -24,7 +24,7 @@ from ._utils import print_exception_with_last_stack, get_item, compare_datetimes, \ pick_unused_port, is_http_running_on, gcs_copy_file -__all__ = ['async', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', +__all__ = ['asynchron', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', 'Iterator', 'Job', 'JobError', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file'] diff --git a/datalab/utils/_async.py b/datalab/utils/_async.py index 510b68f93..a3f5097db 100644 --- a/datalab/utils/_async.py +++ b/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class async(with_metaclass(abc.ABCMeta, object)): +class asynchron(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return _job.Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(async): +class async_function(asynchron): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs)) + return self._function(*asynchron._preprocess_args(*args), **asynchron._preprocess_kwargs(**kwargs)) -class async_method(async): +class async_method(asynchron): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *async._preprocess_args(*args), - **async._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *asynchron._preprocess_args(*args), + **asynchron._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/datalab/utils/_lambda_job.py b/datalab/utils/_lambda_job.py index 740cd91f7..b25b11768 100644 --- a/datalab/utils/_lambda_job.py +++ b/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.async.executor.submit(fn, *args, **kwargs) + self._future = _async.asynchron.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/datalab/utils/commands/_utils.py b/datalab/utils/commands/_utils.py index ba6b83e1e..2b4b45914 100644 --- a/datalab/utils/commands/_utils.py +++ b/datalab/utils/commands/_utils.py @@ -324,7 +324,7 @@ def parse_config(config, env, as_dict=True): elif stripped[0] == '{': config = json.loads(config) else: - config = yaml.load(config) + config = yaml.load(config, Loader=yaml.FullLoader) if as_dict: config = dict(config) diff --git a/google/datalab/utils/__init__.py b/google/datalab/utils/__init__.py index 2ce0eb479..b0b4af074 100644 --- a/google/datalab/utils/__init__.py +++ b/google/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import async, async_function, async_method +from ._async import asynchron, async_function, async_method from ._http import Http, RequestException from ._iterator import Iterator from ._json_encoder import JSONEncoder @@ -23,7 +23,7 @@ pick_unused_port, is_http_running_on, gcs_copy_file, python_portable_string -__all__ = ['async', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', +__all__ = ['asynchron', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file', 'python_portable_string'] diff --git a/google/datalab/utils/_async.py b/google/datalab/utils/_async.py index 60aa23e1e..8e7428659 100644 --- a/google/datalab/utils/_async.py +++ b/google/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class async(with_metaclass(abc.ABCMeta, object)): +class asynchron(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(async): +class async_function(asynchron): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*async._preprocess_args(*args), **async._preprocess_kwargs(**kwargs)) + return self._function(*asynchron._preprocess_args(*args), **asynchron._preprocess_kwargs(**kwargs)) -class async_method(async): +class async_method(asynchron): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(async): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *async._preprocess_args(*args), - **async._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *asynchron._preprocess_args(*args), + **asynchron._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/google/datalab/utils/_lambda_job.py b/google/datalab/utils/_lambda_job.py index 74116d73d..b297b67c0 100644 --- a/google/datalab/utils/_lambda_job.py +++ b/google/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.async.executor.submit(fn, *args, **kwargs) + self._future = _async.asynchron.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/google/datalab/utils/commands/_utils.py b/google/datalab/utils/commands/_utils.py index 380da8401..cf4f08b47 100644 --- a/google/datalab/utils/commands/_utils.py +++ b/google/datalab/utils/commands/_utils.py @@ -329,7 +329,7 @@ def parse_config(config, env, as_dict=True): elif stripped[0] == '{': config = json.loads(config) else: - config = yaml.load(config) + config = yaml.load(config, Loader=yaml.FullLoader) if as_dict: config = dict(config) @@ -374,7 +374,7 @@ def parse_config_for_selected_keys(content, keys): elif stripped[0] == '{': config = json.loads(content) else: - config = yaml.load(content) + config = yaml.load(content, Loader=yaml.FullLoader) if not isinstance(config, dict): raise ValueError('Invalid config.') From 7c46758b1e5c85bd3558026ad44b98d301727f20 Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Wed, 11 Mar 2020 14:31:59 +0900 Subject: [PATCH 02/10] fix some linting errors --- datalab/bigquery/_view.py | 2 +- datalab/utils/__init__.py | 4 ++-- datalab/utils/_async.py | 12 ++++++------ datalab/utils/_lambda_job.py | 2 +- google/datalab/utils/__init__.py | 4 ++-- google/datalab/utils/_async.py | 12 ++++++------ google/datalab/utils/_lambda_job.py | 2 +- tox.ini | 6 +++--- 8 files changed, 22 insertions(+), 22 deletions(-) diff --git a/datalab/bigquery/_view.py b/datalab/bigquery/_view.py index c0df7b726..4cb62987d 100644 --- a/datalab/bigquery/_view.py +++ b/datalab/bigquery/_view.py @@ -188,7 +188,7 @@ def results(self, use_cache=True, dialect=None, billing_tier=None): def execute_async(self, table_name=None, table_mode='create', use_cache=True, priority='high', allow_large_results=False, dialect=None, billing_tier=None): - """Materialize the View asynchronously. + """Materialize the View async_ously. Args: table_name: the result table name; if None, then a temporary table will be used. diff --git a/datalab/utils/__init__.py b/datalab/utils/__init__.py index fd0e6e628..720a80127 100644 --- a/datalab/utils/__init__.py +++ b/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import asynchron, async_function, async_method +from ._async import async_, async_function, async_method from ._gcp_job import GCPJob from ._http import Http, RequestException from ._iterator import Iterator @@ -24,7 +24,7 @@ from ._utils import print_exception_with_last_stack, get_item, compare_datetimes, \ pick_unused_port, is_http_running_on, gcs_copy_file -__all__ = ['asynchron', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', +__all__ = ['async_', 'async_function', 'async_method', 'GCPJob', 'Http', 'RequestException', 'Iterator', 'Job', 'JobError', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file'] diff --git a/datalab/utils/_async.py b/datalab/utils/_async.py index a3f5097db..b7559f009 100644 --- a/datalab/utils/_async.py +++ b/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class asynchron(with_metaclass(abc.ABCMeta, object)): +class async_(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return _job.Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(asynchron): +class async_function(async_): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(asynchron): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*asynchron._preprocess_args(*args), **asynchron._preprocess_kwargs(**kwargs)) + return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs)) -class async_method(asynchron): +class async_method(async_): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(asynchron): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *asynchron._preprocess_args(*args), - **asynchron._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *async_._preprocess_args(*args), + **async_._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/datalab/utils/_lambda_job.py b/datalab/utils/_lambda_job.py index b25b11768..2fed3176e 100644 --- a/datalab/utils/_lambda_job.py +++ b/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.asynchron.executor.submit(fn, *args, **kwargs) + self._future = _async.async_.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/google/datalab/utils/__init__.py b/google/datalab/utils/__init__.py index b0b4af074..f4780c937 100644 --- a/google/datalab/utils/__init__.py +++ b/google/datalab/utils/__init__.py @@ -12,7 +12,7 @@ """Google Cloud Platform library - Internal Helpers.""" -from ._async import asynchron, async_function, async_method +from ._async import async_, async_function, async_method from ._http import Http, RequestException from ._iterator import Iterator from ._json_encoder import JSONEncoder @@ -23,7 +23,7 @@ pick_unused_port, is_http_running_on, gcs_copy_file, python_portable_string -__all__ = ['asynchron', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', +__all__ = ['async_', 'async_function', 'async_method', 'Http', 'RequestException', 'Iterator', 'JSONEncoder', 'LRUCache', 'LambdaJob', 'DataflowJob', 'print_exception_with_last_stack', 'get_item', 'compare_datetimes', 'pick_unused_port', 'is_http_running_on', 'gcs_copy_file', 'python_portable_string'] diff --git a/google/datalab/utils/_async.py b/google/datalab/utils/_async.py index 8e7428659..b22db85d0 100644 --- a/google/datalab/utils/_async.py +++ b/google/datalab/utils/_async.py @@ -23,7 +23,7 @@ from future.utils import with_metaclass -class asynchron(with_metaclass(abc.ABCMeta, object)): +class async_(with_metaclass(abc.ABCMeta, object)): """ Base class for async_function/async_method. Creates a wrapped function/method that will run the original function/method on a thread pool worker thread and return a Job instance for monitoring the status of the thread. @@ -55,7 +55,7 @@ def __call__(self, *args, **kwargs): return Job(future=self.executor.submit(self._call, *args, **kwargs)) -class async_function(asynchron): +class async_function(async_): """ This decorator can be applied to any static function that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -63,10 +63,10 @@ class async_function(asynchron): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(*asynchron._preprocess_args(*args), **asynchron._preprocess_kwargs(**kwargs)) + return self._function(*async_._preprocess_args(*args), **async_._preprocess_kwargs(**kwargs)) -class async_method(asynchron): +class async_method(async_): """ This decorator can be applied to any class instance method that makes blocking calls to create a modified version that creates a Job and returns immediately; the original method will be called on a thread pool worker thread. @@ -74,8 +74,8 @@ class async_method(asynchron): def _call(self, *args, **kwargs): # Call the wrapped method. - return self._function(self.obj, *asynchron._preprocess_args(*args), - **asynchron._preprocess_kwargs(**kwargs)) + return self._function(self.obj, *async_._preprocess_args(*args), + **async_._preprocess_kwargs(**kwargs)) def __get__(self, instance, owner): # This is important for attribute inheritance and setting self.obj so it can be diff --git a/google/datalab/utils/_lambda_job.py b/google/datalab/utils/_lambda_job.py index b297b67c0..2faa4e5ed 100644 --- a/google/datalab/utils/_lambda_job.py +++ b/google/datalab/utils/_lambda_job.py @@ -30,7 +30,7 @@ def __init__(self, fn, job_id, *args, **kwargs): job_id: an optional ID for the job. If None, a UUID will be generated. """ super(LambdaJob, self).__init__(job_id) - self._future = _async.asynchron.executor.submit(fn, *args, **kwargs) + self._future = _async.async_.executor.submit(fn, *args, **kwargs) def __repr__(self): """Returns a representation for the job for showing in the notebook. diff --git a/tox.ini b/tox.ini index 99379028e..8b7c092fa 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,7 @@ [tox] -# By default, we want to run tests for Python 2.7, Python 3.5, and run our -# flake8 checks. -envlist = py27,py35,flake8,coveralls +# By default, we want to run tests for Python 2.7, Python 3.5, Python 3.7, +# and run our flake8 checks. +envlist = py27,py35,py37,flake8,coveralls # If an interpreter is missing locally, skip it. skip_missing_interpreters = true From f453d80d01d927b220e0bf8436a03f765e562ac9 Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Wed, 11 Mar 2020 14:53:48 +0900 Subject: [PATCH 03/10] rollback my mistake --- datalab/bigquery/_view.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datalab/bigquery/_view.py b/datalab/bigquery/_view.py index 4cb62987d..c0df7b726 100644 --- a/datalab/bigquery/_view.py +++ b/datalab/bigquery/_view.py @@ -188,7 +188,7 @@ def results(self, use_cache=True, dialect=None, billing_tier=None): def execute_async(self, table_name=None, table_mode='create', use_cache=True, priority='high', allow_large_results=False, dialect=None, billing_tier=None): - """Materialize the View async_ously. + """Materialize the View asynchronously. Args: table_name: the result table name; if None, then a temporary table will be used. From 2fabaa7703b240566028c84c801299713dfed2ba Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Wed, 11 Mar 2020 15:07:14 +0900 Subject: [PATCH 04/10] try to fix incompatibility between py2 and py3 --- datalab/utils/commands/_utils.py | 5 ++++- google/datalab/utils/commands/_utils.py | 9 +++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/datalab/utils/commands/_utils.py b/datalab/utils/commands/_utils.py index 2b4b45914..e864b4903 100644 --- a/datalab/utils/commands/_utils.py +++ b/datalab/utils/commands/_utils.py @@ -30,6 +30,7 @@ import pandas_profiling except ImportError: pass +import six import sys import types import yaml @@ -323,8 +324,10 @@ def parse_config(config, env, as_dict=True): config = {} elif stripped[0] == '{': config = json.loads(config) - else: + elif six.PY3: config = yaml.load(config, Loader=yaml.FullLoader) + else: + config = yaml.load(config) if as_dict: config = dict(config) diff --git a/google/datalab/utils/commands/_utils.py b/google/datalab/utils/commands/_utils.py index cf4f08b47..2a42bf7fe 100644 --- a/google/datalab/utils/commands/_utils.py +++ b/google/datalab/utils/commands/_utils.py @@ -30,6 +30,7 @@ import pandas_profiling except ImportError: pass +import six import sys import yaml @@ -328,8 +329,10 @@ def parse_config(config, env, as_dict=True): config = {} elif stripped[0] == '{': config = json.loads(config) - else: + elif six.PY3: config = yaml.load(config, Loader=yaml.FullLoader) + else: + config = yaml.load(config) if as_dict: config = dict(config) @@ -373,8 +376,10 @@ def parse_config_for_selected_keys(content, keys): return {}, None elif stripped[0] == '{': config = json.loads(content) - else: + elif six.PY3: config = yaml.load(content, Loader=yaml.FullLoader) + else: + config = yaml.load(content) if not isinstance(config, dict): raise ValueError('Invalid config.') From 4eb504393b69bf090074b5efefc9d6359cbf2d1b Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Fri, 13 Mar 2020 15:44:46 +0900 Subject: [PATCH 05/10] Add Loader=yaml.FullLoader Add trove for py3 --- setup.py | 1 + tests/_util/commands_tests.py | 10 ++++++++-- tests/pipeline/pipeline_tests.py | 16 +++++++++++++--- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 8c524f7d7..6ad55fcb9 100644 --- a/setup.py +++ b/setup.py @@ -78,6 +78,7 @@ classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", "Development Status :: 4 - Beta", "Environment :: Other Environment", "Intended Audience :: Developers", diff --git a/tests/_util/commands_tests.py b/tests/_util/commands_tests.py index 0da13a035..7cc9adaeb 100644 --- a/tests/_util/commands_tests.py +++ b/tests/_util/commands_tests.py @@ -95,7 +95,10 @@ def test_subcommand_line_cell(self): 'string4: value4\nflag1: false') self.assertEqual(args, {'string1': 'value1', 'string2': 'value2', 'string3': 'value3', 'command': 'subcommand2', 'flag1': False}) - self.assertEqual(yaml.load(cell), {'string3': 'value3', 'string4': 'value4'}) + if six.PY3: + self.assertEqual(yaml.load(cell, Loader=yaml.FullLoader), {'string3': 'value3', 'string4': 'value4'}) + else: + self.assertEqual(yaml.load(cell), {'string3': 'value3', 'string4': 'value4'}) # Regular arg and cell arg cannot be the same name. with self.assertRaises(ValueError): @@ -133,7 +136,10 @@ def test_subcommand_var_replacement(self): args, cell = parser.parse('subcommand1 --string1 $var1', 'a: b\nstring2: $var2', namespace) self.assertEqual(args, {'string1': 'value1', 'string2': 'value2', 'flag1': False, 'dict1': None}) - self.assertEqual(yaml.load(cell), {'a': 'b', 'string2': '$var2'}) + if six.PY3: + self.assertEqual(yaml.load(cell, Loader=yaml.FullLoader), {'a': 'b', 'string2': '$var2'}) + else: + self.assertEqual(yaml.load(cell), {'a': 'b', 'string2': '$var2'}) cell = """ dict1: diff --git a/tests/pipeline/pipeline_tests.py b/tests/pipeline/pipeline_tests.py index 615468a1c..ff7f72908 100644 --- a/tests/pipeline/pipeline_tests.py +++ b/tests/pipeline/pipeline_tests.py @@ -16,6 +16,7 @@ import datetime import mock import re +import six import unittest import yaml @@ -262,7 +263,10 @@ def test_get_dag_definition(self): 'catchup=True, default_args=default_args)\n\n') def test_get_datetime_expr(self): - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) start = dag_dict.get('schedule').get('start') datetime_expr = pipeline.PipelineGenerator._get_datetime_expr_str(start) @@ -277,7 +281,10 @@ def test_get_default_args(self): self.assertIn("'email': []", actual) self.assertIn("'owner': 'Google Cloud Datalab'", actual) - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) dag_dict['schedule']['retries'] = 5 dag_dict['schedule']['email_on_retry'] = False dag_dict['schedule']['email_on_failure'] = False @@ -302,7 +309,10 @@ def test_get_default_args(self): self.assertIn("'max_retry_delay': timedelta(seconds=15)", actual) def test_get_airflow_spec_with_default_schedule(self): - dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) + if six.PY3: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec, Loader=yaml.FullLoader) + else: + dag_dict = yaml.load(PipelineTest._test_pipeline_yaml_spec) # We delete the schedule spec to test with defaults del dag_dict['schedule'] From 5f0bfde9333a9420dba2ba4f143143dcd0a57a2f Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Fri, 13 Mar 2020 15:49:07 +0900 Subject: [PATCH 06/10] Add py37 for travis --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index d63575feb..010a0dd91 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,8 @@ matrix: env: TOX_ENV=py27 - python: 3.5 env: TOX_ENV=py35 + - python: 3.7 + env: TOX_ENV=py37 - python: 2.7 env: TOX_ENV=flake8 - python: 2.7 From 43a6a3e58696c7465a18a3abf3425c4d1c905c1e Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Tue, 17 Mar 2020 09:36:55 +0900 Subject: [PATCH 07/10] Add py37 for travis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 010a0dd91..d816cb70b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ matrix: env: TOX_ENV=py35 - python: 3.7 env: TOX_ENV=py37 + dist: xenial - python: 2.7 env: TOX_ENV=flake8 - python: 2.7 From 391ba87fd129889f98bec776f03b280f2983260b Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Tue, 17 Mar 2020 09:57:08 +0900 Subject: [PATCH 08/10] Add py37 for travis --- tests/_util/commands_tests.py | 5 ++++- tox.ini | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/_util/commands_tests.py b/tests/_util/commands_tests.py index 7cc9adaeb..7c1aea525 100644 --- a/tests/_util/commands_tests.py +++ b/tests/_util/commands_tests.py @@ -96,7 +96,10 @@ def test_subcommand_line_cell(self): self.assertEqual(args, {'string1': 'value1', 'string2': 'value2', 'string3': 'value3', 'command': 'subcommand2', 'flag1': False}) if six.PY3: - self.assertEqual(yaml.load(cell, Loader=yaml.FullLoader), {'string3': 'value3', 'string4': 'value4'}) + self.assertEqual( + yaml.load(cell, Loader=yaml.FullLoader), + {'string3': 'value3', 'string4': 'value4'} + ) else: self.assertEqual(yaml.load(cell), {'string3': 'value3', 'string4': 'value4'}) diff --git a/tox.ini b/tox.ini index 8b7c092fa..d9684678d 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ skip_missing_interpreters = true # need them to run our tests suite. # # tox always installs the current package, so there's no need to list it here. -deps = apache-airflow==1.9.0 +deps = apache-airflow==1.10.9 dill==0.2.6 tensorflow==1.8.0 lime==0.1.1.23 From c1ea3398cae56390fe7f9bb0f5615cfdd1686e79 Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Tue, 17 Mar 2020 13:06:24 +0900 Subject: [PATCH 09/10] Add py37 for travis --- .travis.yml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index d816cb70b..0772bb781 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,7 @@ before_install: - tsc --module amd --noImplicitAny --outdir datalab/notebook/static datalab/notebook/static/*.ts # We use tox for actually running tests. - pip install --upgrade pip tox - + script: # tox reads its configuration from tox.ini. - tox -e $TOX_ENV diff --git a/tox.ini b/tox.ini index d9684678d..4dc3b40e1 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ skip_missing_interpreters = true # tox always installs the current package, so there's no need to list it here. deps = apache-airflow==1.10.9 dill==0.2.6 - tensorflow==1.8.0 + tensorflow==1.15.2 lime==0.1.1.23 xgboost==0.6a2 # Dropping this seems to cause problems with conda in some cases. From 7934bcf60f93c3956bbcba93c4409d25b2232dbd Mon Sep 17 00:00:00 2001 From: monotaro_sheng_wei Date: Tue, 17 Mar 2020 13:12:34 +0900 Subject: [PATCH 10/10] Add py37 for travis --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 4dc3b40e1..f3575df22 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ skip_missing_interpreters = true # tox always installs the current package, so there's no need to list it here. deps = apache-airflow==1.10.9 dill==0.2.6 - tensorflow==1.15.2 + tensorflow==1.14.0 lime==0.1.1.23 xgboost==0.6a2 # Dropping this seems to cause problems with conda in some cases.