diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index aef45b60e47..8b018a5bdeb 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -154,11 +154,10 @@ jobs: with: python-version: ${{env.PYTHON_VERSION}} - - name: Run Black on Python code + - name: Run Ruff on Python code run: | - python -m pip install click==8.0.4 - python -m pip install -U black - python -m black . --check + python -m pip install -U ruff + python -m ruff check ./pythonFiles/*.py working-directory: pythonFiles - name: Run gulp prePublishNonBundle diff --git a/.vscode/extensions.json b/.vscode/extensions.json index ac4743919a6..2ab13116972 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -6,7 +6,7 @@ "editorconfig.editorconfig", "esbenp.prettier-vscode", "hbenl.vscode-mocha-test-adapter", - "ms-python.black", + "charliermarsh.ruff", "ms-vscode.extension-test-runner" ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6c1b61cc781..2b554cf7311 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,6 +23,7 @@ - [Prettier](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode) - [EditorConfig for VS Code](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig) - [Python Extension for VS Code](https://marketplace.visualstudio.com/items?itemName=ms-python.python) + - [Ruff](https://marketplace.visualstudio.com/items?itemName=charliermarsh.ruff) ### Setup @@ -37,7 +38,7 @@ python3 -m venv .venv source .venv/bin/activate # and in Windows cmd or PowerShell .venv\Scripts\activate -# The Python code in the extension is formatted using Black. +# The Python code in the extension is formatted using Ruff. python -m pip install black ``` diff --git a/build/existingFiles.json b/build/existingFiles.json index 5f8df586379..8ddfceac212 100644 --- a/build/existingFiles.json +++ b/build/existingFiles.json @@ -153,7 +153,6 @@ "src/extension.ts", "src/platform/formatters/autoPep8Formatter.ts", "src/platform/formatters/baseFormatter.ts", - "src/platform/formatters/blackFormatter.ts", "src/platform/formatters/dummyFormatter.ts", "src/platform/formatters/helper.ts", "src/platform/formatters/lineFormatter.ts", diff --git a/build/unlocalizedFiles.json b/build/unlocalizedFiles.json index 6b6f453f009..179fc165f4f 100644 --- a/build/unlocalizedFiles.json +++ b/build/unlocalizedFiles.json @@ -5,7 +5,6 @@ "src/platform/common/installer/productInstaller.ts", "src/platform/debugger/extension/hooks/childProcessAttachService.ts", "src/platform/formatters/baseFormatter.ts", - "src/platform/formatters/blackFormatter.ts", "src/platform/interpreter/configuration/pythonPathUpdaterService.ts", "src/platform/linters/errorHandlers/notInstalled.ts", "src/platform/linters/errorHandlers/standard.ts", diff --git a/pythonFiles/.vscode/settings.json b/pythonFiles/.vscode/settings.json index 0f49d48f2e8..827237df115 100644 --- a/pythonFiles/.vscode/settings.json +++ b/pythonFiles/.vscode/settings.json @@ -3,5 +3,13 @@ "**/__pycache__/**": true, "**/**/*.pyc": true }, - "python.formatting.provider": "black" + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff", + "editor.codeActionsOnSave": { + "source.fixAll": "explicit" + }, + "notebook.codeActionsOnSave": { + "notebook.source.fixAll": "explicit" + } + } } diff --git a/pythonFiles/tests/ipython/getJupyterVariableList.py b/pythonFiles/tests/ipython/getJupyterVariableList.py index 2253e8e4107..126520b7ecf 100644 --- a/pythonFiles/tests/ipython/getJupyterVariableList.py +++ b/pythonFiles/tests/ipython/getJupyterVariableList.py @@ -1,3 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + # Query Jupyter server for defined variables list # Tested on 2.7 and 3.6 from sys import getsizeof as _VSCODE_getsizeof @@ -35,7 +39,7 @@ ) del _VSCode_type del _VSCode_var - except: + except: # noqa: E722 pass builtins.print(_VSCODE_json.dumps(_VSCode_output)) diff --git a/pythonFiles/tests/ipython/getJupyterVariableValue.py b/pythonFiles/tests/ipython/getJupyterVariableValue.py index d8835f64f58..477b8d8896e 100644 --- a/pythonFiles/tests/ipython/getJupyterVariableValue.py +++ b/pythonFiles/tests/ipython/getJupyterVariableValue.py @@ -1,3 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + import sys as VC_sys import locale as VC_locale @@ -9,11 +13,11 @@ class VC_SafeRepr(object): # Py3 compat - alias unicode to str, and xrange to range try: - unicode # noqa + unicode # type: ignore # noqa except NameError: unicode = str try: - xrange # noqa + xrange # type: ignore # noqa except NameError: xrange = range @@ -38,7 +42,7 @@ class VC_SafeRepr(object): string_types = (str, unicode) set_info = (set, "set([", "])", False) frozenset_info = (frozenset, "frozenset([", "])", False) - int_types = (int, long) # noqa + int_types = (int, long) # type: ignore # noqa long_iter_types = ( list, tuple, @@ -47,7 +51,7 @@ class VC_SafeRepr(object): dict, set, frozenset, - buffer, + buffer, # type: ignore # noqa: F821 ) # noqa # Collection types are recursively iterated for each limit in @@ -90,9 +94,9 @@ class VC_SafeRepr(object): def __call__(self, obj): try: - if VC_IS_PY2: + if VC_IS_PY2: # noqa: F821 return "".join( - (x.encode("utf-8") if isinstance(x, unicode) else x) + (x.encode("utf-8") if isinstance(x, unicode) else x) # noqa: F821 # type: ignore for x in self._repr(obj, 0) ) else: @@ -172,7 +176,7 @@ def _is_long_iter(self, obj, level=0): return False # xrange reprs fine regardless of length. - if isinstance(obj, xrange): + if isinstance(obj, xrange): # type: ignore # noqa: F821 return False # numpy and scipy collections (ndarray etc) have @@ -345,17 +349,17 @@ def _repr_obj(self, obj, level, limit_inner, limit_outer): max(1, int(limit / 3)), ) # noqa - if VC_IS_PY2 and isinstance(obj_repr, bytes): + if VC_IS_PY2 and isinstance(obj_repr, bytes): # noqa: F821 # If we can convert to unicode before slicing, that's better (but don't do # it if it's not possible as we may be dealing with actual binary data). obj_repr = self._bytes_as_unicode_if_possible(obj_repr) - if isinstance(obj_repr, unicode): + if isinstance(obj_repr, unicode): # type: ignore # noqa: F821 # Deal with high-surrogate leftovers on Python 2. try: - if left_count > 0 and unichr(0xD800) <= obj_repr[ + if left_count > 0 and unichr(0xD800) <= obj_repr[ # noqa: F821 # type: ignore left_count - 1 - ] <= unichr(0xDBFF): + ] <= unichr(0xDBFF): # type: ignore # noqa: F821 left_count -= 1 except ValueError: # On Jython unichr(0xD800) will throw an error: @@ -371,9 +375,9 @@ def _repr_obj(self, obj, level, limit_inner, limit_outer): # Deal with high-surrogate leftovers on Python 2. try: - if right_count > 0 and unichr(0xD800) <= obj_repr[ + if right_count > 0 and unichr(0xD800) <= obj_repr[ # noqa: F821 # type: ignore -right_count - 1 - ] <= unichr(0xDBFF): + ] <= unichr(0xDBFF): # type: ignore # noqa: F821 right_count -= 1 except ValueError: # On Jython unichr(0xD800) will throw an error: @@ -392,7 +396,7 @@ def _repr_obj(self, obj, level, limit_inner, limit_outer): yield obj_repr[-right_count:] def _convert_to_unicode_or_bytes_repr(self, obj_repr): - if VC_IS_PY2 and isinstance(obj_repr, bytes): + if VC_IS_PY2 and isinstance(obj_repr, bytes): # noqa: F821 obj_repr = self._bytes_as_unicode_if_possible(obj_repr) if isinstance(obj_repr, bytes): # If we haven't been able to decode it this means it's some binary data @@ -406,12 +410,12 @@ def _bytes_as_unicode_if_possible(self, obj_repr): # locale.getpreferredencoding() and 'utf-8). If no encoding can decode # the input, we return the original bytes. try_encodings = [] - encoding = self.sys_stdout_encoding or getattr(VC_sys.stdout, "encoding", "") + encoding = self.sys_stdout_encoding or getattr(VC_sys.stdout, "encoding", "") # noqa: F821 if encoding: try_encodings.append(encoding.lower()) preferred_encoding = ( - self.locale_preferred_encoding or VC_locale.getpreferredencoding() + self.locale_preferred_encoding or VC_locale.getpreferredencoding() # noqa: F821 ) if preferred_encoding: preferred_encoding = preferred_encoding.lower() @@ -431,7 +435,7 @@ def _bytes_as_unicode_if_possible(self, obj_repr): # Query Jupyter server for the value of a variable -import json as _VSCODE_json +import json as _VSCODE_json # noqa: E402 _VSCODE_max_len = 200 # In IJupyterVariables.getValue this '_VSCode_JupyterTestValue' will be replaced with the json stringified value of the target variable diff --git a/pythonFiles/tests/ipython/scripts.py b/pythonFiles/tests/ipython/scripts.py index 4d6bd8803b9..0a9fd3ffa96 100644 --- a/pythonFiles/tests/ipython/scripts.py +++ b/pythonFiles/tests/ipython/scripts.py @@ -11,7 +11,7 @@ def check_for_ipython(): try: from IPython import get_ipython - return not get_ipython() == None + return not get_ipython() == None # noqa: E711 except ImportError: pass return False @@ -33,7 +33,7 @@ def execute_script(file, replace_dict=dict([])): # Replace the key value pairs contents += ( line - if regex == None + if regex is None else regex.sub(lambda m: replace_dict[m.group()], line) ) @@ -44,7 +44,7 @@ def execute_script(file, replace_dict=dict([])): def execute_code(code): # Execute this script as a cell - result = get_ipython().run_cell(code) + result = get_ipython().run_cell(code) # type: ignore # noqa: F821 return result diff --git a/pythonFiles/tests/ipython/test_variables.py b/pythonFiles/tests/ipython/test_variables.py index 1267f3ed880..2f3e8fb4392 100644 --- a/pythonFiles/tests/ipython/test_variables.py +++ b/pythonFiles/tests/ipython/test_variables.py @@ -82,7 +82,7 @@ def test_dataframe_info(capsys): df = get_variable_value(vars, "df", capsys) se = get_variable_value(vars, "se", capsys) np = get_variable_value(vars, "np1", capsys) - np2 = get_variable_value(vars, "np2", capsys) + np2 = get_variable_value(vars, "np2", capsys) # noqa: F841 ls = get_variable_value(vars, "ls", capsys) obj = get_variable_value(vars, "obj", capsys) df3 = get_variable_value(vars, "df3", capsys) diff --git a/pythonFiles/tests/run_all.py b/pythonFiles/tests/run_all.py index ce5a6264996..6095ec7d193 100644 --- a/pythonFiles/tests/run_all.py +++ b/pythonFiles/tests/run_all.py @@ -7,7 +7,7 @@ sys.path[0] = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -from tests.__main__ import main, parse_args +from tests.__main__ import main, parse_args # noqa: E402 if __name__ == "__main__": diff --git a/pythonFiles/tests/test_normalize_selection.py b/pythonFiles/tests/test_normalize_selection.py index dbb7ec8c271..679bc621c48 100644 --- a/pythonFiles/tests/test_normalize_selection.py +++ b/pythonFiles/tests/test_normalize_selection.py @@ -1,8 +1,6 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -import pytest -import sys import textwrap import normalizeSelection diff --git a/pythonFiles/vscode_datascience_helpers/dataframes/vscodeDataFrame.py b/pythonFiles/vscode_datascience_helpers/dataframes/vscodeDataFrame.py index 59c9e2a1363..882b464c77a 100644 --- a/pythonFiles/vscode_datascience_helpers/dataframes/vscodeDataFrame.py +++ b/pythonFiles/vscode_datascience_helpers/dataframes/vscodeDataFrame.py @@ -1,3 +1,7 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + + def _VSCODE_getDataFrame(what_to_get, is_debugging, *args): import pandas as _VSCODE_pd import builtins as _VSCODE_builtins @@ -9,51 +13,51 @@ def _VSCODE_getDataFrame(what_to_get, is_debugging, *args): _VSCODE_allowedTensorTypes = ["Tensor", "EagerTensor"] def _VSCODE_stringifyElement(element): - if _VSCODE_builtins.isinstance(element, _VSCODE_np.ndarray): + if _VSCODE_builtins.isinstance(element, _VSCODE_np.ndarray): # noqa: F821 # Ensure no rjust or ljust padding is applied to stringified elements - stringified = _VSCODE_np.array2string( + stringified = _VSCODE_np.array2string( # noqa: F821 element, separator=", ", - formatter={"all": lambda x: _VSCODE_builtins.str(x)}, + formatter={"all": lambda x: _VSCODE_builtins.str(x)}, # noqa: F821 ) - elif _VSCODE_builtins.isinstance( - element, (_VSCODE_builtins.list, _VSCODE_builtins.tuple) + elif _VSCODE_builtins.isinstance( # noqa: F821 + element, (_VSCODE_builtins.list, _VSCODE_builtins.tuple) # noqa: F821 ): # We can't pass lists and tuples to array2string because it expects # the size attribute to be defined - stringified = _VSCODE_builtins.str(element) + stringified = _VSCODE_builtins.str(element) # noqa: F821 else: stringified = element return stringified def _VSCODE_convertNumpyArrayToDataFrame(ndarray, start=None, end=None): # Save the user's current setting - current_options = _VSCODE_np.get_printoptions() + current_options = _VSCODE_np.get_printoptions() # noqa: F821 # Ask for the full string. Without this numpy truncates to 3 leading and 3 trailing by default - _VSCODE_np.set_printoptions(threshold=99999) + _VSCODE_np.set_printoptions(threshold=99999) # noqa: F821 flattened = None try: if start is not None and end is not None: ndarray = ndarray[start:end] - if ndarray.ndim < 3 and _VSCODE_builtins.str(ndarray.dtype) != "object": + if ndarray.ndim < 3 and _VSCODE_builtins.str(ndarray.dtype) != "object": # noqa: F821 pass - elif ndarray.ndim == 1 and _VSCODE_builtins.str(ndarray.dtype) == "object": - flattened = _VSCODE_np.empty(ndarray.shape[:2], dtype="object") - for i in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened)): + elif ndarray.ndim == 1 and _VSCODE_builtins.str(ndarray.dtype) == "object": # noqa: F821 + flattened = _VSCODE_np.empty(ndarray.shape[:2], dtype="object") # noqa: F821 + for i in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened)): # noqa: F821 flattened[i] = _VSCODE_stringifyElement(ndarray[i]) ndarray = flattened else: - flattened = _VSCODE_np.empty(ndarray.shape[:2], dtype="object") - for i in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened)): - for j in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened[i])): + flattened = _VSCODE_np.empty(ndarray.shape[:2], dtype="object") # noqa: F821 + for i in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened)): # noqa: F821 + for j in _VSCODE_builtins.range(_VSCODE_builtins.len(flattened[i])): # noqa: F821 flattened[i][j] = _VSCODE_stringifyElement(ndarray[i][j]) ndarray = flattened finally: # Restore the user's printoptions - _VSCODE_np.set_printoptions(threshold=current_options["threshold"]) + _VSCODE_np.set_printoptions(threshold=current_options["threshold"]) # noqa: F821 del flattened - return _VSCODE_pd.DataFrame(ndarray) + return _VSCODE_pd.DataFrame(ndarray) # noqa: F821 # Function that converts tensors to DataFrames def _VSCODE_convertTensorToDataFrame(tensor, start=None, end=None): @@ -65,12 +69,12 @@ def _VSCODE_convertTensorToDataFrame(tensor, start=None, end=None): temp = temp[start:end] # Can't directly convert sparse tensors to numpy arrays # so first convert them to dense tensors - if _VSCODE_builtins.hasattr(temp, "is_sparse") and temp.is_sparse: + if _VSCODE_builtins.hasattr(temp, "is_sparse") and temp.is_sparse: # noqa: F821 # This guard is needed because to_dense exists on all PyTorch # tensors and throws an error if the tensor is already strided temp = temp.to_dense() # See https://discuss.pytorch.org/t/should-it-really-be-necessary-to-do-var-detach-cpu-numpy/35489 - if _VSCODE_builtins.hasattr(temp, "data"): + if _VSCODE_builtins.hasattr(temp, "data"): # noqa: F821 # PyTorch tensors need to be explicitly detached # from the computation graph and copied to CPU temp = temp.data.detach().cpu() @@ -82,7 +86,7 @@ def _VSCODE_convertTensorToDataFrame(tensor, start=None, end=None): temp = _VSCODE_convertNumpyArrayToDataFrame(temp) tensor = temp del temp - except _VSCODE_builtins.AttributeError: + except _VSCODE_builtins.AttributeError: # noqa: F821 # TensorFlow EagerTensors and PyTorch Tensors support numpy() # but avoid a crash just in case the current variable doesn't pass @@ -90,31 +94,31 @@ def _VSCODE_convertTensorToDataFrame(tensor, start=None, end=None): # Function that converts the var passed in into a pandas data frame if possible def _VSCODE_convertToDataFrame(df, start=None, end=None): - vartype = _VSCODE_builtins.type(df) - if _VSCODE_builtins.isinstance(df, _VSCODE_builtins.list): - df = _VSCODE_pd.DataFrame(df).iloc[start:end] - elif _VSCODE_builtins.isinstance(df, _VSCODE_pd.Series): - df = _VSCODE_pd.Series.to_frame(df).iloc[start:end] - elif _VSCODE_builtins.isinstance(df, _VSCODE_builtins.dict): - df = _VSCODE_pd.Series(df) - df = _VSCODE_pd.Series.to_frame(df).iloc[start:end] - elif _VSCODE_builtins.hasattr(df, "toPandas"): + vartype = _VSCODE_builtins.type(df) # noqa: F821 + if _VSCODE_builtins.isinstance(df, _VSCODE_builtins.list): # noqa: F821 + df = _VSCODE_pd.DataFrame(df).iloc[start:end] # noqa: F821 + elif _VSCODE_builtins.isinstance(df, _VSCODE_pd.Series): # noqa: F821 + df = _VSCODE_pd.Series.to_frame(df).iloc[start:end] # noqa: F821 + elif _VSCODE_builtins.isinstance(df, _VSCODE_builtins.dict): # noqa: F821 + df = _VSCODE_pd.Series(df) # noqa: F821 + df = _VSCODE_pd.Series.to_frame(df).iloc[start:end] # noqa: F821 + elif _VSCODE_builtins.hasattr(df, "toPandas"): # noqa: F821 df = df.toPandas().iloc[start:end] - elif _VSCODE_builtins.hasattr(df, "to_pandas"): + elif _VSCODE_builtins.hasattr(df, "to_pandas"): # noqa: F821 df = df.to_pandas().iloc[start:end] elif ( - _VSCODE_builtins.hasattr(vartype, "__name__") + _VSCODE_builtins.hasattr(vartype, "__name__") # noqa: F821 and vartype.__name__ in _VSCODE_allowedTensorTypes ): df = _VSCODE_convertTensorToDataFrame(df, start, end) elif ( - _VSCODE_builtins.hasattr(vartype, "__name__") + _VSCODE_builtins.hasattr(vartype, "__name__") # noqa: F821 and vartype.__name__ == "ndarray" ): df = _VSCODE_convertNumpyArrayToDataFrame(df, start, end) elif ( - _VSCODE_builtins.hasattr(df, "__array__") - and _VSCODE_builtins.hasattr(vartype, "__name__") + _VSCODE_builtins.hasattr(df, "__array__") # noqa: F821 + and _VSCODE_builtins.hasattr(vartype, "__name__") # noqa: F821 and vartype.__name__ == "DataArray" ): df = _VSCODE_convertNumpyArrayToDataFrame( @@ -124,26 +128,26 @@ def _VSCODE_convertToDataFrame(df, start=None, end=None): """Disabling bandit warning for try, except, pass. We want to swallow all exceptions here to not crash on variable fetching""" try: - temp = _VSCODE_pd.DataFrame(df).iloc[start:end] + temp = _VSCODE_pd.DataFrame(df).iloc[start:end] # noqa: F821 df = temp - except: # nosec + except: # nosec # noqa: E722 pass del vartype return df # Function to compute row count for a value def _VSCODE_getRowCount(var): - if _VSCODE_builtins.hasattr(var, "shape"): + if _VSCODE_builtins.hasattr(var, "shape"): # noqa: F821 try: # Get a bit more restrictive with exactly what we want to count as a shape, since anything can define it - if _VSCODE_builtins.isinstance(var.shape, _VSCODE_builtins.tuple): + if _VSCODE_builtins.isinstance(var.shape, _VSCODE_builtins.tuple): # noqa: F821 return var.shape[0] - except _VSCODE_builtins.TypeError: + except _VSCODE_builtins.TypeError: # noqa: F821 return 0 - elif _VSCODE_builtins.hasattr(var, "__len__"): + elif _VSCODE_builtins.hasattr(var, "__len__"): # noqa: F821 try: - return _VSCODE_builtins.len(var) - except _VSCODE_builtins.TypeError: + return _VSCODE_builtins.len(var) # noqa: F821 + except _VSCODE_builtins.TypeError: # noqa: F821 return 0 # Function to retrieve a set of rows for a data frame @@ -153,19 +157,19 @@ def _VSCODE_getDataFrameRows(df, start, end): try: df = df.replace( { - _VSCODE_np.inf: "inf", - -_VSCODE_np.inf: "-inf", - _VSCODE_np.nan: "nan", + _VSCODE_np.inf: "inf", # noqa: F821 + -_VSCODE_np.inf: "-inf", # noqa: F821 + _VSCODE_np.nan: "nan", # noqa: F821 } ) - except: + except: # noqa: E722 pass if is_debugging: - return _VSCODE_pd_json.to_json(None, df, orient="split", date_format="iso") + return _VSCODE_pd_json.to_json(None, df, orient="split", date_format="iso") # noqa: F821 else: - return _VSCODE_builtins.print( - _VSCODE_pd_json.to_json(None, df, orient="split", date_format="iso") + return _VSCODE_builtins.print( # noqa: F821 + _VSCODE_pd_json.to_json(None, df, orient="split", date_format="iso") # noqa: F821 ) # Function to get info on the passed in data frame @@ -179,19 +183,19 @@ def _VSCODE_getDataFrameInfo(df): if rowCount: try: row = df.iloc[0:1] - json_row = _VSCODE_pd_json.to_json(None, row, date_format="iso") - columnNames = _VSCODE_builtins.list(_VSCODE_json.loads(json_row)) - except: - columnNames = _VSCODE_builtins.list(df) + json_row = _VSCODE_pd_json.to_json(None, row, date_format="iso") # noqa: F821 + columnNames = _VSCODE_builtins.list(_VSCODE_json.loads(json_row)) # noqa: F821 + except: # noqa: E722 + columnNames = _VSCODE_builtins.list(df) # noqa: F821 else: - columnNames = _VSCODE_builtins.list(df) + columnNames = _VSCODE_builtins.list(df) # noqa: F821 - columnTypes = _VSCODE_builtins.list(df.dtypes) + columnTypes = _VSCODE_builtins.list(df.dtypes) # noqa: F821 # Compute the index column. It may have been renamed try: indexColumn = df.index.name if df.index.name else "index" - except _VSCODE_builtins.AttributeError: + except _VSCODE_builtins.AttributeError: # noqa: F821 indexColumn = "index" # Make sure the index column exists @@ -201,13 +205,13 @@ def _VSCODE_getDataFrameInfo(df): # Then loop and generate our output json columns = [] - for n in _VSCODE_builtins.range(0, _VSCODE_builtins.len(columnNames)): + for n in _VSCODE_builtins.range(0, _VSCODE_builtins.len(columnNames)): # noqa: F821 column_type = columnTypes[n] - column_name = _VSCODE_builtins.str(columnNames[n]) + column_name = _VSCODE_builtins.str(columnNames[n]) # noqa: F821 colobj = {} colobj["key"] = column_name colobj["name"] = column_name - colobj["type"] = _VSCODE_builtins.str(column_type) + colobj["type"] = _VSCODE_builtins.str(column_type) # noqa: F821 columns.append(colobj) # Save this in our target @@ -218,16 +222,16 @@ def _VSCODE_getDataFrameInfo(df): # return our json object as a string if is_debugging: - return _VSCODE_json.dumps(target) + return _VSCODE_json.dumps(target) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(target)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(target)) # noqa: F821 try: if what_to_get == "rows": return _VSCODE_getDataFrameRows(*args) else: return _VSCODE_getDataFrameInfo(*args) - except: + except: # noqa: E722 del _VSCODE_pd del _VSCODE_json del _VSCODE_pd_json diff --git a/pythonFiles/vscode_datascience_helpers/dummyJupyter.py b/pythonFiles/vscode_datascience_helpers/dummyJupyter.py index 1a1a6cc07cb..d40c0b3795e 100644 --- a/pythonFiles/vscode_datascience_helpers/dummyJupyter.py +++ b/pythonFiles/vscode_datascience_helpers/dummyJupyter.py @@ -1,6 +1,5 @@ # This file can mimic juypter running. Useful for testing jupyter crash handling -import sys import argparse import time diff --git a/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameInfo.py b/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameInfo.py index eadd080c312..4f1d530c407 100644 --- a/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameInfo.py +++ b/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameInfo.py @@ -77,7 +77,7 @@ def _VSCODE_getRowCount(var): _VSCODE_columnNames = list(_VSCODE_json.loads(_VSCODE_json_row)) del _VSCODE_row del _VSCODE_json_row - except: + except: # noqa: E722 _VSCODE_columnNames = list(_VSCODE_df) else: _VSCODE_columnNames = list(_VSCODE_df) diff --git a/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameRows.py b/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameRows.py index f202f8ae4ef..d7932260b14 100644 --- a/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameRows.py +++ b/pythonFiles/vscode_datascience_helpers/getJupyterVariableDataFrameRows.py @@ -12,9 +12,9 @@ # _VSCode_JupyterStartRow and _VSCode_JupyterEndRow should be replaced dynamically with the literals # for our start and end rows -_VSCODE_startRow = _VSCODE_builtins.max(_VSCode_JupyterStartRow, 0) +_VSCODE_startRow = _VSCODE_builtins.max(_VSCode_JupyterStartRow, 0) # type: ignore # noqa: F821 _VSCODE_endRow = _VSCODE_builtins.min( - _VSCode_JupyterEndRow, _VSCODE_targetVariable["rowCount"] + _VSCode_JupyterEndRow, _VSCODE_targetVariable["rowCount"] # noqa: F821 # type: ignore ) # Assume we have a dataframe. If not, turn our eval result into a dataframe diff --git a/pythonFiles/vscode_datascience_helpers/getServerInfo.py b/pythonFiles/vscode_datascience_helpers/getServerInfo.py index 956df172212..7e1a395288e 100644 --- a/pythonFiles/vscode_datascience_helpers/getServerInfo.py +++ b/pythonFiles/vscode_datascience_helpers/getServerInfo.py @@ -9,7 +9,7 @@ from notebook.notebookapp import list_running_servers server_list = list_running_servers() - except: + except: # noqa: E722 from jupyter_server import serverapp server_list = serverapp.list_running_servers() @@ -38,7 +38,7 @@ """ import subprocess # nosec from subprocess import PIPE # nosec - import sys + import os result = subprocess.run( # nosec ["jupyter", "notebook", "list", "--jsonlist"], stdout=PIPE, stderr=PIPE diff --git a/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariableInfo.py b/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariableInfo.py index 90237961f5e..a69296dd644 100644 --- a/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariableInfo.py +++ b/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariableInfo.py @@ -10,11 +10,11 @@ def _VSCODE_getVariable(what_to_get, is_debugging, *args): arrayPageSize = 50 def truncateString(variable): - string = _VSCODE_builtins.repr(variable) - if _VSCODE_builtins.len(string) > maxStringLength: + string = _VSCODE_builtins.repr(variable) # noqa: F821 + if _VSCODE_builtins.len(string) > maxStringLength: # noqa: F821 sizeInfo = ( - "\n\nLength: " + str(_VSCODE_builtins.len(variable)) - if _VSCODE_builtins.type(variable) == _VSCODE_builtins.str + "\n\nLength: " + str(_VSCODE_builtins.len(variable)) # noqa: F821 + if _VSCODE_builtins.type(variable) == _VSCODE_builtins.str # noqa: F821 else "" ) return string[: maxStringLength - 1] + "..." + sizeInfo @@ -24,7 +24,7 @@ def truncateString(variable): DisplayOptions = _VSCODE_namedtuple("DisplayOptions", ["width", "max_columns"]) def set_pandas_display_options(display_options=None): - if _VSCODE_importlib_util.find_spec("pandas") is not None: + if _VSCODE_importlib_util.find_spec("pandas") is not None: # noqa: F821 try: import pandas as _VSCODE_PD @@ -49,8 +49,8 @@ def set_pandas_display_options(display_options=None): def getValue(variable): original_display = None if ( - _VSCODE_builtins.type(variable).__name__ == "DataFrame" - and _VSCODE_importlib_util.find_spec("pandas") is not None + _VSCODE_builtins.type(variable).__name__ == "DataFrame" # noqa: F821 + and _VSCODE_importlib_util.find_spec("pandas") is not None # noqa: F821 ): original_display = set_pandas_display_options() @@ -62,7 +62,7 @@ def getValue(variable): def getPropertyNames(variable): props = [] - for prop in _VSCODE_builtins.dir(variable): + for prop in _VSCODE_builtins.dir(variable): # noqa: F821 if not prop.startswith("_"): props.append(prop) return props @@ -70,32 +70,32 @@ def getPropertyNames(variable): def getFullType(varType): module = "" if ( - _VSCODE_builtins.hasattr(varType, "__module__") + _VSCODE_builtins.hasattr(varType, "__module__") # noqa: F821 and varType.__module__ != "builtins" ): module = varType.__module__ + "." - if _VSCODE_builtins.hasattr(varType, "__qualname__"): + if _VSCODE_builtins.hasattr(varType, "__qualname__"): # noqa: F821 return module + varType.__qualname__ - elif _VSCODE_builtins.hasattr(varType, "__name__"): + elif _VSCODE_builtins.hasattr(varType, "__name__"): # noqa: F821 return module + varType.__name__ def getVariableDescription(variable): result = {} - varType = _VSCODE_builtins.type(variable) + varType = _VSCODE_builtins.type(variable) # noqa: F821 result["type"] = getFullType(varType) if hasattr(varType, "__mro__"): result["interfaces"] = [getFullType(t) for t in varType.__mro__] if ( - _VSCODE_builtins.hasattr(variable, "__len__") + _VSCODE_builtins.hasattr(variable, "__len__") # noqa: F821 and result["type"] in collectionTypes ): - result["count"] = _VSCODE_builtins.len(variable) + result["count"] = _VSCODE_builtins.len(variable) # noqa: F821 result["hasNamedChildren"] = ( - _VSCODE_builtins.hasattr(variable, "__dict__") - or _VSCODE_builtins.type(variable) == dict + _VSCODE_builtins.hasattr(variable, "__dict__") # noqa: F821 + or _VSCODE_builtins.type(variable) == dict # noqa: E721, F821 ) result["value"] = getValue(variable) @@ -105,17 +105,17 @@ def getChildProperty(root, propertyChain): try: variable = root for property in propertyChain: - if _VSCODE_builtins.type(property) == _VSCODE_builtins.int: - if _VSCODE_builtins.hasattr(variable, "__getitem__"): + if _VSCODE_builtins.type(property) == _VSCODE_builtins.int: # noqa: F821 + if _VSCODE_builtins.hasattr(variable, "__getitem__"): # noqa: F821 variable = variable[property] - elif _VSCODE_builtins.type(variable) == _VSCODE_builtins.set: - variable = _VSCODE_builtins.list(variable)[property] + elif _VSCODE_builtins.type(variable) == _VSCODE_builtins.set: # noqa: F821 + variable = _VSCODE_builtins.list(variable)[property] # noqa: F821 else: return None - elif _VSCODE_builtins.hasattr(variable, property): + elif _VSCODE_builtins.hasattr(variable, property): # noqa: F821 variable = getattr(variable, property) elif ( - _VSCODE_builtins.type(variable) == _VSCODE_builtins.dict + _VSCODE_builtins.type(variable) == _VSCODE_builtins.dict # noqa: F821 and property in variable ): variable = variable[property] @@ -141,9 +141,9 @@ def _VSCODE_getVariableDescriptions(varNames): ] if is_debugging: - return _VSCODE_json.dumps(variables) + return _VSCODE_json.dumps(variables) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(variables)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(variables)) # noqa: F821 ### Get info on children of a variable reached through the given property chain def _VSCODE_getAllChildrenDescriptions(rootVarName, propertyChain, startIndex): @@ -152,17 +152,17 @@ def _VSCODE_getAllChildrenDescriptions(rootVarName, propertyChain, startIndex): return [] parent = root - if _VSCODE_builtins.len(propertyChain) > 0: + if _VSCODE_builtins.len(propertyChain) > 0: # noqa: F821 parent = getChildProperty(root, propertyChain) children = [] parentInfo = getVariableDescription(parent) if "count" in parentInfo: if parentInfo["count"] > 0: - lastItem = _VSCODE_builtins.min( + lastItem = _VSCODE_builtins.min( # noqa: F821 parentInfo["count"], startIndex + arrayPageSize ) - range = _VSCODE_builtins.range(startIndex, lastItem) + range = _VSCODE_builtins.range(startIndex, lastItem) # noqa: F821 children = [ { **getVariableDescription(getChildProperty(parent, [i])), @@ -175,17 +175,17 @@ def _VSCODE_getAllChildrenDescriptions(rootVarName, propertyChain, startIndex): ] elif parentInfo["hasNamedChildren"]: childrenNames = [] - if _VSCODE_builtins.hasattr(parent, "__dict__"): + if _VSCODE_builtins.hasattr(parent, "__dict__"): # noqa: F821 childrenNames = getPropertyNames(parent) - elif _VSCODE_builtins.type(parent) == _VSCODE_builtins.dict: - childrenNames = _VSCODE_builtins.list(parent.keys()) + elif _VSCODE_builtins.type(parent) == _VSCODE_builtins.dict: # noqa: F821 + childrenNames = _VSCODE_builtins.list(parent.keys()) # noqa: F821 children = [] for prop in childrenNames: child_property = getChildProperty(parent, [prop]) if ( child_property is not None - and _VSCODE_builtins.type(child_property).__name__ != "method" + and _VSCODE_builtins.type(child_property).__name__ != "method" # noqa: F821 ): child = { **getVariableDescription(child_property), @@ -196,9 +196,9 @@ def _VSCODE_getAllChildrenDescriptions(rootVarName, propertyChain, startIndex): children.append(child) if is_debugging: - return _VSCODE_json.dumps(children) + return _VSCODE_json.dumps(children) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(children)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(children)) # noqa: F821 # Function to do our work. It will return the object def _VSCODE_getVariableInfo(var): @@ -210,25 +210,25 @@ def _VSCODE_getVariableInfo(var): typeName = None try: - vartype = _VSCODE_builtins.type(var) - if _VSCODE_builtins.hasattr(vartype, "__name__"): + vartype = _VSCODE_builtins.type(var) # noqa: F821 + if _VSCODE_builtins.hasattr(vartype, "__name__"): # noqa: F821 result["type"] = typeName = vartype.__name__ result["fullType"] = getFullType(vartype) except TypeError: pass # Find shape and count if available - if _VSCODE_builtins.hasattr(var, "shape"): + if _VSCODE_builtins.hasattr(var, "shape"): # noqa: F821 try: # Get a bit more restrictive with exactly what we want to count as a shape, since anything can define it if ( - _VSCODE_builtins.isinstance(var.shape, _VSCODE_builtins.tuple) + _VSCODE_builtins.isinstance(var.shape, _VSCODE_builtins.tuple) # noqa: F821 or typeName is not None and typeName == "EagerTensor" ): - _VSCODE_shapeStr = _VSCODE_builtins.str(var.shape) + _VSCODE_shapeStr = _VSCODE_builtins.str(var.shape) # noqa: F821 if ( - _VSCODE_builtins.len(_VSCODE_shapeStr) >= 3 + _VSCODE_builtins.len(_VSCODE_shapeStr) >= 3 # noqa: F821 and _VSCODE_shapeStr[0] == "(" and _VSCODE_shapeStr[-1] == ")" and "," in _VSCODE_shapeStr @@ -237,39 +237,39 @@ def _VSCODE_getVariableInfo(var): elif _VSCODE_shapeStr.startswith("torch.Size(["): result["shape"] = "(" + _VSCODE_shapeStr[12:-2] + ")" del _VSCODE_shapeStr - except _VSCODE_builtins.TypeError: + except _VSCODE_builtins.TypeError: # noqa: F821 pass - if _VSCODE_builtins.hasattr(var, "__len__"): + if _VSCODE_builtins.hasattr(var, "__len__"): # noqa: F821 try: - result["count"] = _VSCODE_builtins.len(var) - except _VSCODE_builtins.TypeError: + result["count"] = _VSCODE_builtins.len(var) # noqa: F821 + except _VSCODE_builtins.TypeError: # noqa: F821 pass # return our json object as a string if is_debugging: - return _VSCODE_json.dumps(result) + return _VSCODE_json.dumps(result) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) # noqa: F821 def _VSCODE_getVariableProperties(var, listOfAttributes): result = { - attr: _VSCODE_builtins.repr(_VSCODE_builtins.getattr(var, attr)) + attr: _VSCODE_builtins.repr(_VSCODE_builtins.getattr(var, attr)) # noqa: F821 for attr in listOfAttributes - if _VSCODE_builtins.hasattr(var, attr) + if _VSCODE_builtins.hasattr(var, attr) # noqa: F821 } if is_debugging: - return _VSCODE_json.dumps(result) + return _VSCODE_json.dumps(result) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) # noqa: F821 def _VSCODE_getVariableTypes(varnames): # Map with key: varname and value: vartype result = [] for name in varnames: try: - vartype = _VSCODE_builtins.type(globals()[name]) - if _VSCODE_builtins.hasattr(vartype, "__name__"): + vartype = _VSCODE_builtins.type(globals()[name]) # noqa: F821 + if _VSCODE_builtins.hasattr(vartype, "__name__"): # noqa: F821 result.append( { "name": name, @@ -277,22 +277,22 @@ def _VSCODE_getVariableTypes(varnames): "fullType": getFullType(vartype), } ) - except _VSCODE_builtins.TypeError: + except _VSCODE_builtins.TypeError: # noqa: F821 pass if is_debugging: - return _VSCODE_json.dumps(result) + return _VSCODE_json.dumps(result) # noqa: F821 else: - return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) + return _VSCODE_builtins.print(_VSCODE_json.dumps(result)) # noqa: F821 def _VSCODE_getVariableSummary(variable): if variable is None: return None # check if the variable is a dataframe if ( - _VSCODE_builtins.type(variable).__name__ == "DataFrame" - and _VSCODE_importlib_util.find_spec("pandas") is not None + _VSCODE_builtins.type(variable).__name__ == "DataFrame" # noqa: F821 + and _VSCODE_importlib_util.find_spec("pandas") is not None # noqa: F821 ): - return _VSCODE_builtins.print(variable.info()) + return _VSCODE_builtins.print(variable.info()) # noqa: F821 return None diff --git a/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariablesForProvider.py b/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariablesForProvider.py index 8313b2d917d..c4463789bb9 100644 --- a/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariablesForProvider.py +++ b/pythonFiles/vscode_datascience_helpers/getVariableInfo/vscodeGetVariablesForProvider.py @@ -325,7 +325,7 @@ def _repr_str(self, obj, level): yield part1 yield "..." yield part2 - except: + except: # noqa: E722 # This shouldn't really happen, but let's play it safe. # exception('Error getting string representation to show.') for part in self._repr_obj( diff --git a/pythonFiles/vscode_datascience_helpers/jupyter_nbInstalled.py b/pythonFiles/vscode_datascience_helpers/jupyter_nbInstalled.py index a56562c61be..e3ccf4a77df 100644 --- a/pythonFiles/vscode_datascience_helpers/jupyter_nbInstalled.py +++ b/pythonFiles/vscode_datascience_helpers/jupyter_nbInstalled.py @@ -2,7 +2,7 @@ # Licensed under the MIT License. try: - from notebook import notebookapp as app + from notebook import notebookapp as app # noqa: F401 print("Available") except Exception: diff --git a/pythonFiles/vscode_datascience_helpers/kernel/addRunCellHook.py b/pythonFiles/vscode_datascience_helpers/kernel/addRunCellHook.py index c7237da305a..2a78b2db89e 100644 --- a/pythonFiles/vscode_datascience_helpers/kernel/addRunCellHook.py +++ b/pythonFiles/vscode_datascience_helpers/kernel/addRunCellHook.py @@ -30,7 +30,7 @@ def wrapper(*args, **kwargs): if store_history: del os.environ["IPYKERNEL_CELL_NAME"] return result - except: + except: # noqa: E722 return old_func(*args, **kwargs) return _VSCODE_types.MethodType(wrapper, wrapped_func.__self__) diff --git a/pythonFiles/vscode_datascience_helpers/kernel_interrupt_daemon.py b/pythonFiles/vscode_datascience_helpers/kernel_interrupt_daemon.py index b653e4a54c1..b7f06d198dc 100644 --- a/pythonFiles/vscode_datascience_helpers/kernel_interrupt_daemon.py +++ b/pythonFiles/vscode_datascience_helpers/kernel_interrupt_daemon.py @@ -336,7 +336,7 @@ def handle_command(command, id, line): if command == "INITIALIZE_INTERRUPT": try: handle = interrupter.initialize_interrupt() - except: + except: # noqa: E722 # If we fail to initilize the interrupt, then try again. handle = interrupter.initialize_interrupt() @@ -349,7 +349,7 @@ def handle_command(command, id, line): print(f"DISPOSE_INTERRUPT_HANDLE:{id}") else: logging.warning("Unknown command: '%s' for line '%s'", command, line) - except: + except: # noqa: E722 # Do not change the format of this message (used in parent process). logging.exception(f"ERROR: handling command :{command}:{id}") @@ -357,7 +357,7 @@ def handle_command(command, id, line): try: line = line.strip() handle_command(line.split(":")[0], int(line.split(":")[1]), line) - except: + except: # noqa: E722 logging.exception(f"Error in line {line}") diff --git a/pythonFiles/vscode_datascience_helpers/tests/logParser.py b/pythonFiles/vscode_datascience_helpers/tests/logParser.py index 767f837c513..a05e3a6a485 100644 --- a/pythonFiles/vscode_datascience_helpers/tests/logParser.py +++ b/pythonFiles/vscode_datascience_helpers/tests/logParser.py @@ -1,11 +1,10 @@ from io import TextIOWrapper -import sys import argparse import os os.system("color") -from pathlib import Path -import re +from pathlib import Path # noqa: E402 +import re # noqa: E402 parser = argparse.ArgumentParser(description="Parse a test log into its parts") parser.add_argument("testlog", type=str, nargs=1, help="Log to parse") @@ -63,14 +62,14 @@ def splitByPid(testlog): pid = int(match.group(1)) # See if we've created a log for this pid or not - if not pid in pids: + if not pid in pids: # noqa: E713 pids.add(pid) logFile = "{}_{}.log".format(baseFile, pid) print("Writing to new log: " + logFile) logs[pid] = Path(logFile).open(mode="w") # Add this line to the log - if pid != None: + if pid != None: # noqa: E711 logs[pid].write(line) # Close all of the open logs for key in logs: