diff --git a/.github/scripts/install_protoc.sh b/.github/scripts/install_protoc.sh index 090e8e4922c..f13e93b3ee2 100755 --- a/.github/scripts/install_protoc.sh +++ b/.github/scripts/install_protoc.sh @@ -71,4 +71,7 @@ install() { install +pip install mypy-protobuf +echo "mypy-protobuf version: $(protoc-gen-mypy --version)" + echo "${SCRIPT_NAME} done." diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 8c550aac7ab..d8b630f32b5 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -593,19 +593,13 @@ jobs: python-version: '3.9' - name: Upgrade setuptools and pip run: python -m pip install -U setuptools pip + - name: Install tox + run: python -m pip install tox - name: Install protoc shell: bash run: .github/scripts/install_protoc.sh - - name: Install Snowpark - run: python -m pip install ".[modin-development]" - - name: Install Sphinx - run: python -m pip install sphinx - name: Build document - working-directory: docs - # treat warning as failure but complete the entire process - run: | - make clean - make html SPHINXOPTS="-W --keep-going" + run: python -m tox -e docs - name: Upload html files uses: actions/upload-artifact@v4 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7e6c6fe3645..f121cac2adc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -91,6 +91,7 @@ repos: - id: mypy files: > (?x)^( + src/snowflake/snowpark/_internal/ast/.*\.py| src/snowflake/snowpark/modin/pandas/indexing.py| src/snowflake/snowpark/modin/plugin/_internal/.*\.py| src/snowflake/snowpark/modin/pandas/snow_partition_iterator.py| @@ -103,9 +104,12 @@ repos: - --disallow-incomplete-defs additional_dependencies: - types-requests + - types-python-dateutil - types-urllib3 - types-setuptools - types-pyOpenSSL - types-setuptools + - mypy-protobuf + - typed-ast - pytest - numpy diff --git a/ci/test_fips.sh b/ci/test_fips.sh index 15de21d033f..4abd4ac537f 100755 --- a/ci/test_fips.sh +++ b/ci/test_fips.sh @@ -14,7 +14,7 @@ export PATH=/usr/local/bin:$PATH export LD_LIBRARY_PATH=/usr/local/lib64/:/usr/local/lib/:$LD_LIBRARY_PATH pip install -U setuptools pip -pip install protoc-wheel-0==21.1 +pip install protoc-wheel-0==21.1 #mypy-protobuf pip install "${SNOWPARK_WHL}[pandas,secure-local-storage,development,opentelemetry]" pip install "pytest-timeout" diff --git a/scripts/jenkins_regress.sh b/scripts/jenkins_regress.sh index 5c41717aa5b..81e835fd570 100644 --- a/scripts/jenkins_regress.sh +++ b/scripts/jenkins_regress.sh @@ -18,7 +18,7 @@ exit_code_decorator(){ gpg --quiet --batch --yes --decrypt --passphrase="$GPG_KEY" --output "tests/parameters.py" scripts/parameters.py.gpg # Install protoc -pip install protoc-wheel-0==21.1 +pip install protoc-wheel-0==21.1 #mypy-protobuf # Run linter, Python 3.8 test and code coverage jobs exit_code_decorator "python -m tox -c $WORKING_DIR" -e notdoctest diff --git a/scripts/jenkins_regress_snowpandas.sh b/scripts/jenkins_regress_snowpandas.sh index c7b5c1565e8..4fc68200fa7 100644 --- a/scripts/jenkins_regress_snowpandas.sh +++ b/scripts/jenkins_regress_snowpandas.sh @@ -15,7 +15,7 @@ gpg --quiet --batch --yes --decrypt --passphrase="$GPG_KEY" --output "tests/para python -m pip install tox # Install protoc -pip install protoc-wheel-0==21.1 +pip install protoc-wheel-0==21.1 #mypy-protobuf # Run snowpandas tests python -m tox -c $WORKING_DIR -e snowparkpandasjenkins-modin diff --git a/setup.py b/setup.py index 88740ec07a3..8a7d620bbd9 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ "pytest-assume", # sql counter check "decorator", # sql counter check "protoc-wheel-0==21.1", # Protocol buffer compiler, for Snowpark IR + "mypy-protobuf", # used in generating typed Python code from protobuf for Snowpark IR "lxml", # used in read_xml tests ] @@ -81,7 +82,7 @@ if protoc is None: sys.stderr.write( - "protoc is not installed nor found. Please install the binary package, e.g., `pip install protoc-wheel-0==21.1`\n" + "protoc is not installed nor found. Please install the binary package, e.g., `pip install protoc-wheel-0==21.1 mypy-protobuf`\n" ) sys.exit(-1) @@ -115,7 +116,7 @@ def generate_proto(source): protoc, f"--proto_path={proto_dir}", f"--python_out={output_dir}", - f"--pyi_out={output_dir}", + f"--mypy_out={output_dir}", source, ] if subprocess.call(protoc_command) != 0: diff --git a/src/snowflake/snowpark/_internal/ast/batch.py b/src/snowflake/snowpark/_internal/ast/batch.py index de3c6a4a8db..a45a3a4d027 100644 --- a/src/snowflake/snowpark/_internal/ast/batch.py +++ b/src/snowflake/snowpark/_internal/ast/batch.py @@ -64,9 +64,9 @@ def __init__(self, session: Session) -> None: self._init_batch() # Track callables in this dict (memory id -> TrackedCallable). - self._callables = {} + self._callables: dict[int, TrackedCallable] = {} - def reset_id_gen(self): + def reset_id_gen(self) -> None: """Resets the ID generator.""" self._id_gen = itertools.count(start=1) @@ -84,7 +84,7 @@ def assign(self, symbol: Optional[str] = None) -> proto.Assign: stmt.assign.symbol.value = symbol if isinstance(symbol, str) else "" return stmt.assign - def eval(self, target: proto.Assign): + def eval(self, target: proto.Assign) -> None: """ Creates a new evaluation statement. @@ -102,7 +102,7 @@ def flush(self) -> SerializedBatch: self._init_batch() return SerializedBatch(req_id, batch) - def _init_batch(self): + def _init_batch(self) -> None: # Reset the AST batch by initializing a new request. self._request_id = AstBatch.generate_request_id() # Generate a new unique ID. self._request = proto.Request() diff --git a/src/snowflake/snowpark/_internal/ast/utils.py b/src/snowflake/snowpark/_internal/ast/utils.py index 980d794272e..58a2b312802 100644 --- a/src/snowflake/snowpark/_internal/ast/utils.py +++ b/src/snowflake/snowpark/_internal/ast/utils.py @@ -60,7 +60,7 @@ # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def debug_check_missing_ast(ast, container) -> None: # pragma: no cover +def debug_check_missing_ast(ast, container) -> None: # type: ignore[no-untyped-def] # pragma: no cover """ Debug check for missing AST. This is invoked with various arguments that are expected to be non-NULL if the AST is emitted correctly. @@ -80,14 +80,14 @@ def __init__(self) -> None: # pragma: no cover self.symbols: Optional[Union[str, List[str]]] = None # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. - def visit_Assign(self, node) -> None: # pragma: no cover + def visit_Assign(self, node: ast.Assign) -> None: # pragma: no cover assert len(node.targets) == 1 target = node.targets[0] if isinstance(target, ast.Name): self.symbols = target.id elif isinstance(target, ast.Tuple): - self.symbols = [name.id for name in target.elts] + self.symbols = [name.id for name in target.elts] # type: ignore[attr-defined] # TODO(SNOW-1491199) # "expr" has no attribute "id" else: raise ValueError(f"Unsupported target {ast.dump(target)}") @@ -136,10 +136,10 @@ def fill_timezone( if obj.tzinfo is not None: utc_offset = obj.tzinfo.utcoffset(datetime_val) if utc_offset is not None: - ast.tz.offset_seconds = int(utc_offset.total_seconds()) + ast.tz.offset_seconds = int(utc_offset.total_seconds()) # type: ignore[attr-defined] tz = obj.tzinfo.tzname(datetime_val) if tz is not None: - ast.tz.name.value = tz + ast.tz.name.value = tz # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "tz" else: # tzinfo=None means that the local timezone will be used. # Retrieve name of the local timezone and encode as part of the AST. @@ -148,21 +148,21 @@ def fill_timezone( # environment variable for test. Cf. override_time_zone in test_ast_driver.py for details. tz_env = os.environ.get("TZ") if tz_env: - tz = dateutil.tz.gettz(tz_env) - tz_name = tz.tzname(datetime.datetime.now()) - ast.tz.offset_seconds = int(tz.utcoffset(datetime_val).total_seconds()) + tz = dateutil.tz.gettz(tz_env) # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Optional[tzinfo]", variable has type "Optional[str]") + tz_name = tz.tzname(datetime.datetime.now()) # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "str" of "Optional[str]" has no attribute "tzname", Item "None" of "Optional[str]" has no attribute "tzname" + ast.tz.offset_seconds = int(tz.utcoffset(datetime_val).total_seconds()) # type: ignore[attr-defined, union-attr] # TODO(SNOW-1491199) # "Expr" has no attribute "tz", Item "str" of "Optional[str]" has no attribute "utcoffset", Item "None" of "Optional[str]" has no attribute "utcoffset" else: logging.warn( "Assuming UTC timezone for Windows, but actual timezone may be different." ) - ast.tz.offset_seconds = int(tzlocal().utcoffset(obj).total_seconds()) + ast.tz.offset_seconds = int(tzlocal().utcoffset(obj).total_seconds()) # type: ignore[arg-type, attr-defined, union-attr] # TODO(SNOW-1491199) # "Expr" has no attribute "tz", Item "None" of "Optional[timedelta]" has no attribute "total_seconds", Argument 1 to "utcoffset" of "tzlocal" has incompatible type "Union[datetime, time]"; expected "Optional[datetime]" tz_name = datetime.datetime.now(tzlocal()).tzname() else: - ast.tz.offset_seconds = int( - tzlocal().utcoffset(datetime_val).total_seconds() + ast.tz.offset_seconds = int( # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "tz" + tzlocal().utcoffset(datetime_val).total_seconds() # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "None" of "Optional[timedelta]" has no attribute "total_seconds" ) tz_name = datetime.datetime.now(tzlocal()).tzname() - ast.tz.name.value = tz_name + ast.tz.name.value = tz_name # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "tz" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. @@ -182,7 +182,7 @@ def build_expr_from_python_val( from snowflake.snowpark.row import Row if obj is None: - with_src_position(expr_builder.null_val) + with_src_position(expr_builder.null_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "NullVal"; expected "Expr" # Keep objects most high up in the class hierarchy first, i.e. a Row is a tuple. elif isinstance(obj, Column): @@ -191,44 +191,44 @@ def build_expr_from_python_val( if isinstance(obj._expression, Literal): expr_builder.CopyFrom(snowpark_expression_to_ast(obj._expression)) else: - expr_builder.CopyFrom(obj._ast) + expr_builder.CopyFrom(obj._ast) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Column" has no attribute "_ast"; maybe "_cast"? elif isinstance(obj, Row): - ast = with_src_position(expr_builder.sp_row) + ast = with_src_position(expr_builder.sp_row) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpRow"; expected "Expr" if hasattr(obj, "_named_values") and obj._named_values is not None: for field in obj._fields: - ast.names.list.append(field) - build_expr_from_python_val(ast.vs.add(), obj._named_values[field]) + ast.names.list.append(field) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "names" + build_expr_from_python_val(ast.vs.add(), obj._named_values[field]) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "vs" else: for field in obj: - build_expr_from_python_val(ast.vs.add(), field) + build_expr_from_python_val(ast.vs.add(), field) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "vs" elif isinstance(obj, bool): - ast = with_src_position(expr_builder.bool_val) - ast.v = obj + ast = with_src_position(expr_builder.bool_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "BoolVal"; expected "Expr" + ast.v = obj # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, int): - ast = with_src_position(expr_builder.int64_val) - ast.v = obj + ast = with_src_position(expr_builder.int64_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "Int64Val"; expected "Expr" + ast.v = obj # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, float): - ast = with_src_position(expr_builder.float64_val) - ast.v = obj + ast = with_src_position(expr_builder.float64_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "Float64Val"; expected "Expr" + ast.v = obj # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, str): - ast = with_src_position(expr_builder.string_val) - ast.v = obj + ast = with_src_position(expr_builder.string_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "StringVal"; expected "Expr" + ast.v = obj # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, bytes): - ast = with_src_position(expr_builder.binary_val) - ast.v = obj + ast = with_src_position(expr_builder.binary_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "BinaryVal"; expected "Expr" + ast.v = obj # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, bytearray): - ast = with_src_position(expr_builder.binary_val) - ast.v = bytes(obj) + ast = with_src_position(expr_builder.binary_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "BinaryVal"; expected "Expr" + ast.v = bytes(obj) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" elif isinstance(obj, decimal.Decimal): - ast = with_src_position(expr_builder.big_decimal_val) + ast = with_src_position(expr_builder.big_decimal_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "BigDecimalVal"; expected "Expr" dec_tuple = obj.as_tuple() unscaled_val = reduce(lambda val, digit: val * 10 + digit, dec_tuple.digits) if dec_tuple.sign != 0: @@ -238,67 +238,67 @@ def build_expr_from_python_val( # in full bytes. Therefore, round up to fullest byte. To restore the sign, add another byte. req_bytes = unscaled_val.bit_length() // 8 + 1 - ast.unscaled_value = unscaled_val.to_bytes(req_bytes, "big", signed=True) - ast.scale = dec_tuple.exponent + ast.unscaled_value = unscaled_val.to_bytes(req_bytes, "big", signed=True) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "unscaled_value" + ast.scale = dec_tuple.exponent # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "scale" elif isinstance(obj, datetime.datetime): - ast = with_src_position(expr_builder.python_timestamp_val) + ast = with_src_position(expr_builder.python_timestamp_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "PythonTimestampVal"; expected "Expr" fill_timezone(ast, obj) - ast.year = obj.year - ast.month = obj.month - ast.day = obj.day - ast.hour = obj.hour - ast.minute = obj.minute - ast.second = obj.second - ast.microsecond = obj.microsecond + ast.year = obj.year # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "year" + ast.month = obj.month # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "month" + ast.day = obj.day # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "day" + ast.hour = obj.hour # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "hour" + ast.minute = obj.minute # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "minute" + ast.second = obj.second # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "second" + ast.microsecond = obj.microsecond # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "microsecond" elif isinstance(obj, datetime.date): - ast = with_src_position(expr_builder.python_date_val) - ast.year = obj.year - ast.month = obj.month - ast.day = obj.day + ast = with_src_position(expr_builder.python_date_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "PythonDateVal"; expected "Expr" + ast.year = obj.year # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "year" + ast.month = obj.month # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "month" + ast.day = obj.day # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "day" elif isinstance(obj, datetime.time): - ast = with_src_position(expr_builder.python_time_val) + ast = with_src_position(expr_builder.python_time_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "PythonTimeVal"; expected "Expr" fill_timezone(ast, obj) - ast.hour = obj.hour - ast.minute = obj.minute - ast.second = obj.second - ast.microsecond = obj.microsecond + ast.hour = obj.hour # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "hour" + ast.minute = obj.minute # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "minute" + ast.second = obj.second # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "second" + ast.microsecond = obj.microsecond # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "microsecond" elif isinstance(obj, dict): - ast = with_src_position(expr_builder.seq_map_val) + ast = with_src_position(expr_builder.seq_map_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SeqMapVal"; expected "Expr" for key, value in obj.items(): - kv_ast = ast.kvs.add() + kv_ast = ast.kvs.add() # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "kvs" build_expr_from_python_val(kv_ast.vs.add(), key) build_expr_from_python_val(kv_ast.vs.add(), value) elif isinstance(obj, list): - ast = with_src_position(expr_builder.list_val) + ast = with_src_position(expr_builder.list_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ListVal"; expected "Expr" for v in obj: - build_expr_from_python_val(ast.vs.add(), v) + build_expr_from_python_val(ast.vs.add(), v) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "vs" elif isinstance(obj, tuple): - ast = with_src_position(expr_builder.tuple_val) + ast = with_src_position(expr_builder.tuple_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "TupleVal"; expected "Expr" for v in obj: - build_expr_from_python_val(ast.vs.add(), v) + build_expr_from_python_val(ast.vs.add(), v) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "vs" elif isinstance(obj, snowflake.snowpark.dataframe.DataFrame): - ast = with_src_position(expr_builder.sp_dataframe_ref) + ast = with_src_position(expr_builder.sp_dataframe_ref) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpDataframeRef"; expected "Expr" assert ( obj._ast_id is not None ), "Dataframe object to encode as part of AST does not have an id assigned. Missing AST for object or previous operation?" - ast.id.bitfield1 = obj._ast_id + ast.id.bitfield1 = obj._ast_id # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "id" elif isinstance(obj, snowflake.snowpark.table_function.TableFunctionCall): raise NotImplementedError( "TODO SNOW-1629946: Implement TableFunctionCall with args." ) elif isinstance(obj, snowflake.snowpark._internal.type_utils.DataType): - ast = with_src_position(expr_builder.sp_datatype_val) - obj._fill_ast(ast.datatype) + ast = with_src_position(expr_builder.sp_datatype_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpDatatypeVal"; expected "Expr" + obj._fill_ast(ast.datatype) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast", "Expr" has no attribute "datatype" elif isinstance(obj, snowflake.snowpark._internal.analyzer.expression.Literal): build_expr_from_python_val(expr_builder, obj.value) else: @@ -316,8 +316,8 @@ def build_proto_from_struct_type( expr.structured = schema.structured for field in schema.fields: ast_field = expr.fields.add() - field.column_identifier._fill_ast(ast_field.column_identifier) - field.datatype._fill_ast(ast_field.data_type) + field.column_identifier._fill_ast(ast_field.column_identifier) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "ColumnIdentifier" has no attribute "_fill_ast" + field.datatype._fill_ast(ast_field.data_type) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" ast_field.nullable = field.nullable @@ -335,9 +335,9 @@ def _set_fn_name( ValueError: Raised if the function name is not a string or an iterable of strings. """ if isinstance(name, str): - fn.name.fn_name_flat.name = name + fn.name.fn_name_flat.name = name # type: ignore[attr-defined] # TODO(SNOW-1491199) # "FnNameRefExpr" has no attribute "name" elif isinstance(name, Iterable): - fn.name.fn_name_structured.name.extend(name) + fn.name.fn_name_structured.name.extend(name) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "FnNameRefExpr" has no attribute "name" else: raise ValueError( f"Invalid function name: {name}. The function name must be a string or an iterable of strings." @@ -345,7 +345,7 @@ def _set_fn_name( # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_sp_table_name( +def build_sp_table_name( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation expr_builder: proto.SpTableName, name: Union[str, Iterable[str]] ): # pragma: no cover if isinstance(name, str): @@ -373,8 +373,8 @@ def build_builtin_fn_apply( **kwargs: Keyword arguments to pass to function. """ - expr = with_src_position(ast.apply_expr) - _set_fn_name(builtin_name, expr.fn.builtin_fn) + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + _set_fn_name(builtin_name, expr.fn.builtin_fn) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args, **kwargs) @@ -384,8 +384,8 @@ def build_udf_apply( udf_id: int, *args: Tuple[Union[proto.Expr, Any]], ) -> None: # pragma: no cover - expr = with_src_position(ast.apply_expr) - expr.fn.sp_fn_ref.id.bitfield1 = udf_id + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + expr.fn.sp_fn_ref.id.bitfield1 = udf_id # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args) @@ -395,23 +395,23 @@ def build_udaf_apply( udaf_id: int, *args: Tuple[Union[proto.Expr, Any]], ) -> None: # pragma: no cover - expr = with_src_position(ast.apply_expr) - expr.fn.sp_fn_ref.id.bitfield1 = udaf_id + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + expr.fn.sp_fn_ref.id.bitfield1 = udaf_id # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args) # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_udtf_apply( +def build_udtf_apply( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a type annotation for one or more arguments ast: proto.Expr, udtf_id: int, *args: Tuple[Union[proto.Expr, Any]], **kwargs ) -> None: # pragma: no cover """Encodes a call to UDTF into ast as a Snowpark IR expression.""" - expr = with_src_position(ast.apply_expr) - expr.fn.sp_fn_ref.id.bitfield1 = udtf_id + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + expr.fn.sp_fn_ref.id.bitfield1 = udtf_id # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args, **kwargs) # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_sproc_apply( +def build_sproc_apply( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a type annotation for one or more arguments ast: proto.Expr, sproc_id: int, statement_params: Optional[Dict[str, str]] = None, @@ -419,8 +419,8 @@ def build_sproc_apply( **kwargs, ) -> None: # pragma: no cover """Encodes a call to stored procedure into ast as a Snowpark IR expression.""" - expr = with_src_position(ast.apply_expr) - expr.fn.sp_fn_ref.id.bitfield1 = sproc_id + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + expr.fn.sp_fn_ref.id.bitfield1 = sproc_id # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args, **kwargs) @@ -443,8 +443,8 @@ def build_call_table_function_apply( **kwargs: Keyword arguments to pass to function. """ - expr = with_src_position(ast.apply_expr) - _set_fn_name(name, expr.fn.call_table_function_expr) + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" + _set_fn_name(name, expr.fn.call_table_function_expr) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" build_fn_apply_args(ast, *args, **kwargs) @@ -470,17 +470,17 @@ def build_indirect_table_fn_apply( **kwargs: Keyword arguments to pass to function. """ - expr = with_src_position(ast.apply_expr) + expr = with_src_position(ast.apply_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "ApplyExpr"; expected "Expr" if isinstance( - func, (snowflake.snowpark.table_function.TableFunctionCall, Callable) + func, (snowflake.snowpark.table_function.TableFunctionCall, Callable) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "isinstance" has incompatible type "tuple[type[TableFunctionCall], ]"; expected "_ClassInfo" ): - stmt = func._ast_stmt - fn_expr = expr.fn.indirect_table_fn_id_ref + stmt = func._ast_stmt # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "str" of "Union[str, list[str], TableFunctionCall, Callable[..., Any]]" has no attribute "_ast_stmt", Item "list[str]" of "Union[str, list[str], TableFunctionCall, Callable[..., Any]]" has no attribute "_ast_stmt", Item "TableFunctionCall" of "Union[str, list[str], TableFunctionCall, Callable[..., Any]]" has no attribute "_ast_stmt", Item "function" of "Union[str, list[str], TableFunctionCall, Callable[..., Any]]" has no attribute "_ast_stmt" + fn_expr = expr.fn.indirect_table_fn_id_ref # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" fn_expr.id.bitfield1 = stmt.var_id.bitfield1 else: - fn_expr = expr.fn.indirect_table_fn_name_ref - _set_fn_name(func, fn_expr) - build_fn_apply_args(ast, *func_arguments, **func_named_arguments) + fn_expr = expr.fn.indirect_table_fn_name_ref # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "fn" + _set_fn_name(func, fn_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "_set_fn_name" has incompatible type "Union[str, list[str], TableFunctionCall, Callable[..., Any]]"; expected "Union[str, Iterable[str]]" + build_fn_apply_args(ast, *func_arguments, **func_named_arguments) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "build_fn_apply_args" has incompatible type "*tuple[Union[Column, str], ...]"; expected "tuple[Union[Expr, Any]]", Argument 3 to "build_fn_apply_args" has incompatible type "**dict[str, Union[Column, str]]"; expected "dict[str, Union[Expr, Any]]" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. @@ -505,7 +505,7 @@ def build_fn_apply_args( # Special case: _ast is None but arg is Column(LITERAL). if ( - arg._ast is None + arg._ast is None # type: ignore[attr-defined] # TODO(SNOW-1491199) # "tuple[Union[Expr, Any]]" has no attribute "_ast" and isinstance(arg, snowflake.snowpark.Column) and isinstance( arg._expression, @@ -513,19 +513,19 @@ def build_fn_apply_args( ) ): build_expr_from_python_val(expr.pos_args.add(), arg._expression.value) - elif arg._ast is None and isinstance(arg, snowflake.snowpark.Column): + elif arg._ast is None and isinstance(arg, snowflake.snowpark.Column): # type: ignore[attr-defined] # TODO(SNOW-1491199) # "tuple[Union[Expr, Any]]" has no attribute "_ast" expr.pos_args.append(snowpark_expression_to_ast(arg._expression)) else: assert ( - arg._ast + arg._ast # type: ignore[attr-defined] # TODO(SNOW-1491199) # "tuple[Union[Expr, Any]]" has no attribute "_ast" ), f"Object {arg} has member _ast=None set. Expected valid AST." - expr.pos_args.append(arg._ast) + expr.pos_args.append(arg._ast) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "tuple[Union[Expr, Any]]" has no attribute "_ast" else: pos_arg = proto.Expr() build_expr_from_python_val(pos_arg, arg) expr.pos_args.append(pos_arg) - for name, arg in kwargs.items(): + for name, arg in kwargs.items(): # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "dict[str, Union[Expr, Any]]", variable has type "tuple[Union[Expr, Any]]") kwarg = proto.Tuple_String_Expr() kwarg._1 = name if isinstance(arg, proto.Expr): @@ -538,23 +538,25 @@ def build_fn_apply_args( expr.named_args.append(kwarg) -def set_builtin_fn_alias(ast: proto.Expr, alias: str) -> None: +# TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. +def set_builtin_fn_alias(ast: proto.Expr, alias: str) -> None: # pragma: no cover """ Set the alias for a builtin function call. Requires that the expression has an ApplyExpr with a BuiltinFn. Args: ast: Expr node to fill. alias: Alias to set for the builtin function. """ - _set_fn_name(alias, ast.apply_expr.fn.builtin_fn) + _set_fn_name(alias, ast.apply_expr.fn.builtin_fn) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "_set_fn_name" has incompatible type "BuiltinFn"; expected "FnNameRefExpr" +# TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. def with_src_position( expr_ast: proto.Expr, assign: Optional[proto.Assign] = None, caller_frame_depth: Optional[int] = None, debug: bool = False, target_idx: Optional[int] = None, -) -> proto.Expr: +) -> proto.Expr: # pragma: no cover """ Sets the src_position on the supplied Expr AST node and returns it. N.B. This function assumes it's always invoked from a public API, meaning that the caller's caller @@ -566,7 +568,7 @@ def with_src_position( If this is not provided, the filename for each frame is probed to find the code of interest. target_idx: If an integer, tries to extract from an assign statement the {target_idx}th symbol. If None, assumes a single target. """ - src = expr_ast.src + src = expr_ast.src # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "src" frame = inspect.currentframe() # Best practices for the inspect library are to remove references to frame objects once done with them @@ -591,7 +593,7 @@ def with_src_position( # If the frame is not None, one guarantee we have is that two frames back is the caller's caller, and this # frame contains the code of interest from the user if they are using a simple public API with no further # nesting or indirection. This is the most common case. - frame, prev_frame = frame.f_back.f_back, frame.f_back + frame, prev_frame = frame.f_back.f_back, frame.f_back # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "None" of "Optional[FrameType]" has no attribute "f_back", Item "None" of "Union[FrameType, Any, None]" has no attribute "f_back" while ( frame is not None and SNOWPARK_LIB_PATH in Path(frame.f_code.co_filename).parents @@ -606,9 +608,9 @@ def with_src_position( curr_frame_depth += 1 if debug: - last_snowpark_file = prev_frame.f_code.co_filename + last_snowpark_file = prev_frame.f_code.co_filename # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "None" of "Union[FrameType, Any, None]" has no attribute "f_code" assert SNOWPARK_LIB_PATH in Path(last_snowpark_file).parents - first_non_snowpark_file = frame.f_code.co_filename + first_non_snowpark_file = frame.f_code.co_filename # type: ignore[union-attr] # TODO(SNOW-1491199) # Item "None" of "Optional[FrameType]" has no attribute "f_code" assert SNOWPARK_LIB_PATH not in Path(first_non_snowpark_file).parents # Once we've stepped out of the snowpark package, we should be in the code of interest. @@ -642,7 +644,7 @@ def with_src_position( if assign is not None: if code := frame_info.code_context: - source_line = code[frame_info.index] + source_line = code[frame_info.index] # type: ignore[index] # TODO(SNOW-1491199) # Invalid index type "Optional[int]" for "list[str]"; expected type "SupportsIndex" symbols = extract_assign_targets(source_line) if symbols is not None: if target_idx is not None: @@ -669,12 +671,12 @@ def build_expr_from_snowpark_column( Raises: NotImplementedError: Raised if the Column object does not have an AST set and FAIL_ON_MISSING_AST is True. """ - if value._ast is None and FAIL_ON_MISSING_AST: + if value._ast is None and FAIL_ON_MISSING_AST: # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Column" has no attribute "_ast"; maybe "_cast"? raise NotImplementedError( f"Column({value._expression})._ast is None due to the use of a Snowpark API which does not support AST logging yet." ) - elif value._ast is not None: - expr_builder.CopyFrom(value._ast) + elif value._ast is not None: # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Column" has no attribute "_ast"; maybe "_cast"? + expr_builder.CopyFrom(value._ast) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Column" has no attribute "_ast"; maybe "_cast"? # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. @@ -693,8 +695,8 @@ def build_expr_from_snowpark_column_or_col_name( if isinstance(value, snowflake.snowpark.Column): build_expr_from_snowpark_column(expr_builder, value) elif isinstance(value, str): - expr = with_src_position(expr_builder.string_val) - expr.v = value + expr = with_src_position(expr_builder.string_val) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "StringVal"; expected "Expr" + expr.v = value # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "v" else: raise TypeError( f"{type(value)} is not a valid type for Column or column name AST." @@ -717,8 +719,8 @@ def build_expr_from_snowpark_column_or_sql_str( if isinstance(value, snowflake.snowpark.Column): build_expr_from_snowpark_column(expr_builder, value) elif isinstance(value, str): - expr = with_src_position(expr_builder.sp_column_sql_expr) - expr.sql = value + expr = with_src_position(expr_builder.sp_column_sql_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpColumnSqlExpr"; expected "Expr" + expr.sql = value # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "sql" else: raise TypeError( f"{type(value)} is not a valid type for Column or SQL expression AST." @@ -769,8 +771,8 @@ def build_expr_from_snowpark_column_or_table_fn( if isinstance(value, snowflake.snowpark.Column): build_expr_from_snowpark_column(expr_builder, value) elif isinstance(value, snowflake.snowpark.table_function.TableFunctionCall): - assert value._ast is not None, "TableFunctionCall must have ast assigned." - expr_builder.CopyFrom(value._ast) + assert value._ast is not None, "TableFunctionCall must have ast assigned." # type: ignore[attr-defined] # TODO(SNOW-1491199) # "TableFunctionCall" has no attribute "_ast" + expr_builder.CopyFrom(value._ast) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "TableFunctionCall" has no attribute "_ast" # TODO SNOW-1509198: Test this branch more extensively for session.table_function. else: @@ -780,7 +782,7 @@ def build_expr_from_snowpark_column_or_table_fn( # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def fill_ast_for_column( +def fill_ast_for_column( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a type annotation for one or more arguments expr: proto.Expr, name1: str, name2: Optional[str], fn_name="col" ) -> None: # pragma: no cover """ @@ -795,16 +797,16 @@ def fill_ast_for_column( # Handle the special case * (as a SQL column expr). if name2 == "*": - ast = with_src_position(expr.sp_column_sql_expr) - ast.sql = "*" + ast = with_src_position(expr.sp_column_sql_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpColumnSqlExpr"; expected "Expr" + ast.sql = "*" # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "sql" if name1 is not None: - ast.df_alias.value = name1 - return expr + ast.df_alias.value = name1 # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "df_alias" + return expr # type: ignore[return-value] # TODO(SNOW-1491199) # No return value expected if name1 == "*" and name2 is None: - ast = with_src_position(expr.sp_column_sql_expr) - ast.sql = "*" - return expr + ast = with_src_position(expr.sp_column_sql_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpColumnSqlExpr"; expected "Expr" + ast.sql = "*" # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "sql" + return expr # type: ignore[return-value] # TODO(SNOW-1491199) # No return value expected # Regular form (without *): build as function ApplyExpr. kwargs = ( @@ -818,11 +820,11 @@ def fill_ast_for_column( args = tuple(kwargs.values()) kwargs = {} - build_builtin_fn_apply(expr, fn_name, *args, **kwargs) + build_builtin_fn_apply(expr, fn_name, *args, **kwargs) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 3 to "build_builtin_fn_apply" has incompatible type "*tuple[str, ...]"; expected "tuple[Union[Expr, Any]]", Argument 4 to "build_builtin_fn_apply" has incompatible type "**dict[str, str]"; expected "dict[str, Union[Expr, Any]]" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def create_ast_for_column( +def create_ast_for_column( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a type annotation for one or more arguments name1: str, name2: Optional[str], fn_name="col" ) -> proto.Expr: # pragma: no cover """ @@ -849,7 +851,7 @@ def snowpark_expression_to_ast(expr: Expression) -> proto.Expr: # pragma: no co if isinstance(expr, Alias): # TODO: Not sure if this can come up in a real use case. We see this use case for internal calls, where # we don't need an AST. - return None + return None # type: ignore[return-value] # TODO(SNOW-1491199) # Incompatible return value type (got "None", expected "Expr") elif isinstance(expr, Attribute): return create_ast_for_column(expr.name, None) elif isinstance(expr, Literal): @@ -859,8 +861,8 @@ def snowpark_expression_to_ast(expr: Expression) -> proto.Expr: # pragma: no co elif isinstance(expr, UnresolvedAttribute): # Unresolved means treatment as sql expression. ast = proto.Expr() - sql_expr_ast = with_src_position(ast.sp_column_sql_expr) - sql_expr_ast.sql = expr.sql + sql_expr_ast = with_src_position(ast.sp_column_sql_expr) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "with_src_position" has incompatible type "SpColumnSqlExpr"; expected "Expr" + sql_expr_ast.sql = expr.sql # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "sql" return ast elif isinstance(expr, MultipleExpression): # Convert to list of expressions. @@ -872,16 +874,16 @@ def snowpark_expression_to_ast(expr: Expression) -> proto.Expr: # pragma: no co elif isinstance(expr, CaseWhen): # TODO: Not sure if this can come up in a real use case. We see this use case for internal calls, where # we don't need an AST. - return None + return None # type: ignore[return-value] # TODO(SNOW-1491199) # Incompatible return value type (got "None", expected "Expr") elif isinstance(expr, Star): # Comes up in count(), handled there. - return None + return None # type: ignore[return-value] # TODO(SNOW-1491199) # Incompatible return value type (got "None", expected "Expr") elif isinstance(expr, FunctionExpression): # Snowpark pandas API has some usage where injecting the publicapi decorator would lead to issues. # Directly translate here. ast = proto.Expr() build_builtin_fn_apply( - ast, expr.name, *tuple(map(snowpark_expression_to_ast, expr.children)) + ast, expr.name, *tuple(map(snowpark_expression_to_ast, expr.children)) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 3 to "build_builtin_fn_apply" has incompatible type "*tuple[Expr, ...]"; expected "tuple[Union[Expr, Any]]", Argument 2 to "map" has incompatible type "Optional[list[Expression]]"; expected "Iterable[Expression]" ) return ast else: @@ -895,7 +897,7 @@ def fill_sp_save_mode( expr: proto.SpSaveMode, save_mode: Union[str, SaveMode] ) -> None: # pragma: no cover if isinstance(save_mode, str): - save_mode = str_to_enum(save_mode.lower(), SaveMode, "`save_mode`") + save_mode = str_to_enum(save_mode.lower(), SaveMode, "`save_mode`") # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Enum", variable has type "Union[str, SaveMode]") if save_mode == SaveMode.APPEND: expr.sp_save_mode_append = True @@ -921,38 +923,38 @@ def fill_sp_write_file( block: bool = True, **copy_options: dict, ) -> None: # pragma: no cover - expr.location = location + expr.location = location # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "location" if partition_by is not None: - build_expr_from_snowpark_column_or_sql_str(expr.partition_by, partition_by) + build_expr_from_snowpark_column_or_sql_str(expr.partition_by, partition_by) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "partition_by" if format_type_options is not None: for k, v in format_type_options.items(): - t = expr.format_type_options.add() + t = expr.format_type_options.add() # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "format_type_options" t._1 = k t._2 = v - expr.header = header + expr.header = header # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "header" if statement_params is not None: for k, v in statement_params.items(): - t = expr.statement_params.add() + t = expr.statement_params.add() # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "statement_params" t._1 = k t._2 = v - expr.block = block + expr.block = block # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "block" if copy_options: - for k, v in copy_options.items(): - t = expr.copy_options.add() + for k, v in copy_options.items(): # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "dict[Any, Any]", variable has type "str") + t = expr.copy_options.add() # type: ignore[attr-defined] # TODO(SNOW-1491199) # "Expr" has no attribute "copy_options" t._1 = k build_expr_from_python_val(t._2, v) # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_proto_from_pivot_values( +def build_proto_from_pivot_values( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation expr_builder: proto.SpPivotValue, - values: Optional[Union[Iterable["LiteralType"], "DataFrame"]], # noqa: F821 + values: Optional[Union[Iterable["LiteralType"], "DataFrame"]], # type: ignore[name-defined] # noqa: F821 # TODO(SNOW-1491199) # Name "LiteralType" is not defined, Name "DataFrame" is not defined ): # pragma: no cover """Helper function to encode Snowpark pivot values that are used in various pivot operations to AST.""" if not values: @@ -965,7 +967,7 @@ def build_proto_from_pivot_values( # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_proto_from_callable( +def build_proto_from_callable( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation expr_builder: proto.SpCallable, func: Union[Callable, Tuple[str, str]], ast_batch: Optional[AstBatch] = None, @@ -974,7 +976,7 @@ def build_proto_from_callable( udf_id = None if ast_batch is not None: - udf_id = ast_batch.register_callable(func) + udf_id = ast_batch.register_callable(func) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "register_callable" of "AstBatch" has incompatible type "Union[Callable[..., Any], tuple[str, str]]"; expected "Callable[..., Any]" expr_builder.id = udf_id if callable(func) and func.__name__ == "": @@ -991,11 +993,11 @@ def build_proto_from_callable( expr_builder.name = func[1] else: # Use the actual function name. Note: We do not support different scopes yet, need to be careful with this then. - expr_builder.name = func.__name__ + expr_builder.name = func.__name__ # type: ignore[union-attr] # TODO(SNOW-1491199) # error: Item "Tuple[str, ...]" of "Union[Callable[..., Any], Tuple[str, str]]" has no attribute "__name__" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_udf( +def build_udf( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation, Function is missing a type annotation for one or more arguments ast: proto.Udf, func: Union[Callable, Tuple[str, str]], return_type: Optional[DataType], @@ -1024,17 +1026,17 @@ def build_udf( # This is the name the UDF is registered to. Not the name to display when unparsing, that name is captured in callable. if name is not None: - _set_fn_name(name, ast) + _set_fn_name(name, ast) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "_set_fn_name" has incompatible type "Udf"; expected "FnNameRefExpr" build_proto_from_callable( ast.func, func, session._ast_batch if session is not None else None ) if return_type is not None: - return_type._fill_ast(ast.return_type) + return_type._fill_ast(ast.return_type) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" if input_types is not None and len(input_types) != 0: for input_type in input_types: - input_type._fill_ast(ast.input_types.list.add()) + input_type._fill_ast(ast.input_types.list.add()) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" ast.is_permanent = is_permanent if stage_location is not None: ast.stage_location = stage_location @@ -1077,13 +1079,13 @@ def build_udf( if comment is not None: ast.comment.value = comment for k, v in kwargs.items(): - t = ast.kwargs.add() + t = ast.kwargs.add() # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Tuple_String_Expr", variable has type "Tuple_String_String") t._1 = k - build_expr_from_python_val(t._2, v) + build_expr_from_python_val(t._2, v) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "build_expr_from_python_val" has incompatible type "str"; expected "Expr" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_udaf( +def build_udaf( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation, Function is missing a type annotation for one or more arguments ast: proto.Udaf, handler: Union[Callable, Tuple[str, str]], return_type: Optional[DataType], @@ -1108,17 +1110,17 @@ def build_udaf( # This is the name the UDAF is registered to. Not the name to display when unparsing, that name is captured in callable. if name is not None: - _set_fn_name(name, ast) + _set_fn_name(name, ast) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "_set_fn_name" has incompatible type "Udaf"; expected "FnNameRefExpr" build_proto_from_callable( ast.handler, handler, session._ast_batch if session is not None else None ) if return_type is not None: - return_type._fill_ast(ast.return_type) + return_type._fill_ast(ast.return_type) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" if input_types is not None and len(input_types) != 0: for input_type in input_types: - input_type._fill_ast(ast.input_types.list.add()) + input_type._fill_ast(ast.input_types.list.add()) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" ast.is_permanent = is_permanent if stage_location is not None: ast.stage_location.value = stage_location @@ -1156,16 +1158,16 @@ def build_udaf( if comment is not None: ast.comment.value = comment for k, v in kwargs.items(): - t = ast.kwargs.add() + t = ast.kwargs.add() # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Tuple_String_Expr", variable has type "Tuple_String_String") t._1 = k - build_expr_from_python_val(t._2, v) + build_expr_from_python_val(t._2, v) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "build_expr_from_python_val" has incompatible type "str"; expected "Expr" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_udtf( +def build_udtf( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a return type annotation, Function is missing a type annotation for one or more arguments ast: proto.Udtf, handler: Union[Callable, Tuple[str, str]], - output_schema: Union[ + output_schema: Union[ # type: ignore[name-defined] # TODO(SNOW-1491199) # Name "PandasDataFrameType" is not defined StructType, Iterable[str], "PandasDataFrameType" # noqa: F821 ], # noqa: F821 input_types: Optional[List[DataType]], @@ -1192,7 +1194,7 @@ def build_udtf( # This is the name the UDTF is registered to. Not the name to display when unparsing, that name is captured in callable. if name is not None: - _set_fn_name(name, ast) + _set_fn_name(name, ast) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "_set_fn_name" has incompatible type "Udtf"; expected "FnNameRefExpr" build_proto_from_callable( ast.handler, handler, session._ast_batch if session is not None else None @@ -1200,7 +1202,7 @@ def build_udtf( if output_schema is not None: if isinstance(output_schema, DataType): - output_schema._fill_ast(ast.output_schema.udtf_schema__type.return_type) + output_schema._fill_ast(ast.output_schema.udtf_schema__type.return_type) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" elif isinstance(output_schema, Sequence) and all( isinstance(el, str) for el in output_schema ): @@ -1210,7 +1212,7 @@ def build_udtf( if input_types is not None and len(input_types) != 0: for input_type in input_types: - input_type._fill_ast(ast.input_types.list.add()) + input_type._fill_ast(ast.input_types.list.add()) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" ast.is_permanent = is_permanent if stage_location is not None: ast.stage_location = stage_location @@ -1250,9 +1252,9 @@ def build_udtf( if comment is not None: ast.comment.value = comment for k, v in kwargs.items(): - t = ast.kwargs.add() + t = ast.kwargs.add() # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Tuple_String_Expr", variable has type "Tuple_String_String") t._1 = k - build_expr_from_python_val(t._2, v) + build_expr_from_python_val(t._2, v) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "build_expr_from_python_val" has incompatible type "str"; expected "Expr" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. @@ -1269,7 +1271,7 @@ def add_intermediate_stmt(ast_batch: AstBatch, o: Any) -> None: # pragma: no co functions.table_function, it must have a field named _ast, of type proto.Expr. """ if not isinstance( - o, (snowflake.snowpark.table_function.TableFunctionCall, Callable) + o, (snowflake.snowpark.table_function.TableFunctionCall, Callable) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "isinstance" has incompatible type "tuple[type[TableFunctionCall], ]"; expected "_ClassInfo" ): return stmt = ast_batch.assign() @@ -1278,7 +1280,7 @@ def add_intermediate_stmt(ast_batch: AstBatch, o: Any) -> None: # pragma: no co # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. -def build_sproc( +def build_sproc( # type: ignore[no-untyped-def] # TODO(SNOW-1491199) # Function is missing a type annotation for one or more arguments ast: proto.StoredProcedure, func: Union[Callable, Tuple[str, str]], return_type: Optional[DataType], @@ -1300,21 +1302,21 @@ def build_sproc( is_permanent: bool = False, session=None, **kwargs, -): # pragma: no cover +) -> None: # pragma: no cover """Helper function to encode stored procedure parameters (used in both regular and mock StoredProcedureRegistration).""" if sp_name is not None: - _set_fn_name(sp_name, ast) + _set_fn_name(sp_name, ast) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 2 to "_set_fn_name" has incompatible type "StoredProcedure"; expected "FnNameRefExpr" build_proto_from_callable( ast.func, func, session._ast_batch if session is not None else None ) if return_type is not None: - return_type._fill_ast(ast.return_type) + return_type._fill_ast(ast.return_type) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" if input_types is not None and len(input_types) != 0: for input_type in input_types: - input_type._fill_ast(ast.input_types.list.add()) + input_type._fill_ast(ast.input_types.list.add()) # type: ignore[attr-defined] # TODO(SNOW-1491199) # "DataType" has no attribute "_fill_ast" ast.is_permanent = is_permanent if stage_location is not None: ast.stage_location = stage_location @@ -1354,9 +1356,9 @@ def build_sproc( if comment is not None: ast.comment.value = comment for k, v in kwargs.items(): - t = ast.kwargs.add() + t = ast.kwargs.add() # type: ignore[assignment] # TODO(SNOW-1491199) # Incompatible types in assignment (expression has type "Tuple_String_Expr", variable has type "Tuple_String_String") t._1 = k - build_expr_from_python_val(t._2, v) + build_expr_from_python_val(t._2, v) # type: ignore[arg-type] # TODO(SNOW-1491199) # Argument 1 to "build_expr_from_python_val" has incompatible type "str"; expected "Expr" # TODO(SNOW-1491199) - This method is not covered by tests until the end of phase 0. Drop the pragma when it is covered. @@ -1370,6 +1372,6 @@ def build_expr_from_dict_str_str( dict_str_str (Dict[str, str]): The dictionary mapping str to str. """ for k, v in dict_str_str.items(): - t = ast_dict.add() + t = ast_dict.add() # type: ignore[attr-defined, arg-type] # TODO(SNOW-1491199) # "Tuple_String_String" has no attribute "add" t._1 = k t._2 = v diff --git a/tox.ini b/tox.ini index 33b4d1325fa..92d671c5598 100644 --- a/tox.ini +++ b/tox.ini @@ -149,7 +149,9 @@ depends = py39, py310, py311 basepython = python3.9 description = build docs for the project skip_install = false -deps = sphinx +deps = + sphinx + .[modin-development] extras = pandas commands = sphinx-build -d "{envtmpdir}{/}doctree" docs/source "{toxworkdir}{/}docs_out" --color -b html {posargs:-W --keep-going} @@ -186,7 +188,7 @@ description = generate python code from protobuf allowlist_externals = bash, protoc deps = protobuf commands = - protoc --proto_path=src/snowflake/snowpark/_internal/proto/ --python_out=src/snowflake/snowpark/_internal/proto/generated --pyi_out=src/snowflake/snowpark/_internal/proto/generated/ src/snowflake/snowpark/_internal/proto/ast.proto + protoc --proto_path=src/snowflake/snowpark/_internal/proto/ --python_out=src/snowflake/snowpark/_internal/proto/generated --mypy_out=src/snowflake/snowpark/_internal/proto/generated/ src/snowflake/snowpark/_internal/proto/ast.proto [testenv:dev] description = create dev environment