Skip to content

Commit

Permalink
style(ruff): ruff fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Jan 14, 2025
1 parent 15ac772 commit daef444
Show file tree
Hide file tree
Showing 21 changed files with 50 additions and 53 deletions.
6 changes: 3 additions & 3 deletions docs/_renderer.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ def render(self, el: qd.ast.ExampleCode) -> str:
if expect_failure in first or any(
expect_failure in line for line in rest
):
assert (
start and end
), "expected failure should never occur alongside a skipped doctest example"
assert start and end, (
"expected failure should never occur alongside a skipped doctest example"
)
result.append("#| error: true")

# remove the quartodoc markers from the rendered code
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/clickhouse/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ def test_create_table_no_syntax_error(con):


def test_password_with_bracket():
password = f'{os.environ.get("IBIS_TEST_CLICKHOUSE_PASSWORD", "")}[]'
password = f"{os.environ.get('IBIS_TEST_CLICKHOUSE_PASSWORD', '')}[]"
quoted_pass = quote_plus(password)
host = os.environ.get("IBIS_TEST_CLICKHOUSE_HOST", "localhost")
user = os.environ.get("IBIS_TEST_CLICKHOUSE_USER", "default")
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/duckdb/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,23 +66,23 @@ def ddl_script(self) -> Iterator[str]:
yield (
f"""
CREATE OR REPLACE TABLE {table} AS
SELECT * FROM read_parquet('{parquet_dir / f'{table}.parquet'}')
SELECT * FROM read_parquet('{parquet_dir / f"{table}.parquet"}')
"""
)
if not SANDBOXED:
for table in TEST_TABLES_GEO:
yield (
f"""
CREATE OR REPLACE TABLE {table} AS
SELECT * FROM st_read('{geojson_dir / f'{table}.geojson'}')
SELECT * FROM st_read('{geojson_dir / f"{table}.geojson"}')
"""
)
for table in TEST_TABLE_GEO_PARQUET:
# the ops on this table will need the spatial extension
yield (
f"""
CREATE OR REPLACE TABLE {table} AS
SELECT * FROM read_parquet('{parquet_dir / f'{table}.parquet'}')
SELECT * FROM read_parquet('{parquet_dir / f"{table}.parquet"}')
"""
)
yield (
Expand Down
3 changes: 1 addition & 2 deletions ibis/backends/impala/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,8 +341,7 @@ def drop_database(self, name, force=False):
)
elif tables or udfs or udas:
raise com.IntegrityError(
f"Database {name} must be empty before "
"being dropped, or set force=True"
f"Database {name} must be empty before being dropped, or set force=True"
)
statement = ddl.DropDatabase(name, must_exist=not force)
self._safe_exec_sql(statement)
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/impala/tests/test_ddl.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@

pytest.importorskip("impala")

from impala.error import HiveServer2Error # noqa: E402
from impala.error import HiveServer2Error


@pytest.fixture
def temp_view(con) -> str:
def temp_view(con):
name = util.gen_name("view")
yield name
con.drop_view(name, force=True)
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/impala/tests/test_exprs.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@ def _check_impala_output_types_match(con, table):
for n, left_ty, right_ty in zip(
left_schema.names, left_schema.types, right_schema.types
):
assert (
left_ty == right_ty
), f"Value for {n} had left type {left_ty} and right type {right_ty}\nquery:\n{query}"
assert left_ty == right_ty, (
f"Value for {n} had left type {left_ty} and right type {right_ty}\nquery:\n{query}"
)


@pytest.mark.parametrize(
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/impala/tests/test_parquet_ddl.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

pytest.importorskip("impala")

from impala.error import HiveServer2Error # noqa: E402
from impala.error import HiveServer2Error


def test_parquet_file_with_name(con, test_data_dir, temp_table):
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/impala/tests/test_partition.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

pytest.importorskip("impala")

from impala.error import Error as ImpylaError # noqa: E402
from impala.error import Error as ImpylaError


@pytest.fixture
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/sql/compilers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,9 @@ def array(self, *args: Any) -> sge.Array:
first, *rest = args

if isinstance(first, sge.Select):
assert (
not rest
), "only one argument allowed when `first` is a select statement"
assert not rest, (
"only one argument allowed when `first` is a select statement"
)

return sge.Array(expressions=list(map(sge.convert, (first, *rest))))

Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/sql/compilers/bigquery/udf/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,9 +283,9 @@ def visit_NameConstant(self, node):
return "true"
elif value is False:
return "false"
assert (
value is None
), f"value is not True and is not False, must be None, got {value}"
assert value is None, (

Check warning on line 286 in ibis/backends/sql/compilers/bigquery/udf/core.py

View check run for this annotation

Codecov / codecov/patch

ibis/backends/sql/compilers/bigquery/udf/core.py#L286

Added line #L286 was not covered by tests
f"value is not True and is not False, must be None, got {value}"
)
return "null"

def visit_Str(self, node):
Expand Down
6 changes: 2 additions & 4 deletions ibis/backends/sql/compilers/pyspark.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,8 +251,7 @@ def visit_LastValue(self, op, *, arg):
def visit_First(self, op, *, arg, where, order_by, include_null):
if where is not None and include_null:
raise com.UnsupportedOperationError(
"Combining `include_null=True` and `where` is not supported "
"by pyspark"
"Combining `include_null=True` and `where` is not supported by pyspark"
)
out = self.agg.first(arg, where=where, order_by=order_by)
if not include_null:
Expand All @@ -262,8 +261,7 @@ def visit_First(self, op, *, arg, where, order_by, include_null):
def visit_Last(self, op, *, arg, where, order_by, include_null):
if where is not None and include_null:
raise com.UnsupportedOperationError(
"Combining `include_null=True` and `where` is not supported "
"by pyspark"
"Combining `include_null=True` and `where` is not supported by pyspark"
)
out = self.agg.last(arg, where=where, order_by=order_by)
if not include_null:
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/sql/compilers/snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -847,9 +847,9 @@ def visit_ArrayFilter(self, op, *, arg, param, body, index):
)

def visit_JoinLink(self, op, *, how, table, predicates):
assert (
predicates or how == "cross"
), "expected non-empty predicates when not a cross join"
assert predicates or how == "cross", (

Check warning on line 850 in ibis/backends/sql/compilers/snowflake.py

View check run for this annotation

Codecov / codecov/patch

ibis/backends/sql/compilers/snowflake.py#L850

Added line #L850 was not covered by tests
"expected non-empty predicates when not a cross join"
)

if how == "asof":
# the asof join match condition is always the first predicate by
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/sql/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,9 +629,9 @@ def _from_sqlglot_TIMESTAMP_NS(cls, nullable: bool | None = None) -> dt.Timestam

@classmethod
def _from_ibis_GeoSpatial(cls, dtype: dt.GeoSpatial):
assert (
dtype.geotype == "geometry"
), "DuckDB only supports geometry types; geography types are not supported"
assert dtype.geotype == "geometry", (
"DuckDB only supports geometry types; geography types are not supported"
)
return sge.DataType(this=typecode.GEOMETRY)

_from_ibis_Point = _from_ibis_LineString = _from_ibis_Polygon = (
Expand Down
12 changes: 6 additions & 6 deletions ibis/backends/tests/test_vectorized_udf.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,9 +277,9 @@ def test_elementwise_udf(udf_backend, udf_alltypes, udf_df, udf):
result = expr.execute()

expected_func = getattr(expr.op(), "__func__", getattr(udf, "func", None))
assert (
expected_func is not None
), f"neither __func__ nor func attributes found on {udf} or expr object"
assert expected_func is not None, (
f"neither __func__ nor func attributes found on {udf} or expr object"
)

expected = expected_func(udf_df["double_col"])
udf_backend.assert_series_equal(result, expected, check_names=False)
Expand All @@ -292,9 +292,9 @@ def test_elementwise_udf_mutate(udf_backend, udf_alltypes, udf_df, udf):
result = expr.execute()

expected_func = getattr(udf_expr.op(), "__func__", getattr(udf, "func", None))
assert (
expected_func is not None
), f"neither __func__ nor func attributes found on {udf} or expr object"
assert expected_func is not None, (
f"neither __func__ nor func attributes found on {udf} or expr object"
)

expected = udf_df.assign(incremented=expected_func(udf_df["double_col"]))
udf_backend.assert_series_equal(result["incremented"], expected["incremented"])
Expand Down
6 changes: 3 additions & 3 deletions ibis/backends/tests/tpc/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ def pytest_pyfunc_call(pyfuncitem):
testargs["backend"] = backend

result = testfunction(**testargs)
assert (
result is None
), "test function should not return anything, did you mean to use assert?"
assert result is None, (
"test function should not return anything, did you mean to use assert?"
)
return True


Expand Down
3 changes: 1 addition & 2 deletions ibis/expr/datatypes/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -785,8 +785,7 @@ def __init__(
if precision is not None:
if not isinstance(precision, numbers.Integral):
raise TypeError(
"Decimal type precision must be an integer; "
f"got {type(precision)}"
f"Decimal type precision must be an integer; got {type(precision)}"
)
if precision < 0:
raise ValueError("Decimal type precision cannot be negative")
Expand Down
2 changes: 1 addition & 1 deletion ibis/expr/datatypes/tests/test_value.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class Foo(enum.Enum):
(-32769, dt.int32),
(-2147483649, dt.int64),
(1.5, dt.double),
(decimal.Decimal(1.5), dt.decimal),
(decimal.Decimal("1.5"), dt.decimal),
# parametric types
(list("abc"), dt.Array(dt.string)),
(set("abc"), dt.Array(dt.string)),
Expand Down
6 changes: 3 additions & 3 deletions ibis/expr/types/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1526,9 +1526,9 @@ def __pandas_result__(
if data_mapper is None:
from ibis.formats.pandas import PandasData as data_mapper

assert (
len(df.columns) == 1
), "more than one column when converting columnar result DataFrame to Series"
assert len(df.columns) == 1, (
"more than one column when converting columnar result DataFrame to Series"
)
# in theory we could use df.iloc[:, 0], but there seems to be a bug in
# older geopandas where df.iloc[:, 0] doesn't return the same kind of
# object as df.loc[:, column_name] when df is a GeoDataFrame
Expand Down
6 changes: 4 additions & 2 deletions ibis/expr/types/relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -2958,7 +2958,7 @@ def describe(
col_max = lit(None).cast(float)
col_mode = lit(None).cast(str)
quantile_values = {
f"p{100*q:.6f}".rstrip("0").rstrip("."): lit(None).cast(float)
f"p{100 * q:.6f}".rstrip("0").rstrip("."): lit(None).cast(float)
for q in quantile
}

Expand All @@ -2969,7 +2969,9 @@ def describe(
col_min = col.min().cast(float)
col_max = col.max().cast(float)
quantile_values = {
f"p{100*q:.6f}".rstrip("0").rstrip("."): col.quantile(q).cast(float)
f"p{100 * q:.6f}".rstrip("0").rstrip("."): col.quantile(q).cast(
float
)
for q in quantile
}
elif typ.is_string():
Expand Down
5 changes: 2 additions & 3 deletions ibis/expr/types/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,12 +98,11 @@ def __getitem__(self, key: slice | int | ir.IntegerScalar) -> StringValue:
raise ValueError("Step can only be 1")
if start is not None and not isinstance(start, ir.Expr) and start < 0:
raise ValueError(
"Negative slicing not yet supported, got start value "
f"of {start:d}"
f"Negative slicing not yet supported, got start value of {start:d}"
)
if stop is not None and not isinstance(stop, ir.Expr) and stop < 0:
raise ValueError(
"Negative slicing not yet supported, got stop value " f"of {stop:d}"
f"Negative slicing not yet supported, got stop value of {stop:d}"
)
if start is None and stop is None:
return self
Expand Down
2 changes: 1 addition & 1 deletion ibis/tests/expr/test_pretty_repr.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

pytest.importorskip("rich")

from ibis.expr.types.pretty import format_column, format_values # noqa: E402
from ibis.expr.types.pretty import format_column, format_values

pd = pytest.importorskip("pandas")

Expand Down

0 comments on commit daef444

Please sign in to comment.