Skip to content

Commit

Permalink
fix mypy errors
Browse files Browse the repository at this point in the history
  • Loading branch information
samster25 committed Aug 23, 2024
1 parent 38fc2dd commit 6a6ce12
Show file tree
Hide file tree
Showing 10 changed files with 13 additions and 12 deletions.
2 changes: 1 addition & 1 deletion benchmarking/tpch/data_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def gen_csv_files(basedir: str, num_parts: int, scale_factor: float) -> str:
Returns:
str: path to folder with generated CSV files
"""
cachedir = os.path.join(basedir, ("%.1f" % scale_factor).replace(".", "_"), str(num_parts))
cachedir = os.path.join(basedir, (f"{scale_factor:.1f}").replace(".", "_"), str(num_parts))
if not os.path.exists(cachedir):
# If running in CI, use a scale factor of 0.2
# Otherwise, check for SCALE_FACTOR env variable or default to 1
Expand Down
2 changes: 1 addition & 1 deletion benchmarking/tpch/pipelined_data_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def pipelined_data_generation(
):
assert num_parts > 1, "script should only be used if num_parts > 1"

cachedir = pathlib.Path(scratch_dir) / ("%.1f" % scale_factor).replace(".", "_") / str(num_parts)
cachedir = pathlib.Path(scratch_dir) / (f"{scale_factor:.1f}").replace(".", "_") / str(num_parts)

if not cachedir.exists():
logger.info("Cloning tpch dbgen repo")
Expand Down
2 changes: 1 addition & 1 deletion daft/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@


class _RunnerConfig:
name = ClassVar[str]
name: ClassVar[str]


@dataclasses.dataclass(frozen=True)
Expand Down
2 changes: 1 addition & 1 deletion daft/dataframe/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -1984,7 +1984,7 @@ def transform(self, func: Callable[..., "DataFrame"], *args: Any, **kwargs: Any)
"""
result = func(self, *args, **kwargs)
assert isinstance(result, DataFrame), (
"Func returned an instance of type [%s], " "should have been DataFrame." % type(result)
f"Func returned an instance of type [{type(result)}], " "should have been DataFrame."
)
return result

Expand Down
1 change: 1 addition & 0 deletions daft/expressions/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ def lit(value: object) -> Expression:
lit_value = _time_lit(i64_value, time_unit)
elif isinstance(value, Decimal):
sign, digits, exponent = value.as_tuple()
assert isinstance(exponent, int)
lit_value = _decimal_lit(sign == 1, digits, exponent)
elif isinstance(value, Series):
lit_value = _series_lit(value._series)
Expand Down
6 changes: 3 additions & 3 deletions tests/expressions/test_udf.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,11 @@ def test_udf_return_containers(container, batch_size):

@udf(return_dtype=DataType.string(), batch_size=batch_size)
def identity(data):
if container == Series:
if container is Series:
return data
elif container == list:
elif container is list:
return data.to_pylist()
elif container == np.ndarray:
elif container is np.ndarray:
return np.array(data.to_arrow())
else:
raise NotImplementedError(f"Test not implemented for container type: {container}")
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/io/test_list_files_s3_minio.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def test_directory_globbing_fragment_wildcard(minio_io_config, path_expect_pair,
for name in files:
fs.touch(f"bucket/{name}")

if type(expect) == type and issubclass(expect, BaseException):
if type(expect) is type and issubclass(expect, BaseException):
with pytest.raises(expect):
io_glob(globpath, io_config=minio_io_config, fanout_limit=fanout_limit)
else:
Expand Down
2 changes: 1 addition & 1 deletion tutorials/delta_lake/1-local-image-batch-inference.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@
" batch = self.preprocess(images_array)\n",
" prediction = self.model(batch).softmax(0)\n",
" class_ids = prediction.argmax(1)\n",
" scores = prediction[:, class_ids]\n",
" prediction[:, class_ids]\n",
" return [self.category_map[class_id] for class_id in class_ids]"
]
},
Expand Down
2 changes: 1 addition & 1 deletion tutorials/delta_lake/2-distributed-batch-inference.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@
" batch = self.preprocess(images_array)\n",
" prediction = self.model(batch).softmax(0)\n",
" class_ids = prediction.argmax(1)\n",
" scores = prediction[:, class_ids]\n",
" prediction[:, class_ids]\n",
" return [self.category_map[class_id] for class_id in class_ids]\n",
"\n",
"\n",
Expand Down
4 changes: 2 additions & 2 deletions tutorials/mnist.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@
"\n",
"images_df = images_df.with_column(\n",
" \"image_2d\",\n",
" col(\"image\").apply(lambda l: np.array(l).reshape(28, 28), return_dtype=DataType.python()),\n",
" col(\"image\").apply(lambda img: np.array(img).reshape(28, 28), return_dtype=DataType.python()),\n",
")"
]
},
Expand Down Expand Up @@ -495,7 +495,7 @@
"\n",
"class Net(nn.Module):\n",
" def __init__(self):\n",
" super(Net, self).__init__()\n",
" super().__init__()\n",
" self.conv1 = nn.Conv2d(1, 32, 3, 1)\n",
" self.conv2 = nn.Conv2d(32, 64, 3, 1)\n",
" self.dropout1 = nn.Dropout(0.25)\n",
Expand Down

0 comments on commit 6a6ce12

Please sign in to comment.