Skip to content

Commit

Permalink
improve tests
Browse files Browse the repository at this point in the history
  • Loading branch information
JingyaHuang committed Feb 11, 2024
1 parent 0110d5d commit 2c457fc
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 6 deletions.
4 changes: 2 additions & 2 deletions optimum/commands/export/neuron.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def parse_args_neuron(parser: "ArgumentParser"):
type=str,
choices=["transformers", "sentence_transformers"],
default=None,
help=("The library on the model." " If not provided, will attempt to infer the local checkpoint's library."),
help=("The library on the model. If not provided, will attempt to infer the local checkpoint's library."),
)
optional_group.add_argument(
"--subfolder",
Expand All @@ -76,7 +76,7 @@ def parse_args_neuron(parser: "ArgumentParser"):
optional_group.add_argument(
"--compiler_workdir",
type=Path,
help="Path indicating the directory where to store intermediary files generated by Neuronx compiler.",
help="Path indicating the directory where to store intermediary files generated by Neuron compiler.",
)
optional_group.add_argument(
"--disable-weights-neff-inline",
Expand Down
2 changes: 1 addition & 1 deletion optimum/exporters/neuron/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,7 @@ def export_neuron(
output (`Path`):
Directory to store the exported Neuron model.
compiler_workdir (`Optional[Path]`, defaults to `None`):
The directory used by neuronx-cc, where you can find intermediary outputs (neff, weight, hlo...).
The directory used by neuron-cc, where you can find intermediary outputs (neff, weight, hlo...).
inline_weights_to_neff (`bool`, defaults to `True`):
Whether to inline the weights to the neff graph. If set to False, weights will be seperated from the neff.
auto_cast (`Optional[str]`, defaults to `None`):
Expand Down
7 changes: 5 additions & 2 deletions tests/cli/test_export_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import unittest

from optimum.exporters.neuron.model_configs import * # noqa: F403
from optimum.neuron.utils import is_neuronx_available
from optimum.neuron.utils.testing_utils import is_inferentia_test, requires_neuronx
from optimum.utils import logging

Expand Down Expand Up @@ -109,12 +110,14 @@ def test_opt_level(self):
check=True,
)

@requires_neuronx
def test_store_intemediary(self):
model_id = "hf-internal-testing/tiny-random-BertModel"
with tempfile.TemporaryDirectory() as tempdir:
save_path = f"{tempdir}/neff"
neff_path = os.path.join(save_path, model_id.split("/")[-1], "graph.neff")
if is_neuronx_available():
neff_path = os.path.join(save_path, model_id.split("/")[-1], "graph.neff")
else:
neff_path = os.path.join(save_path, model_id.split("/")[-1], "32", "neff.json")
subprocess.run(
[
"optimum-cli",
Expand Down
1 change: 0 additions & 1 deletion tests/exporters/test_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ def test_export(self, test_name, name, model_name, task, neuron_config_construct
_get_models_to_test(EXPORT_MODELS_TINY, exclude_model_types=WEIGHTS_NEFF_SEPARATION_UNSUPPORTED_ARCH)
)
@is_inferentia_test
@requires_neuronx
def test_export_separated_weights(self, test_name, name, model_name, task, neuron_config_constructor):
self._neuronx_export(
test_name, name, model_name, task, neuron_config_constructor, inline_weights_to_neff=False
Expand Down

0 comments on commit 2c457fc

Please sign in to comment.