Skip to content

Commit

Permalink
e2e-test for %load_node magic (#3528)
Browse files Browse the repository at this point in the history
* e2e draft

Signed-off-by: Nok <[email protected]>

* Fix name collision

Signed-off-by: Nok Lam Chan <[email protected]>

* Add e2e test

Signed-off-by: Nok Lam Chan <[email protected]>

* protect script under __name__

Signed-off-by: Nok Lam Chan <[email protected]>

* Fix test

Signed-off-by: Nok Lam Chan <[email protected]>

* remove test notebook

Signed-off-by: Nok Lam Chan <[email protected]>

* remove debug statement

Signed-off-by: Nok Lam Chan <[email protected]>

* revert changes

Signed-off-by: Nok Lam Chan <[email protected]>

* fix test

Signed-off-by: Nok Lam Chan <[email protected]>

* clean up notebook e2etest

Signed-off-by: Nok <[email protected]>

* fix test after node rename

Signed-off-by: Nok <[email protected]>

---------

Signed-off-by: Nok <[email protected]>
Signed-off-by: Nok Lam Chan <[email protected]>
  • Loading branch information
noklam authored Jul 10, 2024
1 parent ff4bbb5 commit 12f0986
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 4 deletions.
9 changes: 9 additions & 0 deletions features/load_node.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
Feature: load_node in new project

Background:
Given I have prepared a config file
And I have run a non-interactive kedro new with starter "default"

Scenario: Execute ipython load_node magic
When I execute the load_node magic command
Then the logs should show that load_node executed successfully
22 changes: 22 additions & 0 deletions features/steps/cli_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,7 @@ def create_project_with_starter(context, starter):
env=context.env,
cwd=context.temp_dir,
)

assert res.returncode == OK_EXIT_CODE, res


Expand Down Expand Up @@ -561,6 +562,17 @@ def check_correct_nodes_run(context, node):
)


@then("the logs should show that load_node executed successfully")
def check_load_node_run(context):
expected_log_line = "load_node executed successfully"
stdout = context.result.stdout
clean_logs = util.clean_up_log(stdout)
assert expected_log_line in clean_logs, (
"Expected the following message segment to be printed on stdout: "
f"{expected_log_line},\nbut got {stdout}"
)


@then("I should get a successful exit code")
def check_status_code(context):
if context.result.returncode != OK_EXIT_CODE:
Expand Down Expand Up @@ -724,6 +736,16 @@ def add_micropkg_to_pyproject_toml(context: behave.runner.Context):
file.write(project_toml_str)


@given("I have executed the load_node magic command")
@when("I execute the load_node magic command")
def exec_magic_command(context):
"""Execute Kedro target."""
cmd = [context.python, "ipython_script.py"]
context.result = run(
cmd, env=context.env, cwd=str(context.root_project_dir), print_output=True
)


@given('I have changed the current working directory to "{dir}"')
def change_dir(context, dir):
"""Execute Kedro target."""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
if __name__ == "__main__":
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
ip.run_line_magic("load_ext", "kedro.ipython")
# Assume cwd is project root
ip.run_line_magic("reload_kedro", "")
ip.run_line_magic("load_node", "split_data_node")
# ip.rl_next_input is what you see in the terminal input
ip.run_cell(ip.rl_next_input)
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def split_data(data: pd.DataFrame, example_test_data_ratio: float) -> dict[str,
The data and the parameters will be loaded and provided to your function
automatically when the pipeline is executed and it is time to run this node.
"""
print("load_node executed successfully")
data.columns = [
"sepal_length",
"sepal_width",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def create_pipeline(**kwargs):
test_x="example_test_x",
test_y="example_test_y",
),
name="split_data_node"
)
]
)
9 changes: 7 additions & 2 deletions kedro/ipython/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,11 @@ def _resolve_project_path(
if path:
project_path = Path(path).expanduser().resolve()
else:
if local_namespace and "context" in local_namespace:
if (
local_namespace
and local_namespace.get("context")
and hasattr(local_namespace["context"], "project_path")
):
project_path = local_namespace["context"].project_path
else:
project_path = _find_kedro_project(Path.cwd())
Expand All @@ -177,7 +181,8 @@ def _resolve_project_path(
if (
project_path
and local_namespace
and "context" in local_namespace
and local_namespace.get("context")
and hasattr(local_namespace["context"], "project_path") # Avoid name collision
and project_path != local_namespace["context"].project_path
):
logger.info("Updating path to Kedro project: %s...", project_path)
Expand Down
4 changes: 2 additions & 2 deletions tests/framework/cli/test_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ def yaml_dump_mock(mocker):
@pytest.fixture
def pipelines_dict():
pipelines = {
"data_engineering": ["split_data (split_data)"],
"data_engineering": ["split_data_node (split_data)"],
"data_science": [
"train_model (train_model)",
"predict (predict)",
"report_accuracy (report_accuracy)",
],
"data_processing": ["data_processing.split_data (split_data)"],
"data_processing": ["data_processing.split_data_node (split_data)"],
}
pipelines["__default__"] = pipelines["data_engineering"] + pipelines["data_science"]
return pipelines
Expand Down

0 comments on commit 12f0986

Please sign in to comment.