Skip to content

Commit

Permalink
align with core changes
Browse files Browse the repository at this point in the history
  • Loading branch information
avishniakov committed Jan 22, 2024
1 parent 6dbf10f commit 3897c2e
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 8 deletions.
8 changes: 5 additions & 3 deletions template/pipelines/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from uuid import UUID

from steps import model_evaluator, model_promoter, model_trainer
from zenml import ExternalArtifact, pipeline
from zenml import pipeline
from zenml.client import Client
from zenml.logger import get_logger

from pipelines import (
Expand Down Expand Up @@ -43,8 +44,9 @@ def training(
if train_dataset_id is None or test_dataset_id is None:
dataset_trn, dataset_tst = feature_engineering()
else:
dataset_trn = ExternalArtifact(id=train_dataset_id)
dataset_tst = ExternalArtifact(id=test_dataset_id)
client = Client()
dataset_trn = client.get_artifact_version(name_id_or_prefix=train_dataset_id)
dataset_tst = client.get_artifact_version(name_id_or_prefix=test_dataset_id)

model = model_trainer(dataset_trn=dataset_trn, target=target, model_type=model_type)

Expand Down
10 changes: 5 additions & 5 deletions template/quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@
"\n",
"import random\n",
"import pandas as pd\n",
"from zenml import step, ExternalArtifact, pipeline, Model, get_step_context\n",
"from zenml import step, pipeline, Model, get_step_context\n",
"from zenml.client import Client\n",
"from zenml.logger import get_logger\n",
"from uuid import UUID\n",
Expand Down Expand Up @@ -602,7 +602,7 @@
"metadata": {},
"source": [
"ZenML allows you to load any version of any dataset that is tracked by the framework\n",
"directly into a pipeline using the `ExternalArtifact` interface. This is very convenient\n",
"directly into a pipeline using the `Client().get_artifact_version` interface. This is very convenient\n",
"in this case, as we'd like to send our preprocessed dataset from the older pipeline directly\n",
"into the training pipeline."
]
Expand All @@ -628,8 +628,8 @@
" dataset_trn, dataset_tst = feature_engineering()\n",
" else:\n",
" # Load the datasets from an older pipeline\n",
" dataset_trn = ExternalArtifact(id=train_dataset_id)\n",
" dataset_tst = ExternalArtifact(id=test_dataset_id) \n",
" dataset_trn = client.get_artifact_version(id=train_dataset_id)\n",
" dataset_tst = client.get_artifact_version(id=test_dataset_id) \n",
"\n",
" trained_model = model_trainer(\n",
" dataset_trn=dataset_trn,\n",
Expand Down Expand Up @@ -981,7 +981,7 @@
" df_inference = inference_preprocessor(\n",
" dataset_inf=df_inference,\n",
" # We use the preprocess pipeline from the feature engineering pipeline\n",
" preprocess_pipeline=ExternalArtifact(id=preprocess_pipeline_id),\n",
" preprocess_pipeline=client.get_artifact_version(id=preprocess_pipeline_id),\n",
" target=target,\n",
" )\n",
" inference_predict(\n",
Expand Down

0 comments on commit 3897c2e

Please sign in to comment.