Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add DAG repo to airflow submission response messages #2103

Merged
merged 1 commit into from
Sep 3, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 16 additions & 8 deletions elyra/cli/pipeline_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from elyra.metadata.schema import SchemaManager
from elyra.pipeline.parser import PipelineParser
from elyra.pipeline.processor import PipelineProcessorManager
from elyra.pipeline.processor import PipelineProcessorResponse
from elyra.pipeline.validation import PipelineValidationManager
from elyra.pipeline.validation import ValidationSeverity

Expand Down Expand Up @@ -192,7 +193,7 @@ def _validate_pipeline_definition(pipeline_definition):
raise click.ClickException("Pipeline validation FAILED. The pipeline was not submitted for execution.")


def _execute_pipeline(pipeline_definition):
def _execute_pipeline(pipeline_definition) -> PipelineProcessorResponse:
try:
# parse pipeline
pipeline_object = PipelineParser().parse(pipeline_definition)
Expand Down Expand Up @@ -265,15 +266,22 @@ def submit(pipeline_path, runtime_config):
_validate_pipeline_definition(pipeline_definition)

with yaspin(text="Submitting pipeline..."):
response = _execute_pipeline(pipeline_definition)
response: PipelineProcessorResponse = _execute_pipeline(pipeline_definition)

if response:
print_info("Job submission succeeded",
[
f"Check the status of your job at: {response._run_url}",
f"The results and outputs are in the {response._object_storage_path} ",
f"working directory in {response._object_storage_url}"
])
msg = []
# If there's a git_url attr, assume Apache Airflow DAG repo.
# TODO: this will need to be revisited once front-end is decoupled from runtime platforms.
if hasattr(response, 'git_url'):
msg.append(f"Apache Airflow DAG has been pushed to: {response.git_url}")
msg.extend(
[
f"Check the status of your job at: {response.run_url}",
f"The results and outputs are in the {response.object_storage_path} ",
f"working directory in {response.object_storage_url}"
]
)
print_info("Job submission succeeded", msg)

click.echo()

Expand Down