-
Notifications
You must be signed in to change notification settings - Fork 1
feat: APIs for stack jobs #383
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: develop
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -15,7 +15,7 @@ | |||||
| from extensions.business.mixins.node_tags_mixin import _NodeTagsMixin | ||||||
| from extensions.business.mixins.request_tracking_mixin import _RequestTrackingMixin | ||||||
| from .deeploy_const import ( | ||||||
| DEEPLOY_CREATE_REQUEST, DEEPLOY_CREATE_REQUEST_MULTI_PLUGIN, DEEPLOY_GET_APPS_REQUEST, DEEPLOY_DELETE_REQUEST, | ||||||
| DEEPLOY_CREATE_REQUEST, DEEPLOY_CREATE_REQUEST_MULTI_PLUGIN, DEEPLOY_CREATE_BATCH_REQUEST, DEEPLOY_GET_APPS_REQUEST, DEEPLOY_DELETE_REQUEST, | ||||||
| DEEPLOY_ERRORS, DEEPLOY_KEYS, DEEPLOY_SCALE_UP_JOB_WORKERS_REQUEST, DEEPLOY_STATUS, DEEPLOY_INSTANCE_COMMAND_REQUEST, | ||||||
| DEEPLOY_APP_COMMAND_REQUEST, DEEPLOY_GET_ORACLE_JOB_DETAILS_REQUEST, DEEPLOY_GET_R1FS_JOB_PIPELINE_REQUEST, | ||||||
| DEEPLOY_PLUGIN_DATA, JOB_APP_TYPES, JOB_APP_TYPES_ALL, | ||||||
|
|
@@ -521,6 +521,7 @@ def _process_pipeline_request( | |||||
| deeploy_specs_payload = self._ensure_deeploy_specs_job_config( | ||||||
| deeploy_specs_payload, | ||||||
| pipeline_params=pipeline_params, | ||||||
| stack_job_config=inputs.get(DEEPLOY_KEYS.STACK_JOB_CONFIG), | ||||||
| ) | ||||||
|
|
||||||
| dct_status, str_status, response_keys, pipeline_to_persist = self.check_and_deploy_pipelines( | ||||||
|
|
@@ -1032,6 +1033,76 @@ def create_pipeline( | |||||
| return self._register_pending_deploy_request(result['__pending__']) | ||||||
| return result | ||||||
|
|
||||||
| @BasePlugin.endpoint(method="post") | ||||||
| # /create_pipelines_batch | ||||||
| def create_pipelines_batch( | ||||||
| self, | ||||||
| request: dict = DEEPLOY_CREATE_BATCH_REQUEST | ||||||
| ): | ||||||
| """ | ||||||
| Create multiple pipelines in one API call. | ||||||
|
|
||||||
| Expects `request.requests` to be a list of normal create_pipeline payloads. | ||||||
| Returns per-item results plus an aggregate status: | ||||||
| - success: all items succeeded/command_delivered | ||||||
| - fail: all items failed/timeout/error | ||||||
| - partial: mixed outcomes | ||||||
| """ | ||||||
| self.Pd(f"Called Deeploy create_pipelines_batch endpoint") | ||||||
|
|
||||||
| try: | ||||||
| requests = request.get(DEEPLOY_KEYS.REQUESTS, []) | ||||||
| if not isinstance(requests, list) or len(requests) == 0: | ||||||
| raise ValueError(f"{DEEPLOY_ERRORS.REQUEST3}: '{DEEPLOY_KEYS.REQUESTS}' must be a non-empty list.") | ||||||
|
|
||||||
|
Comment on lines
+1054
to
+1057
|
||||||
| results = [] | ||||||
| statuses = [] | ||||||
|
|
||||||
| for idx, request_item in enumerate(requests): | ||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. IMO, this is a no-go. |
||||||
| if not isinstance(request_item, dict): | ||||||
| item_result = { | ||||||
| DEEPLOY_KEYS.STATUS: DEEPLOY_STATUS.FAIL, | ||||||
| DEEPLOY_KEYS.ERROR: f"{DEEPLOY_ERRORS.REQUEST3}: request[{idx}] must be an object.", | ||||||
| } | ||||||
|
Comment on lines
+1054
to
+1066
|
||||||
| else: | ||||||
| item_result = self._process_pipeline_request( | ||||||
| request=request_item, | ||||||
| is_create=True, | ||||||
| async_mode=False, | ||||||
|
||||||
| async_mode=False, | |
| async_mode=True, |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I don't think we would like to have the stack jobs in multiple pipelines.
The main idea was having multiple CARs in the same pipeline and paying for it as per-CAR price summed up (not as a native job as we're doing it now)
For such apps it's really important to keep the CARs in the same pipeline, so they could share the same semaphoring mechanism and ENV variables injeciton