Skip to content

Commit

Permalink
optimization: job 插件针对文件路径等输入项处理两端空格
Browse files Browse the repository at this point in the history
  • Loading branch information
hanshuaikang committed Oct 17, 2023
1 parent 097fbb5 commit 6d91be3
Show file tree
Hide file tree
Showing 7 changed files with 49 additions and 57 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,29 +12,20 @@
"""

import logging
from functools import partial
from copy import deepcopy
from functools import partial

from django.utils import translation
from django.utils.translation import ugettext_lazy as _

from pipeline.core.flow.io import (
StringItemSchema,
ArrayItemSchema,
ObjectItemSchema,
)
from pipeline.component_framework.component import Component
from pipeline.core.flow.io import ArrayItemSchema, ObjectItemSchema, StringItemSchema

from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import (
get_job_instance_url,
get_node_callback_url,
loose_strip,
)
from pipeline_plugins.components.collections.sites.open.job import JobService
from gcloud.conf import settings
from gcloud.constants import JobBizScopeType
from gcloud.utils.handlers import handle_api_error
from pipeline_plugins.components.collections.sites.open.job import JobService
from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import get_job_instance_url, get_node_callback_url, loose_strip

logger = logging.getLogger("celery")
__group_name__ = _("作业平台(JOB)")
Expand Down Expand Up @@ -134,8 +125,8 @@ def execute(self, data, parent_data):
"bk_biz_id": biz_cc_id,
"file_source_list": file_source,
"target_server": target_server,
"account_alias": data.get_one_of_inputs("job_account"),
"file_target_path": data.get_one_of_inputs("job_target_path"),
"account_alias": data.get_one_of_inputs("job_account").strip(),
"file_target_path": data.get_one_of_inputs("job_target_path").strip(),
"callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")),
}
if job_timeout:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,10 @@ def inputs_format(self):
schema=StringItemSchema(description=_("文件分发目标机器 IP,多个用英文逗号 `,` 分隔")),
),
self.InputItem(
name=_("目标账户"), key="job_account", type="string", schema=StringItemSchema(description=_("文件分发目标机器账户")),
name=_("目标账户"),
key="job_account",
type="string",
schema=StringItemSchema(description=_("文件分发目标机器账户")),
),
self.InputItem(
name=_("目标路径"),
Expand Down Expand Up @@ -127,8 +130,8 @@ def execute(self, data, parent_data):
"bk_biz_id": biz_cc_id,
"file_source_list": file_source,
"target_server": target_server,
"account_alias": data.get_one_of_inputs("job_account"),
"file_target_path": data.get_one_of_inputs("job_target_path"),
"account_alias": data.get_one_of_inputs("job_account").strip(),
"file_target_path": data.get_one_of_inputs("job_target_path").strip(),
"callback_url": get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")),
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@ def inputs_format(self):
),
),
self.InputItem(
name=_("上传限速"), key="upload_speed_limit", type="string", schema=StringItemSchema(description=_("MB/s")),
name=_("上传限速"),
key="upload_speed_limit",
type="string",
schema=StringItemSchema(description=_("MB/s")),
),
self.InputItem(
name=_("下载限速"),
Expand All @@ -75,7 +78,10 @@ def inputs_format(self):
schema=StringItemSchema(description=_("文件分发目标机器 IP,多个用英文逗号 `,` 分隔")),
),
self.InputItem(
name=_("目标账户"), key="job_account", type="string", schema=StringItemSchema(description=_("文件分发目标机器账户")),
name=_("目标账户"),
key="job_account",
type="string",
schema=StringItemSchema(description=_("文件分发目标机器账户")),
),
self.InputItem(
name=_("目标路径"),
Expand Down Expand Up @@ -153,8 +159,8 @@ def execute(self, data, parent_data):
"bk_biz_id": biz_cc_id,
"file_source_list": [source],
"target_server": target_server,
"account_alias": attr["job_account"],
"file_target_path": attr["job_target_path"],
"account_alias": attr["job_account"].strip(),
"file_target_path": attr["job_target_path"].strip(),
}
if upload_speed_limit:
job_kwargs["upload_speed_limit"] = int(upload_speed_limit)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,21 @@
specific language governing permissions and limitations under the License.
"""

from functools import partial
from copy import deepcopy
from functools import partial

from django.utils import translation
from django.utils.translation import ugettext_lazy as _

from pipeline.core.flow.io import StringItemSchema, ArrayItemSchema, ObjectItemSchema, BooleanItemSchema
from pipeline.component_framework.component import Component

from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.collections.sites.open.job.base import JobScheduleService
from pipeline_plugins.components.utils.common import batch_execute_func
from pipeline_plugins.components.utils import (
get_job_instance_url,
loose_strip,
chunk_table_data,
)
from pipeline.core.flow.io import ArrayItemSchema, BooleanItemSchema, ObjectItemSchema, StringItemSchema

from gcloud.conf import settings
from gcloud.constants import JobBizScopeType
from gcloud.utils.handlers import handle_api_error
from pipeline_plugins.components.collections.sites.open.job.base import JobScheduleService
from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import chunk_table_data, get_job_instance_url, loose_strip
from pipeline_plugins.components.utils.common import batch_execute_func

__group_name__ = _("作业平台(JOB)")

Expand Down Expand Up @@ -186,8 +180,8 @@ def execute(self, data, parent_data):
"bk_biz_id": biz_cc_id,
"file_source_list": file_source,
"target_server": target_server,
"account_alias": attr["job_account"],
"file_target_path": attr["job_target_path"],
"account_alias": attr["job_account"].strip(),
"file_target_path": attr["job_target_path"].strip(),
}
if upload_speed_limit:
job_kwargs["upload_speed_limit"] = int(upload_speed_limit)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,17 @@
from functools import partial

from django.utils.translation import ugettext_lazy as _

from pipeline.core.flow.io import StringItemSchema, ArrayItemSchema, ObjectItemSchema
from pipeline.core.flow.activity import StaticIntervalGenerator
from pipeline.core.flow.io import ArrayItemSchema, ObjectItemSchema, StringItemSchema

from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.collections.sites.open.job.base import JobScheduleService
from pipeline_plugins.components.utils.common import batch_execute_func
from pipeline_plugins.components.utils import get_job_instance_url
from files.factory import ManagerFactory
from gcloud.conf import settings
from gcloud.utils.handlers import handle_api_error
from gcloud.core.models import EnvironmentVariables
from gcloud.utils.handlers import handle_api_error
from pipeline_plugins.components.collections.sites.open.job.base import JobScheduleService
from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import get_job_instance_url
from pipeline_plugins.components.utils.common import batch_execute_func

__group_name__ = _("作业平台(JOB)")

Expand Down Expand Up @@ -144,9 +143,9 @@ def get_params_list(self, client, data, target_server, local_files_and_target_pa
for _file in push_files_info["file_info"]
if _file["response"]["result"] is True
],
"target_path": push_files_info["target_path"],
"target_path": push_files_info["target_path"].strip(),
"ips": None,
"account": target_account,
"account": target_account.strip(),
"target_server": target_server,
}
for push_files_info in local_files_and_target_path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@
from functools import partial

from django.utils.translation import ugettext_lazy as _

from pipeline.component_framework.component import Component
from pipeline_plugins.components.collections.sites.open.job import JobService
from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import get_job_instance_url, get_node_callback_url

from files.factory import ManagerFactory
from gcloud.conf import settings
from gcloud.utils.handlers import handle_api_error
from gcloud.core.models import EnvironmentVariables
from gcloud.utils.handlers import handle_api_error
from pipeline_plugins.components.collections.sites.open.job import JobService
from pipeline_plugins.components.collections.sites.open.job.ipv6_base import GetJobTargetServerMixin
from pipeline_plugins.components.utils import get_job_instance_url, get_node_callback_url

__group_name__ = _("作业平台(JOB)")

Expand Down Expand Up @@ -82,9 +82,9 @@ def execute(self, data, parent_data):
esb_client=client,
bk_biz_id=biz_cc_id,
file_tags=file_tags,
target_path=target_path,
target_path=target_path.strip(),
ips=None,
account=target_account,
account=target_account.strip(),
callback_url=get_node_callback_url(self.root_pipeline_id, self.id, getattr(self, "version", "")),
target_server=target_server,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,13 @@
"""

from django.utils.translation import ugettext_lazy as _

from pipeline.component_framework.component import Component
from pipeline.core.flow.io import BooleanItemSchema, StringItemSchema

from gcloud.conf import settings
from pipeline_plugins.components.collections.sites.open.job.push_local_files.base_service import (
BaseJobPushLocalFilesService,
)
from gcloud.conf import settings

__group_name__ = _("作业平台(JOB)")

Expand Down Expand Up @@ -77,10 +76,10 @@ def get_params_list(self, client, data, target_server, local_files_and_target_pa
for _file in push_files_info["file_info"]
if _file["response"]["result"] is True
],
"target_path": push_files_info["target_path"],
"target_path": push_files_info["target_path"].strip(),
"ips": None,
"target_server": target_server,
"account": target_account,
"account": target_account.strip(),
"rolling_config": rolling_config,
}
for push_files_info in local_files_and_target_path
Expand Down

0 comments on commit 6d91be3

Please sign in to comment.