test_spark.py 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. # Licensed to the Apache Software Foundation (ASF) under one
  2. # or more contributor license agreements. See the NOTICE file
  3. # distributed with this work for additional information
  4. # regarding copyright ownership. The ASF licenses this file
  5. # to you under the Apache License, Version 2.0 (the
  6. # "License"); you may not use this file except in compliance
  7. # with the License. You may obtain a copy of the License at
  8. #
  9. # http://www.apache.org/licenses/LICENSE-2.0
  10. #
  11. # Unless required by applicable law or agreed to in writing,
  12. # software distributed under the License is distributed on an
  13. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  14. # KIND, either express or implied. See the License for the
  15. # specific language governing permissions and limitations
  16. # under the License.
  17. """Test Task Spark."""
  18. from unittest.mock import patch
  19. from pydolphinscheduler.tasks.spark import DeployMode, ProgramType, Spark, SparkVersion
  20. @patch(
  21. "pydolphinscheduler.core.engine.Engine.get_resource_info",
  22. return_value=({"id": 1, "name": "test"}),
  23. )
  24. def test_spark_get_define(mock_resource):
  25. """Test task spark function get_define."""
  26. code = 123
  27. version = 1
  28. name = "test_spark_get_define"
  29. main_class = "org.apache.spark.test_main_class"
  30. main_package = "test_main_package"
  31. program_type = ProgramType.JAVA
  32. deploy_mode = DeployMode.LOCAL
  33. expect = {
  34. "code": code,
  35. "name": name,
  36. "version": 1,
  37. "description": None,
  38. "delayTime": 0,
  39. "taskType": "SPARK",
  40. "taskParams": {
  41. "mainClass": main_class,
  42. "mainJar": {
  43. "id": 1,
  44. },
  45. "programType": program_type,
  46. "deployMode": deploy_mode,
  47. "sparkVersion": SparkVersion.SPARK2,
  48. "driverCores": 1,
  49. "driverMemory": "512M",
  50. "numExecutors": 2,
  51. "executorMemory": "2G",
  52. "executorCores": 2,
  53. "appName": None,
  54. "mainArgs": None,
  55. "others": None,
  56. "localParams": [],
  57. "resourceList": [],
  58. "dependence": {},
  59. "conditionResult": {"successNode": [""], "failedNode": [""]},
  60. "waitStartTimeout": {},
  61. },
  62. "flag": "YES",
  63. "taskPriority": "MEDIUM",
  64. "workerGroup": "default",
  65. "failRetryTimes": 0,
  66. "failRetryInterval": 1,
  67. "timeoutFlag": "CLOSE",
  68. "timeoutNotifyStrategy": None,
  69. "timeout": 0,
  70. }
  71. with patch(
  72. "pydolphinscheduler.core.task.Task.gen_code_and_version",
  73. return_value=(code, version),
  74. ):
  75. task = Spark(name, main_class, main_package, program_type, deploy_mode)
  76. assert task.get_define() == expect