pipeline-homo-sbt-binary-with-predict.py 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. #
  2. # Copyright 2019 The FATE Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import argparse
  17. from pipeline.backend.pipeline import PipeLine
  18. from pipeline.component import DataTransform
  19. from pipeline.component.homo_secureboost import HomoSecureBoost
  20. from pipeline.component.reader import Reader
  21. from pipeline.interface.data import Data
  22. from pipeline.component.evaluation import Evaluation
  23. from pipeline.interface.model import Model
  24. from pipeline.utils.tools import load_job_config
  25. def main(config="../../config.yaml", namespace=""):
  26. # obtain config
  27. if isinstance(config, str):
  28. config = load_job_config(config)
  29. parties = config.parties
  30. guest = parties.guest[0]
  31. host = parties.host[0]
  32. arbiter = parties.arbiter[0]
  33. guest_train_data = {"name": "breast_homo_guest", "namespace": f"experiment{namespace}"}
  34. guest_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
  35. host_train_data = {"name": "breast_homo_host", "namespace": f"experiment{namespace}"}
  36. host_validate_data = {"name": "breast_homo_test", "namespace": f"experiment{namespace}"}
  37. pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
  38. data_transform_0, data_transform_1 = DataTransform(name="data_transform_0"), DataTransform(name='data_transform_1')
  39. reader_0, reader_1 = Reader(name="reader_0"), Reader(name='reader_1')
  40. reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
  41. reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
  42. data_transform_0.get_party_instance(
  43. role='guest', party_id=guest).component_param(
  44. with_label=True, output_format="dense")
  45. data_transform_0.get_party_instance(
  46. role='host', party_id=host).component_param(
  47. with_label=True, output_format="dense")
  48. reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_validate_data)
  49. reader_1.get_party_instance(role='host', party_id=host).component_param(table=host_validate_data)
  50. data_transform_1.get_party_instance(
  51. role='guest', party_id=guest).component_param(
  52. with_label=True, output_format="dense")
  53. data_transform_1.get_party_instance(
  54. role='host', party_id=host).component_param(
  55. with_label=True, output_format="dense")
  56. homo_secureboost_0 = HomoSecureBoost(name="homo_secureboost_0",
  57. num_trees=3,
  58. task_type='classification',
  59. objective_param={"objective": "cross_entropy"},
  60. tree_param={
  61. "max_depth": 3
  62. },
  63. validation_freqs=1
  64. )
  65. evaluation_0 = Evaluation(name='evaluation_0', eval_type='binary')
  66. pipeline.add_component(reader_0)
  67. pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
  68. pipeline.add_component(reader_1)
  69. pipeline.add_component(
  70. data_transform_1, data=Data(
  71. data=reader_1.output.data), model=Model(
  72. data_transform_0.output.model))
  73. pipeline.add_component(homo_secureboost_0, data=Data(train_data=data_transform_0.output.data,
  74. validate_data=data_transform_1.output.data
  75. ))
  76. pipeline.add_component(evaluation_0, data=Data(homo_secureboost_0.output.data))
  77. pipeline.compile()
  78. pipeline.fit()
  79. # predict
  80. # deploy required components
  81. pipeline.deploy_component([data_transform_0, homo_secureboost_0])
  82. predict_pipeline = PipeLine()
  83. # add data reader onto predict pipeline
  84. predict_pipeline.add_component(reader_1)
  85. # add selected components from train pipeline onto predict pipeline
  86. # specify data source
  87. predict_pipeline.add_component(
  88. pipeline, data=Data(
  89. predict_input={
  90. pipeline.data_transform_0.input.data: reader_1.output.data}))
  91. # run predict model
  92. predict_pipeline.predict()
  93. if __name__ == "__main__":
  94. parser = argparse.ArgumentParser("PIPELINE DEMO")
  95. parser.add_argument("-config", type=str,
  96. help="config file")
  97. args = parser.parse_args()
  98. if args.config is not None:
  99. main(args.config)
  100. else:
  101. main()