123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141 |
- import argparse
- import json
- from pipeline.backend.pipeline import PipeLine
- from pipeline.component import DataTransform
- from pipeline.component import Evaluation
- from pipeline.component import HeteroSSHELR
- from pipeline.component import Intersection
- from pipeline.component import Reader
- from pipeline.interface import Data
- from pipeline.utils.tools import load_job_config
- def prettify(response, verbose=True):
- if verbose:
- print(json.dumps(response, indent=4, ensure_ascii=False))
- print()
- return response
- def main(config="../../config.yaml", namespace=""):
- if isinstance(config, str):
- config = load_job_config(config)
- parties = config.parties
- guest = parties.guest[0]
- hosts = parties.host[0]
- guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
- host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
-
-
-
- pipeline = PipeLine()
-
- pipeline.set_initiator(role='guest', party_id=guest)
-
- pipeline.set_roles(guest=guest, host=hosts)
-
- reader_0 = Reader(name="reader_0")
-
- reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
-
- reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
- data_transform_0 = DataTransform(name="data_transform_0", output_format='dense')
-
- data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
-
- data_transform_0_guest_party_instance.component_param(with_label=True)
-
- data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
-
- intersection_0 = Intersection(name="intersection_0")
- pipeline.add_component(reader_0)
- pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
- pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
- lr_param = {
- "name": "hetero_sshe_lr_0",
- "penalty": "L2",
- "optimizer": "adam",
- "tol": 0.0001,
- "alpha": 0.001,
- "max_iter": 30,
- "early_stop": "diff",
- "batch_size": -1,
- "learning_rate": 0.15,
- "init_param": {
- "init_method": "zeros",
- "fit_intercept": False
- },
- "encrypt_param": {
- "key_length": 1024
- },
- "reveal_every_iter": True,
- "reveal_strategy": "respectively"
- }
- hetero_sshe_lr_0 = HeteroSSHELR(**lr_param)
- pipeline.add_component(hetero_sshe_lr_0, data=Data(train_data=intersection_0.output.data))
- evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
- pipeline.add_component(evaluation_0, data=Data(data=hetero_sshe_lr_0.output.data))
- pipeline.compile()
-
- pipeline.fit()
-
- prettify(pipeline.get_component("hetero_sshe_lr_0").get_summary())
- prettify(pipeline.get_component("evaluation_0").get_summary())
- pipeline.deploy_component([data_transform_0, intersection_0, hetero_sshe_lr_0])
- predict_pipeline = PipeLine()
-
- predict_pipeline.add_component(reader_0)
-
-
- predict_pipeline.add_component(
- pipeline, data=Data(
- predict_input={
- pipeline.data_transform_0.input.data: reader_0.output.data}))
-
- predict_pipeline.predict()
- return pipeline
- if __name__ == "__main__":
- parser = argparse.ArgumentParser("PIPELINE DEMO")
- parser.add_argument("-config", type=str,
- help="config file")
- args = parser.parse_args()
- if args.config is not None:
- main(args.config)
- else:
- main()
|