123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146 |
- #
- # Copyright 2019 The FATE Authors. All Rights Reserved.
- #
- # Licensed under the Apache License, Version 2.0 (the "License");
- # you may not use this file except in compliance with the License.
- # You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #
- import copy
- import functools
- import numpy as np
- from federatedml.protobuf.generated.feature_scale_meta_pb2 import ScaleMeta
- from federatedml.protobuf.generated.feature_scale_param_pb2 import ScaleParam
- from federatedml.protobuf.generated.feature_scale_param_pb2 import ColumnScaleParam
- from federatedml.feature.feature_scale.base_scale import BaseScale
- class MinMaxScale(BaseScale):
- """
- Transforms features by scaling each feature to a given range,e.g.between minimum and maximum. The transformation is given by:
- X_scale = (X - X.min) / (X.max - X.min), while X.min is the minimum value of feature, and X.max is the maximum
- """
- def __init__(self, params):
- super().__init__(params)
- self.mode = params.mode
- self.column_range = None
- @staticmethod
- def __scale(data, max_value_list, min_value_list, scale_value_list, process_cols_list):
- """
- Scale operator for each column. The input data type is data_instance
- """
- features = np.array(data.features, dtype=float)
- for i in process_cols_list:
- value = features[i]
- if value > max_value_list[i]:
- value = max_value_list[i]
- elif value < min_value_list[i]:
- value = min_value_list[i]
- features[i] = (value - min_value_list[i]) / scale_value_list[i]
- _data = copy.deepcopy(data)
- _data.features = features
- return _data
- def fit(self, data):
- """
- Apply min-max scale for input data
- Parameters
- ----------
- data: data_instance, input data
- Returns
- ----------
- fit_data:data_instance, data after scale
- """
- self.column_min_value, self.column_max_value = self._get_min_max_value(data)
- self.scale_column_idx = self._get_scale_column_idx(data)
- self.header = self._get_header(data)
- self.column_range = []
- for i in range(len(self.column_max_value)):
- scale = self.column_max_value[i] - self.column_min_value[i]
- if scale < 0:
- raise ValueError("scale value should large than 0")
- elif np.abs(scale - 0) < 1e-6:
- scale = 1
- self.column_range.append(scale)
- f = functools.partial(MinMaxScale.__scale, max_value_list=self.column_max_value,
- min_value_list=self.column_min_value, scale_value_list=self.column_range,
- process_cols_list=self.scale_column_idx)
- fit_data = data.mapValues(f)
- return fit_data
- def transform(self, data):
- """
- Transform input data using min-max scale with fit results
- Parameters
- ----------
- data: data_instance, input data
- Returns
- ----------
- transform_data:data_instance, data after transform
- """
- self.column_range = []
- for i in range(len(self.column_max_value)):
- scale = self.column_max_value[i] - self.column_min_value[i]
- if scale < 0:
- raise ValueError("scale value should large than 0")
- elif np.abs(scale - 0) < 1e-6:
- scale = 1
- self.column_range.append(scale)
- f = functools.partial(MinMaxScale.__scale, max_value_list=self.column_max_value,
- min_value_list=self.column_min_value, scale_value_list=self.column_range,
- process_cols_list=self.scale_column_idx)
- transform_data = data.mapValues(f)
- return transform_data
- def _get_meta(self, need_run):
- if self.header:
- scale_column = [self.header[i] for i in self.scale_column_idx]
- else:
- scale_column = ["_".join(["col", str(i)]) for i in self.scale_column_idx]
- if not self.data_shape:
- self.data_shape = -1
- meta_proto_obj = ScaleMeta(method="min_max_scale",
- mode=self.mode,
- area="null",
- scale_column=scale_column,
- feat_upper=self._get_upper(self.data_shape),
- feat_lower=self._get_lower(self.data_shape),
- need_run=need_run
- )
- return meta_proto_obj
- def _get_param(self):
- min_max_scale_param_dict = {}
- if self.header:
- scale_column_idx_set = set(self.scale_column_idx)
- for i, header in enumerate(self.header):
- if i in scale_column_idx_set:
- param_obj = ColumnScaleParam(column_upper=self.column_max_value[i],
- column_lower=self.column_min_value[i])
- min_max_scale_param_dict[header] = param_obj
- param_proto_obj = ScaleParam(col_scale_param=min_max_scale_param_dict,
- header=self.header)
- return param_proto_obj
|