gradient_method_test.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. #
  4. # Copyright 2019 The FATE Authors. All Rights Reserved.
  5. #
  6. # Licensed under the Apache License, Version 2.0 (the "License");
  7. # you may not use this file except in compliance with the License.
  8. # You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. #
  18. # Copyright 2019 The FATE Authors. All Rights Reserved.
  19. #
  20. # Licensed under the Apache License, Version 2.0 (the "License");
  21. # you may not use this file except in compliance with the License.
  22. # You may obtain a copy of the License at
  23. #
  24. # http://www.apache.org/licenses/LICENSE-2.0
  25. #
  26. # Unless required by applicable law or agreed to in writing, software
  27. # distributed under the License is distributed on an "AS IS" BASIS,
  28. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  29. # See the License for the specific language governing permissions and
  30. # limitations under the License.
  31. #
  32. import os
  33. import time
  34. import unittest
  35. import numpy as np
  36. import pandas as pd
  37. from federatedml.util import fate_operator
  38. def go_fast(a): # Function is compiled and runs in machine code
  39. sum = 0
  40. for j in range(100000):
  41. trace = 0
  42. for i in range(a.shape[0]):
  43. trace += np.tanh(a[i, i])
  44. sum += trace
  45. print(sum)
  46. return sum
  47. class TestHomoLRGradient(unittest.TestCase):
  48. def setUp(self):
  49. # home_dir = os.path.split(os.path.realpath(__file__))[0]
  50. # data_dir = home_dir + '/../../../../../examples/data/breast_hetero_guest.csv'
  51. # data_df = pd.read_csv(data_dir)
  52. # self.X = np.array(data_df.iloc[:, 2:])
  53. # self.Y = np.array(data_df.iloc[:, 1])
  54. # self.Y = self.Y.reshape([-1, 1])
  55. self.X = np.random.random((569, 30))
  56. self.Y = np.random.randint(low=0, high=2, size=(569, 1))
  57. self.coef = np.zeros(self.X.shape[1])
  58. self.intercept = 0
  59. self.fit_intercept = True
  60. def test_compute_time(self):
  61. x = np.arange(10000).reshape(100, 100)
  62. start_time = time.time()
  63. grad = self._test_compute(self.X, self.Y, self.coef, self.intercept, self.fit_intercept)
  64. # go_fast(x)
  65. end_time = time.time()
  66. print("compute time: {}".format(end_time - start_time)) # without jit: 6.935, with jit: 6.684
  67. # add jit in dot 7.271
  68. # add jit in dot only: 7.616
  69. pass
  70. def _test_compute(self, X, Y, coef, intercept, fit_intercept):
  71. batch_size = len(X)
  72. if batch_size == 0:
  73. return None, None
  74. one_d_y = Y.reshape([-1, ])
  75. d = (0.25 * np.array(fate_operator.dot(X, coef) + intercept).transpose() + 0.5 * one_d_y * -1)
  76. grad_batch = X.transpose() * d
  77. tot_loss = np.log(1 + np.exp(np.multiply(-Y.transpose(), X.dot(coef) + intercept))).sum()
  78. avg_loss = tot_loss / Y.shape[0]
  79. # grad_batch = grad_batch.transpose()
  80. # if fit_intercept:
  81. # grad_batch = np.c_[grad_batch, d]
  82. # grad = sum(grad_batch) / batch_size
  83. return 0
  84. if __name__ == '__main__':
  85. unittest.main()