log.py 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. #
  2. # Copyright 2019 The FATE Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import inspect
  17. import traceback
  18. import logging
  19. import os
  20. from logging.handlers import TimedRotatingFileHandler
  21. from threading import RLock
  22. from fate_arch.common import file_utils
  23. class LoggerFactory(object):
  24. TYPE = "FILE"
  25. LOG_FORMAT = "[%(levelname)s] [%(asctime)s] [jobId] [%(process)s:%(thread)s] - [%(module)s.%(funcName)s] [line:%(lineno)d]: %(message)s"
  26. LEVEL = logging.DEBUG
  27. logger_dict = {}
  28. global_handler_dict = {}
  29. LOG_DIR = None
  30. PARENT_LOG_DIR = None
  31. log_share = True
  32. append_to_parent_log = None
  33. lock = RLock()
  34. # CRITICAL = 50
  35. # FATAL = CRITICAL
  36. # ERROR = 40
  37. # WARNING = 30
  38. # WARN = WARNING
  39. # INFO = 20
  40. # DEBUG = 10
  41. # NOTSET = 0
  42. levels = (10, 20, 30, 40)
  43. schedule_logger_dict = {}
  44. @staticmethod
  45. def set_directory(directory=None, parent_log_dir=None, append_to_parent_log=None, force=False):
  46. if parent_log_dir:
  47. LoggerFactory.PARENT_LOG_DIR = parent_log_dir
  48. if append_to_parent_log:
  49. LoggerFactory.append_to_parent_log = append_to_parent_log
  50. with LoggerFactory.lock:
  51. if not directory:
  52. directory = file_utils.get_project_base_directory("logs")
  53. if not LoggerFactory.LOG_DIR or force:
  54. LoggerFactory.LOG_DIR = directory
  55. if LoggerFactory.log_share:
  56. oldmask = os.umask(000)
  57. os.makedirs(LoggerFactory.LOG_DIR, exist_ok=True)
  58. os.umask(oldmask)
  59. else:
  60. os.makedirs(LoggerFactory.LOG_DIR, exist_ok=True)
  61. for loggerName, ghandler in LoggerFactory.global_handler_dict.items():
  62. for className, (logger, handler) in LoggerFactory.logger_dict.items():
  63. logger.removeHandler(ghandler)
  64. ghandler.close()
  65. LoggerFactory.global_handler_dict = {}
  66. for className, (logger, handler) in LoggerFactory.logger_dict.items():
  67. logger.removeHandler(handler)
  68. _handler = None
  69. if handler:
  70. handler.close()
  71. if className != "default":
  72. _handler = LoggerFactory.get_handler(className)
  73. logger.addHandler(_handler)
  74. LoggerFactory.assemble_global_handler(logger)
  75. LoggerFactory.logger_dict[className] = logger, _handler
  76. @staticmethod
  77. def new_logger(name):
  78. logger = logging.getLogger(name)
  79. logger.propagate = False
  80. logger.setLevel(LoggerFactory.LEVEL)
  81. return logger
  82. @staticmethod
  83. def get_logger(class_name=None):
  84. with LoggerFactory.lock:
  85. if class_name in LoggerFactory.logger_dict.keys():
  86. logger, handler = LoggerFactory.logger_dict[class_name]
  87. if not logger:
  88. logger, handler = LoggerFactory.init_logger(class_name)
  89. else:
  90. logger, handler = LoggerFactory.init_logger(class_name)
  91. return logger
  92. @staticmethod
  93. def get_global_handler(logger_name, level=None, log_dir=None):
  94. if not LoggerFactory.LOG_DIR:
  95. return logging.StreamHandler()
  96. if log_dir:
  97. logger_name_key = logger_name + "_" + log_dir
  98. else:
  99. logger_name_key = logger_name + "_" + LoggerFactory.LOG_DIR
  100. # if loggerName not in LoggerFactory.globalHandlerDict:
  101. if logger_name_key not in LoggerFactory.global_handler_dict:
  102. with LoggerFactory.lock:
  103. if logger_name_key not in LoggerFactory.global_handler_dict:
  104. handler = LoggerFactory.get_handler(logger_name, level, log_dir)
  105. LoggerFactory.global_handler_dict[logger_name_key] = handler
  106. return LoggerFactory.global_handler_dict[logger_name_key]
  107. @staticmethod
  108. def get_handler(class_name, level=None, log_dir=None, log_type=None, job_id=None):
  109. if not log_type:
  110. if not LoggerFactory.LOG_DIR or not class_name:
  111. return logging.StreamHandler()
  112. if not log_dir:
  113. log_file = os.path.join(LoggerFactory.LOG_DIR, "{}.log".format(class_name))
  114. else:
  115. log_file = os.path.join(log_dir, "{}.log".format(class_name))
  116. else:
  117. log_file = os.path.join(log_dir, "fate_flow_{}.log".format(
  118. log_type) if level == LoggerFactory.LEVEL else 'fate_flow_{}_error.log'.format(log_type))
  119. job_id = job_id or os.getenv("FATE_JOB_ID")
  120. if job_id:
  121. formatter = logging.Formatter(LoggerFactory.LOG_FORMAT.replace("jobId", job_id))
  122. else:
  123. formatter = logging.Formatter(LoggerFactory.LOG_FORMAT.replace("jobId", "Server"))
  124. os.makedirs(os.path.dirname(log_file), exist_ok=True)
  125. if LoggerFactory.log_share:
  126. handler = ROpenHandler(log_file,
  127. when='D',
  128. interval=1,
  129. backupCount=14,
  130. delay=True)
  131. else:
  132. handler = TimedRotatingFileHandler(log_file,
  133. when='D',
  134. interval=1,
  135. backupCount=14,
  136. delay=True)
  137. if level:
  138. handler.level = level
  139. handler.setFormatter(formatter)
  140. return handler
  141. @staticmethod
  142. def init_logger(class_name):
  143. with LoggerFactory.lock:
  144. logger = LoggerFactory.new_logger(class_name)
  145. handler = None
  146. if class_name:
  147. handler = LoggerFactory.get_handler(class_name)
  148. logger.addHandler(handler)
  149. LoggerFactory.logger_dict[class_name] = logger, handler
  150. else:
  151. LoggerFactory.logger_dict["default"] = logger, handler
  152. LoggerFactory.assemble_global_handler(logger)
  153. return logger, handler
  154. @staticmethod
  155. def assemble_global_handler(logger):
  156. if LoggerFactory.LOG_DIR:
  157. for level in LoggerFactory.levels:
  158. if level >= LoggerFactory.LEVEL:
  159. level_logger_name = logging._levelToName[level]
  160. logger.addHandler(LoggerFactory.get_global_handler(level_logger_name, level))
  161. if LoggerFactory.append_to_parent_log and LoggerFactory.PARENT_LOG_DIR:
  162. for level in LoggerFactory.levels:
  163. if level >= LoggerFactory.LEVEL:
  164. level_logger_name = logging._levelToName[level]
  165. logger.addHandler(
  166. LoggerFactory.get_global_handler(level_logger_name, level, LoggerFactory.PARENT_LOG_DIR))
  167. def setDirectory(directory=None):
  168. LoggerFactory.set_directory(directory)
  169. def setLevel(level):
  170. LoggerFactory.LEVEL = level
  171. def getLogger(className=None, useLevelFile=False):
  172. if className is None:
  173. frame = inspect.stack()[1]
  174. module = inspect.getmodule(frame[0])
  175. className = 'stat'
  176. return LoggerFactory.get_logger(className)
  177. def exception_to_trace_string(ex):
  178. return "".join(traceback.TracebackException.from_exception(ex).format())
  179. class ROpenHandler(TimedRotatingFileHandler):
  180. def _open(self):
  181. prevumask = os.umask(000)
  182. rtv = TimedRotatingFileHandler._open(self)
  183. os.umask(prevumask)
  184. return rtv