source file: /opt/devel/celery/celery/log.py
file stats: 33 lines, 31 executed: 93.9% covered
1. import multiprocessing
2. import os
3. import time
4. import logging
5. from celery.conf import LOG_FORMAT, DAEMON_LOG_LEVEL
6.
7.
8. def setup_logger(loglevel=DAEMON_LOG_LEVEL, logfile=None, format=LOG_FORMAT,
9. **kwargs):
10. """Setup the ``multiprocessing`` logger. If ``logfile`` is not specified,
11. ``stderr`` is used.
12.
13. Returns logger object.
14. """
15. logger = multiprocessing.get_logger()
16. if logfile:
17. if hasattr(logfile, "write"):
18. log_file_handler = logging.StreamHandler(logfile)
19. else:
20. log_file_handler = logging.FileHandler(logfile)
21. formatter = logging.Formatter(format)
22. log_file_handler.setFormatter(formatter)
23. logger.addHandler(log_file_handler)
24. else:
25. multiprocessing.log_to_stderr()
26. logger.setLevel(loglevel)
27. return logger
28.
29.
30. def emergency_error(logfile, message):
31. """Emergency error logging, for when there's no standard file
32. descriptors open because the process has been daemonized or for
33. some other reason."""
34. logfh_needs_to_close = False
35. if hasattr(logfile, "write"):
36. logfh = logfile
37. else:
38. logfh = open(logfile, "a")
39. logfh_needs_to_close = True
40. logfh.write("[%(asctime)s: FATAL/%(pid)d]: %(message)s\n" % {
41. "asctime": time.asctime(),
42. "pid": os.getpid(),
43. "message": message})
44. if logfh_needs_to_close:
45. logfh.close()