Coverage for muutils/logger/loggingstream.py: 70%

40 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2025-04-04 03:33 -0600

1from __future__ import annotations 

2 

3import time 

4from dataclasses import dataclass, field 

5from typing import Any, Callable 

6 

7from muutils.logger.simplelogger import AnyIO, NullIO 

8from muutils.misc import sanitize_fname 

9 

10 

11@dataclass 

12class LoggingStream: 

13 """properties of a logging stream 

14 

15 - `name: str` name of the stream 

16 - `aliases: set[str]` aliases for the stream 

17 (calls to these names will be redirected to this stream. duplicate alises will result in errors) 

18 TODO: perhaps duplicate alises should result in duplicate writes? 

19 - `file: str|bool|AnyIO|None` file to write to 

20 - if `None`, will write to standard log 

21 - if `True`, will write to `name + ".log"` 

22 - if `False` will "write" to `NullIO` (throw it away) 

23 - if a string, will write to that file 

24 - if a fileIO type object, will write to that object 

25 - `default_level: int|None` default level for this stream 

26 - `default_contents: dict[str, Callable[[], Any]]` default contents for this stream 

27 - `last_msg: tuple[float, Any]|None` last message written to this stream (timestamp, message) 

28 """ 

29 

30 name: str | None 

31 aliases: set[str | None] = field(default_factory=set) 

32 file: str | bool | AnyIO | None = None 

33 default_level: int | None = None 

34 default_contents: dict[str, Callable[[], Any]] = field(default_factory=dict) 

35 handler: AnyIO | None = None 

36 

37 # TODO: implement last-message caching 

38 # last_msg: tuple[float, Any]|None = None 

39 

40 def make_handler(self) -> AnyIO | None: 

41 if self.file is None: 

42 return None 

43 elif isinstance(self.file, str): 

44 # if its a string, open a file 

45 return open( 

46 self.file, 

47 "w", 

48 encoding="utf-8", 

49 ) 

50 elif isinstance(self.file, bool): 

51 # if its a bool and true, open a file with the same name as the stream (in the current dir) 

52 # TODO: make this happen in the same dir as the main logfile? 

53 if self.file: 

54 return open( # type: ignore[return-value] 

55 f"{sanitize_fname(self.name)}.log.jsonl", 

56 "w", 

57 encoding="utf-8", 

58 ) 

59 else: 

60 return NullIO() 

61 else: 

62 # if its neither, check it has `.write()` and `.flush()` methods 

63 if ( 

64 ( 

65 not hasattr(self.file, "write") 

66 or (not callable(self.file.write)) 

67 or (not hasattr(self.file, "flush")) 

68 or (not callable(self.file.flush)) 

69 ) 

70 or (not hasattr(self.file, "close")) 

71 or (not callable(self.file.close)) 

72 ): 

73 raise ValueError(f"stream {self.name} has invalid handler {self.file}") 

74 # ignore type check because we know it has a .write() method, 

75 # assume the user knows what they're doing 

76 return self.file # type: ignore 

77 

78 def __post_init__(self): 

79 self.aliases = set(self.aliases) 

80 if any(x.startswith("_") for x in self.aliases if x is not None): 

81 raise ValueError( 

82 "stream names or aliases cannot start with an underscore, sorry" 

83 ) 

84 self.aliases.add(self.name) 

85 self.default_contents["_timestamp"] = time.time 

86 self.default_contents["_stream"] = lambda: self.name 

87 self.handler = self.make_handler() 

88 

89 def __del__(self): 

90 if self.handler is not None: 

91 self.handler.flush() 

92 self.handler.close() 

93 

94 def __str__(self): 

95 return f"LoggingStream(name={self.name}, aliases={self.aliases}, file={self.file}, default_level={self.default_level}, default_contents={self.default_contents})"