initial import

This commit is contained in:
gmega 2023-10-17 16:35:02 -03:00
commit 5a855a97e7
No known key found for this signature in database
GPG Key ID: FFD8DAF00660270F
31 changed files with 1293 additions and 0 deletions

4
.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
.idea
data
dist
**/__pycache__

28
README.md Normal file
View File

@ -0,0 +1,28 @@
# Simple Log Visualization Tools
## Installation
```
pip install logtools
```
## Usage
### Merge by Timestamp
```
log-merge log1.log log2.log
```
### Merge by Timestamp Showing Aliases Instead of File Name
```
log-merge log1.log log2.log --aliases bootstrap codex21
```
### Merge and Filter by Timestamp
```
# If no timezone is provided, assumes UTC
log-merge log1.log log2.log --from 2021-01-01T00:00:00 --to 2021-01-02T00:00:00
```

24
bin/pull_pod_logs.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
NAMESPACE=${1:-"codex-continuous-tests"}
# List all pods in the namespace
pods=$(kubectl get pods -n $NAMESPACE -o jsonpath='{.items[*].metadata.name}')
for pod in $pods; do
echo "Fetching logs for $pod..."
# Handle pods with multiple containers
containers=$(kubectl get pod $pod -n $NAMESPACE -o jsonpath='{.spec.containers[*].name}')
for container in $containers; do
if [ "$container" == "$pod" ]; then
# If there's only one container, name the log file after the pod
kubectl logs $pod -n $NAMESPACE > "${1}${pod}.log"
else
# If there are multiple containers, name the log file after the pod and container
kubectl logs $pod -c $container -n $NAMESPACE > "${1}${pod}_${container}.log"
fi
done
done
echo "Done fetching logs."

0
logtools/__init__.py Normal file
View File

0
logtools/cli/__init__.py Normal file
View File

86
logtools/cli/merge.py Normal file
View File

@ -0,0 +1,86 @@
"""Merges two log files by timestamp. Accepts aliases for log files. Can filter by timestamp."""
import argparse
import random
from datetime import datetime
from pathlib import Path
from random import shuffle
from typing import Dict
import pytz
from colored import Fore, Style
from dateutil import parser as tsparser
from logtools.log.sources.file_log_source import FileLogSource
from logtools.log.sources.filtered_source import FilteredSource, timestamp_range
from logtools.log.sources.merged_source import MergedSource
from logtools.log.sources.ordered_source import OrderedSource
def merge(args):
names = _assign_aliases(args)
palette = _assign_colors(names)
logs = MergedSource(*[
OrderedSource(
FilteredSource(
FileLogSource(path),
predicate=_filtering_predicate(args)
)
)
for path in args.files
])
for line in logs:
log_id = names[line.location.path.name]
print(f'{getattr(Fore, palette[log_id])}{log_id}: {line.raw}{Style.reset}', end='')
def _assign_aliases(args):
names = {path.name: path.name for path in args.files}
for i, alias in enumerate(args.aliases):
if i >= len(args.files): # excess aliases are just ignored
break
names[args.files[i].name] = alias
max_len = max([len(alias) for alias in names.values()])
return {name: alias.rjust(max_len) for name, alias in names.items()}
def _assign_colors(names: Dict[str, str]) -> Dict[str, str]:
random.seed(4)
colors = list(Fore._COLORS.keys())
shuffle(colors)
return {names[key]: colors[i] for i, key in enumerate(names.keys())}
def _filtering_predicate(args):
if args.from_ or args.to:
return timestamp_range(
_ensure_utc(tsparser.parse(args.from_)),
_ensure_utc(tsparser.parse(args.to))
)
return lambda x: True
def _ensure_utc(ts: datetime) -> datetime:
if ts.tzinfo is None:
ts = ts.replace(tzinfo=pytz.UTC)
return ts.replace(tzinfo=pytz.UTC)
def main():
parser = argparse.ArgumentParser(
description='Merges logs chronologically and outputs colored, interleaved content.')
parser.add_argument("files", nargs="+", help='Log files to merge.', type=Path)
parser.add_argument('--aliases', nargs="*",
help='Optional aliases to print instead of the log file name in merged output',
type=str, default=[])
parser.add_argument('--from', dest='from_', type=tsparser.parse,
help='Show entries from date/time (multiple formats accepted)')
parser.add_argument('--to', type=tsparser.parse,
help='Show entries to date/time (multiple formats accepted)')
merge(parser.parse_args())

0
logtools/log/__init__.py Normal file
View File

165
logtools/log/composers.py Normal file
View File

@ -0,0 +1,165 @@
# import abc
# import re
# from abc import abstractmethod
# from datetime import datetime
# from heapq import heapify, heappop, heappush
# from pathlib import Path
# from typing import TypedDict, Iterable, Union, Generator, Optional, Iterator, List
#
# from parse.utils import group_match
#
# class LogIterator(abc.ABC, Iterator[LogLine]):
# @abstractmethod
# def peek(self) -> Optional[LogLine]:
# ...
#
# def context(self) -> str:
# ...
#
#
# class SingleLogIterator(LogIterator):
#
# def __init__(
# self,
# path: Path,
# alias: str,
# from_ts: Optional[datetime] = None,
# to_ts: Optional[datetime] = None,
# parse_datetime=False
# ):
# self.path = path
# self.line_number = 0
# self.parse_datetime = parse_datetime
# self.alias = alias
#
# # If from_ts or to_ts is specified, then timestamp parsing is mandatory.
# self.parse_datetime = self.parse_datetime or (from_ts is not None or to_ts is not None)
# self.from_ts = from_ts
# self.to_ts = to_ts
#
# self.inner_iterator = self._iterator()
# self.look_ahead = next(self.inner_iterator, None)
#
# def __next__(self) -> LogLine:
# next_element = self.look_ahead if self.look_ahead is not None else next(self.inner_iterator)
# self.look_ahead = next(self.inner_iterator, None)
# return next_element
#
# def __iter__(self):
# return self
#
# def __lt__(self, other):
# return self.latest_timestamp() < other.latest_timestamp()
#
# def __le__(self, other):
# return self.latest_timestamp() <= other.latest_timestamp()
#
# def _iterator(self) -> Generator[LogLine, None, None]:
# with self.path.open() as f:
# for line in f:
# self.line_number += 1
# contents = group_match(line, LOG_LINE)
# if not contents:
# continue
#
# line = LogLine(
# parent=self,
# log=self.alias,
# raw=line,
# line_number=self.line_number,
# timestamp=(datetime.fromisoformat(contents['timestamp']) if self.parse_datetime
# else contents['timestamp']),
# message=contents['message'],
# )
#
# if self.should_accept(line):
# yield line
#
# def should_accept(self, line: LogLine) -> bool:
# timestamp = line['timestamp']
# if self.from_ts is not None and timestamp <= self.from_ts:
# return False
#
# if self.to_ts is not None and timestamp >= self.to_ts:
# return False
#
# return True
#
# def peek(self) -> Optional[LogLine]:
# return self.look_ahead
#
# def latest_timestamp(self) -> Optional[datetime]:
# return self.peek()['timestamp'] if self.peek() is not None else None
#
# def context(self) -> str:
# return f'{self.path}:{self.line_number}'
#
#
# def _exclude_empty(logs: Iterable[LogIterator]):
# return [log for log in logs if log.peek() is not None]
#
#
# class CollatingLogIterator(LogIterator):
#
# def __init__(self, logs: List[SingleLogIterator]):
# self.logs = _exclude_empty(logs)
#
# def __iter__(self):
# return self
#
# def __next__(self):
# if not self.logs:
# raise StopIteration()
#
# log = self.logs[0]
# value = next(log)
# if log.peek() is None:
# self.logs.pop(0)
# return value
#
# def peek(self) -> Optional[LogLine]:
# if not self.logs:
# return None
#
# return self.logs[0].peek()
#
# def context(self) -> str:
# if not self.logs:
# raise Exception('Undefined context.')
#
# return self.logs[0].context()
#
#
# class MergingLogIterator(LogIterator):
# def __init__(self, logs: List[SingleLogIterator]):
# self.logs = _exclude_empty(logs)
# heapify(self.logs)
#
# def __iter__(self):
# return self
#
# def __next__(self) -> LogLine:
# if not self.logs:
# raise StopIteration()
#
# # by construction, we can't have any empty iterators at this point, so the call to next always succeeds.
# log = heappop(self.logs)
# value = next(log)
#
# # if the iterator still has stuff in it...
# if log.peek() is not None:
# heappush(self.logs, log)
#
# return value
#
# def peek(self) -> Optional[LogLine]:
# if not self.logs:
# return None
#
# return self.logs[0].peek()
#
# def context(self) -> str:
# if not self.logs:
# raise Exception('Undefined context.')
#
# return self.logs[0].context()

44
logtools/log/log_line.py Normal file
View File

@ -0,0 +1,44 @@
import re
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Union, Self, Optional
class LogLevel(Enum):
trace = 'TRC'
debug = 'DBG'
info = 'INF'
error = 'ERR'
LOG_LINE = re.compile(
r'(?P<line_type>\w{3}) (?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}\+\d{2}:\d{2}) (?P<message>.*) '
r'count=(?P<count>\d+)$'
)
@dataclass
class LogLine:
raw: str
level: LogLevel
line_number: int
timestamp: Union[str, datetime]
message: str
count: Optional[int]
@classmethod
def from_str(cls, source: str, parse_datetime: bool = False) -> Self:
parsed = LOG_LINE.search(source)
if not parsed:
raise ValueError(f'Could not parse log line: {source}')
return cls(
raw=source,
level=LogLevel(parsed['line_type'].upper()),
line_number=0,
timestamp=(datetime.fromisoformat(parsed['timestamp']) if parse_datetime
else parsed['timestamp']),
message=parsed['message'],
count=int(parsed['count']) if parsed['count'] else None,
)

View File

View File

@ -0,0 +1,13 @@
from typing import Iterator
from logtools.log.sources.log_source import LogSource, TLocation, TrackedLogLine
class CollatingSource(LogSource[TLocation]):
def __init__(self, *sources: LogSource[TLocation]):
self.sources = sources
def __iter__(self) -> Iterator[TrackedLogLine[TLocation]]:
for source in self.sources:
for line in source:
yield line

View File

@ -0,0 +1,30 @@
import sys
from dataclasses import dataclass
from pathlib import Path
from logtools.log.sources.log_source import TrackedLogLine, LogSource
@dataclass
class FileLineLocation:
path: Path
line_number: int
class FileLogSource(LogSource[TrackedLogLine[FileLineLocation]]):
def __init__(self, path: Path, parse_datetime=True):
self.path = path
self.parse_datetime = parse_datetime
def __iter__(self):
with self.path.open(encoding='utf-8') as f:
for line_number, line in enumerate(f, start=1):
try:
parsed = TrackedLogLine.from_str(line, parse_datetime=True)
parsed.location = FileLineLocation(self.path, line_number)
yield parsed
except ValueError:
# FIXME we should probably relax parsing restrictions and output
# these too but for now just skip it.
print(f'Skip unparseable line: {line}', file=sys.stderr)

View File

@ -0,0 +1,22 @@
from datetime import datetime
from typing import Callable
from logtools.log.sources.log_source import LogSource, TLocation, TrackedLogLine
class FilteredSource(LogSource[TrackedLogLine[TLocation]]):
def __init__(self, source: LogSource, predicate: Callable[[TrackedLogLine[TLocation]], bool]):
self.source = source
self.predicate = predicate
def __iter__(self):
for line in self.source:
if self.predicate(line):
yield line
def timestamp_range(start: datetime, end: datetime):
def predicate(line: TrackedLogLine[TLocation]):
return start <= line.timestamp <= end
return predicate

View File

@ -0,0 +1,13 @@
from collections.abc import Iterable
from typing import TypeVar, Generic
from logtools.log.log_line import LogLine
TLocation = TypeVar('TLocation')
class TrackedLogLine(LogLine, Generic[TLocation]):
location: TLocation
LogSource = Iterable[TrackedLogLine[TLocation]]

View File

@ -0,0 +1,24 @@
from typing import Optional
from logtools.log.sources.log_source import TLocation, LogSource, TrackedLogLine
class LookAheadSource(LogSource[TLocation]):
def __init__(self, source: LogSource[TLocation]):
self.source = iter(source)
self._lookahead = next(self.source, None)
@property
def peek(self) -> Optional[TrackedLogLine[TLocation]]:
return self._lookahead
def __iter__(self):
return self
def __next__(self):
if self._lookahead is None:
raise StopIteration()
value = self._lookahead
self._lookahead = next(self.source, None)
return value

View File

@ -0,0 +1,27 @@
from heapq import heapify, heappop, heappush
from logtools.log.sources.log_source import LogSource, TLocation
from logtools.log.sources.ordered_source import OrderedSource
class MergedSource(LogSource[TLocation]):
def __init__(self, *sources: OrderedSource[TLocation]):
self.sources = [source for source in sources if source.peek is not None]
heapify(self.sources)
def __iter__(self):
return self
def __next__(self):
if not self.sources:
raise StopIteration()
# by construction, we can't have any empty iterators at this point, so the call to next always succeeds.
log = heappop(self.sources)
value = next(log)
# if the iterator still has stuff in it...
if log.peek is not None:
heappush(self.sources, log)
return value

View File

@ -0,0 +1,31 @@
import datetime
from typing import Self
from logtools.log.sources.log_source import TrackedLogLine, TLocation
from logtools.log.sources.lookahead_source import LookAheadSource
class OrderedSource(LookAheadSource[TLocation]):
def __lt__(self, other: Self) -> bool:
return self._peek.timestamp < other._peek.timestamp # type: ignore
def __le__(self, other: Self) -> bool:
return self._peek.timestamp <= other._peek.timestamp # type: ignore
def __gt__(self, other: Self) -> bool:
return self._peek.timestamp > other._peek.timestamp # type: ignore
def __ge__(self, other: Self) -> bool:
return self._peek.timestamp >= other._peek.timestamp # type: ignore
@property
def _peek(self) -> TrackedLogLine[TLocation]:
value = self.peek
if value is None:
raise ValueError('Cannot order sources that ran out of elements')
# FIXME too hacky, need to use a proper generic which mypy can track
if not isinstance(value.timestamp, datetime.datetime):
raise ValueError('Cannot order sources that do not have parsed timestamps')
return value

View File

View File

@ -0,0 +1,10 @@
TRC 2023-10-17 13:52:23.876+00:00 Got wantList for peer topics="codex blockexcengine" tid=1 peer=16U*JMNiva items=1 count=1
TRC 2023-10-17 13:52:23.876+00:00 Retrieving peer from peer context store topics="codex peerctxstore" tid=1 peer=16U*JMNiva count=2
TRC 2023-10-17 13:52:23.876+00:00 Processing new want list entry topics="codex blockexcengine" tid=1 cid=zb2*3CyMa3 peer=16U*JMNiva wantType=WantBlock count=3
TRC 2023-10-17 13:52:23.876+00:00 Added entry to peer's want blocks list topics="codex blockexcengine" tid=1 cid=zb2*3CyMa3 peer=16U*JMNiva wantType=WantBlock count=4
TRC 2023-10-17 13:52:23.876+00:00 Scheduling a task for this peer, to look over their want-list topics="codex blockexcengine" tid=1 peer=16U*JMNiva count=5
TRC 2023-10-17 13:52:23.879+00:00 Got new task from queue topics="codex blockexcengine" tid=1 peerId=16U*JMNiva count=6
TRC 2023-10-17 13:52:23.879+00:00 Handling task for peer topics="codex blockexcengine" tid=1 peer=16U*JMNiva count=7
TRC 2023-10-17 13:52:23.880+00:00 wantsBlocks topics="codex blockexcengine" tid=1 peer=16U*JMNiva n=3850 count=8
TRC 2023-10-17 13:52:23.880+00:00 Got peer want blocks list topics="codex blockexcengine" tid=1 items=3850 count=9
TRC 2023-10-17 13:52:23.950+00:00 Got block for cid topics="codex repostore" tid=1 cid=zb2*3CyMa3 count=10

View File

@ -0,0 +1,22 @@
from dataclasses import dataclass
from logtools.log.sources.log_source import LogSource, TrackedLogLine
@dataclass
class ParseLocation:
name: str
number: int
class StringLogSource(LogSource[TrackedLogLine[ParseLocation]]):
def __init__(self, name: str, lines: str):
self.name = name
self.lines = lines
def __iter__(self):
for line_number, line in enumerate(self.lines.splitlines(), start=1):
parsed = TrackedLogLine.from_str(line, parse_datetime=True)
parsed.location = ParseLocation(self.name, line_number)
yield parsed

View File

@ -0,0 +1,37 @@
from logtools.log.sources.collating_source import CollatingSource
from logtools.log.sources.tests.string_log_source import StringLogSource
def test_should_collate_lines_from_log_sources():
log1 = StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
)
log2 = StringLogSource(
name='log2',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=8
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=9
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=10"""
)
collated = CollatingSource(log1, log2)
entries = [(line.location.name, line.location.number, line.count) for line in collated]
assert entries == [
('log1', 1, 1),
('log1', 2, 2),
('log1', 3, 3),
('log1', 4, 4),
('log1', 5, 5),
('log2', 1, 6),
('log2', 2, 7),
('log2', 3, 8),
('log2', 4, 9),
('log2', 5, 10),
]

View File

@ -0,0 +1,22 @@
from pathlib import Path
from logtools.log.sources.file_log_source import FileLogSource
SAMPLE_LOG = Path(__file__).parent / 'sample.log'
def test_should_read_lines_from_file():
log = FileLogSource(SAMPLE_LOG)
assert [line.count for line in log] == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
def test_should_provide_location_context_for_lines():
log = iter(FileLogSource(SAMPLE_LOG))
line1 = next(log)
line2 = next(log)
assert line1.location.path == SAMPLE_LOG
assert line2.location.path == SAMPLE_LOG
assert line1.location.line_number == 1
assert line2.location.line_number == 2

View File

@ -0,0 +1,38 @@
from dateutil import parser
from logtools.log.log_line import LogLine
from logtools.log.sources.filtered_source import FilteredSource, timestamp_range
from logtools.log.sources.tests.string_log_source import StringLogSource
def test_should_filter_by_matching_predicate():
log1 = StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
)
def predicate(line):
return line.count % 2 == 0
assert [line.count for line in FilteredSource(log1, predicate)] == [2, 4]
def test_should_generate_correct_datetime_range_predicate():
raw_lines = [
'TRC 2023-10-16 20:29:24.595+00:00 one topics="codex discoveryengine" count=1',
'TRC 2023-10-17 20:29:24.597+00:00 two topics="codex discoveryengine" count=2',
'TRC 2023-10-18 20:29:24.597+00:00 three topics="codex discoveryengine" count=3',
'TRC 2023-10-18 21:29:24.597+00:00 four little indians topics="codex discoveryengine" count=4',
]
matches = timestamp_range(start=parser.parse('2023-10-16 22:29:24.597+00:00'),
end=parser.parse('2023-10-18 20:29:25.597+00:00'))
lines = [LogLine.from_str(line, parse_datetime=True) for line in raw_lines]
filtered = [line.count for line in lines if matches(line)]
assert filtered == [2, 3]

View File

@ -0,0 +1,46 @@
from logtools.log.sources.lookahead_source import LookAheadSource
from logtools.log.sources.tests.string_log_source import StringLogSource
def test_should_allow_peeking_at_the_head_of_log():
log1 = LookAheadSource(StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
))
assert log1.peek.count == 1
assert next(log1).count == 1
assert log1.peek.count == 2
def test_should_return_all_elements():
log1 = LookAheadSource(StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
))
assert [entry.count for entry in log1] == [1, 2, 3, 4, 5]
def test_should_raise_exception_when_nothing_to_peek():
log1 = LookAheadSource(StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
))
for _ in log1:
...
assert log1.peek is None

View File

@ -0,0 +1,26 @@
from logtools.log.sources.merged_source import MergedSource
from logtools.log.sources.ordered_source import OrderedSource
from logtools.log.sources.tests.string_log_source import StringLogSource
def test_should_merge_logs_by_timestamp():
log1 = OrderedSource(StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.594+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
TRC 2023-10-16 20:29:24.647+00:00 Providing block topics="codex discovery" count=5"""
))
log2 = OrderedSource(StringLogSource(
name='log2',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
TRC 2023-10-16 20:29:24.596+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
TRC 2023-10-16 20:29:24.596+00:00 Advertised block topics="codex discoveryengine" count=8
TRC 2023-10-16 20:29:24.645+00:00 Retrieved record from repo topics="codex repostore" count=9
TRC 2023-10-16 20:29:24.649+00:00 Providing block topics="codex discovery" count=10"""
))
merged = MergedSource(log1, log2)
assert [line.count for line in merged] == [1, 6, 7, 8, 2, 3, 9, 4, 5, 10]

View File

@ -0,0 +1,42 @@
import pytest
from logtools.log.sources.ordered_source import OrderedSource
from logtools.log.sources.tests.string_log_source import StringLogSource
def test_should_order_sources_by_lookahead_timestamp():
contents = StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
)
log1 = OrderedSource(contents)
log2 = OrderedSource(contents)
next(log1)
assert log2 < log1
next(log2)
assert (log2 <= log1) and (log2 <= log1)
next(log2)
assert log1 <= log2
def test_should_raise_error_if_comparing_empty_sources():
contents = StringLogSource(
name='log1',
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
)
log1 = OrderedSource(contents)
log2 = OrderedSource(contents)
for _ in log1:
...
with pytest.raises(ValueError):
_ = log1 < log2

View File

View File

@ -0,0 +1,16 @@
from datetime import datetime
import pytz
from logtools.log.log_line import LogLine, LogLevel
def test_should_parse_logline_from_string():
line = LogLine.from_str('TRC 2023-10-16 17:28:46.579+00:00 Sending want list to peer '
'topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM '
'type=WantBlock items=1 count=870781', parse_datetime=True)
assert line.level == LogLevel.trace
assert line.timestamp == datetime(2023, 10, 16, 17, 28, 46,
579000, tzinfo=pytz.utc)
assert line.count == 870781

2
mypy.ini Normal file
View File

@ -0,0 +1,2 @@
[mypy-colored.*]
ignore_missing_imports = true

494
poetry.lock generated Normal file
View File

@ -0,0 +1,494 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "appnope"
version = "0.1.3"
description = "Disable App Nap on macOS >= 10.9"
optional = false
python-versions = "*"
files = [
{file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"},
{file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"},
]
[[package]]
name = "asttokens"
version = "2.4.0"
description = "Annotate AST trees with source code positions"
optional = false
python-versions = "*"
files = [
{file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"},
{file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"},
]
[package.dependencies]
six = ">=1.12.0"
[package.extras]
test = ["astroid", "pytest"]
[[package]]
name = "backcall"
version = "0.2.0"
description = "Specifications for callback functions passed in to an API"
optional = false
python-versions = "*"
files = [
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
]
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "colored"
version = "2.2.3"
description = "Simple python library for color and formatting to terminal"
optional = false
python-versions = ">=3.9"
files = [
{file = "colored-2.2.3-py3-none-any.whl", hash = "sha256:1318b2fb8e0313d39724b8ab0707af79d1e2c0e60710b608a00e70fe0f84ff5d"},
{file = "colored-2.2.3.tar.gz", hash = "sha256:1905ae45fa2b7fd63a8b4776586e63aeaba4df8db225b72b78fd167408558983"},
]
[[package]]
name = "decorator"
version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "executing"
version = "2.0.0"
description = "Get the currently executing AST node of a frame, and other information"
optional = false
python-versions = "*"
files = [
{file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"},
{file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"},
]
[package.extras]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "install"
version = "1.3.5"
description = "Install packages from within code"
optional = false
python-versions = ">=2.7, >=3.5"
files = [
{file = "install-1.3.5-py3-none-any.whl", hash = "sha256:0d3fadf4aa62c95efe8d34757c8507eb46177f86c016c21c6551eafc6a53d5a9"},
{file = "install-1.3.5.tar.gz", hash = "sha256:e67c8a0be5ccf8cb4ffa17d090f3a61b6e820e6a7e21cd1d2c0f7bc59b18e647"},
]
[[package]]
name = "ipython"
version = "8.16.1"
description = "IPython: Productive Interactive Computing"
optional = false
python-versions = ">=3.9"
files = [
{file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"},
{file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"},
]
[package.dependencies]
appnope = {version = "*", markers = "sys_platform == \"darwin\""}
backcall = "*"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
decorator = "*"
jedi = ">=0.16"
matplotlib-inline = "*"
pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
pickleshare = "*"
prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
[package.extras]
all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"]
black = ["black"]
doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"]
kernel = ["ipykernel"]
nbconvert = ["nbconvert"]
nbformat = ["nbformat"]
notebook = ["ipywidgets", "notebook"]
parallel = ["ipyparallel"]
qtconsole = ["qtconsole"]
test = ["pytest (<7.1)", "pytest-asyncio", "testpath"]
test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"]
[[package]]
name = "jedi"
version = "0.19.1"
description = "An autocompletion tool for Python that can be used for text editors."
optional = false
python-versions = ">=3.6"
files = [
{file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
{file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
]
[package.dependencies]
parso = ">=0.8.3,<0.9.0"
[package.extras]
docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
[[package]]
name = "matplotlib-inline"
version = "0.1.6"
description = "Inline Matplotlib backend for Jupyter"
optional = false
python-versions = ">=3.5"
files = [
{file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"},
{file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"},
]
[package.dependencies]
traitlets = "*"
[[package]]
name = "mypy"
version = "1.6.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"},
{file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"},
{file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"},
{file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"},
{file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"},
{file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"},
{file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"},
{file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"},
{file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"},
{file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"},
{file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"},
{file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"},
{file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"},
{file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"},
{file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"},
{file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"},
{file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"},
{file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"},
{file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"},
{file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"},
{file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"},
{file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"},
{file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"},
{file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"},
{file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"},
{file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"},
{file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
typing-extensions = ">=4.1.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
install-types = ["pip"]
reports = ["lxml"]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
name = "parso"
version = "0.8.3"
description = "A Python Parser"
optional = false
python-versions = ">=3.6"
files = [
{file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
{file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
]
[package.extras]
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["docopt", "pytest (<6.0.0)"]
[[package]]
name = "pexpect"
version = "4.8.0"
description = "Pexpect allows easy control of interactive console applications."
optional = false
python-versions = "*"
files = [
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
{file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
]
[package.dependencies]
ptyprocess = ">=0.5"
[[package]]
name = "pickleshare"
version = "0.7.5"
description = "Tiny 'shelve'-like database with concurrency support"
optional = false
python-versions = "*"
files = [
{file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
]
[[package]]
name = "pluggy"
version = "1.3.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
{file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "prompt-toolkit"
version = "3.0.39"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"},
{file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"},
]
[package.dependencies]
wcwidth = "*"
[[package]]
name = "ptyprocess"
version = "0.7.0"
description = "Run a subprocess in a pseudo terminal"
optional = false
python-versions = "*"
files = [
{file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
]
[[package]]
name = "pure-eval"
version = "0.2.2"
description = "Safely evaluate AST nodes without side effects"
optional = false
python-versions = "*"
files = [
{file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
{file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
]
[package.extras]
tests = ["pytest"]
[[package]]
name = "pygments"
version = "2.16.1"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.7"
files = [
{file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
{file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
]
[package.extras]
plugins = ["importlib-metadata"]
[[package]]
name = "pytest"
version = "7.4.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"},
{file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pytz"
version = "2023.3.post1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
{file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
{file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "stack-data"
version = "0.6.3"
description = "Extract data from python stack frames and tracebacks for informative displays"
optional = false
python-versions = "*"
files = [
{file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
{file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
]
[package.dependencies]
asttokens = ">=2.1.0"
executing = ">=1.2.0"
pure-eval = "*"
[package.extras]
tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
[[package]]
name = "traitlets"
version = "5.11.2"
description = "Traitlets Python configuration system"
optional = false
python-versions = ">=3.8"
files = [
{file = "traitlets-5.11.2-py3-none-any.whl", hash = "sha256:98277f247f18b2c5cabaf4af369187754f4fb0e85911d473f72329db8a7f4fae"},
{file = "traitlets-5.11.2.tar.gz", hash = "sha256:7564b5bf8d38c40fa45498072bf4dc5e8346eb087bbf1e2ae2d8774f6a0f078e"},
]
[package.extras]
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
[[package]]
name = "types-python-dateutil"
version = "2.8.19.14"
description = "Typing stubs for python-dateutil"
optional = false
python-versions = "*"
files = [
{file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"},
{file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"},
]
[[package]]
name = "types-pytz"
version = "2023.3.1.1"
description = "Typing stubs for pytz"
optional = false
python-versions = "*"
files = [
{file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"},
{file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"},
]
[[package]]
name = "typing-extensions"
version = "4.8.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
]
[[package]]
name = "wcwidth"
version = "0.2.8"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
{file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"},
{file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"},
]
[metadata]
lock-version = "2.0"
python-versions = "~3.11"
content-hash = "e8258c66a10f3359660ef60eb8f960c71b2e699da61ec162575456f864de8b58"

27
pyproject.toml Normal file
View File

@ -0,0 +1,27 @@
[tool.poetry]
name = "logtools"
version = "0.1.0"
description = ""
authors = ["Your Name <you@example.com>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "~3.11"
pytz = "^2023.3.post1"
colored = "^2.2.3"
python-dateutil = "^2.8.2"
[tool.poetry.group.dev.dependencies]
pytest = "^7.4.2"
ipython = "^8.15.0"
mypy = "^1.6.0"
install = "^1.3.5"
types-pytz = "^2023.3.1.1"
types-python-dateutil = "^2.8.19.14"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
log-merge = 'logtools.cli.merge:main'