add elasticsearch log source, overhaul of log tooling structure
This commit is contained in:
parent
613f35cdfc
commit
b316e458c9
|
@ -0,0 +1,70 @@
|
||||||
|
from enum import Enum
|
||||||
|
from typing import List, Iterator, Iterable
|
||||||
|
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
from prettytable import PrettyTable
|
||||||
|
from traitlets.config.loader import ArgumentParser
|
||||||
|
|
||||||
|
from logtools.log.sources.input.elastic_search.elastic_search_log_repo import ElasticSearchLogRepo
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceType(Enum):
|
||||||
|
pods = 'pods'
|
||||||
|
namespaces = 'namespaces'
|
||||||
|
|
||||||
|
|
||||||
|
GETTERS = {
|
||||||
|
ResourceType.pods: lambda repo, args: repo.pods(prefix=args.prefix, run_id=args.run_id),
|
||||||
|
ResourceType.namespaces: lambda repo, args: repo.namespaces(prefix=args.prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def format_table(objects: List) -> str:
|
||||||
|
tbl = PrettyTable()
|
||||||
|
|
||||||
|
for obj in objects:
|
||||||
|
if not tbl.field_names:
|
||||||
|
tbl.field_names = obj.__annotations__.keys()
|
||||||
|
tbl.add_row([_format_field(getattr(obj, field)) for field in tbl.field_names])
|
||||||
|
|
||||||
|
return tbl.get_string()
|
||||||
|
|
||||||
|
|
||||||
|
def _format_field(field: str | Iterable[object]):
|
||||||
|
if isinstance(field, str):
|
||||||
|
return field
|
||||||
|
return ', '.join([str(item) for item in field])
|
||||||
|
|
||||||
|
|
||||||
|
def get_object(args, repo: ElasticSearchLogRepo):
|
||||||
|
print(format_table(GETTERS[ResourceType[args.resource_type]](repo, args)))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = ArgumentParser()
|
||||||
|
parser.add_argument(
|
||||||
|
'--es-host',
|
||||||
|
help='ElasticSearch URL (defaults to http://localhost:9200)',
|
||||||
|
default='http://localhost:9200'
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(title='Command', required=True)
|
||||||
|
|
||||||
|
get = subparsers.add_parser('get', help='Display existing resources')
|
||||||
|
get.set_defaults(main=get_object)
|
||||||
|
|
||||||
|
get_subparsers = get.add_subparsers(title='Resource type', dest='resource_type', required=True)
|
||||||
|
get_pods = get_subparsers.add_parser('pods', help='Display existing pods')
|
||||||
|
get_pods.add_argument('--prefix', help='Filter pods by prefix')
|
||||||
|
get_pods.add_argument('--run-id', help='Show pods for a given run')
|
||||||
|
|
||||||
|
get_namespaces = get_subparsers.add_parser('namespaces', help='Display existing namespaces')
|
||||||
|
get_namespaces.add_argument('--prefix', help='Filter namespaces by prefix')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
args.main(args, ElasticSearchLogRepo(client=Elasticsearch(args.es_host)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -10,10 +10,11 @@ import pytz
|
||||||
from colored import Fore, Style
|
from colored import Fore, Style
|
||||||
from dateutil import parser as tsparser
|
from dateutil import parser as tsparser
|
||||||
|
|
||||||
from logtools.log.sources.file_log_source import FileLogSource
|
from logtools.log.sources.input.file_log_source import FileLogSource
|
||||||
from logtools.log.sources.filtered_source import FilteredSource, timestamp_range
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
from logtools.log.sources.merged_source import MergedSource
|
from logtools.log.sources.transform.filtered_source import FilteredSource, timestamp_range
|
||||||
from logtools.log.sources.ordered_source import OrderedSource
|
from logtools.log.sources.transform.merged_source import MergedSource
|
||||||
|
from logtools.log.sources.transform.ordered_source import OrderedSource
|
||||||
|
|
||||||
|
|
||||||
def merge(args):
|
def merge(args):
|
||||||
|
@ -23,7 +24,9 @@ def merge(args):
|
||||||
logs = MergedSource(*[
|
logs = MergedSource(*[
|
||||||
OrderedSource(
|
OrderedSource(
|
||||||
FilteredSource(
|
FilteredSource(
|
||||||
FileLogSource(path),
|
ChroniclesRawSource(
|
||||||
|
FileLogSource(path)
|
||||||
|
),
|
||||||
predicate=_filtering_predicate(args)
|
predicate=_filtering_predicate(args)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -73,7 +76,7 @@ def _ensure_utc(ts: datetime) -> datetime:
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Merges logs chronologically and outputs colored, interleaved content.')
|
description='Merges Chronicles logs chronologically and outputs colored, interleaved content.')
|
||||||
|
|
||||||
parser.add_argument("files", nargs="+", help='Log files to merge.', type=Path)
|
parser.add_argument("files", nargs="+", help='Log files to merge.', type=Path)
|
||||||
parser.add_argument('--aliases', nargs="*",
|
parser.add_argument('--aliases', nargs="*",
|
||||||
|
@ -85,3 +88,7 @@ def main():
|
||||||
help='Show entries to date/time (multiple formats accepted)')
|
help='Show entries to date/time (multiple formats accepted)')
|
||||||
|
|
||||||
merge(parser.parse_args())
|
merge(parser.parse_args())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
|
@ -3,9 +3,11 @@ extracted into their own columns."""
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from csv import DictWriter
|
from csv import DictWriter
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from logtools.cli.utils import kv_pair
|
from logtools.cli.utils import kv_pair
|
||||||
from logtools.log.sources.stream_log_source import StreamLogSource
|
from logtools.log.sources.input.file_log_source import FileLogSource
|
||||||
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
|
|
||||||
|
|
||||||
def to_csv(args):
|
def to_csv(args):
|
||||||
|
@ -18,7 +20,8 @@ def to_csv(args):
|
||||||
)
|
)
|
||||||
|
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
for line in StreamLogSource(sys.stdin):
|
# FIXME '/dev/stdin' is a non-portable hack.
|
||||||
|
for line in ChroniclesRawSource(FileLogSource(Path('/dev/stdin'))):
|
||||||
line_fields = {field: line.fields.get(field, 'NA') for field in fields}
|
line_fields = {field: line.fields.get(field, 'NA') for field in fields}
|
||||||
writer.writerow({
|
writer.writerow({
|
||||||
'timestamp': line.timestamp.isoformat(),
|
'timestamp': line.timestamp.isoformat(),
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
import abc
|
||||||
|
import re
|
||||||
|
from abc import ABC
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional, TypeVar, Generic, Iterable, Iterator
|
||||||
|
|
||||||
|
TLocation = TypeVar('TLocation')
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LineNumberLocation:
|
||||||
|
"""Commonly used location type which tracks the line number of a log line with respect to a given source."""
|
||||||
|
line_number: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RawLogLine(Generic[TLocation]):
|
||||||
|
"""
|
||||||
|
A :class:`RawLogLine` is a log line that has not been parsed. It contains the raw text of the line and an optional
|
||||||
|
location when that can be meaningfully established by the input source.
|
||||||
|
"""
|
||||||
|
location: Optional[TLocation]
|
||||||
|
raw: str
|
||||||
|
|
||||||
|
|
||||||
|
TLogLine = TypeVar('TLogLine', bound=RawLogLine)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TimestampedLogLine(RawLogLine[TLocation]):
|
||||||
|
"""
|
||||||
|
A :class:`TimestampedLogLine` is a log line with a known timestamp.
|
||||||
|
"""
|
||||||
|
timestamp: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class LogSource(ABC, Generic[TLogLine]):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def __iter__(self) -> Iterator[TLogLine]:
|
||||||
|
...
|
|
@ -1,31 +0,0 @@
|
||||||
import re
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Union, Optional
|
|
||||||
|
|
||||||
TOPICS = re.compile(r'(?P<key>\w+)=(?P<value>"[\w\s]+"|\S+)')
|
|
||||||
|
|
||||||
|
|
||||||
class LogLevel(Enum):
|
|
||||||
trace = 'TRC'
|
|
||||||
debug = 'DBG'
|
|
||||||
info = 'INF'
|
|
||||||
error = 'ERR'
|
|
||||||
warning = 'WRN'
|
|
||||||
note = 'NOT'
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LogLine:
|
|
||||||
raw: str
|
|
||||||
level: LogLevel
|
|
||||||
timestamp: Union[str, datetime]
|
|
||||||
message: str
|
|
||||||
topics: str
|
|
||||||
count: Optional[int]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fields(self):
|
|
||||||
fields = TOPICS.findall(self.topics)
|
|
||||||
return {key: value for key, value in fields} if fields else {}
|
|
|
@ -1,13 +0,0 @@
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from logtools.log.sources.log_source import LogSource, TLocation, TrackedLogLine
|
|
||||||
|
|
||||||
|
|
||||||
class CollatingSource(LogSource[TLocation]):
|
|
||||||
def __init__(self, *sources: LogSource[TLocation]):
|
|
||||||
self.sources = sources
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[TrackedLogLine[TLocation]]:
|
|
||||||
for source in self.sources:
|
|
||||||
for line in source:
|
|
||||||
yield line
|
|
|
@ -1,25 +0,0 @@
|
||||||
from dataclasses import dataclass
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from logtools.log.sources.log_parsers import LineNumberLocation
|
|
||||||
from logtools.log.sources.stream_log_source import StreamLogSource
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FileLineLocation(LineNumberLocation):
|
|
||||||
path: Path
|
|
||||||
|
|
||||||
|
|
||||||
class FileLogSource(StreamLogSource):
|
|
||||||
def __init__(self, path: Path, parse_datetime=True):
|
|
||||||
self.path = path
|
|
||||||
super().__init__(self.path.open(encoding='utf-8'), parse_datetime=parse_datetime)
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
try:
|
|
||||||
yield from super().__iter__()
|
|
||||||
finally:
|
|
||||||
self.stream.close()
|
|
||||||
|
|
||||||
def _location(self, line_number: int) -> LineNumberLocation:
|
|
||||||
return FileLineLocation(path=self.path, line_number=line_number)
|
|
|
@ -1,22 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from logtools.log.sources.log_source import LogSource, TLocation, TrackedLogLine
|
|
||||||
|
|
||||||
|
|
||||||
class FilteredSource(LogSource[TrackedLogLine[TLocation]]):
|
|
||||||
def __init__(self, source: LogSource, predicate: Callable[[TrackedLogLine[TLocation]], bool]):
|
|
||||||
self.source = source
|
|
||||||
self.predicate = predicate
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for line in self.source:
|
|
||||||
if self.predicate(line):
|
|
||||||
yield line
|
|
||||||
|
|
||||||
|
|
||||||
def timestamp_range(start: datetime, end: datetime):
|
|
||||||
def predicate(line: TrackedLogLine[TLocation]):
|
|
||||||
return start <= line.timestamp <= end # type: ignore
|
|
||||||
|
|
||||||
return predicate
|
|
|
@ -0,0 +1,99 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
from typing import Optional, Iterator
|
||||||
|
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Namespace:
|
||||||
|
name: str
|
||||||
|
run_id: frozenset[str]
|
||||||
|
indices: frozenset[str]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Pod:
|
||||||
|
name: str
|
||||||
|
namespace: str
|
||||||
|
run_id: str
|
||||||
|
indices: frozenset[str]
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticSearchLogRepo:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
client: Optional[Elasticsearch] = None,
|
||||||
|
indices: str = 'continuous-tests-pods-*',
|
||||||
|
):
|
||||||
|
if client is None:
|
||||||
|
logger.warning('No client provided, defaulting to localhost')
|
||||||
|
client = Elasticsearch(hosts='http://localhost:9200', request_timeout=60)
|
||||||
|
|
||||||
|
self.client = client
|
||||||
|
self.indices = indices
|
||||||
|
|
||||||
|
def namespaces(self, prefix: Optional[str] = None) -> Iterator[Namespace]:
|
||||||
|
query = {
|
||||||
|
'size': 0,
|
||||||
|
'aggs': {
|
||||||
|
'distinct_namespaces': {
|
||||||
|
'terms': {'field': 'pod_namespace.keyword'},
|
||||||
|
'aggs': {
|
||||||
|
'indices': {'terms': {'field': '_index'}},
|
||||||
|
'runid': {'terms': {'field': 'pod_labels.runid.keyword'}},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if prefix is not None:
|
||||||
|
query['aggs']['distinct_namespaces']['terms']['include'] = f'{prefix}.*'
|
||||||
|
|
||||||
|
result = self.client.search(index=self.indices, body=query)
|
||||||
|
|
||||||
|
for namespace in result['aggregations']['distinct_namespaces']['buckets']:
|
||||||
|
yield Namespace(
|
||||||
|
name=namespace['key'],
|
||||||
|
run_id=frozenset(run_id['key'] for run_id in namespace['runid']['buckets']),
|
||||||
|
indices=frozenset(index['key'] for index in namespace['indices']['buckets'])
|
||||||
|
)
|
||||||
|
|
||||||
|
def pods(self, prefix: Optional[str] = None, run_id: Optional[str] = None):
|
||||||
|
query = {
|
||||||
|
'size': 0,
|
||||||
|
'aggs': {
|
||||||
|
'distinct_pods': {
|
||||||
|
'terms': {'field': 'pod_name.keyword'},
|
||||||
|
'aggs': {
|
||||||
|
'indices': {'terms': {'field': '_index'}},
|
||||||
|
'namespace': {'terms': {'field': 'pod_namespace.keyword'}},
|
||||||
|
'runid': {'terms': {'field': 'pod_labels.runid.keyword'}},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if prefix is not None:
|
||||||
|
query['aggs']['distinct_pods']['terms']['include'] = f'{prefix}.*'
|
||||||
|
|
||||||
|
if run_id is not None:
|
||||||
|
query['query'] = {
|
||||||
|
'bool': {
|
||||||
|
'filter': [{'term': {'pod_labels.runid.keyword': run_id}}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for pod in self.client.search(index=self.indices, body=query)['aggregations']['distinct_pods']['buckets']:
|
||||||
|
assert len(pod['namespace']['buckets']) == 1, 'Pods should only have one namespace'
|
||||||
|
assert len(pod['runid']['buckets']) == 1, 'Pods should only have one run_id'
|
||||||
|
|
||||||
|
yield Pod(
|
||||||
|
name=pod['key'],
|
||||||
|
namespace=pod['namespace']['buckets'][0]['key'],
|
||||||
|
run_id=pod['runid']['buckets'][0]['key'],
|
||||||
|
indices=frozenset(index['key'] for index in pod['indices']['buckets'])
|
||||||
|
)
|
|
@ -0,0 +1,112 @@
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional, List, Dict, Any, Iterator, Set
|
||||||
|
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
|
||||||
|
from logtools.log.base import TimestampedLogLine, LogSource
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
INDEX_PREFIX = 'continuous-tests-pods'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ElasticSearchLocation:
|
||||||
|
index: str
|
||||||
|
result_number: int
|
||||||
|
pod_name: str
|
||||||
|
run_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticSearchSource(LogSource[TimestampedLogLine[ElasticSearchLocation]]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
pods: Optional[Set[str]] = None,
|
||||||
|
run_id: Optional[str] = None,
|
||||||
|
client: Optional[Elasticsearch] = None,
|
||||||
|
start_date: Optional[datetime] = datetime.min,
|
||||||
|
end_date: Optional[datetime] = datetime.max,
|
||||||
|
):
|
||||||
|
if client is None:
|
||||||
|
logger.warning('No client provided, defaulting to localhost')
|
||||||
|
client = Elasticsearch(hosts='http://localhost:9200')
|
||||||
|
|
||||||
|
self.run_id = run_id
|
||||||
|
self.pods = pods
|
||||||
|
|
||||||
|
self.client = client
|
||||||
|
self.start_date = start_date
|
||||||
|
self.end_date = end_date
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[TimestampedLogLine[ElasticSearchLocation]]:
|
||||||
|
for index in self._indices():
|
||||||
|
for i, document in enumerate(self._get_logs(index)):
|
||||||
|
yield self._format_log_line(i, index, document)
|
||||||
|
|
||||||
|
def _indices(self) -> List[str]:
|
||||||
|
start_day = self.start_date.date()
|
||||||
|
end_day = self.end_date.date()
|
||||||
|
increment = timedelta(days=1)
|
||||||
|
|
||||||
|
while start_day <= end_day:
|
||||||
|
index = f'{INDEX_PREFIX}-{start_day:%Y.%m.%d}'
|
||||||
|
if self.client.indices.exists(index=index):
|
||||||
|
yield index
|
||||||
|
start_day += increment
|
||||||
|
|
||||||
|
def _get_logs(self, index: str):
|
||||||
|
query = {
|
||||||
|
'sort': [{'@timestamp': 'asc'}],
|
||||||
|
'query': {
|
||||||
|
'bool': {
|
||||||
|
'filter': [
|
||||||
|
{
|
||||||
|
'range': {
|
||||||
|
'@timestamp': {
|
||||||
|
'gte': self.start_date.isoformat(),
|
||||||
|
'lte': self.end_date.isoformat(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.pods is not None:
|
||||||
|
query['query']['bool']['filter'].append({"terms": {"pod_name.keyword": list(self.pods)}})
|
||||||
|
|
||||||
|
if self.run_id is not None:
|
||||||
|
query['query']['bool']['filter'].append({"term": {"pod_labels.runid.keyword": self.run_id}})
|
||||||
|
|
||||||
|
return self._run_scan(query, index)
|
||||||
|
|
||||||
|
def _run_scan(self, query: Dict[str, Any], index: str):
|
||||||
|
initial = self.client.search(index=index, body=query, size=5_000, scroll='2m')
|
||||||
|
scroll_id = initial['_scroll_id']
|
||||||
|
results = initial
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
documents = results['hits']['hits']
|
||||||
|
if not documents:
|
||||||
|
break
|
||||||
|
|
||||||
|
for doc in documents:
|
||||||
|
yield doc
|
||||||
|
|
||||||
|
results = self.client.scroll(scroll_id=scroll_id, scroll='2m')
|
||||||
|
finally:
|
||||||
|
self.client.clear_scroll(scroll_id=scroll_id)
|
||||||
|
|
||||||
|
def _format_log_line(self, result_number: int, index: str, document: Dict[str, Any]):
|
||||||
|
contents = document['_source']
|
||||||
|
|
||||||
|
return TimestampedLogLine(
|
||||||
|
location=ElasticSearchLocation(index=index, result_number=result_number, run_id=self.run_id,
|
||||||
|
pod_name=contents['pod_name']),
|
||||||
|
timestamp=datetime.fromisoformat(contents['@timestamp']),
|
||||||
|
raw=contents['message'],
|
||||||
|
)
|
|
@ -0,0 +1,30 @@
|
||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"aggs":{"distinct_namespaces":{"terms":{"field":"pod_namespace.keyword","include":"codex-continuous-tests-profiling.*"},"aggs":{"indices":{"terms":{"field":"_index"}},"runid":{"terms":{"field":"pod_labels.runid.keyword"}}}}},"size":0}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/vnd.elasticsearch+json; compatible-with=8
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-type:
|
||||||
|
- application/vnd.elasticsearch+json; compatible-with=8
|
||||||
|
user-agent:
|
||||||
|
- elasticsearch-py/8.10.1 (Python/3.11.5; elastic-transport/8.10.0)
|
||||||
|
x-elastic-client-meta:
|
||||||
|
- es=8.10.1,py=3.11.5,t=8.10.0,ur=2.0.7
|
||||||
|
method: POST
|
||||||
|
uri: http://localhost:9200/continuous-tests-pods-*/_search
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: '{"took":24,"timed_out":false,"_shards":{"total":14,"successful":14,"skipped":0,"failed":0},"hits":{"total":{"value":10000,"relation":"gte"},"max_score":null,"hits":[]},"aggregations":{"distinct_namespaces":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-0","doc_count":32258774,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.10","doc_count":23279858},{"key":"continuous-tests-pods-2023.11.09","doc_count":8405729},{"key":"continuous-tests-pods-2023.11.07","doc_count":573187}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-085853","doc_count":31604533},{"key":"20231107-074743","doc_count":517515},{"key":"20231109-043100","doc_count":77259},{"key":"20231107-065930","doc_count":50350},{"key":"20231107-064223","doc_count":5322},{"key":"20231109-055106","doc_count":3795}]}},{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":22615739,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.10","doc_count":22415082},{"key":"continuous-tests-pods-2023.11.09","doc_count":200657}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":22615739}]}}]}}}'
|
||||||
|
headers:
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-elastic-product:
|
||||||
|
- Elasticsearch
|
||||||
|
content-type:
|
||||||
|
- application/vnd.elasticsearch+json;compatible-with=8
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
|
@ -0,0 +1,30 @@
|
||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: '{"aggs":{"distinct_pods":{"terms":{"field":"pod_name.keyword"},"aggs":{"indices":{"terms":{"field":"_index"}},"namespace":{"terms":{"field":"pod_namespace.keyword"}},"runid":{"terms":{"field":"pod_labels.runid.keyword"}}}}},"query":{"bool":{"filter":[{"term":{"pod_labels.runid.keyword":"20231109-101554"}}]}},"size":0}'
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- application/vnd.elasticsearch+json; compatible-with=8
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
content-type:
|
||||||
|
- application/vnd.elasticsearch+json; compatible-with=8
|
||||||
|
user-agent:
|
||||||
|
- elasticsearch-py/8.10.1 (Python/3.11.5; elastic-transport/8.10.0)
|
||||||
|
x-elastic-client-meta:
|
||||||
|
- es=8.10.1,py=3.11.5,t=8.10.0,ur=2.0.7
|
||||||
|
method: POST
|
||||||
|
uri: http://localhost:9200/continuous-tests-pods-*/_search
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: '{"took":17,"timed_out":false,"_shards":{"total":14,"successful":14,"skipped":0,"failed":0},"hits":{"total":{"value":10000,"relation":"gte"},"max_score":null,"hits":[]},"aggregations":{"distinct_pods":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"bootstrap-2-58b69484bc-88msf","doc_count":11145047,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.10","doc_count":11060058},{"key":"continuous-tests-pods-2023.11.09","doc_count":84989}]},"namespace":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":11145047}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":11145047}]}},{"key":"codex1-3-b558568cf-tvcsc","doc_count":11138278,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.10","doc_count":11053946},{"key":"continuous-tests-pods-2023.11.09","doc_count":84332}]},"namespace":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":11138278}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":11138278}]}},{"key":"geth-0-7d8bc9dd5b-8wx95","doc_count":332341,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.10","doc_count":301052},{"key":"continuous-tests-pods-2023.11.09","doc_count":31289}]},"namespace":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":332341}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":332341}]}},{"key":"ctnr4-d8f8d6d8-rtqrp","doc_count":60,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.09","doc_count":34},{"key":"continuous-tests-pods-2023.11.10","doc_count":26}]},"namespace":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":60}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":60}]}},{"key":"codex-contracts-1-b98d98877-bqd5x","doc_count":13,"indices":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"continuous-tests-pods-2023.11.09","doc_count":13}]},"namespace":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"codex-continuous-tests-profiling-two-client-tests-sched-0","doc_count":13}]},"runid":{"doc_count_error_upper_bound":0,"sum_other_doc_count":0,"buckets":[{"key":"20231109-101554","doc_count":13}]}}]}}}'
|
||||||
|
headers:
|
||||||
|
Transfer-Encoding:
|
||||||
|
- chunked
|
||||||
|
X-elastic-product:
|
||||||
|
- Elasticsearch
|
||||||
|
content-type:
|
||||||
|
- application/vnd.elasticsearch+json;compatible-with=8
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: OK
|
||||||
|
version: 1
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from logtools.log.sources.input.elastic_search.elastic_search_log_repo import ElasticSearchLogRepo, Namespace, Pod
|
||||||
|
|
||||||
|
|
||||||
|
# XXX these are not good quality tests as they are overly complex and either tightly coupled to specific data or very
|
||||||
|
# weak in terms of what they assert. Ideally we should build simpler fixtures and test smaller bits at a time, but
|
||||||
|
# that requires a lot of setup, so we go with this.
|
||||||
|
|
||||||
|
@pytest.mark.vcr
|
||||||
|
def test_should_retrieve_existing_namespaces():
|
||||||
|
repo = ElasticSearchLogRepo()
|
||||||
|
namespaces = repo.namespaces('codex-continuous-tests-profiling')
|
||||||
|
|
||||||
|
assert set(namespaces) == {
|
||||||
|
Namespace(
|
||||||
|
name='codex-continuous-tests-profiling-two-client-tests-0',
|
||||||
|
run_id=frozenset({
|
||||||
|
'20231109-085853',
|
||||||
|
'20231107-074743',
|
||||||
|
'20231109-043100',
|
||||||
|
'20231107-065930',
|
||||||
|
'20231107-064223',
|
||||||
|
'20231109-055106'
|
||||||
|
}),
|
||||||
|
indices=frozenset({
|
||||||
|
'continuous-tests-pods-2023.11.07',
|
||||||
|
'continuous-tests-pods-2023.11.09',
|
||||||
|
'continuous-tests-pods-2023.11.10',
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
Namespace(
|
||||||
|
name='codex-continuous-tests-profiling-two-client-tests-sched-0',
|
||||||
|
run_id=frozenset({'20231109-101554'}),
|
||||||
|
indices=frozenset({
|
||||||
|
'continuous-tests-pods-2023.11.09',
|
||||||
|
'continuous-tests-pods-2023.11.10'
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr
|
||||||
|
def test_should_retrieve_existing_pods_for_namespace():
|
||||||
|
repo = ElasticSearchLogRepo()
|
||||||
|
pods = set(repo.pods(run_id='20231109-101554'))
|
||||||
|
|
||||||
|
assert {pod.name for pod in pods} == {'bootstrap-2-58b69484bc-88msf',
|
||||||
|
'codex1-3-b558568cf-tvcsc',
|
||||||
|
'geth-0-7d8bc9dd5b-8wx95',
|
||||||
|
'ctnr4-d8f8d6d8-rtqrp',
|
||||||
|
'codex-contracts-1-b98d98877-bqd5x'}
|
||||||
|
|
||||||
|
assert Pod(
|
||||||
|
name='bootstrap-2-58b69484bc-88msf',
|
||||||
|
namespace='codex-continuous-tests-profiling-two-client-tests-sched-0',
|
||||||
|
run_id='20231109-101554',
|
||||||
|
indices=frozenset({
|
||||||
|
'continuous-tests-pods-2023.11.09',
|
||||||
|
'continuous-tests-pods-2023.11.10'
|
||||||
|
})
|
||||||
|
) in pods
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from dateutil import parser
|
||||||
|
|
||||||
|
from logtools.log.sources.input.elastic_search.elastic_search_source import ElasticSearchSource
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.vcr
|
||||||
|
def test_should_fetch_logs_by_date():
|
||||||
|
start_date = parser.parse('2023-11-10T05:14:46.9842511Z')
|
||||||
|
end_date = parser.parse('2023-11-10T05:15:47.0842511Z')
|
||||||
|
|
||||||
|
log = ElasticSearchSource(
|
||||||
|
start_date=start_date,
|
||||||
|
end_date=end_date,
|
||||||
|
run_id='20231109-101554',
|
||||||
|
pods={'codex1-3-b558568cf-tvcsc', 'bootstrap-2-58b69484bc-88msf'}
|
||||||
|
)
|
||||||
|
|
||||||
|
lines = list(log)
|
||||||
|
|
||||||
|
assert len(lines) > 0
|
||||||
|
# ES resolution is 1ms, so we may get some results that are off up to 1ms
|
||||||
|
assert all((start_date - timedelta(milliseconds=1)) <=
|
||||||
|
line.timestamp <= (end_date + timedelta(milliseconds=1)) for line in lines)
|
||||||
|
|
||||||
|
assert {line.location.pod_name for line in lines} == {'codex1-3-b558568cf-tvcsc', 'bootstrap-2-58b69484bc-88msf'}
|
||||||
|
assert {line.location.run_id for line in lines} == {'20231109-101554'}
|
|
@ -0,0 +1,23 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, RawLogLine, LineNumberLocation
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FileLineLocation(LineNumberLocation):
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
|
||||||
|
class FileLogSource(LogSource[RawLogLine[FileLineLocation]]):
|
||||||
|
def __init__(self, path: Path):
|
||||||
|
self.path = path
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[RawLogLine[FileLineLocation]]:
|
||||||
|
with self.path.open(encoding='utf-8') as infile:
|
||||||
|
for i, raw_string in enumerate(infile, start=1):
|
||||||
|
yield RawLogLine(
|
||||||
|
location=FileLineLocation(path=self.path, line_number=i),
|
||||||
|
raw=raw_string
|
||||||
|
)
|
|
@ -0,0 +1,23 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from io import StringIO
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, RawLogLine, LineNumberLocation
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ParseLocation(LineNumberLocation):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
class StringLogSource(LogSource[RawLogLine[ParseLocation]]):
|
||||||
|
def __init__(self, lines: str, name: str = 'unnamed'):
|
||||||
|
self.name = name
|
||||||
|
self.stream = StringIO(lines)
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[RawLogLine[ParseLocation]]:
|
||||||
|
for line_number, line in enumerate(self.stream, start=1):
|
||||||
|
yield RawLogLine(
|
||||||
|
location=ParseLocation(name=self.name, line_number=line_number),
|
||||||
|
raw=line
|
||||||
|
)
|
|
@ -1,17 +1,18 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from logtools.log.sources.file_log_source import FileLogSource
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
|
from logtools.log.sources.input.file_log_source import FileLogSource
|
||||||
|
|
||||||
SAMPLE_LOG = Path(__file__).parent / 'sample.log'
|
SAMPLE_LOG = Path(__file__).parent / 'sample.log'
|
||||||
|
|
||||||
|
|
||||||
def test_should_read_lines_from_file():
|
def test_should_read_lines_from_file():
|
||||||
log = FileLogSource(SAMPLE_LOG)
|
log = ChroniclesRawSource(FileLogSource(SAMPLE_LOG))
|
||||||
assert [line.count for line in log] == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
assert [line.count for line in log] == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||||
|
|
||||||
|
|
||||||
def test_should_provide_location_context_for_lines():
|
def test_should_provide_location_context_for_lines():
|
||||||
log = iter(FileLogSource(SAMPLE_LOG))
|
log = iter(ChroniclesRawSource(FileLogSource(SAMPLE_LOG)))
|
||||||
line1 = next(log)
|
line1 = next(log)
|
||||||
line2 = next(log)
|
line2 = next(log)
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from csv import DictReader
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from typing import Callable, TextIO, Optional, cast
|
|
||||||
|
|
||||||
from dateutil import parser as tsparser
|
|
||||||
|
|
||||||
from logtools.log.log_line import LogLevel
|
|
||||||
from logtools.log.sources.log_source import TrackedLogLine, LogSource
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LineNumberLocation:
|
|
||||||
line_number: int
|
|
||||||
|
|
||||||
|
|
||||||
"""A :class:`LogParser` is a function that takes a raw text stream and returns a :class:`LogSource`, which in turn
|
|
||||||
is an iterable of parsed lines."""
|
|
||||||
LogParser = Callable[[TextIO], LogSource[LineNumberLocation]]
|
|
||||||
|
|
||||||
LOG_LINE = re.compile(
|
|
||||||
r'(?P<line_type>\w{3}) (?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}\+\d{2}:\d{2}) (?P<message>.*) '
|
|
||||||
r'count=(?P<count>\d+)$'
|
|
||||||
)
|
|
||||||
|
|
||||||
TOPICS = re.compile(r'((\w+=("[\w\s]+"|\S+) )+)?\w+=("[\w\s]+"|\S+)$')
|
|
||||||
|
|
||||||
|
|
||||||
def parse_raw(line: str, parse_datetime: bool = True) -> Optional[TrackedLogLine[LineNumberLocation]]:
|
|
||||||
parsed = LOG_LINE.search(line)
|
|
||||||
if not parsed:
|
|
||||||
return None
|
|
||||||
|
|
||||||
topics = TOPICS.search(parsed['message'])
|
|
||||||
if not topics:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return TrackedLogLine(
|
|
||||||
raw=line,
|
|
||||||
level=LogLevel(parsed['line_type'].upper()),
|
|
||||||
timestamp=(tsparser.parse(parsed['timestamp']) if parse_datetime
|
|
||||||
else parsed['timestamp']),
|
|
||||||
message=parsed['message'][:topics.start() - 1].strip(),
|
|
||||||
count=int(parsed['count']) if parsed['count'] else None,
|
|
||||||
topics=topics.group()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def raw_parser(stream: TextIO, parse_datetime=True) -> LogSource:
|
|
||||||
for line_number, line in enumerate(stream, start=1):
|
|
||||||
parsed = parse_raw(line, parse_datetime=parse_datetime)
|
|
||||||
if not parsed:
|
|
||||||
# FIXME we should probably relax parsing restrictions and output
|
|
||||||
# these too but for now just skip it.
|
|
||||||
print(f'Skip unparseable line: {line}', file=sys.stderr)
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield parsed
|
|
||||||
|
|
||||||
|
|
||||||
def csv_parser(stream: TextIO, parse_datetime=True) -> LogSource:
|
|
||||||
for line_number, line in enumerate(DictReader(stream), start=1):
|
|
||||||
try:
|
|
||||||
parsed_line: TrackedLogLine = TrackedLogLine(
|
|
||||||
raw=line['message'], # FIXME this is NOT the raw line...
|
|
||||||
timestamp=line['timestamp'],
|
|
||||||
message=line['message'],
|
|
||||||
count=int(line['count']) if line['count'] else None,
|
|
||||||
topics=line['topics'],
|
|
||||||
level=LogLevel[line['level']],
|
|
||||||
)
|
|
||||||
|
|
||||||
if parse_datetime:
|
|
||||||
parsed_line.timestamp = tsparser.parse(cast(str, parsed_line.timestamp))
|
|
||||||
yield parsed_line
|
|
||||||
except ValueError:
|
|
||||||
print(f'Skip unparseable line: {line}', file=sys.stderr)
|
|
|
@ -1,13 +0,0 @@
|
||||||
from collections.abc import Iterable
|
|
||||||
from typing import TypeVar, Generic
|
|
||||||
|
|
||||||
from logtools.log.log_line import LogLine
|
|
||||||
|
|
||||||
TLocation = TypeVar('TLocation')
|
|
||||||
|
|
||||||
|
|
||||||
class TrackedLogLine(LogLine, Generic[TLocation]):
|
|
||||||
location: TLocation
|
|
||||||
|
|
||||||
|
|
||||||
LogSource = Iterable[TrackedLogLine[TLocation]]
|
|
|
@ -1,24 +0,0 @@
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from logtools.log.sources.log_source import TLocation, LogSource, TrackedLogLine
|
|
||||||
|
|
||||||
|
|
||||||
class LookAheadSource(LogSource[TLocation]):
|
|
||||||
def __init__(self, source: LogSource[TLocation]):
|
|
||||||
self.source = iter(source)
|
|
||||||
self._lookahead = next(self.source, None)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def peek(self) -> Optional[TrackedLogLine[TLocation]]:
|
|
||||||
return self._lookahead
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
if self._lookahead is None:
|
|
||||||
raise StopIteration()
|
|
||||||
|
|
||||||
value = self._lookahead
|
|
||||||
self._lookahead = next(self.source, None)
|
|
||||||
return value
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Iterator, Optional
|
||||||
|
|
||||||
|
from dateutil import parser
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, TLocation, RawLogLine, TimestampedLogLine
|
||||||
|
|
||||||
|
_LOG_LINE = re.compile(
|
||||||
|
r'(?P<line_type>\w{3}) (?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}\+\d{2}:\d{2}) (?P<message>.*) '
|
||||||
|
r'count=(?P<count>\d+)$'
|
||||||
|
)
|
||||||
|
|
||||||
|
_TOPICS = re.compile(r'((\w+=("[\w\s]+"|\S+) )+)?\w+=("[\w\s]+"|\S+)$')
|
||||||
|
|
||||||
|
_TOPICS_KV = re.compile(r'(?P<key>\w+)=(?P<value>"[\w\s]+"|\S+)')
|
||||||
|
|
||||||
|
|
||||||
|
class LogLevel(Enum):
|
||||||
|
trace = 'TRC'
|
||||||
|
debug = 'DBG'
|
||||||
|
info = 'INF'
|
||||||
|
error = 'ERR'
|
||||||
|
warning = 'WRN'
|
||||||
|
note = 'NOT'
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ChroniclesLogLine(TimestampedLogLine[TLocation]):
|
||||||
|
"""
|
||||||
|
A :class:`ChroniclesLogLine` is a log line coming from [Chronicles](https://github.com/status-im/nim-chronicles).
|
||||||
|
"""
|
||||||
|
timestamp: datetime
|
||||||
|
level: LogLevel
|
||||||
|
message: str
|
||||||
|
topics: str
|
||||||
|
count: Optional[int]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self):
|
||||||
|
fields = _TOPICS_KV.findall(self.topics)
|
||||||
|
return {key: value for key, value in fields} if fields else {}
|
||||||
|
|
||||||
|
|
||||||
|
class ChroniclesRawSource(LogSource[ChroniclesLogLine[TLocation]]):
|
||||||
|
"""Parses a Chronicles log from raw text. Other variants could parse from JSON or CSV."""
|
||||||
|
|
||||||
|
def __init__(self, stream: LogSource[RawLogLine[TLocation]]):
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[ChroniclesLogLine[TLocation]]:
|
||||||
|
for line in self.stream:
|
||||||
|
parsed = self._parse_raw(line)
|
||||||
|
if not parsed:
|
||||||
|
print(f'Skip unparseable line: {line}', file=sys.stderr)
|
||||||
|
continue
|
||||||
|
parsed.location = line.location
|
||||||
|
yield parsed
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_raw(line: RawLogLine[TLocation]) -> Optional[ChroniclesLogLine[TLocation]]:
|
||||||
|
parsed = _LOG_LINE.search(line.raw)
|
||||||
|
if not parsed:
|
||||||
|
return None
|
||||||
|
|
||||||
|
topics = _TOPICS.search(parsed['message'])
|
||||||
|
if not topics:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return ChroniclesLogLine(
|
||||||
|
location=line.location,
|
||||||
|
raw=line.raw,
|
||||||
|
level=LogLevel(parsed['line_type'].upper()),
|
||||||
|
timestamp=parser.parse(parsed['timestamp']),
|
||||||
|
message=parsed['message'][:topics.start() - 1].strip(),
|
||||||
|
count=int(parsed['count']) if parsed['count'] else None,
|
||||||
|
topics=topics.group()
|
||||||
|
)
|
|
@ -0,0 +1,52 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource, ChroniclesLogLine, LogLevel
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_parse_raw_chronicles_logs():
|
||||||
|
source = ChroniclesRawSource(
|
||||||
|
StringLogSource(
|
||||||
|
lines='TRC 2023-10-16 17:28:46.579+00:00 Sending want list to peer '
|
||||||
|
'topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM type=WantBlock items=1 count=870781'
|
||||||
|
)
|
||||||
|
).__iter__()
|
||||||
|
|
||||||
|
line = next(source)
|
||||||
|
|
||||||
|
assert line.level == LogLevel.trace
|
||||||
|
assert line.timestamp == datetime(2023, 10, 16, 17, 28, 46,
|
||||||
|
579000, tzinfo=pytz.utc)
|
||||||
|
assert line.message == 'Sending want list to peer'
|
||||||
|
assert line.topics == 'topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM type=WantBlock items=1'
|
||||||
|
assert line.count == 870781
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_skip_unparseable_lines():
|
||||||
|
source = ChroniclesRawSource(StringLogSource(lines='This is not a log line')).__iter__()
|
||||||
|
with pytest.raises(StopIteration):
|
||||||
|
next(source)
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_parse_chronicles_fields():
|
||||||
|
line = ChroniclesLogLine(
|
||||||
|
location=None,
|
||||||
|
message='Sending want list to peer',
|
||||||
|
topics='topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM '
|
||||||
|
'type=WantBlock items=1',
|
||||||
|
timestamp=datetime(2020, 1, 1, 0, 0, 0, 0),
|
||||||
|
count=0,
|
||||||
|
raw='',
|
||||||
|
level=LogLevel.trace
|
||||||
|
)
|
||||||
|
|
||||||
|
assert line.fields == {
|
||||||
|
'topics': '"codex blockexcnetwork"',
|
||||||
|
'tid': '1',
|
||||||
|
'peer': '16U*7mogoM',
|
||||||
|
'type': 'WantBlock',
|
||||||
|
'items': '1',
|
||||||
|
}
|
|
@ -1,19 +0,0 @@
|
||||||
from typing import TextIO
|
|
||||||
|
|
||||||
from logtools.log.sources.log_parsers import raw_parser, LineNumberLocation, LogParser
|
|
||||||
from logtools.log.sources.log_source import LogSource, TrackedLogLine
|
|
||||||
|
|
||||||
|
|
||||||
class StreamLogSource(LogSource[TrackedLogLine[LineNumberLocation]]):
|
|
||||||
def __init__(self, stream: TextIO, parse_datetime=True, log_format: LogParser = raw_parser):
|
|
||||||
self.stream = stream
|
|
||||||
self.format = log_format
|
|
||||||
self.parse_datetime = parse_datetime
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
for line_number, line in enumerate(self.format(self.stream), start=1):
|
|
||||||
line.location = self._location(line_number)
|
|
||||||
yield line
|
|
||||||
|
|
||||||
def _location(self, line_number: int) -> LineNumberLocation:
|
|
||||||
return LineNumberLocation(line_number)
|
|
|
@ -1,10 +0,0 @@
|
||||||
TRC 2023-10-17 13:52:23.876+00:00 Got wantList for peer topics="codex blockexcengine" tid=1 peer=16U*JMNiva items=1 count=1
|
|
||||||
TRC 2023-10-17 13:52:23.876+00:00 Retrieving peer from peer context store topics="codex peerctxstore" tid=1 peer=16U*JMNiva count=2
|
|
||||||
TRC 2023-10-17 13:52:23.876+00:00 Processing new want list entry topics="codex blockexcengine" tid=1 cid=zb2*3CyMa3 peer=16U*JMNiva wantType=WantBlock count=3
|
|
||||||
TRC 2023-10-17 13:52:23.876+00:00 Added entry to peer's want blocks list topics="codex blockexcengine" tid=1 cid=zb2*3CyMa3 peer=16U*JMNiva wantType=WantBlock count=4
|
|
||||||
TRC 2023-10-17 13:52:23.876+00:00 Scheduling a task for this peer, to look over their want-list topics="codex blockexcengine" tid=1 peer=16U*JMNiva count=5
|
|
||||||
TRC 2023-10-17 13:52:23.879+00:00 Got new task from queue topics="codex blockexcengine" tid=1 peerId=16U*JMNiva count=6
|
|
||||||
TRC 2023-10-17 13:52:23.879+00:00 Handling task for peer topics="codex blockexcengine" tid=1 peer=16U*JMNiva count=7
|
|
||||||
TRC 2023-10-17 13:52:23.880+00:00 wantsBlocks topics="codex blockexcengine" tid=1 peer=16U*JMNiva n=3850 count=8
|
|
||||||
TRC 2023-10-17 13:52:23.880+00:00 Got peer want blocks list topics="codex blockexcengine" tid=1 items=3850 count=9
|
|
||||||
TRC 2023-10-17 13:52:23.950+00:00 Got block for cid topics="codex repostore" tid=1 cid=zb2*3CyMa3 count=10
|
|
|
@ -1,19 +0,0 @@
|
||||||
from dataclasses import dataclass
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
from logtools.log.sources.log_parsers import LogParser
|
|
||||||
from logtools.log.sources.stream_log_source import StreamLogSource, LineNumberLocation, raw_parser
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ParseLocation(LineNumberLocation):
|
|
||||||
name: str
|
|
||||||
|
|
||||||
|
|
||||||
class StringLogSource(StreamLogSource):
|
|
||||||
def __init__(self, name: str, lines: str, log_format: LogParser = raw_parser):
|
|
||||||
self.name = name
|
|
||||||
super().__init__(stream=StringIO(lines), log_format=log_format)
|
|
||||||
|
|
||||||
def _location(self, line_number: int) -> LineNumberLocation:
|
|
||||||
return ParseLocation(name=self.name, line_number=line_number)
|
|
|
@ -1,38 +0,0 @@
|
||||||
from dateutil import parser
|
|
||||||
|
|
||||||
from logtools.log.sources.filtered_source import FilteredSource, timestamp_range
|
|
||||||
from logtools.log.sources.log_parsers import parse_raw
|
|
||||||
from logtools.log.sources.tests.string_log_source import StringLogSource
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_filter_by_matching_predicate():
|
|
||||||
log1 = StringLogSource(
|
|
||||||
name='log1',
|
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
|
||||||
)
|
|
||||||
|
|
||||||
def predicate(line):
|
|
||||||
return line.count % 2 == 0
|
|
||||||
|
|
||||||
assert [line.count for line in FilteredSource(log1, predicate)] == [2, 4]
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_generate_correct_datetime_range_predicate():
|
|
||||||
raw_lines = [
|
|
||||||
'TRC 2023-10-16 20:29:24.595+00:00 one topics="codex discoveryengine" count=1',
|
|
||||||
'TRC 2023-10-17 20:29:24.597+00:00 two topics="codex discoveryengine" count=2',
|
|
||||||
'TRC 2023-10-18 20:29:24.597+00:00 three topics="codex discoveryengine" count=3',
|
|
||||||
'TRC 2023-10-18 21:29:24.597+00:00 four little indians topics="codex discoveryengine" count=4',
|
|
||||||
]
|
|
||||||
|
|
||||||
matches = timestamp_range(start=parser.parse('2023-10-16 22:29:24.597+00:00'),
|
|
||||||
end=parser.parse('2023-10-18 20:29:25.597+00:00'))
|
|
||||||
|
|
||||||
lines = [parse_raw(line, parse_datetime=True) for line in raw_lines]
|
|
||||||
filtered = [line.count for line in lines if matches(line)]
|
|
||||||
|
|
||||||
assert filtered == [2, 3]
|
|
|
@ -1,24 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from logtools.log.log_line import LogLevel
|
|
||||||
from logtools.log.sources.log_parsers import parse_raw
|
|
||||||
|
|
||||||
|
|
||||||
def test_raw_parser_should_parse_logline_from_string():
|
|
||||||
line = parse_raw('TRC 2023-10-16 17:28:46.579+00:00 Sending want list to peer '
|
|
||||||
'topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM '
|
|
||||||
'type=WantBlock items=1 count=870781', parse_datetime=True)
|
|
||||||
|
|
||||||
assert line.level == LogLevel.trace
|
|
||||||
assert line.timestamp == datetime(2023, 10, 16, 17, 28, 46,
|
|
||||||
579000, tzinfo=pytz.utc)
|
|
||||||
assert line.message == 'Sending want list to peer'
|
|
||||||
assert line.topics == 'topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM type=WantBlock items=1'
|
|
||||||
assert line.count == 870781
|
|
||||||
|
|
||||||
|
|
||||||
def test_raw_parser_should_return_none_if_line_is_not_parseable():
|
|
||||||
line = parse_raw('This is not a log line', parse_datetime=True)
|
|
||||||
assert line is None
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, TLogLine
|
||||||
|
|
||||||
|
|
||||||
|
class CollatingSource(LogSource[TLogLine]):
|
||||||
|
def __init__(self, *sources: LogSource[TLogLine]):
|
||||||
|
self.sources = sources
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[TLogLine]:
|
||||||
|
for source in self.sources:
|
||||||
|
for line in source:
|
||||||
|
yield line
|
|
@ -0,0 +1,22 @@
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Callable, Iterator
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, TLogLine, TimestampedLogLine, TLocation
|
||||||
|
|
||||||
|
|
||||||
|
class FilteredSource(LogSource[TLogLine]):
|
||||||
|
def __init__(self, source: LogSource[TLogLine], predicate: Callable[[TLogLine], bool]):
|
||||||
|
self.source = source
|
||||||
|
self.predicate = predicate
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[TLogLine]:
|
||||||
|
for line in self.source:
|
||||||
|
if self.predicate(line):
|
||||||
|
yield line
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp_range(start: datetime, end: datetime) -> Callable[[TimestampedLogLine[TLocation]], bool]:
|
||||||
|
def predicate(line: TimestampedLogLine[TLocation]):
|
||||||
|
return start <= line.timestamp <= end
|
||||||
|
|
||||||
|
return predicate
|
|
@ -0,0 +1,24 @@
|
||||||
|
from typing import Optional, Iterator
|
||||||
|
|
||||||
|
from logtools.log.base import LogSource, TLocation, RawLogLine, TLogLine
|
||||||
|
|
||||||
|
|
||||||
|
class LookAheadSource(LogSource[TLogLine]):
|
||||||
|
def __init__(self, source: LogSource[TLogLine]):
|
||||||
|
self.source = iter(source)
|
||||||
|
self._lookahead = next(self.source, None)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def peek(self) -> Optional[TLogLine]:
|
||||||
|
return self._lookahead
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[TLogLine]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __next__(self) -> TLogLine:
|
||||||
|
if self._lookahead is None:
|
||||||
|
raise StopIteration()
|
||||||
|
|
||||||
|
value = self._lookahead
|
||||||
|
self._lookahead = next(self.source, None)
|
||||||
|
return value
|
|
@ -1,18 +1,19 @@
|
||||||
from heapq import heapify, heappop, heappush
|
from heapq import heapify, heappop, heappush
|
||||||
|
from typing import Iterator
|
||||||
|
|
||||||
from logtools.log.sources.log_source import LogSource, TLocation
|
from logtools.log.base import LogSource, TLogLine
|
||||||
from logtools.log.sources.ordered_source import OrderedSource
|
from logtools.log.sources.transform.ordered_source import OrderedSource
|
||||||
|
|
||||||
|
|
||||||
class MergedSource(LogSource[TLocation]):
|
class MergedSource(LogSource[TLogLine]):
|
||||||
def __init__(self, *sources: OrderedSource[TLocation]):
|
def __init__(self, *sources: OrderedSource[TLogLine]):
|
||||||
self.sources = [source for source in sources if source.peek is not None]
|
self.sources = [source for source in sources if source.peek is not None]
|
||||||
heapify(self.sources)
|
heapify(self.sources)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Iterator[TLogLine]:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self) -> TLogLine:
|
||||||
if not self.sources:
|
if not self.sources:
|
||||||
raise StopIteration()
|
raise StopIteration()
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import datetime
|
import datetime
|
||||||
from typing import Self
|
from typing import Self
|
||||||
|
|
||||||
from logtools.log.sources.log_source import TrackedLogLine, TLocation
|
from logtools.log.base import TimestampedLogLine, TLocation
|
||||||
from logtools.log.sources.lookahead_source import LookAheadSource
|
from logtools.log.sources.transform.lookahead_source import LookAheadSource
|
||||||
|
|
||||||
|
|
||||||
class OrderedSource(LookAheadSource[TLocation]):
|
class OrderedSource(LookAheadSource[TimestampedLogLine[TLocation]]):
|
||||||
def __lt__(self, other: Self) -> bool:
|
def __lt__(self, other: Self) -> bool:
|
||||||
return self._peek.timestamp < other._peek.timestamp # type: ignore
|
return self._peek.timestamp < other._peek.timestamp # type: ignore
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ class OrderedSource(LookAheadSource[TLocation]):
|
||||||
return self._peek.timestamp >= other._peek.timestamp # type: ignore
|
return self._peek.timestamp >= other._peek.timestamp # type: ignore
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _peek(self) -> TrackedLogLine[TLocation]:
|
def _peek(self) -> TimestampedLogLine[TLocation]:
|
||||||
value = self.peek
|
value = self.peek
|
||||||
if value is None:
|
if value is None:
|
||||||
raise ValueError('Cannot order sources that ran out of elements')
|
raise ValueError('Cannot order sources that ran out of elements')
|
|
@ -1,25 +1,26 @@
|
||||||
from logtools.log.sources.collating_source import CollatingSource
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
from logtools.log.sources.tests.string_log_source import StringLogSource
|
from logtools.log.sources.transform.collating_source import CollatingSource
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
def test_should_collate_lines_from_log_sources():
|
def test_should_collate_lines_from_log_sources():
|
||||||
log1 = StringLogSource(
|
log1 = ChroniclesRawSource(StringLogSource(
|
||||||
name='log1',
|
name='log1',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
)
|
))
|
||||||
|
|
||||||
log2 = StringLogSource(
|
log2 = ChroniclesRawSource(StringLogSource(
|
||||||
name='log2',
|
name='log2',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=8
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=8
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=9
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=9
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=10"""
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=10"""
|
||||||
)
|
))
|
||||||
|
|
||||||
collated = CollatingSource(log1, log2)
|
collated = CollatingSource(log1, log2)
|
||||||
entries = [(line.location.name, line.location.line_number, line.count) for line in collated]
|
entries = [(line.location.name, line.location.line_number, line.count) for line in collated]
|
|
@ -0,0 +1,39 @@
|
||||||
|
from dateutil import parser
|
||||||
|
|
||||||
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
|
from logtools.log.sources.transform.filtered_source import FilteredSource, timestamp_range
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_filter_by_matching_predicate():
|
||||||
|
log1 = ChroniclesRawSource(StringLogSource(
|
||||||
|
name='log1',
|
||||||
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
|
))
|
||||||
|
|
||||||
|
def predicate(line):
|
||||||
|
return line.count % 2 == 0
|
||||||
|
|
||||||
|
assert [line.count for line in FilteredSource(log1, predicate)] == [2, 4]
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_generate_correct_datetime_range_predicate():
|
||||||
|
log1 = ChroniclesRawSource(StringLogSource(
|
||||||
|
"""TRC 2023-10-16 20:29:24.595+00:00 one topics="codex discoveryengine" count=1
|
||||||
|
TRC 2023-10-17 20:29:24.597+00:00 two topics="codex discoveryengine" count=2
|
||||||
|
TRC 2023-10-18 20:29:24.597+00:00 three topics="codex discoveryengine" count=3
|
||||||
|
TRC 2023-10-18 21:29:24.597+00:00 four little indians topics="codex discoveryengine" count=4"""
|
||||||
|
))
|
||||||
|
|
||||||
|
line_numbers = [
|
||||||
|
line.location.line_number for line in FilteredSource(
|
||||||
|
log1, timestamp_range(start=parser.parse('2023-10-17 20:29:24.597+00:00'),
|
||||||
|
end=parser.parse('2023-10-18 20:29:24.597+00:00'))
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
assert line_numbers == [2, 3]
|
|
@ -1,16 +1,17 @@
|
||||||
from logtools.log.sources.lookahead_source import LookAheadSource
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
from logtools.log.sources.tests.string_log_source import StringLogSource
|
from logtools.log.sources.transform.lookahead_source import LookAheadSource
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
def test_should_allow_peeking_at_the_head_of_log():
|
def test_should_allow_peeking_at_the_head_of_log():
|
||||||
log1 = LookAheadSource(StringLogSource(
|
log1 = LookAheadSource(ChroniclesRawSource(StringLogSource(
|
||||||
name='log1',
|
name='log1',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
))
|
)))
|
||||||
|
|
||||||
assert log1.peek.count == 1
|
assert log1.peek.count == 1
|
||||||
assert next(log1).count == 1
|
assert next(log1).count == 1
|
||||||
|
@ -18,27 +19,27 @@ def test_should_allow_peeking_at_the_head_of_log():
|
||||||
|
|
||||||
|
|
||||||
def test_should_return_all_elements():
|
def test_should_return_all_elements():
|
||||||
log1 = LookAheadSource(StringLogSource(
|
log1 = LookAheadSource(ChroniclesRawSource(StringLogSource(
|
||||||
name='log1',
|
name='log1',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
))
|
)))
|
||||||
|
|
||||||
assert [entry.count for entry in log1] == [1, 2, 3, 4, 5]
|
assert [entry.count for entry in log1] == [1, 2, 3, 4, 5]
|
||||||
|
|
||||||
|
|
||||||
def test_should_raise_exception_when_nothing_to_peek():
|
def test_should_raise_exception_when_nothing_to_peek():
|
||||||
log1 = LookAheadSource(StringLogSource(
|
log1 = LookAheadSource(ChroniclesRawSource(StringLogSource(
|
||||||
name='log1',
|
name='log1',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
TRC 2023-10-16 20:29:24.646+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
))
|
)))
|
||||||
|
|
||||||
for _ in log1:
|
for _ in log1:
|
||||||
...
|
...
|
|
@ -1,26 +1,27 @@
|
||||||
from logtools.log.sources.merged_source import MergedSource
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
from logtools.log.sources.ordered_source import OrderedSource
|
from logtools.log.sources.transform.merged_source import MergedSource
|
||||||
from logtools.log.sources.tests.string_log_source import StringLogSource
|
from logtools.log.sources.transform.ordered_source import OrderedSource
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
def test_should_merge_logs_by_timestamp():
|
def test_should_merge_logs_by_timestamp():
|
||||||
log1 = OrderedSource(StringLogSource(
|
log1 = OrderedSource(ChroniclesRawSource(StringLogSource(
|
||||||
name='log1',
|
name='log1',
|
||||||
lines="""TRC 2023-10-16 20:29:24.594+00:00 Advertising block topics="codex discoveryengine" count=1
|
lines="""TRC 2023-10-16 20:29:24.594+00:00 Advertising block topics="codex discoveryengine" count=1
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
TRC 2023-10-16 20:29:24.597+00:00 Advertised block topics="codex discoveryengine" count=3
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=4
|
||||||
TRC 2023-10-16 20:29:24.647+00:00 Providing block topics="codex discovery" count=5"""
|
TRC 2023-10-16 20:29:24.647+00:00 Providing block topics="codex discovery" count=5"""
|
||||||
))
|
)))
|
||||||
|
|
||||||
log2 = OrderedSource(StringLogSource(
|
log2 = OrderedSource(ChroniclesRawSource(StringLogSource(
|
||||||
name='log2',
|
name='log2',
|
||||||
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
|
lines="""TRC 2023-10-16 20:29:24.595+00:00 Advertising block topics="codex discoveryengine" count=6
|
||||||
TRC 2023-10-16 20:29:24.596+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
|
TRC 2023-10-16 20:29:24.596+00:00 Provided to nodes topics="codex discovery" tid=1 count=7
|
||||||
TRC 2023-10-16 20:29:24.596+00:00 Advertised block topics="codex discoveryengine" count=8
|
TRC 2023-10-16 20:29:24.596+00:00 Advertised block topics="codex discoveryengine" count=8
|
||||||
TRC 2023-10-16 20:29:24.645+00:00 Retrieved record from repo topics="codex repostore" count=9
|
TRC 2023-10-16 20:29:24.645+00:00 Retrieved record from repo topics="codex repostore" count=9
|
||||||
TRC 2023-10-16 20:29:24.649+00:00 Providing block topics="codex discovery" count=10"""
|
TRC 2023-10-16 20:29:24.649+00:00 Providing block topics="codex discovery" count=10"""
|
||||||
))
|
)))
|
||||||
|
|
||||||
merged = MergedSource(log1, log2)
|
merged = MergedSource(log1, log2)
|
||||||
assert [line.count for line in merged] == [1, 6, 7, 8, 2, 3, 9, 4, 5, 10]
|
assert [line.count for line in merged] == [1, 6, 7, 8, 2, 3, 9, 4, 5, 10]
|
|
@ -1,7 +1,8 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from logtools.log.sources.ordered_source import OrderedSource
|
from logtools.log.sources.parse.chronicles_raw_source import ChroniclesRawSource
|
||||||
from logtools.log.sources.tests.string_log_source import StringLogSource
|
from logtools.log.sources.transform.ordered_source import OrderedSource
|
||||||
|
from logtools.log.sources.input.string_log_source import StringLogSource
|
||||||
|
|
||||||
|
|
||||||
def test_should_order_sources_by_lookahead_timestamp():
|
def test_should_order_sources_by_lookahead_timestamp():
|
||||||
|
@ -9,8 +10,8 @@ def test_should_order_sources_by_lookahead_timestamp():
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
|
||||||
|
|
||||||
log1 = OrderedSource(StringLogSource(name='log1', lines=lines))
|
log1 = OrderedSource(ChroniclesRawSource(StringLogSource(name='log1', lines=lines)))
|
||||||
log2 = OrderedSource(StringLogSource(name='log2', lines=lines))
|
log2 = OrderedSource(ChroniclesRawSource(StringLogSource(name='log2', lines=lines)))
|
||||||
|
|
||||||
next(log1)
|
next(log1)
|
||||||
assert log2 < log1
|
assert log2 < log1
|
||||||
|
@ -25,8 +26,8 @@ def test_should_raise_error_if_comparing_empty_sources():
|
||||||
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
TRC 2023-10-16 20:29:24.597+00:00 Provided to nodes topics="codex discovery" tid=1 count=2
|
||||||
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
|
TRC 2023-10-16 20:29:24.646+00:00 Retrieved record from repo topics="codex repostore" count=3"""
|
||||||
|
|
||||||
log1 = OrderedSource(StringLogSource(name='log1', lines=lines))
|
log1 = OrderedSource(ChroniclesRawSource(StringLogSource(name='log1', lines=lines)))
|
||||||
log2 = OrderedSource(StringLogSource(name='log2', lines=lines))
|
log2 = OrderedSource(ChroniclesRawSource(StringLogSource(name='log2', lines=lines)))
|
||||||
|
|
||||||
for _ in log1:
|
for _ in log1:
|
||||||
...
|
...
|
|
@ -1,19 +0,0 @@
|
||||||
from logtools.log.log_line import LogLine, LogLevel
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_parse_chronicles_fields():
|
|
||||||
line = LogLine(message='Sending want list to peer',
|
|
||||||
topics='topics="codex blockexcnetwork" tid=1 peer=16U*7mogoM '
|
|
||||||
'type=WantBlock items=1',
|
|
||||||
timestamp='',
|
|
||||||
count=0,
|
|
||||||
raw='',
|
|
||||||
level=LogLevel.trace)
|
|
||||||
|
|
||||||
assert line.fields == {
|
|
||||||
'topics': '"codex blockexcnetwork"',
|
|
||||||
'tid': '1',
|
|
||||||
'peer': '16U*7mogoM',
|
|
||||||
'type': 'WantBlock',
|
|
||||||
'items': '1',
|
|
||||||
}
|
|
|
@ -39,6 +39,17 @@ files = [
|
||||||
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
|
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2023.7.22"
|
||||||
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
|
||||||
|
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
|
@ -72,6 +83,42 @@ files = [
|
||||||
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
|
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "elastic-transport"
|
||||||
|
version = "8.10.0"
|
||||||
|
description = "Transport classes and utilities shared among Python Elastic client libraries"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "elastic-transport-8.10.0.tar.gz", hash = "sha256:ca51d08a4d16611701a57fb70592dbc7cb68c40fef4ac1becfe4aea100fe82ef"},
|
||||||
|
{file = "elastic_transport-8.10.0-py3-none-any.whl", hash = "sha256:e73ac3c7ad4e9209436207143d797d3f6b62a399a34d2729e069e44c9ea2cadc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
certifi = "*"
|
||||||
|
urllib3 = ">=1.26.2,<3"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
develop = ["aiohttp", "furo", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "elasticsearch"
|
||||||
|
version = "8.10.1"
|
||||||
|
description = "Python client for Elasticsearch"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6, <4"
|
||||||
|
files = [
|
||||||
|
{file = "elasticsearch-8.10.1-py3-none-any.whl", hash = "sha256:68141d42d10c7f67ac466ca00496830d3b81a7e9476c3baa5585060832c60c69"},
|
||||||
|
{file = "elasticsearch-8.10.1.tar.gz", hash = "sha256:2cb56b433daa2d3ef1aaa2e5a5eacd36ba1d66884722f3d7759a4f9d16190059"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
elastic-transport = ">=8,<9"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
async = ["aiohttp (>=3,<4)"]
|
||||||
|
requests = ["requests (>=2.4.0,<3.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "executing"
|
name = "executing"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
|
@ -86,6 +133,17 @@ files = [
|
||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
|
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.4"
|
||||||
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.5"
|
||||||
|
files = [
|
||||||
|
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||||
|
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "iniconfig"
|
name = "iniconfig"
|
||||||
version = "2.0.0"
|
version = "2.0.0"
|
||||||
|
@ -179,6 +237,89 @@ files = [
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
traitlets = "*"
|
traitlets = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "multidict"
|
||||||
|
version = "6.0.4"
|
||||||
|
description = "multidict implementation"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
|
||||||
|
{file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
|
||||||
|
{file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
|
||||||
|
{file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
|
||||||
|
{file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
|
||||||
|
{file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
|
||||||
|
{file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy"
|
name = "mypy"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
|
@ -301,6 +442,23 @@ files = [
|
||||||
dev = ["pre-commit", "tox"]
|
dev = ["pre-commit", "tox"]
|
||||||
testing = ["pytest", "pytest-benchmark"]
|
testing = ["pytest", "pytest-benchmark"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prettytable"
|
||||||
|
version = "3.9.0"
|
||||||
|
description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "prettytable-3.9.0-py3-none-any.whl", hash = "sha256:a71292ab7769a5de274b146b276ce938786f56c31cf7cea88b6f3775d82fe8c8"},
|
||||||
|
{file = "prettytable-3.9.0.tar.gz", hash = "sha256:f4ed94803c23073a90620b201965e5dc0bccf1760b7a7eaf3158cab8aaffdf34"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
wcwidth = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prompt-toolkit"
|
name = "prompt-toolkit"
|
||||||
version = "3.0.39"
|
version = "3.0.39"
|
||||||
|
@ -374,6 +532,25 @@ pluggy = ">=0.12,<2.0"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-recording"
|
||||||
|
version = "0.13.0"
|
||||||
|
description = "A pytest plugin that allows you recording of network interactions via VCR.py"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "pytest_recording-0.13.0-py3-none-any.whl", hash = "sha256:679b0ae6eb3337b981f6a4d590a30c28c43855bfac5b9ad87070ad6d08b05dbc"},
|
||||||
|
{file = "pytest_recording-0.13.0.tar.gz", hash = "sha256:b24b707af843341457d9d340328f361eceb0efe980e388341941b4fada3745ca"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pytest = ">=3.5.0"
|
||||||
|
vcrpy = ">=2.0.1"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pytest-recording[tests]"]
|
||||||
|
tests = ["pytest-httpbin", "pytest-mock", "requests", "werkzeug (==2.3.6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
version = "2.8.2"
|
version = "2.8.2"
|
||||||
|
@ -399,6 +576,65 @@ files = [
|
||||||
{file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
|
{file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyyaml"
|
||||||
|
version = "6.0.1"
|
||||||
|
description = "YAML parser and emitter for Python"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
||||||
|
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
||||||
|
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||||
|
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
|
||||||
|
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
|
||||||
|
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
||||||
|
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
||||||
|
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
||||||
|
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "six"
|
name = "six"
|
||||||
version = "1.16.0"
|
version = "1.16.0"
|
||||||
|
@ -477,6 +713,39 @@ files = [
|
||||||
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
|
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.0.7"
|
||||||
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
|
||||||
|
{file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||||
|
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
|
||||||
|
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
zstd = ["zstandard (>=0.18.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vcrpy"
|
||||||
|
version = "5.1.0"
|
||||||
|
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "vcrpy-5.1.0-py2.py3-none-any.whl", hash = "sha256:605e7b7a63dcd940db1df3ab2697ca7faf0e835c0852882142bafb19649d599e"},
|
||||||
|
{file = "vcrpy-5.1.0.tar.gz", hash = "sha256:bbf1532f2618a04f11bce2a99af3a9647a32c880957293ff91e0a5f187b6b3d2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
PyYAML = "*"
|
||||||
|
wrapt = "*"
|
||||||
|
yarl = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wcwidth"
|
name = "wcwidth"
|
||||||
version = "0.2.8"
|
version = "0.2.8"
|
||||||
|
@ -488,7 +757,173 @@ files = [
|
||||||
{file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"},
|
{file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wrapt"
|
||||||
|
version = "1.16.0"
|
||||||
|
description = "Module for decorators, wrappers and monkey patching."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
|
||||||
|
{file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
|
||||||
|
{file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
|
||||||
|
{file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
|
||||||
|
{file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
|
||||||
|
{file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
|
||||||
|
{file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
|
||||||
|
{file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
|
||||||
|
{file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
|
||||||
|
{file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yarl"
|
||||||
|
version = "1.9.2"
|
||||||
|
description = "Yet another URL library"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
|
||||||
|
{file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
|
||||||
|
{file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
|
||||||
|
{file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
|
||||||
|
{file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
|
||||||
|
{file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
|
||||||
|
{file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
idna = ">=2.0"
|
||||||
|
multidict = ">=4.0"
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "~3.11"
|
python-versions = "~3.11"
|
||||||
content-hash = "e8258c66a10f3359660ef60eb8f960c71b2e699da61ec162575456f864de8b58"
|
content-hash = "ed54352d543c77435a48e65debaf1005c58f524f18ba627e9a9c956ea77e2b91"
|
||||||
|
|
|
@ -10,6 +10,8 @@ python = "~3.11"
|
||||||
pytz = "^2023.3.post1"
|
pytz = "^2023.3.post1"
|
||||||
colored = "^2.2.3"
|
colored = "^2.2.3"
|
||||||
python-dateutil = "^2.8.2"
|
python-dateutil = "^2.8.2"
|
||||||
|
elasticsearch = "^8.10.1"
|
||||||
|
prettytable = "^3.9.0"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
pytest = "^7.4.2"
|
pytest = "^7.4.2"
|
||||||
|
@ -18,6 +20,8 @@ mypy = "^1.6.0"
|
||||||
install = "^1.3.5"
|
install = "^1.3.5"
|
||||||
types-pytz = "^2023.3.1.1"
|
types-pytz = "^2023.3.1.1"
|
||||||
types-python-dateutil = "^2.8.19.14"
|
types-python-dateutil = "^2.8.19.14"
|
||||||
|
vcrpy = "^5.1.0"
|
||||||
|
pytest-recording = "^0.13.0"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
|
|
Loading…
Reference in New Issue