nwo
stringlengths 10
28
| sha
stringlengths 40
40
| path
stringlengths 11
97
| identifier
stringlengths 1
64
| parameters
stringlengths 2
2.24k
| return_statement
stringlengths 0
2.17k
| docstring
stringlengths 0
5.45k
| docstring_summary
stringlengths 0
3.83k
| func_begin
int64 1
13.4k
| func_end
int64 2
13.4k
| function
stringlengths 28
56.4k
| url
stringlengths 106
209
| project
int64 1
48
| executed_lines
list | executed_lines_pc
float64 0
153
| missing_lines
list | missing_lines_pc
float64 0
100
| covered
bool 2
classes | filecoverage
float64 2.53
100
| function_lines
int64 2
1.46k
| mccabe
int64 1
253
| coverage
float64 0
100
| docstring_lines
int64 0
112
| function_nodoc
stringlengths 9
56.4k
| id
int64 0
29.8k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/task.py
|
Task.__lt__
|
(self, other: Task)
|
return self.priority < other.priority
| 179 | 182 |
def __lt__(self, other: Task) -> bool:
if self.priority is None or other.priority is None:
return False
return self.priority < other.priority
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/task.py#L179-L182
| 1 |
[
0,
1,
3
] | 75 |
[
2
] | 25 | false | 88.505747 | 4 | 3 | 75 | 0 |
def __lt__(self, other: Task) -> bool:
if self.priority is None or other.priority is None:
return False
return self.priority < other.priority
| 100 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/task.py
|
Task.__eq__
|
(self, other: object)
|
return self.priority == other.priority
| 184 | 190 |
def __eq__(self, other: object) -> bool:
if not isinstance(other, Task):
return NotImplemented
if not self.priority or not other.priority:
# WTF???
return True
return self.priority == other.priority
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/task.py#L184-L190
| 1 |
[
0,
1,
3,
4,
5,
6
] | 85.714286 |
[
2
] | 14.285714 | false | 88.505747 | 7 | 4 | 85.714286 | 0 |
def __eq__(self, other: object) -> bool:
if not isinstance(other, Task):
return NotImplemented
if not self.priority or not other.priority:
# WTF???
return True
return self.priority == other.priority
| 101 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.__init__
|
(
self,
task_queue: None | BaseTaskQueue = None,
thread_number: None | int = None,
network_try_limit: None | int = None,
task_try_limit: None | int = None,
priority_mode: str = "random",
meta: None | dict[str, Any] = None,
config: None | dict[str, Any] = None,
parser_requests_per_process: int = 10000,
parser_pool_size: int = 1,
network_service: None | BaseNetworkService = None,
grab_transport: None
| BaseTransport[HttpRequest, Document]
| type[BaseTransport[HttpRequest, Document]] = None,
)
|
Create Spider instance, duh.
Arguments:
* thread-number - Number of concurrent network streams
* network_try_limit - How many times try to send request
again if network error was occurred, use 0 to disable
* task_try_limit - Limit of tries to execute some task
this is not the same as network_try_limit
network try limit limits the number of tries which
are performed automatically in case of network timeout
of some other physical error
but task_try_limit limits the number of attempts which
are scheduled manually in the spider business logic
* priority_mode - could be "random" or "const"
* meta - arbitrary user data
|
Create Spider instance, duh.
| 72 | 171 |
def __init__(
self,
task_queue: None | BaseTaskQueue = None,
thread_number: None | int = None,
network_try_limit: None | int = None,
task_try_limit: None | int = None,
priority_mode: str = "random",
meta: None | dict[str, Any] = None,
config: None | dict[str, Any] = None,
parser_requests_per_process: int = 10000,
parser_pool_size: int = 1,
network_service: None | BaseNetworkService = None,
grab_transport: None
| BaseTransport[HttpRequest, Document]
| type[BaseTransport[HttpRequest, Document]] = None,
) -> None:
"""Create Spider instance, duh.
Arguments:
* thread-number - Number of concurrent network streams
* network_try_limit - How many times try to send request
again if network error was occurred, use 0 to disable
* task_try_limit - Limit of tries to execute some task
this is not the same as network_try_limit
network try limit limits the number of tries which
are performed automatically in case of network timeout
of some other physical error
but task_try_limit limits the number of attempts which
are scheduled manually in the spider business logic
* priority_mode - could be "random" or "const"
* meta - arbitrary user data
"""
self.fatal_error_queue: Queue[FatalErrorQueueItem] = Queue()
self._started: None | float = None
self.grab_transport = grab_transport
self.parser_requests_per_process = parser_requests_per_process
self.stat = Stat()
self.runtime_events: dict[str, list[None | str]] = {}
self.task_queue: BaseTaskQueue = task_queue if task_queue else MemoryTaskQueue()
if config is not None:
self.config = config
else:
self.config = {}
if meta:
self.meta = meta
else:
self.meta = {}
self.thread_number = thread_number or int(
self.config.get("thread_number", DEFAULT_NETWORK_STREAM_NUMBER)
)
self.task_try_limit = task_try_limit or int(
self.config.get("task_try_limit", DEFAULT_TASK_TRY_LIMIT)
)
self.network_try_limit = network_try_limit or int(
self.config.get("network_try_limit", DEFAULT_NETWORK_TRY_LIMIT)
)
if priority_mode not in ["random", "const"]:
raise SpiderMisuseError(
'Value of priority_mode option should be "random" or "const"'
)
self.priority_mode = priority_mode
self.work_allowed = True
self.proxylist_enabled: None | bool = None
self.proxylist: None | ProxyList = None
self.proxy: None | ProxyServer = None
self.proxy_auto_change = False
self.parser_pool_size = parser_pool_size
assert network_service is None or isinstance(
network_service, BaseNetworkService
)
self.network_service = (
network_service
if network_service is not None
else NetworkServiceThreaded(
self.fatal_error_queue,
self.thread_number,
process_task=self.srv_process_task,
get_task_from_queue=self.get_task_from_queue,
)
)
self.task_dispatcher = TaskDispatcherService(
self.fatal_error_queue,
process_service_result=self.srv_process_service_result,
)
self.parser_service = ParserService(
fatal_error_queue=self.fatal_error_queue,
pool_size=self.parser_pool_size,
task_dispatcher=self.task_dispatcher,
stat=self.stat,
parser_requests_per_process=self.parser_requests_per_process,
find_task_handler=self.find_task_handler,
)
self.task_generator_service = TaskGeneratorService(
self.fatal_error_queue,
self.task_generator(),
thread_number=self.thread_number,
get_task_queue=self.get_task_queue,
parser_service=self.parser_service,
task_dispatcher=self.task_dispatcher,
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L72-L171
| 1 |
[
0,
31,
32,
33,
34,
35,
36,
37,
38,
39,
41,
42,
43,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99
] | 68 |
[
40,
44
] | 2 | false | 92.476489 | 100 | 9 | 98 | 15 |
def __init__(
self,
task_queue: None | BaseTaskQueue = None,
thread_number: None | int = None,
network_try_limit: None | int = None,
task_try_limit: None | int = None,
priority_mode: str = "random",
meta: None | dict[str, Any] = None,
config: None | dict[str, Any] = None,
parser_requests_per_process: int = 10000,
parser_pool_size: int = 1,
network_service: None | BaseNetworkService = None,
grab_transport: None
| BaseTransport[HttpRequest, Document]
| type[BaseTransport[HttpRequest, Document]] = None,
) -> None:
self.fatal_error_queue: Queue[FatalErrorQueueItem] = Queue()
self._started: None | float = None
self.grab_transport = grab_transport
self.parser_requests_per_process = parser_requests_per_process
self.stat = Stat()
self.runtime_events: dict[str, list[None | str]] = {}
self.task_queue: BaseTaskQueue = task_queue if task_queue else MemoryTaskQueue()
if config is not None:
self.config = config
else:
self.config = {}
if meta:
self.meta = meta
else:
self.meta = {}
self.thread_number = thread_number or int(
self.config.get("thread_number", DEFAULT_NETWORK_STREAM_NUMBER)
)
self.task_try_limit = task_try_limit or int(
self.config.get("task_try_limit", DEFAULT_TASK_TRY_LIMIT)
)
self.network_try_limit = network_try_limit or int(
self.config.get("network_try_limit", DEFAULT_NETWORK_TRY_LIMIT)
)
if priority_mode not in ["random", "const"]:
raise SpiderMisuseError(
'Value of priority_mode option should be "random" or "const"'
)
self.priority_mode = priority_mode
self.work_allowed = True
self.proxylist_enabled: None | bool = None
self.proxylist: None | ProxyList = None
self.proxy: None | ProxyServer = None
self.proxy_auto_change = False
self.parser_pool_size = parser_pool_size
assert network_service is None or isinstance(
network_service, BaseNetworkService
)
self.network_service = (
network_service
if network_service is not None
else NetworkServiceThreaded(
self.fatal_error_queue,
self.thread_number,
process_task=self.srv_process_task,
get_task_from_queue=self.get_task_from_queue,
)
)
self.task_dispatcher = TaskDispatcherService(
self.fatal_error_queue,
process_service_result=self.srv_process_service_result,
)
self.parser_service = ParserService(
fatal_error_queue=self.fatal_error_queue,
pool_size=self.parser_pool_size,
task_dispatcher=self.task_dispatcher,
stat=self.stat,
parser_requests_per_process=self.parser_requests_per_process,
find_task_handler=self.find_task_handler,
)
self.task_generator_service = TaskGeneratorService(
self.fatal_error_queue,
self.task_generator(),
thread_number=self.thread_number,
get_task_queue=self.get_task_queue,
parser_service=self.parser_service,
task_dispatcher=self.task_dispatcher,
)
| 102 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.collect_runtime_event
|
(self, name: str, value: None | str)
| 173 | 174 |
def collect_runtime_event(self, name: str, value: None | str) -> None:
self.runtime_events.setdefault(name, []).append(value)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L173-L174
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 92.476489 | 2 | 1 | 100 | 0 |
def collect_runtime_event(self, name: str, value: None | str) -> None:
self.runtime_events.setdefault(name, []).append(value)
| 103 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.setup_queue
|
(self, *_args: Any, **_kwargs: Any)
|
Set up queue.
|
Set up queue.
| 178 | 184 |
def setup_queue(self, *_args: Any, **_kwargs: Any) -> None:
"""Set up queue."""
raise GrabFeatureIsDeprecated(
"""Method Spider.setup_queue is deprecated. Now MemoryTaskQueue is used
by default. If you need custom task queue pass instance of queue class
in task_queue parameter in constructor of Spider class."""
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L178-L184
| 1 |
[
0,
1
] | 28.571429 |
[
2
] | 14.285714 | false | 92.476489 | 7 | 1 | 85.714286 | 1 |
def setup_queue(self, *_args: Any, **_kwargs: Any) -> None:
raise GrabFeatureIsDeprecated(
)
| 104 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.add_task
|
(
self,
task: Task,
queue: None | BaseTaskQueue = None,
raise_error: bool = False,
)
|
return True
|
Add task to the task queue.
|
Add task to the task queue.
| 186 | 217 |
def add_task(
self,
task: Task,
queue: None | BaseTaskQueue = None,
raise_error: bool = False,
) -> bool:
"""Add task to the task queue."""
if queue is None:
queue = self.task_queue
if task.priority is None or not task.priority_set_explicitly:
task.priority = self.generate_task_priority()
task.priority_set_explicitly = False
else:
task.priority_set_explicitly = True
if not task.request.url or not task.request.url.startswith(
("http://", "https://", "ftp://", "file://", "feed://")
):
self.collect_runtime_event("task-with-invalid-url", task.request.url)
msg = "Invalid task URL: %s" % task.request.url
if raise_error:
raise SpiderError(msg)
logger.error(
"%s\nTraceback:\n%s",
msg,
"".join(format_stack()),
)
return False
# TODO: keep original task priority if it was set explicitly
# WTF the previous comment means?
queue.put(task, priority=task.priority, schedule_time=task.schedule_time)
return True
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L186-L217
| 1 |
[
0,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31
] | 84.375 |
[] | 0 | false | 92.476489 | 32 | 7 | 100 | 1 |
def add_task(
self,
task: Task,
queue: None | BaseTaskQueue = None,
raise_error: bool = False,
) -> bool:
if queue is None:
queue = self.task_queue
if task.priority is None or not task.priority_set_explicitly:
task.priority = self.generate_task_priority()
task.priority_set_explicitly = False
else:
task.priority_set_explicitly = True
if not task.request.url or not task.request.url.startswith(
("http://", "https://", "ftp://", "file://", "feed://")
):
self.collect_runtime_event("task-with-invalid-url", task.request.url)
msg = "Invalid task URL: %s" % task.request.url
if raise_error:
raise SpiderError(msg)
logger.error(
"%s\nTraceback:\n%s",
msg,
"".join(format_stack()),
)
return False
# TODO: keep original task priority if it was set explicitly
# WTF the previous comment means?
queue.put(task, priority=task.priority, schedule_time=task.schedule_time)
return True
| 105 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.stop
|
(self)
|
Instruct spider to stop processing new tasks and start shutting down.
|
Instruct spider to stop processing new tasks and start shutting down.
| 219 | 221 |
def stop(self) -> None:
"""Instruct spider to stop processing new tasks and start shutting down."""
self.work_allowed = False
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L219-L221
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 92.476489 | 3 | 1 | 100 | 1 |
def stop(self) -> None:
self.work_allowed = False
| 106 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.load_proxylist
|
(
self,
source: str | BaseProxySource,
source_type: None | str = None,
proxy_type: str = "http",
auto_init: bool = True,
auto_change: bool = True,
)
|
Load proxy list.
:param source: Proxy source.
Accepts string (file path, url) or ``BaseProxySource`` instance.
:param source_type: The type of the specified source.
Should be one of the following: 'text_file' or 'url'.
:param proxy_type:
Should be one of the following: 'socks4', 'socks5' or'http'.
:param auto_change:
If set to `True` then automatically random proxy rotation
will be used.
Proxy source format should be one of the following (for each line):
- ip:port
- ip:port:login:password
|
Load proxy list.
| 223 | 275 |
def load_proxylist(
self,
source: str | BaseProxySource,
source_type: None | str = None,
proxy_type: str = "http",
auto_init: bool = True,
auto_change: bool = True,
) -> None:
"""Load proxy list.
:param source: Proxy source.
Accepts string (file path, url) or ``BaseProxySource`` instance.
:param source_type: The type of the specified source.
Should be one of the following: 'text_file' or 'url'.
:param proxy_type:
Should be one of the following: 'socks4', 'socks5' or'http'.
:param auto_change:
If set to `True` then automatically random proxy rotation
will be used.
Proxy source format should be one of the following (for each line):
- ip:port
- ip:port:login:password
"""
if isinstance(source, BaseProxySource):
self.proxylist = ProxyList(source)
elif isinstance(source, str):
if source_type == "text_file":
self.proxylist = ProxyList.from_local_file(
source, proxy_type=proxy_type
)
elif source_type == "url":
self.proxylist = ProxyList.from_network_file(
source, proxy_type=proxy_type
)
else:
raise SpiderMisuseError(
"Method `load_proxylist` received "
"invalid `source_type` argument: %s" % source_type
)
else:
raise SpiderMisuseError(
"Method `load_proxylist` received "
"invalid `source` argument: %s" % source
)
self.proxylist_enabled = True
self.proxy = None
if not auto_change and auto_init:
self.proxy = self.proxylist.get_random_server()
if not self.proxy.proxy_type:
raise GrabMisuseError("Could not use proxy without defined proxy type")
self.proxy_auto_change = auto_change
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L223-L275
| 1 |
[
0,
23,
24,
26,
27,
28,
29,
30,
45,
46,
47,
48,
52
] | 24.528302 |
[
25,
31,
32,
36,
41,
49,
50,
51
] | 15.09434 | false | 92.476489 | 53 | 8 | 84.90566 | 15 |
def load_proxylist(
self,
source: str | BaseProxySource,
source_type: None | str = None,
proxy_type: str = "http",
auto_init: bool = True,
auto_change: bool = True,
) -> None:
if isinstance(source, BaseProxySource):
self.proxylist = ProxyList(source)
elif isinstance(source, str):
if source_type == "text_file":
self.proxylist = ProxyList.from_local_file(
source, proxy_type=proxy_type
)
elif source_type == "url":
self.proxylist = ProxyList.from_network_file(
source, proxy_type=proxy_type
)
else:
raise SpiderMisuseError(
"Method `load_proxylist` received "
"invalid `source_type` argument: %s" % source_type
)
else:
raise SpiderMisuseError(
"Method `load_proxylist` received "
"invalid `source` argument: %s" % source
)
self.proxylist_enabled = True
self.proxy = None
if not auto_change and auto_init:
self.proxy = self.proxylist.get_random_server()
if not self.proxy.proxy_type:
raise GrabMisuseError("Could not use proxy without defined proxy type")
self.proxy_auto_change = auto_change
| 107 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.render_stats
|
(self)
|
return "\n".join(out) + "\n"
| 277 | 314 |
def render_stats(self) -> str:
out = [
"------------ Stats: ------------",
"Counters:",
]
# Process counters
items = sorted(self.stat.counters.items(), key=lambda x: x[0], reverse=True)
for item in items:
out.append(" %s: %s" % item)
out.append("")
out.append("Lists:")
# Process event lists sorted by size in descendant order
col_sizes = [(x, len(y)) for x, y in self.runtime_events.items()]
col_sizes = sorted(col_sizes, key=lambda x: x[1], reverse=True)
for col_size in col_sizes:
out.append(" %s: %d" % col_size)
out.append("")
# Process extra metrics
if "download-size" in self.stat.counters:
out.append(
"Network download: %s"
% format_traffic_value(self.stat.counters["download-size"])
)
out.append(
"Queue size: %d" % self.task_queue.size() if self.task_queue else "NA"
)
out.append("Network streams: %d" % self.thread_number)
elapsed = (time.time() - self._started) if self._started else 0
hours, seconds = divmod(elapsed, 3600)
minutes, seconds = divmod(seconds, 60)
out.append("Time elapsed: %d:%d:%d (H:M:S)" % (hours, minutes, seconds))
out.append(
"End time: %s" % datetime.utcnow().strftime("%d %b %Y, %H:%M:%S UTC")
)
return "\n".join(out) + "\n"
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L277-L314
| 1 |
[
0,
1,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
18,
19,
20,
21,
26,
29,
30,
31,
32,
33,
34,
37
] | 65.789474 |
[
17,
22
] | 5.263158 | false | 92.476489 | 38 | 5 | 94.736842 | 0 |
def render_stats(self) -> str:
out = [
"------------ Stats: ------------",
"Counters:",
]
# Process counters
items = sorted(self.stat.counters.items(), key=lambda x: x[0], reverse=True)
for item in items:
out.append(" %s: %s" % item)
out.append("")
out.append("Lists:")
# Process event lists sorted by size in descendant order
col_sizes = [(x, len(y)) for x, y in self.runtime_events.items()]
col_sizes = sorted(col_sizes, key=lambda x: x[1], reverse=True)
for col_size in col_sizes:
out.append(" %s: %d" % col_size)
out.append("")
# Process extra metrics
if "download-size" in self.stat.counters:
out.append(
"Network download: %s"
% format_traffic_value(self.stat.counters["download-size"])
)
out.append(
"Queue size: %d" % self.task_queue.size() if self.task_queue else "NA"
)
out.append("Network streams: %d" % self.thread_number)
elapsed = (time.time() - self._started) if self._started else 0
hours, seconds = divmod(elapsed, 3600)
minutes, seconds = divmod(seconds, 60)
out.append("Time elapsed: %d:%d:%d (H:M:S)" % (hours, minutes, seconds))
out.append(
"End time: %s" % datetime.utcnow().strftime("%d %b %Y, %H:%M:%S UTC")
)
return "\n".join(out) + "\n"
| 108 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.prepare
|
(self)
|
Do additional spider customization here.
This method runs before spider has started working.
|
Do additional spider customization here.
| 320 | 324 |
def prepare(self) -> None:
"""Do additional spider customization here.
This method runs before spider has started working.
"""
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L320-L324
| 1 |
[
0,
1,
2,
3,
4
] | 100 |
[] | 0 | true | 92.476489 | 5 | 1 | 100 | 3 |
def prepare(self) -> None:
| 109 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.shutdown
|
(self)
|
Override this method to do some final actions after parsing has been done.
|
Override this method to do some final actions after parsing has been done.
| 326 | 327 |
def shutdown(self) -> None:
"""Override this method to do some final actions after parsing has been done."""
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L326-L327
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 92.476489 | 2 | 1 | 100 | 1 |
def shutdown(self) -> None:
| 110 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.create_grab_instance
|
(self, **kwargs: Any)
|
return Grab(transport=self.grab_transport, **kwargs)
| 329 | 330 |
def create_grab_instance(self, **kwargs: Any) -> Grab:
return Grab(transport=self.grab_transport, **kwargs)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L329-L330
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 92.476489 | 2 | 1 | 100 | 0 |
def create_grab_instance(self, **kwargs: Any) -> Grab:
return Grab(transport=self.grab_transport, **kwargs)
| 111 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.task_generator
|
(self)
|
You can override this method to load new tasks.
It will be used each time as number of tasks
in task queue is less then number of threads multiplied on 2
This allows you to not overload all free memory if total number of
tasks is big.
|
You can override this method to load new tasks.
| 332 | 340 |
def task_generator(self) -> Iterator[Task]:
"""You can override this method to load new tasks.
It will be used each time as number of tasks
in task queue is less then number of threads multiplied on 2
This allows you to not overload all free memory if total number of
tasks is big.
"""
yield from ()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L332-L340
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8
] | 100 |
[] | 0 | true | 92.476489 | 9 | 1 | 100 | 6 |
def task_generator(self) -> Iterator[Task]:
yield from ()
| 112 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.check_task_limits
|
(self, task: Task)
|
return True, "ok"
|
Check that task's network & try counters do not exceed limits.
Returns:
* if success: (True, None)
* if error: (False, reason)
|
Check that task's network & try counters do not exceed limits.
| 346 | 360 |
def check_task_limits(self, task: Task) -> tuple[bool, str]:
"""Check that task's network & try counters do not exceed limits.
Returns:
* if success: (True, None)
* if error: (False, reason)
"""
if task.task_try_count > self.task_try_limit:
return False, "task-try-count"
if task.network_try_count > self.network_try_limit:
return False, "network-try-count"
return True, "ok"
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L346-L360
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14
] | 100 |
[] | 0 | true | 92.476489 | 15 | 3 | 100 | 5 |
def check_task_limits(self, task: Task) -> tuple[bool, str]:
if task.task_try_count > self.task_try_limit:
return False, "task-try-count"
if task.network_try_count > self.network_try_limit:
return False, "network-try-count"
return True, "ok"
| 113 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.generate_task_priority
|
(self)
|
return system_random.randint(*RANDOM_TASK_PRIORITY_RANGE)
| 362 | 365 |
def generate_task_priority(self) -> int:
if self.priority_mode == "const":
return DEFAULT_TASK_PRIORITY
return system_random.randint(*RANDOM_TASK_PRIORITY_RANGE)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L362-L365
| 1 |
[
0,
1,
2,
3
] | 100 |
[] | 0 | true | 92.476489 | 4 | 2 | 100 | 0 |
def generate_task_priority(self) -> int:
if self.priority_mode == "const":
return DEFAULT_TASK_PRIORITY
return system_random.randint(*RANDOM_TASK_PRIORITY_RANGE)
| 114 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.process_initial_urls
|
(self)
| 367 | 370 |
def process_initial_urls(self) -> None:
if self.initial_urls:
for url in self.initial_urls:
self.add_task(Task(name="initial", request=HttpRequest(url)))
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L367-L370
| 1 |
[
0,
1,
2,
3
] | 100 |
[] | 0 | true | 92.476489 | 4 | 3 | 100 | 0 |
def process_initial_urls(self) -> None:
if self.initial_urls:
for url in self.initial_urls:
self.add_task(Task(name="initial", request=HttpRequest(url)))
| 115 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.get_task_from_queue
|
(self)
| 372 | 379 |
def get_task_from_queue(self) -> None | Literal[True] | Task:
try:
return self.task_queue.get()
except Empty:
size = self.task_queue.size()
if size:
return True
return None
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L372-L379
| 1 |
[
0,
1,
2,
3,
4,
5,
7
] | 87.5 |
[
6
] | 12.5 | false | 92.476489 | 8 | 3 | 87.5 | 0 |
def get_task_from_queue(self) -> None | Literal[True] | Task:
try:
return self.task_queue.get()
except Empty:
size = self.task_queue.size()
if size:
return True
return None
| 116 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.is_valid_network_response_code
|
(self, code: int, task: Task)
|
return code < 400 or code == 404 or code in task.valid_status
|
Test if response is valid.
Valid response is handled with associated task handler.
Failed respoosne is processed with error handler.
|
Test if response is valid.
| 381 | 387 |
def is_valid_network_response_code(self, code: int, task: Task) -> bool:
"""Test if response is valid.
Valid response is handled with associated task handler.
Failed respoosne is processed with error handler.
"""
return code < 400 or code == 404 or code in task.valid_status
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L381-L387
| 1 |
[
0,
1,
2,
3,
4,
5,
6
] | 100 |
[] | 0 | true | 92.476489 | 7 | 3 | 100 | 4 |
def is_valid_network_response_code(self, code: int, task: Task) -> bool:
return code < 400 or code == 404 or code in task.valid_status
| 117 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.process_parser_error
|
(
self,
func_name: str,
task: Task,
exc_info: tuple[type[Exception], Exception, TracebackType],
)
| 389 | 408 |
def process_parser_error(
self,
func_name: str,
task: Task,
exc_info: tuple[type[Exception], Exception, TracebackType],
) -> None:
_, ex, _ = exc_info
self.stat.inc("spider:error-%s" % ex.__class__.__name__.lower())
logger.error(
"Task handler [%s] error\n%s",
func_name,
"".join(format_exception(*exc_info)),
)
task_url = task.request.url if task else None
self.collect_runtime_event(
"fatal",
"%s|%s|%s|%s" % (func_name, ex.__class__.__name__, str(ex), task_url),
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L389-L408
| 1 |
[
0,
6,
7,
8,
9,
14,
15,
16
] | 40 |
[] | 0 | false | 92.476489 | 20 | 1 | 100 | 0 |
def process_parser_error(
self,
func_name: str,
task: Task,
exc_info: tuple[type[Exception], Exception, TracebackType],
) -> None:
_, ex, _ = exc_info
self.stat.inc("spider:error-%s" % ex.__class__.__name__.lower())
logger.error(
"Task handler [%s] error\n%s",
func_name,
"".join(format_exception(*exc_info)),
)
task_url = task.request.url if task else None
self.collect_runtime_event(
"fatal",
"%s|%s|%s|%s" % (func_name, ex.__class__.__name__, str(ex), task_url),
)
| 118 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.find_task_handler
|
(self, task: Task)
| 410 | 423 |
def find_task_handler(self, task: Task) -> Callable[..., Any]:
callback = task.get("callback")
if callback:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], callback)
# pylint: enable=deprecated-typing-alias
try:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, "task_%s" % task.name))
# pylint: enable=deprecated-typing-alias
except AttributeError as ex:
raise NoTaskHandler(
"No handler or callback defined for " "task %s" % task.name
) from ex
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L410-L423
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11
] | 85.714286 |
[] | 0 | false | 92.476489 | 14 | 3 | 100 | 0 |
def find_task_handler(self, task: Task) -> Callable[..., Any]:
callback = task.get("callback")
if callback:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], callback)
# pylint: enable=deprecated-typing-alias
try:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, "task_%s" % task.name))
# pylint: enable=deprecated-typing-alias
except AttributeError as ex:
raise NoTaskHandler(
"No handler or callback defined for " "task %s" % task.name
) from ex
| 119 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.log_network_result_stats
|
(self, res: NetworkResult, task: Task)
| 425 | 437 |
def log_network_result_stats(self, res: NetworkResult, task: Task) -> None:
# Increase stat counters
self.stat.inc("spider:request-processed")
self.stat.inc("spider:task")
self.stat.inc("spider:task-%s" % task.name)
if task.network_try_count == 1 and task.task_try_count == 1:
self.stat.inc("spider:task-%s-initial" % task.name)
# Update traffic statistics
if res["grab"] and res["doc"]:
doc = res["doc"]
self.stat.inc("spider:download-size", doc.download_size)
self.stat.inc("spider:upload-size", doc.upload_size)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L425-L437
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12
] | 100 |
[] | 0 | true | 92.476489 | 13 | 5 | 100 | 0 |
def log_network_result_stats(self, res: NetworkResult, task: Task) -> None:
# Increase stat counters
self.stat.inc("spider:request-processed")
self.stat.inc("spider:task")
self.stat.inc("spider:task-%s" % task.name)
if task.network_try_count == 1 and task.task_try_count == 1:
self.stat.inc("spider:task-%s-initial" % task.name)
# Update traffic statistics
if res["grab"] and res["doc"]:
doc = res["doc"]
self.stat.inc("spider:download-size", doc.download_size)
self.stat.inc("spider:upload-size", doc.upload_size)
| 120 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.process_grab_proxy
|
(self, task: Task, grab: Grab)
|
Assign new proxy from proxylist to the task.
|
Assign new proxy from proxylist to the task.
| 439 | 445 |
def process_grab_proxy(self, task: Task, grab: Grab) -> None:
"""Assign new proxy from proxylist to the task."""
if task.use_proxylist and self.proxylist_enabled:
if self.proxy_auto_change:
self.change_active_proxy(task, grab)
if self.proxy:
raise Exception("Look like it is not called from tests")
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L439-L445
| 1 |
[
0,
1,
2,
3,
5
] | 71.428571 |
[
4,
6
] | 28.571429 | false | 92.476489 | 7 | 5 | 71.428571 | 1 |
def process_grab_proxy(self, task: Task, grab: Grab) -> None:
if task.use_proxylist and self.proxylist_enabled:
if self.proxy_auto_change:
self.change_active_proxy(task, grab)
if self.proxy:
raise Exception("Look like it is not called from tests")
| 121 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.change_active_proxy
|
(self, task: Task, grab: Grab)
| 452 | 458 |
def change_active_proxy(self, task: Task, grab: Grab) -> None:
# pylint: disable=unused-argument
self.proxy = cast(ProxyList, self.proxylist).get_random_server()
if not self.proxy.proxy_type:
raise SpiderMisuseError(
'Value of priority_mode option should be "random" or "const"'
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L452-L458
| 1 |
[
0,
1
] | 28.571429 |
[
2,
3,
4
] | 42.857143 | false | 92.476489 | 7 | 2 | 57.142857 | 0 |
def change_active_proxy(self, task: Task, grab: Grab) -> None:
# pylint: disable=unused-argument
self.proxy = cast(ProxyList, self.proxylist).get_random_server()
if not self.proxy.proxy_type:
raise SpiderMisuseError(
'Value of priority_mode option should be "random" or "const"'
)
| 122 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.get_task_queue
|
(self)
|
return self.task_queue
| 460 | 464 |
def get_task_queue(self) -> BaseTaskQueue:
# this method is expected to be called
# after "spider.run()" is called
# i.e. the "self.task_queue" is set
return self.task_queue
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L460-L464
| 1 |
[
0,
1,
2,
3,
4
] | 100 |
[] | 0 | true | 92.476489 | 5 | 1 | 100 | 0 |
def get_task_queue(self) -> BaseTaskQueue:
# this method is expected to be called
# after "spider.run()" is called
# i.e. the "self.task_queue" is set
return self.task_queue
| 123 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.is_idle_estimated
|
(self)
|
return (
not self.task_generator_service.is_alive()
and not self.task_queue.size()
and not self.task_dispatcher.input_queue.qsize()
and not self.parser_service.input_queue.qsize()
and not self.parser_service.is_busy()
and not self.network_service.get_active_threads_number()
and not self.network_service.is_busy()
)
| 466 | 475 |
def is_idle_estimated(self) -> bool:
return (
not self.task_generator_service.is_alive()
and not self.task_queue.size()
and not self.task_dispatcher.input_queue.qsize()
and not self.parser_service.input_queue.qsize()
and not self.parser_service.is_busy()
and not self.network_service.get_active_threads_number()
and not self.network_service.is_busy()
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L466-L475
| 1 |
[
0,
1
] | 20 |
[] | 0 | false | 92.476489 | 10 | 7 | 100 | 0 |
def is_idle_estimated(self) -> bool:
return (
not self.task_generator_service.is_alive()
and not self.task_queue.size()
and not self.task_dispatcher.input_queue.qsize()
and not self.parser_service.input_queue.qsize()
and not self.parser_service.is_busy()
and not self.network_service.get_active_threads_number()
and not self.network_service.is_busy()
)
| 124 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.is_idle_confirmed
|
(self, services: list[BaseService])
|
return False
|
Test if spider is fully idle.
WARNING: As side effect it stops all services to get state of queues
anaffected by sercies.
Spider is full idle when all conditions are met:
* all services are paused i.e. the do not change queues
* all queues are empty
* task generator is completed
|
Test if spider is fully idle.
| 477 | 495 |
def is_idle_confirmed(self, services: list[BaseService]) -> bool:
"""Test if spider is fully idle.
WARNING: As side effect it stops all services to get state of queues
anaffected by sercies.
Spider is full idle when all conditions are met:
* all services are paused i.e. the do not change queues
* all queues are empty
* task generator is completed
"""
if self.is_idle_estimated():
for srv in services:
srv.pause()
if self.is_idle_estimated():
return True
for srv in services:
srv.resume()
return False
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L477-L495
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
18
] | 89.473684 |
[
16,
17
] | 10.526316 | false | 92.476489 | 19 | 5 | 89.473684 | 9 |
def is_idle_confirmed(self, services: list[BaseService]) -> bool:
if self.is_idle_estimated():
for srv in services:
srv.pause()
if self.is_idle_estimated():
return True
for srv in services:
srv.resume()
return False
| 125 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.run
|
(self)
| 497 | 524 |
def run(self) -> None:
self._started = time.time()
services = []
try:
self.prepare()
self.process_initial_urls()
services = [
self.task_dispatcher,
self.task_generator_service,
self.parser_service,
self.network_service,
]
for srv in services:
srv.start()
while self.work_allowed:
try:
exc_info = self.fatal_error_queue.get(True, 0.5)
except Empty:
pass
else:
# WTF: why? (see below)
# The trackeback of fatal error MUST BE rendered by the sender
raise exc_info[1]
if self.is_idle_confirmed(services):
break
finally:
self.shutdown_services(services)
self.stat.shutdown(join_threads=True)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L497-L524
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
12,
13,
14,
15,
16,
17,
18,
21,
22,
23,
24,
26,
27
] | 71.428571 |
[] | 0 | false | 92.476489 | 28 | 5 | 100 | 0 |
def run(self) -> None:
self._started = time.time()
services = []
try:
self.prepare()
self.process_initial_urls()
services = [
self.task_dispatcher,
self.task_generator_service,
self.parser_service,
self.network_service,
]
for srv in services:
srv.start()
while self.work_allowed:
try:
exc_info = self.fatal_error_queue.get(True, 0.5)
except Empty:
pass
else:
# WTF: why? (see below)
# The trackeback of fatal error MUST BE rendered by the sender
raise exc_info[1]
if self.is_idle_confirmed(services):
break
finally:
self.shutdown_services(services)
self.stat.shutdown(join_threads=True)
| 126 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.shutdown_services
|
(self, services: list[BaseService])
| 526 | 545 |
def shutdown_services(self, services: list[BaseService]) -> None:
# TODO:
for srv in services:
# Resume service if it has been paused
# to allow service to process stop signal
srv.resume()
srv.stop()
start = time.time()
while any(x.is_alive() for x in services):
time.sleep(0.1)
if time.time() - start > 10:
break
for srv in services:
if srv.is_alive():
logger.error("The %s has not stopped :(", srv)
self.stat.render_moment()
self.shutdown()
self.task_queue.clear()
self.task_queue.close()
logger.debug("Work done")
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L526-L545
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
12,
13,
15,
16,
17,
18,
19
] | 90 |
[
11,
14
] | 10 | false | 92.476489 | 20 | 6 | 90 | 0 |
def shutdown_services(self, services: list[BaseService]) -> None:
# TODO:
for srv in services:
# Resume service if it has been paused
# to allow service to process stop signal
srv.resume()
srv.stop()
start = time.time()
while any(x.is_alive() for x in services):
time.sleep(0.1)
if time.time() - start > 10:
break
for srv in services:
if srv.is_alive():
logger.error("The %s has not stopped :(", srv)
self.stat.render_moment()
self.shutdown()
self.task_queue.clear()
self.task_queue.close()
logger.debug("Work done")
| 127 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.log_failed_network_result
|
(self, res: NetworkResult)
| 547 | 557 |
def log_failed_network_result(self, res: NetworkResult) -> None:
orig_exc = (
res["exc"].original_exc
if isinstance(res["exc"], OriginalExceptionGrabError)
else res["exc"]
)
msg = (
("http-%s" % res["doc"].code) if res["ok"] else orig_exc.__class__.__name__
)
self.stat.inc("error:%s" % msg)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L547-L557
| 1 |
[
0,
1,
6,
9,
10
] | 45.454545 |
[] | 0 | false | 92.476489 | 11 | 1 | 100 | 0 |
def log_failed_network_result(self, res: NetworkResult) -> None:
orig_exc = (
res["exc"].original_exc
if isinstance(res["exc"], OriginalExceptionGrabError)
else res["exc"]
)
msg = (
("http-%s" % res["doc"].code) if res["ok"] else orig_exc.__class__.__name__
)
self.stat.inc("error:%s" % msg)
| 128 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.log_rejected_task
|
(self, task: Task, reason: str)
| 559 | 565 |
def log_rejected_task(self, task: Task, reason: str) -> None:
if reason == "task-try-count":
self.collect_runtime_event("task-count-rejected", task.request.url)
elif reason == "network-try-count":
self.collect_runtime_event("network-count-rejected", task.request.url)
else:
raise SpiderError("Unknown response from check_task_limits: %s" % reason)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L559-L565
| 1 |
[
0,
1,
2,
3,
4,
6
] | 85.714286 |
[] | 0 | false | 92.476489 | 7 | 3 | 100 | 0 |
def log_rejected_task(self, task: Task, reason: str) -> None:
if reason == "task-try-count":
self.collect_runtime_event("task-count-rejected", task.request.url)
elif reason == "network-try-count":
self.collect_runtime_event("network-count-rejected", task.request.url)
else:
raise SpiderError("Unknown response from check_task_limits: %s" % reason)
| 129 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.get_fallback_handler
|
(self, task: Task)
|
return None
| 567 | 578 |
def get_fallback_handler(self, task: Task) -> None | Callable[..., Any]:
if task.fallback_name:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, task.fallback_name))
# pylint: enable=deprecated-typing-alias
if task.name:
fb_name = "task_%s_fallback" % task.name
if hasattr(self, fb_name):
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, fb_name))
# pylint: enable=deprecated-typing-alias
return None
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L567-L578
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11
] | 100 |
[] | 0 | true | 92.476489 | 12 | 4 | 100 | 0 |
def get_fallback_handler(self, task: Task) -> None | Callable[..., Any]:
if task.fallback_name:
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, task.fallback_name))
# pylint: enable=deprecated-typing-alias
if task.name:
fb_name = "task_%s_fallback" % task.name
if hasattr(self, fb_name):
# pylint: disable=deprecated-typing-alias
return cast(typing.Callable[..., Any], getattr(self, fb_name))
# pylint: enable=deprecated-typing-alias
return None
| 130 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.srv_process_service_result
|
(
self,
result: Task | None | Exception | dict[str, Any],
task: Task,
meta: None | dict[str, Any] = None,
)
|
Process result submitted from any service to task dispatcher service.
Result could be:
* Task
* None
* Task instance
* ResponseNotValid-based exception
* Arbitrary exception
* Network response:
{ok, ecode, emsg, exc, grab, grab_config_backup}
Exception can come only from parser_service and it always has
meta {"from": "parser", "exc_info": <...>}
|
Process result submitted from any service to task dispatcher service.
| 583 | 629 |
def srv_process_service_result(
self,
result: Task | None | Exception | dict[str, Any],
task: Task,
meta: None | dict[str, Any] = None,
) -> None:
"""Process result submitted from any service to task dispatcher service.
Result could be:
* Task
* None
* Task instance
* ResponseNotValid-based exception
* Arbitrary exception
* Network response:
{ok, ecode, emsg, exc, grab, grab_config_backup}
Exception can come only from parser_service and it always has
meta {"from": "parser", "exc_info": <...>}
"""
if meta is None:
meta = {}
if isinstance(result, Task):
self.add_task(result)
elif result is None:
pass
elif isinstance(result, ResponseNotValid):
self.add_task(task.clone())
error_code = result.__class__.__name__.replace("_", "-")
self.stat.inc("integrity:%s" % error_code)
elif isinstance(result, Exception):
if task:
handler = self.find_task_handler(task)
handler_name = getattr(handler, "__name__", "NONE")
else:
handler_name = "NA"
self.process_parser_error(
handler_name,
task,
meta["exc_info"],
)
if isinstance(result, FatalError):
self.fatal_error_queue.put(meta["exc_info"])
elif isinstance(result, dict) and "grab" in result:
self.srv_process_network_result(result, task)
else:
raise SpiderError("Unknown result received from a service: %s" % result)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L583-L629
| 1 |
[
0,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46
] | 59.574468 |
[
35
] | 2.12766 | false | 92.476489 | 47 | 10 | 97.87234 | 13 |
def srv_process_service_result(
self,
result: Task | None | Exception | dict[str, Any],
task: Task,
meta: None | dict[str, Any] = None,
) -> None:
if meta is None:
meta = {}
if isinstance(result, Task):
self.add_task(result)
elif result is None:
pass
elif isinstance(result, ResponseNotValid):
self.add_task(task.clone())
error_code = result.__class__.__name__.replace("_", "-")
self.stat.inc("integrity:%s" % error_code)
elif isinstance(result, Exception):
if task:
handler = self.find_task_handler(task)
handler_name = getattr(handler, "__name__", "NONE")
else:
handler_name = "NA"
self.process_parser_error(
handler_name,
task,
meta["exc_info"],
)
if isinstance(result, FatalError):
self.fatal_error_queue.put(meta["exc_info"])
elif isinstance(result, dict) and "grab" in result:
self.srv_process_network_result(result, task)
else:
raise SpiderError("Unknown result received from a service: %s" % result)
| 131 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.srv_process_network_result
|
(self, result: NetworkResult, task: Task)
| 631 | 649 |
def srv_process_network_result(self, result: NetworkResult, task: Task) -> None:
# TODO: Move to network service
# starts
self.log_network_result_stats(result, task)
# ends
is_valid = False
if task.get("raw"):
is_valid = True
elif result["ok"]:
res_code = result["doc"].code
is_valid = self.is_valid_network_response_code(res_code, task)
if is_valid:
self.parser_service.input_queue.put((result, task))
else:
self.log_failed_network_result(result)
# Try to do network request one more time
if self.network_try_limit > 0:
self.add_task(task)
self.stat.inc("spider:request")
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L631-L649
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
14,
15,
16,
17,
18
] | 94.736842 |
[] | 0 | false | 92.476489 | 19 | 5 | 100 | 0 |
def srv_process_network_result(self, result: NetworkResult, task: Task) -> None:
# TODO: Move to network service
# starts
self.log_network_result_stats(result, task)
# ends
is_valid = False
if task.get("raw"):
is_valid = True
elif result["ok"]:
res_code = result["doc"].code
is_valid = self.is_valid_network_response_code(res_code, task)
if is_valid:
self.parser_service.input_queue.put((result, task))
else:
self.log_failed_network_result(result)
# Try to do network request one more time
if self.network_try_limit > 0:
self.add_task(task)
self.stat.inc("spider:request")
| 132 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/base.py
|
Spider.srv_process_task
|
(self, task: Task)
| 651 | 686 |
def srv_process_task(self, task: Task) -> None:
task.network_try_count += 1
is_valid, reason = self.check_task_limits(task)
if is_valid:
grab = self.create_grab_instance()
self.process_grab_proxy(task, grab)
self.stat.inc("spider:request-network")
self.stat.inc("spider:task-%s-network" % task.name)
try:
result: dict[str, Any] = {
"ok": True,
"ecode": None,
"emsg": None,
"grab": grab,
"task": task,
"exc": None,
"doc": None,
}
try:
result["doc"] = grab.request(task.request)
except (
GrabNetworkError,
GrabInvalidUrl,
GrabInvalidResponse,
GrabTooManyRedirectsError,
) as ex:
result.update({"ok": False, "exc": ex})
self.task_dispatcher.input_queue.put((result, task, None))
finally:
pass
else:
self.log_rejected_task(task, reason)
handler = self.get_fallback_handler(task)
if handler:
handler(task)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/base.py#L651-L686
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
19,
20,
21,
27,
28,
30,
32,
33,
34,
35
] | 58.333333 |
[] | 0 | false | 92.476489 | 36 | 4 | 100 | 0 |
def srv_process_task(self, task: Task) -> None:
task.network_try_count += 1
is_valid, reason = self.check_task_limits(task)
if is_valid:
grab = self.create_grab_instance()
self.process_grab_proxy(task, grab)
self.stat.inc("spider:request-network")
self.stat.inc("spider:task-%s-network" % task.name)
try:
result: dict[str, Any] = {
"ok": True,
"ecode": None,
"emsg": None,
"grab": grab,
"task": task,
"exc": None,
"doc": None,
}
try:
result["doc"] = grab.request(task.request)
except (
GrabNetworkError,
GrabInvalidUrl,
GrabInvalidResponse,
GrabTooManyRedirectsError,
) as ex:
result.update({"ok": False, "exc": ex})
self.task_dispatcher.input_queue.put((result, task, None))
finally:
pass
else:
self.log_rejected_task(task, reason)
handler = self.get_fallback_handler(task)
if handler:
handler(task)
| 133 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/memory.py
|
MemoryTaskQueue.__init__
|
(self)
| 12 | 15 |
def __init__(self) -> None:
super().__init__()
self.queue_object: PriorityQueue[tuple[int, Task]] = PriorityQueue()
self.schedule_list: list[tuple[datetime, Task]] = []
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/memory.py#L12-L15
| 1 |
[
0,
1,
2,
3
] | 100 |
[] | 0 | true | 85.714286 | 4 | 1 | 100 | 0 |
def __init__(self) -> None:
super().__init__()
self.queue_object: PriorityQueue[tuple[int, Task]] = PriorityQueue()
self.schedule_list: list[tuple[datetime, Task]] = []
| 134 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/memory.py
|
MemoryTaskQueue.put
|
(
self, task: Task, priority: int, schedule_time: None | datetime = None
)
| 17 | 23 |
def put(
self, task: Task, priority: int, schedule_time: None | datetime = None
) -> None:
if schedule_time is None:
self.queue_object.put((priority, task))
else:
self.schedule_list.append((schedule_time, task))
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/memory.py#L17-L23
| 1 |
[
0,
3,
4
] | 42.857143 |
[
6
] | 14.285714 | false | 85.714286 | 7 | 2 | 85.714286 | 0 |
def put(
self, task: Task, priority: int, schedule_time: None | datetime = None
) -> None:
if schedule_time is None:
self.queue_object.put((priority, task))
else:
self.schedule_list.append((schedule_time, task))
| 135 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/memory.py
|
MemoryTaskQueue.get
|
(self)
|
return task
| 25 | 40 |
def get(self) -> Task:
now = datetime.utcnow()
removed_indexes = []
for idx, item in enumerate(self.schedule_list):
schedule_time, task = item
if schedule_time <= now:
self.put(task, 1)
removed_indexes.append(idx)
self.schedule_list = [
x for idx, x in enumerate(self.schedule_list) if idx not in removed_indexes
]
_, task = self.queue_object.get(block=False)
return task
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/memory.py#L25-L40
| 1 |
[
0,
1,
2,
3,
4,
9,
10,
13,
14,
15
] | 62.5 |
[
5,
6,
7,
8
] | 25 | false | 85.714286 | 16 | 4 | 75 | 0 |
def get(self) -> Task:
now = datetime.utcnow()
removed_indexes = []
for idx, item in enumerate(self.schedule_list):
schedule_time, task = item
if schedule_time <= now:
self.put(task, 1)
removed_indexes.append(idx)
self.schedule_list = [
x for idx, x in enumerate(self.schedule_list) if idx not in removed_indexes
]
_, task = self.queue_object.get(block=False)
return task
| 136 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/memory.py
|
MemoryTaskQueue.size
|
(self)
|
return self.queue_object.qsize() + len(self.schedule_list)
| 42 | 43 |
def size(self) -> int:
return self.queue_object.qsize() + len(self.schedule_list)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/memory.py#L42-L43
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 85.714286 | 2 | 1 | 100 | 0 |
def size(self) -> int:
return self.queue_object.qsize() + len(self.schedule_list)
| 137 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/memory.py
|
MemoryTaskQueue.clear
|
(self)
| 45 | 49 |
def clear(self) -> None:
with suppress(Empty):
while True:
self.queue_object.get(False)
self.schedule_list = []
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/memory.py#L45-L49
| 1 |
[
0,
1,
2,
3,
4
] | 100 |
[] | 0 | true | 85.714286 | 5 | 3 | 100 | 0 |
def clear(self) -> None:
with suppress(Empty):
while True:
self.queue_object.get(False)
self.schedule_list = []
| 138 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.random_queue_name
|
(self)
|
return "task_queue_{}".format(str(uuid4()).replace("-", "_"))
| 15 | 16 |
def random_queue_name(self) -> str:
return "task_queue_{}".format(str(uuid4()).replace("-", "_"))
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L15-L16
| 1 |
[
0
] | 50 |
[
1
] | 50 | false | 91.666667 | 2 | 1 | 50 | 0 |
def random_queue_name(self) -> str:
return "task_queue_{}".format(str(uuid4()).replace("-", "_"))
| 139 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.put
|
(
self,
task: Task,
priority: int,
schedule_time: None | datetime = None,
)
| 18 | 24 |
def put(
self,
task: Task,
priority: int,
schedule_time: None | datetime = None,
) -> None: # pragma: no cover
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L18-L24
| 1 |
[] | 0 |
[] | 0 | false | 91.666667 | 7 | 1 | 100 | 0 |
def put(
self,
task: Task,
priority: int,
schedule_time: None | datetime = None,
) -> None: # pragma: no cover
raise NotImplementedError
| 140 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.get
|
(self)
|
Return `Task` object or raise `Queue.Empty` exception.
@returns: `grab.spider.task.Task` object
@raises: `Queue.Empty` exception
|
Return `Task` object or raise `Queue.Empty` exception.
| 26 | 32 |
def get(self) -> Task: # pragma: no cover
"""Return `Task` object or raise `Queue.Empty` exception.
@returns: `grab.spider.task.Task` object
@raises: `Queue.Empty` exception
"""
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L26-L32
| 1 |
[] | 0 |
[] | 0 | false | 91.666667 | 7 | 1 | 100 | 4 |
def get(self) -> Task: # pragma: no cover
raise NotImplementedError
| 141 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.size
|
(self)
| 35 | 36 |
def size(self) -> int: # pragma: no cover
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L35-L36
| 1 |
[] | 0 |
[] | 0 | false | 91.666667 | 2 | 1 | 100 | 0 |
def size(self) -> int: # pragma: no cover
raise NotImplementedError
| 142 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.clear
|
(self)
|
Remove all tasks from the queue.
|
Remove all tasks from the queue.
| 38 | 40 |
def clear(self) -> None: # pragma: no cover
"""Remove all tasks from the queue."""
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L38-L40
| 1 |
[] | 0 |
[] | 0 | false | 91.666667 | 3 | 1 | 100 | 1 |
def clear(self) -> None: # pragma: no cover
raise NotImplementedError
| 143 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/queue_backend/base.py
|
BaseTaskQueue.close
|
(self)
| 42 | 43 |
def close(self) -> None: # pragma: no cover
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/queue_backend/base.py#L42-L43
| 1 |
[] | 0 |
[] | 0 | false | 91.666667 | 2 | 1 | 100 | 0 |
def close(self) -> None: # pragma: no cover
raise NotImplementedError
| 144 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/network.py
|
BaseNetworkService.get_active_threads_number
|
(self)
| 18 | 19 |
def get_active_threads_number(self) -> int: # pragma: no cover
raise NotImplementedError
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/network.py#L18-L19
| 1 |
[] | 0 |
[] | 0 | false | 94.594595 | 2 | 1 | 100 | 0 |
def get_active_threads_number(self) -> int: # pragma: no cover
raise NotImplementedError
| 160 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/network.py
|
NetworkServiceThreaded.__init__
|
(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
thread_number: int,
process_task: Callable[[Task], None],
get_task_from_queue: Callable[[], None | Literal[True] | Task],
)
| 23 | 37 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
thread_number: int,
process_task: Callable[[Task], None],
get_task_from_queue: Callable[[], None | Literal[True] | Task],
) -> None:
super().__init__(fatal_error_queue)
self.thread_number = thread_number
self.process_task = process_task
self.get_task_from_queue = get_task_from_queue
self.worker_pool = []
for _ in range(self.thread_number):
self.worker_pool.append(self.create_worker(self.worker_callback))
self.register_workers(self.worker_pool)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/network.py#L23-L37
| 1 |
[
0,
7,
8,
9,
10,
11,
12,
13,
14
] | 60 |
[] | 0 | false | 94.594595 | 15 | 2 | 100 | 0 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
thread_number: int,
process_task: Callable[[Task], None],
get_task_from_queue: Callable[[], None | Literal[True] | Task],
) -> None:
super().__init__(fatal_error_queue)
self.thread_number = thread_number
self.process_task = process_task
self.get_task_from_queue = get_task_from_queue
self.worker_pool = []
for _ in range(self.thread_number):
self.worker_pool.append(self.create_worker(self.worker_callback))
self.register_workers(self.worker_pool)
| 161 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/network.py
|
NetworkServiceThreaded.get_active_threads_number
|
(self)
|
return sum(
1
for x in self.iterate_workers(self.worker_registry)
if x.is_busy_event.is_set()
)
| 39 | 44 |
def get_active_threads_number(self) -> int:
return sum(
1
for x in self.iterate_workers(self.worker_registry)
if x.is_busy_event.is_set()
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/network.py#L39-L44
| 1 |
[
0,
1
] | 33.333333 |
[] | 0 | false | 94.594595 | 6 | 1 | 100 | 0 |
def get_active_threads_number(self) -> int:
return sum(
1
for x in self.iterate_workers(self.worker_registry)
if x.is_busy_event.is_set()
)
| 162 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/network.py
|
NetworkServiceThreaded.worker_callback
|
(self, worker: ServiceWorker)
| 47 | 62 |
def worker_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
task = self.get_task_from_queue()
except Empty:
time.sleep(0.1)
else:
if task is None or task is True:
time.sleep(0.1)
else:
worker.is_busy_event.set()
try:
self.process_task(task)
finally:
worker.is_busy_event.clear()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/network.py#L47-L62
| 1 |
[
0,
1,
2,
3,
4,
8,
9,
11,
12,
13,
15
] | 68.75 |
[
5,
6
] | 12.5 | false | 94.594595 | 16 | 5 | 87.5 | 0 |
def worker_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
task = self.get_task_from_queue()
except Empty:
time.sleep(0.1)
else:
if task is None or task is True:
time.sleep(0.1)
else:
worker.is_busy_event.set()
try:
self.process_task(task)
finally:
worker.is_busy_event.clear()
| 163 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/parser.py
|
ParserService.__init__
|
(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
pool_size: int,
task_dispatcher: TaskDispatcherService,
stat: Stat,
parser_requests_per_process: int,
find_task_handler: Callable[[Task], Callable[..., None]],
)
| 22 | 42 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
pool_size: int,
task_dispatcher: TaskDispatcherService,
stat: Stat,
parser_requests_per_process: int,
find_task_handler: Callable[[Task], Callable[..., None]],
) -> None:
super().__init__(fatal_error_queue)
self.task_dispatcher = task_dispatcher
self.stat = stat
self.parser_requests_per_process = parser_requests_per_process
self.find_task_handler = find_task_handler
self.input_queue: Queue[Any] = Queue()
self.pool_size = pool_size
self.workers_pool = []
for _ in range(self.pool_size):
self.workers_pool.append(self.create_worker(self.worker_callback))
self.supervisor = self.create_worker(self.supervisor_callback)
self.register_workers(self.workers_pool, self.supervisor)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/parser.py#L22-L42
| 1 |
[
0,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20
] | 61.904762 |
[] | 0 | false | 100 | 21 | 2 | 100 | 0 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
pool_size: int,
task_dispatcher: TaskDispatcherService,
stat: Stat,
parser_requests_per_process: int,
find_task_handler: Callable[[Task], Callable[..., None]],
) -> None:
super().__init__(fatal_error_queue)
self.task_dispatcher = task_dispatcher
self.stat = stat
self.parser_requests_per_process = parser_requests_per_process
self.find_task_handler = find_task_handler
self.input_queue: Queue[Any] = Queue()
self.pool_size = pool_size
self.workers_pool = []
for _ in range(self.pool_size):
self.workers_pool.append(self.create_worker(self.worker_callback))
self.supervisor = self.create_worker(self.supervisor_callback)
self.register_workers(self.workers_pool, self.supervisor)
| 164 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/parser.py
|
ParserService.check_pool_health
|
(self)
| 44 | 54 |
def check_pool_health(self) -> None:
to_remove = []
for worker in self.workers_pool:
if not worker.is_alive():
self.stat.inc("parser:worker-restarted")
new_worker = self.create_worker(self.worker_callback)
self.workers_pool.append(new_worker)
new_worker.start()
to_remove.append(worker)
for worker in to_remove:
self.workers_pool.remove(worker)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/parser.py#L44-L54
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10
] | 100 |
[] | 0 | true | 100 | 11 | 4 | 100 | 0 |
def check_pool_health(self) -> None:
to_remove = []
for worker in self.workers_pool:
if not worker.is_alive():
self.stat.inc("parser:worker-restarted")
new_worker = self.create_worker(self.worker_callback)
self.workers_pool.append(new_worker)
new_worker.start()
to_remove.append(worker)
for worker in to_remove:
self.workers_pool.remove(worker)
| 165 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/parser.py
|
ParserService.supervisor_callback
|
(self, worker: ServiceWorker)
| 56 | 60 |
def supervisor_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
self.check_pool_health()
time.sleep(1)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/parser.py#L56-L60
| 1 |
[
0,
1,
2,
3,
4
] | 100 |
[] | 0 | true | 100 | 5 | 2 | 100 | 0 |
def supervisor_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
self.check_pool_health()
time.sleep(1)
| 166 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/parser.py
|
ParserService.worker_callback
|
(self, worker: ServiceWorker)
| 62 | 92 |
def worker_callback(self, worker: ServiceWorker) -> None:
process_request_count = 0
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
result, task = self.input_queue.get(True, 0.1)
except Empty:
pass
else:
worker.is_busy_event.set()
try:
process_request_count += 1
try:
handler = self.find_task_handler(task)
except NoTaskHandler as ex:
self.task_dispatcher.input_queue.put(
(ex, task, {"exc_info": sys.exc_info()})
)
self.stat.inc("parser:handler-not-found")
else:
self.execute_task_handler(handler, result, task)
self.stat.inc("parser:handler-processed")
if self.parser_requests_per_process and (
process_request_count >= self.parser_requests_per_process
):
self.stat.inc(
"parser:handler-req-limit",
)
return
finally:
worker.is_busy_event.clear()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/parser.py#L62-L92
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7,
9,
10,
11,
12,
13,
14,
15,
18,
20,
21,
22,
25,
28,
30
] | 70.967742 |
[] | 0 | false | 100 | 31 | 6 | 100 | 0 |
def worker_callback(self, worker: ServiceWorker) -> None:
process_request_count = 0
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
result, task = self.input_queue.get(True, 0.1)
except Empty:
pass
else:
worker.is_busy_event.set()
try:
process_request_count += 1
try:
handler = self.find_task_handler(task)
except NoTaskHandler as ex:
self.task_dispatcher.input_queue.put(
(ex, task, {"exc_info": sys.exc_info()})
)
self.stat.inc("parser:handler-not-found")
else:
self.execute_task_handler(handler, result, task)
self.stat.inc("parser:handler-processed")
if self.parser_requests_per_process and (
process_request_count >= self.parser_requests_per_process
):
self.stat.inc(
"parser:handler-req-limit",
)
return
finally:
worker.is_busy_event.clear()
| 167 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/parser.py
|
ParserService.execute_task_handler
|
(
self, handler: Callable[[Grab, Task], None], result: NetworkResult, task: Task
)
| 94 | 116 |
def execute_task_handler(
self, handler: Callable[[Grab, Task], None], result: NetworkResult, task: Task
) -> None:
try:
handler_result = handler(result["doc"], task)
if handler_result is None:
pass
else:
for item in handler_result:
self.task_dispatcher.input_queue.put(
(item, task, None),
)
except Exception as ex:
self.task_dispatcher.input_queue.put(
(
ex,
task,
{
"exc_info": sys.exc_info(),
"from": "parser",
},
)
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/parser.py#L94-L116
| 1 |
[
0,
3,
4,
5,
6,
8,
9,
12,
13
] | 39.130435 |
[] | 0 | false | 100 | 23 | 4 | 100 | 0 |
def execute_task_handler(
self, handler: Callable[[Grab, Task], None], result: NetworkResult, task: Task
) -> None:
try:
handler_result = handler(result["doc"], task)
if handler_result is None:
pass
else:
for item in handler_result:
self.task_dispatcher.input_queue.put(
(item, task, None),
)
except Exception as ex:
self.task_dispatcher.input_queue.put(
(
ex,
task,
{
"exc_info": sys.exc_info(),
"from": "parser",
},
)
)
| 168 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/task_dispatcher.py
|
TaskDispatcherService.__init__
|
(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
process_service_result: Callable[[Any, Task, None | dict[str, Any]], Any],
)
| 13 | 22 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
process_service_result: Callable[[Any, Task, None | dict[str, Any]], Any],
):
super().__init__(fatal_error_queue)
self.process_service_result = process_service_result
self.input_queue: Queue[Any] = Queue()
self.worker = self.create_worker(self.worker_callback)
self.register_workers(self.worker)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/task_dispatcher.py#L13-L22
| 1 |
[
0,
5,
6,
7,
8,
9
] | 60 |
[] | 0 | false | 100 | 10 | 1 | 100 | 0 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
process_service_result: Callable[[Any, Task, None | dict[str, Any]], Any],
):
super().__init__(fatal_error_queue)
self.process_service_result = process_service_result
self.input_queue: Queue[Any] = Queue()
self.worker = self.create_worker(self.worker_callback)
self.register_workers(self.worker)
| 169 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/task_dispatcher.py
|
TaskDispatcherService.start
|
(self)
| 24 | 25 |
def start(self) -> None:
self.worker.start()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/task_dispatcher.py#L24-L25
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def start(self) -> None:
self.worker.start()
| 170 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/task_dispatcher.py
|
TaskDispatcherService.worker_callback
|
(self, worker: ServiceWorker)
| 27 | 35 |
def worker_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
result, task, meta = self.input_queue.get(True, 0.1)
except Empty:
pass
else:
self.process_service_result(result, task, meta)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/task_dispatcher.py#L27-L35
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
8
] | 88.888889 |
[] | 0 | false | 100 | 9 | 3 | 100 | 0 |
def worker_callback(self, worker: ServiceWorker) -> None:
while not worker.stop_event.is_set():
worker.process_pause_signal()
try:
result, task, meta = self.input_queue.get(True, 0.1)
except Empty:
pass
else:
self.process_service_result(result, task, meta)
| 171 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.__init__
|
(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
worker_callback: Callable[..., Any],
)
| 17 | 32 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
worker_callback: Callable[..., Any],
) -> None:
self.fatal_error_queue = fatal_error_queue
self.thread = Thread(
target=self.worker_callback_wrapper(worker_callback), args=[self]
)
self.thread.daemon = True
self.thread.name = self.build_thread_name(worker_callback)
self.pause_event = Event()
self.stop_event = Event()
self.resume_event = Event()
self.activity_paused = Event()
self.is_busy_event = Event()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L17-L32
| 1 |
[
0,
5,
6,
9,
10,
11,
12,
13,
14,
15
] | 62.5 |
[] | 0 | false | 100 | 16 | 1 | 100 | 0 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
worker_callback: Callable[..., Any],
) -> None:
self.fatal_error_queue = fatal_error_queue
self.thread = Thread(
target=self.worker_callback_wrapper(worker_callback), args=[self]
)
self.thread.daemon = True
self.thread.name = self.build_thread_name(worker_callback)
self.pause_event = Event()
self.stop_event = Event()
self.resume_event = Event()
self.activity_paused = Event()
self.is_busy_event = Event()
| 172 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.build_thread_name
|
(self, worker_callback: Callable[..., Any])
|
return "worker:%s:%s" % (cls_name, worker_callback.__name__)
| 34 | 40 |
def build_thread_name(self, worker_callback: Callable[..., Any]) -> str:
cls_name = (
worker_callback.__self__.__class__.__name__
if hasattr(worker_callback, "__self__")
else "NA"
)
return "worker:%s:%s" % (cls_name, worker_callback.__name__)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L34-L40
| 1 |
[
0,
1,
6
] | 42.857143 |
[] | 0 | false | 100 | 7 | 1 | 100 | 0 |
def build_thread_name(self, worker_callback: Callable[..., Any]) -> str:
cls_name = (
worker_callback.__self__.__class__.__name__
if hasattr(worker_callback, "__self__")
else "NA"
)
return "worker:%s:%s" % (cls_name, worker_callback.__name__)
| 173 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.worker_callback_wrapper
|
(
self, callback: Callable[..., Any]
)
|
return wrapper
| 42 | 57 |
def worker_callback_wrapper(
self, callback: Callable[..., Any]
) -> Callable[..., None]:
def wrapper(*args: Any, **kwargs: Any) -> None:
try:
callback(*args, **kwargs)
except Exception as ex:
logger.error("Spider Service Fatal Error", exc_info=ex)
# pylint: disable=deprecated-typing-alias
self.fatal_error_queue.put(
cast(
Tuple[Type[Exception], Exception, TracebackType], sys.exc_info()
)
)
return wrapper
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L42-L57
| 1 |
[
0,
3,
4,
5,
6,
7,
8,
9,
14,
15
] | 62.5 |
[] | 0 | false | 100 | 16 | 3 | 100 | 0 |
def worker_callback_wrapper(
self, callback: Callable[..., Any]
) -> Callable[..., None]:
def wrapper(*args: Any, **kwargs: Any) -> None:
try:
callback(*args, **kwargs)
except Exception as ex:
logger.error("Spider Service Fatal Error", exc_info=ex)
# pylint: disable=deprecated-typing-alias
self.fatal_error_queue.put(
cast(
Tuple[Type[Exception], Exception, TracebackType], sys.exc_info()
)
)
return wrapper
| 174 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.start
|
(self)
| 59 | 60 |
def start(self) -> None:
self.thread.start()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L59-L60
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def start(self) -> None:
self.thread.start()
| 175 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.stop
|
(self)
| 62 | 63 |
def stop(self) -> None:
self.stop_event.set()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L62-L63
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def stop(self) -> None:
self.stop_event.set()
| 176 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.process_pause_signal
|
(self)
| 65 | 68 |
def process_pause_signal(self) -> None:
if self.pause_event.is_set():
self.activity_paused.set()
self.resume_event.wait()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L65-L68
| 1 |
[
0,
1,
2,
3
] | 100 |
[] | 0 | true | 100 | 4 | 2 | 100 | 0 |
def process_pause_signal(self) -> None:
if self.pause_event.is_set():
self.activity_paused.set()
self.resume_event.wait()
| 177 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.pause
|
(self)
| 70 | 77 |
def pause(self) -> None:
self.resume_event.clear()
self.pause_event.set()
while True:
if self.activity_paused.wait(0.1):
break
if not self.is_alive():
break
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L70-L77
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
7
] | 100 |
[] | 0 | true | 100 | 8 | 4 | 100 | 0 |
def pause(self) -> None:
self.resume_event.clear()
self.pause_event.set()
while True:
if self.activity_paused.wait(0.1):
break
if not self.is_alive():
break
| 178 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.resume
|
(self)
| 79 | 82 |
def resume(self) -> None:
self.pause_event.clear()
self.activity_paused.clear()
self.resume_event.set()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L79-L82
| 1 |
[
0,
1,
2,
3
] | 100 |
[] | 0 | true | 100 | 4 | 1 | 100 | 0 |
def resume(self) -> None:
self.pause_event.clear()
self.activity_paused.clear()
self.resume_event.set()
| 179 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
ServiceWorker.is_alive
|
(self)
|
return self.thread.is_alive()
| 84 | 85 |
def is_alive(self) -> bool:
return self.thread.is_alive()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L84-L85
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def is_alive(self) -> bool:
return self.thread.is_alive()
| 180 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.__init__
|
(self, fatal_error_queue: Queue[FatalErrorQueueItem])
| 89 | 91 |
def __init__(self, fatal_error_queue: Queue[FatalErrorQueueItem]) -> None:
self.fatal_error_queue = fatal_error_queue
self.worker_registry: list[ServiceWorker] = []
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L89-L91
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 100 | 3 | 1 | 100 | 0 |
def __init__(self, fatal_error_queue: Queue[FatalErrorQueueItem]) -> None:
self.fatal_error_queue = fatal_error_queue
self.worker_registry: list[ServiceWorker] = []
| 181 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.create_worker
|
(self, worker_action: Callable[..., None])
|
return ServiceWorker(self.fatal_error_queue, worker_action)
| 93 | 94 |
def create_worker(self, worker_action: Callable[..., None]) -> ServiceWorker:
return ServiceWorker(self.fatal_error_queue, worker_action)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L93-L94
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def create_worker(self, worker_action: Callable[..., None]) -> ServiceWorker:
return ServiceWorker(self.fatal_error_queue, worker_action)
| 182 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.iterate_workers
|
(self, objects: list[ServiceWorker])
| 96 | 102 |
def iterate_workers(self, objects: list[ServiceWorker]) -> Iterable[ServiceWorker]:
for obj in objects:
assert isinstance(obj, (ServiceWorker, list))
if isinstance(obj, ServiceWorker):
yield obj
elif isinstance(obj, list):
yield from obj
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L96-L102
| 1 |
[
0,
1,
2,
3,
4,
5,
6
] | 100 |
[] | 0 | true | 100 | 7 | 5 | 100 | 0 |
def iterate_workers(self, objects: list[ServiceWorker]) -> Iterable[ServiceWorker]:
for obj in objects:
assert isinstance(obj, (ServiceWorker, list))
if isinstance(obj, ServiceWorker):
yield obj
elif isinstance(obj, list):
yield from obj
| 183 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.start
|
(self)
| 104 | 106 |
def start(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.start()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L104-L106
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 100 | 3 | 2 | 100 | 0 |
def start(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.start()
| 184 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.stop
|
(self)
| 108 | 110 |
def stop(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.stop()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L108-L110
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 100 | 3 | 2 | 100 | 0 |
def stop(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.stop()
| 185 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.pause
|
(self)
| 112 | 114 |
def pause(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.pause()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L112-L114
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 100 | 3 | 2 | 100 | 0 |
def pause(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.pause()
| 186 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.resume
|
(self)
| 116 | 118 |
def resume(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.resume()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L116-L118
| 1 |
[
0,
1,
2
] | 100 |
[] | 0 | true | 100 | 3 | 2 | 100 | 0 |
def resume(self) -> None:
for worker in self.iterate_workers(self.worker_registry):
worker.resume()
| 187 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.register_workers
|
(self, *args: Any)
| 120 | 121 |
def register_workers(self, *args: Any) -> None:
self.worker_registry = list(args)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L120-L121
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def register_workers(self, *args: Any) -> None:
self.worker_registry = list(args)
| 188 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.is_busy
|
(self)
|
return any(
x.is_busy_event.is_set() for x in self.iterate_workers(self.worker_registry)
)
| 123 | 126 |
def is_busy(self) -> bool:
return any(
x.is_busy_event.is_set() for x in self.iterate_workers(self.worker_registry)
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L123-L126
| 1 |
[
0,
1
] | 50 |
[] | 0 | false | 100 | 4 | 1 | 100 | 0 |
def is_busy(self) -> bool:
return any(
x.is_busy_event.is_set() for x in self.iterate_workers(self.worker_registry)
)
| 189 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/base.py
|
BaseService.is_alive
|
(self)
|
return any(x.is_alive() for x in self.iterate_workers(self.worker_registry))
| 128 | 129 |
def is_alive(self) -> bool:
return any(x.is_alive() for x in self.iterate_workers(self.worker_registry))
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/base.py#L128-L129
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 100 | 2 | 1 | 100 | 0 |
def is_alive(self) -> bool:
return any(x.is_alive() for x in self.iterate_workers(self.worker_registry))
| 190 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/task_generator.py
|
TaskGeneratorService.__init__
|
(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
real_generator: Iterator[Task],
thread_number: int,
get_task_queue: Callable[[], BaseTaskQueue],
parser_service: ParserService,
task_dispatcher: TaskDispatcherService,
)
| 16 | 32 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
real_generator: Iterator[Task],
thread_number: int,
get_task_queue: Callable[[], BaseTaskQueue],
parser_service: ParserService,
task_dispatcher: TaskDispatcherService,
) -> None:
super().__init__(fatal_error_queue)
self.real_generator = real_generator
self.task_queue_threshold = max(200, thread_number * 2)
self.get_task_queue = get_task_queue
self.parser_service = parser_service
self.task_dispatcher = task_dispatcher
self.worker = self.create_worker(self.worker_callback)
self.register_workers(self.worker)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/task_generator.py#L16-L32
| 1 |
[
0,
9,
10,
11,
12,
13,
14,
15,
16
] | 52.941176 |
[] | 0 | false | 94.285714 | 17 | 1 | 100 | 0 |
def __init__(
self,
fatal_error_queue: Queue[FatalErrorQueueItem],
real_generator: Iterator[Task],
thread_number: int,
get_task_queue: Callable[[], BaseTaskQueue],
parser_service: ParserService,
task_dispatcher: TaskDispatcherService,
) -> None:
super().__init__(fatal_error_queue)
self.real_generator = real_generator
self.task_queue_threshold = max(200, thread_number * 2)
self.get_task_queue = get_task_queue
self.parser_service = parser_service
self.task_dispatcher = task_dispatcher
self.worker = self.create_worker(self.worker_callback)
self.register_workers(self.worker)
| 191 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/spider/service/task_generator.py
|
TaskGeneratorService.worker_callback
|
(self, worker: ServiceWorker)
| 34 | 56 |
def worker_callback(self, worker: ServiceWorker) -> None:
# at this point I guess the task queue is set
# i.e. "spider.run()" is called
task_queue = self.get_task_queue()
while not worker.stop_event.is_set():
worker.process_pause_signal()
queue_size = max(
task_queue.size(),
self.parser_service.input_queue.qsize(),
)
if queue_size < self.task_queue_threshold:
try:
for _ in range(self.task_queue_threshold - queue_size):
if worker.pause_event.is_set():
return
task = next(self.real_generator)
self.task_dispatcher.input_queue.put(
(task, None, {"source": "task_generator"})
)
except StopIteration:
return
else:
time.sleep(0.1)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/spider/service/task_generator.py#L34-L56
| 1 |
[
0,
1,
2,
3,
4,
5,
6,
10,
11,
12,
13,
15,
16,
19,
20
] | 65.217391 |
[
14,
22
] | 8.695652 | false | 94.285714 | 23 | 6 | 91.304348 | 0 |
def worker_callback(self, worker: ServiceWorker) -> None:
# at this point I guess the task queue is set
# i.e. "spider.run()" is called
task_queue = self.get_task_queue()
while not worker.stop_event.is_set():
worker.process_pause_signal()
queue_size = max(
task_queue.size(),
self.parser_service.input_queue.qsize(),
)
if queue_size < self.task_queue_threshold:
try:
for _ in range(self.task_queue_threshold - queue_size):
if worker.pause_event.is_set():
return
task = next(self.real_generator)
self.task_dispatcher.input_queue.put(
(task, None, {"source": "task_generator"})
)
except StopIteration:
return
else:
time.sleep(0.1)
| 192 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/metrics.py
|
in_unit
|
(num: int, unit: str)
|
return num
| 8 | 17 |
def in_unit(num: int, unit: str) -> int | float:
if unit == "b":
return num
if unit == "kb":
return round(num / float(KB), 2)
if unit == "mb":
return round(num / float(MB), 2)
if unit == "gb":
return round(num / float(GB), 2)
return num
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/metrics.py#L8-L17
| 1 |
[
0
] | 10 |
[
1,
2,
3,
4,
5,
6,
7,
8,
9
] | 90 | false | 27.272727 | 10 | 5 | 10 | 0 |
def in_unit(num: int, unit: str) -> int | float:
if unit == "b":
return num
if unit == "kb":
return round(num / float(KB), 2)
if unit == "mb":
return round(num / float(MB), 2)
if unit == "gb":
return round(num / float(GB), 2)
return num
| 193 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/metrics.py
|
format_traffic_value
|
(num: int)
|
return "%s GB" % in_unit(num, "gb")
| 20 | 27 |
def format_traffic_value(num: int) -> str:
if num < KB:
return "%s B" % in_unit(num, "b")
if num < MB:
return "%s KB" % in_unit(num, "kb")
if num < GB:
return "%s MB" % in_unit(num, "mb")
return "%s GB" % in_unit(num, "gb")
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/metrics.py#L20-L27
| 1 |
[
0
] | 12.5 |
[
1,
2,
3,
4,
5,
6,
7
] | 87.5 | false | 27.272727 | 8 | 4 | 12.5 | 0 |
def format_traffic_value(num: int) -> str:
if num < KB:
return "%s B" % in_unit(num, "b")
if num < MB:
return "%s KB" % in_unit(num, "kb")
if num < GB:
return "%s MB" % in_unit(num, "mb")
return "%s GB" % in_unit(num, "gb")
| 194 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/structures.py
|
merge_with_dict
|
(
hdr1: MutableMapping[str, Any],
hdr2: Mapping[str, Any],
replace: bool,
)
|
return hdr1
| 7 | 15 |
def merge_with_dict(
hdr1: MutableMapping[str, Any],
hdr2: Mapping[str, Any],
replace: bool,
) -> MutableMapping[str, Any]:
for key, val in hdr2.items():
if replace or key not in hdr1:
hdr1[key] = val
return hdr1
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/structures.py#L7-L15
| 1 |
[
0,
5,
6,
7,
8
] | 55.555556 |
[] | 0 | false | 100 | 9 | 4 | 100 | 0 |
def merge_with_dict(
hdr1: MutableMapping[str, Any],
hdr2: Mapping[str, Any],
replace: bool,
) -> MutableMapping[str, Any]:
for key, val in hdr2.items():
if replace or key not in hdr1:
hdr1[key] = val
return hdr1
| 196 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/types.py
|
resolve_entity
|
(
base_type: type[T],
entity: None | T | type[T],
default: type[T],
)
|
return cast(typing.Type[T], entity)()
| 16 | 32 |
def resolve_entity(
base_type: type[T],
entity: None | T | type[T],
default: type[T],
) -> T:
if entity and (
not isinstance(entity, base_type)
and (not inspect.isclass(entity) or not issubclass(entity, base_type))
):
raise TypeError("Invalid {} entity: {}".format(base_type, entity))
if entity is None:
assert issubclass(default, base_type)
return default()
if isinstance(entity, base_type):
return entity
# pylint: disable=deprecated-typing-alias
return cast(typing.Type[T], entity)()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/types.py#L16-L32
| 1 |
[
0,
5,
10,
11,
12,
13,
14,
15,
16
] | 52.941176 |
[
9
] | 5.882353 | false | 92.857143 | 17 | 8 | 94.117647 | 0 |
def resolve_entity(
base_type: type[T],
entity: None | T | type[T],
default: type[T],
) -> T:
if entity and (
not isinstance(entity, base_type)
and (not inspect.isclass(entity) or not issubclass(entity, base_type))
):
raise TypeError("Invalid {} entity: {}".format(base_type, entity))
if entity is None:
assert issubclass(default, base_type)
return default()
if isinstance(entity, base_type):
return entity
# pylint: disable=deprecated-typing-alias
return cast(typing.Type[T], entity)()
| 197 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/timeout.py
|
Timeout.__init__
|
(
self,
total: None | float | UndefinedParam = UNDEFINED_PARAM,
connect: None | float | UndefinedParam = UNDEFINED_PARAM,
read: None | float | UndefinedParam = UNDEFINED_PARAM,
)
|
Timeout constructor.
Unspecified total timeout is set to None.
Unspecified connect timeout is set to total timeout.
Unspecified read timeout is set to total timeout.
|
Timeout constructor.
| 17 | 31 |
def __init__(
self,
total: None | float | UndefinedParam = UNDEFINED_PARAM,
connect: None | float | UndefinedParam = UNDEFINED_PARAM,
read: None | float | UndefinedParam = UNDEFINED_PARAM,
):
"""Timeout constructor.
Unspecified total timeout is set to None.
Unspecified connect timeout is set to total timeout.
Unspecified read timeout is set to total timeout.
"""
self.total = total if total is not UNDEFINED_PARAM else DEFAULT_TOTAL_TIMEOUT
self.connect = connect if connect is not UNDEFINED_PARAM else self.total
self.read = read if read is not UNDEFINED_PARAM else self.total
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/timeout.py#L17-L31
| 1 |
[
0,
11,
12,
13,
14
] | 33.333333 |
[] | 0 | false | 92.857143 | 15 | 1 | 100 | 5 |
def __init__(
self,
total: None | float | UndefinedParam = UNDEFINED_PARAM,
connect: None | float | UndefinedParam = UNDEFINED_PARAM,
read: None | float | UndefinedParam = UNDEFINED_PARAM,
):
self.total = total if total is not UNDEFINED_PARAM else DEFAULT_TOTAL_TIMEOUT
self.connect = connect if connect is not UNDEFINED_PARAM else self.total
self.read = read if read is not UNDEFINED_PARAM else self.total
| 198 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/timeout.py
|
Timeout.__repr__
|
(self)
|
return "{}(connect={!r}, read={!r}, total={!r})".format(
type(self).__name__, self.connect, self.read, self.total
)
| 33 | 36 |
def __repr__(self) -> str:
return "{}(connect={!r}, read={!r}, total={!r})".format(
type(self).__name__, self.connect, self.read, self.total
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/timeout.py#L33-L36
| 1 |
[
0
] | 25 |
[
1
] | 25 | false | 92.857143 | 4 | 1 | 75 | 0 |
def __repr__(self) -> str:
return "{}(connect={!r}, read={!r}, total={!r})".format(
type(self).__name__, self.connect, self.read, self.total
)
| 199 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
create_cookie
|
( # pylint: disable=too-many-arguments, too-many-locals
*,
name: str,
value: str,
domain: str,
comment: None | str = None,
comment_url: None | str = None,
discard: bool = True,
domain_initial_dot: None | bool = None,
domain_specified: None | bool = None,
expires: None | int = None,
path: str = "/",
path_specified: None | bool = None,
port: None | int = None,
port_specified: None | bool = None,
rest: None | dict[str, Any] = None,
rfc2109: bool = False,
secure: bool = False,
version: int = 0,
httponly: None | bool = None,
)
|
return Cookie(
# from required scope
name=name,
value=value,
domain=domain,
# from non required scope
comment=comment,
comment_url=comment_url,
discard=discard,
domain_initial_dot=domain_initial_dot,
domain_specified=domain_specified,
expires=expires,
path=path,
path_specified=path_specified,
port=str(port) if port else None, # typeshed bundled with mypy wants str type
port_specified=port_specified,
rest=new_rest,
rfc2109=rfc2109,
secure=secure,
version=version,
)
|
Create cookielib.Cookie instance.
|
Create cookielib.Cookie instance.
| 129 | 189 |
def create_cookie( # pylint: disable=too-many-arguments, too-many-locals
*,
name: str,
value: str,
domain: str,
comment: None | str = None,
comment_url: None | str = None,
discard: bool = True,
domain_initial_dot: None | bool = None,
domain_specified: None | bool = None,
expires: None | int = None,
path: str = "/",
path_specified: None | bool = None,
port: None | int = None,
port_specified: None | bool = None,
rest: None | dict[str, Any] = None,
rfc2109: bool = False,
secure: bool = False,
version: int = 0,
httponly: None | bool = None,
) -> Cookie:
"""Create cookielib.Cookie instance."""
# See also type hints for Cookie at
# https://github.com/python/typeshed/blob/main/stdlib/http/cookiejar.pyi
if domain == "localhost":
domain = ""
if rest is None:
new_rest = {}
else:
new_rest = copy(rest)
if "HttpOnly" not in new_rest:
new_rest["HttpOnly"] = httponly
if port_specified is None:
port_specified = port is not None
if domain_specified is None:
domain_specified = domain is not None
if domain_initial_dot is None:
domain_initial_dot = domain.startswith(".")
if path_specified is None:
path_specified = path is not None
return Cookie(
# from required scope
name=name,
value=value,
domain=domain,
# from non required scope
comment=comment,
comment_url=comment_url,
discard=discard,
domain_initial_dot=domain_initial_dot,
domain_specified=domain_specified,
expires=expires,
path=path,
path_specified=path_specified,
port=str(port) if port else None, # typeshed bundled with mypy wants str type
port_specified=port_specified,
rest=new_rest,
rfc2109=rfc2109,
secure=secure,
version=version,
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L129-L189
| 1 |
[
0,
23,
24,
26,
27,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41
] | 24.590164 |
[
25,
29,
30,
31
] | 6.557377 | false | 81.927711 | 61 | 8 | 93.442623 | 1 |
def create_cookie( # pylint: disable=too-many-arguments, too-many-locals
*,
name: str,
value: str,
domain: str,
comment: None | str = None,
comment_url: None | str = None,
discard: bool = True,
domain_initial_dot: None | bool = None,
domain_specified: None | bool = None,
expires: None | int = None,
path: str = "/",
path_specified: None | bool = None,
port: None | int = None,
port_specified: None | bool = None,
rest: None | dict[str, Any] = None,
rfc2109: bool = False,
secure: bool = False,
version: int = 0,
httponly: None | bool = None,
) -> Cookie:
# See also type hints for Cookie at
# https://github.com/python/typeshed/blob/main/stdlib/http/cookiejar.pyi
if domain == "localhost":
domain = ""
if rest is None:
new_rest = {}
else:
new_rest = copy(rest)
if "HttpOnly" not in new_rest:
new_rest["HttpOnly"] = httponly
if port_specified is None:
port_specified = port is not None
if domain_specified is None:
domain_specified = domain is not None
if domain_initial_dot is None:
domain_initial_dot = domain.startswith(".")
if path_specified is None:
path_specified = path is not None
return Cookie(
# from required scope
name=name,
value=value,
domain=domain,
# from non required scope
comment=comment,
comment_url=comment_url,
discard=discard,
domain_initial_dot=domain_initial_dot,
domain_specified=domain_specified,
expires=expires,
path=path,
path_specified=path_specified,
port=str(port) if port else None, # typeshed bundled with mypy wants str type
port_specified=port_specified,
rest=new_rest,
rfc2109=rfc2109,
secure=secure,
version=version,
)
| 200 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
build_cookie_header
|
(
cookiejar: CookieJar, url: str, headers: Mapping[str, str]
)
|
return mocked_req.get_new_headers().get("Cookie")
|
Build HTTP Cookie header value for given cookies.
|
Build HTTP Cookie header value for given cookies.
| 192 | 198 |
def build_cookie_header(
cookiejar: CookieJar, url: str, headers: Mapping[str, str]
) -> None | str:
"""Build HTTP Cookie header value for given cookies."""
mocked_req = MockRequest(url, dict(headers))
cookiejar.add_cookie_header(cast(urllib.request.Request, mocked_req))
return mocked_req.get_new_headers().get("Cookie")
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L192-L198
| 1 |
[
0,
3,
4,
5,
6
] | 71.428571 |
[] | 0 | false | 81.927711 | 7 | 1 | 100 | 1 |
def build_cookie_header(
cookiejar: CookieJar, url: str, headers: Mapping[str, str]
) -> None | str:
mocked_req = MockRequest(url, dict(headers))
cookiejar.add_cookie_header(cast(urllib.request.Request, mocked_req))
return mocked_req.get_new_headers().get("Cookie")
| 201 |
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
build_jar
|
(cookies: Sequence[Cookie])
|
return jar
| 201 | 205 |
def build_jar(cookies: Sequence[Cookie]) -> CookieJar:
jar = CookieJar()
for item in cookies:
jar.set_cookie(item)
return jar
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L201-L205
| 1 |
[
0
] | 20 |
[
1,
2,
3,
4
] | 80 | false | 81.927711 | 5 | 2 | 20 | 0 |
def build_jar(cookies: Sequence[Cookie]) -> CookieJar:
jar = CookieJar()
for item in cookies:
jar.set_cookie(item)
return jar
| 202 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
extract_response_cookies
|
(
req_url: str,
req_headers: Mapping[str, Any] | HTTPMessage | HTTPHeaderDict,
response_headers: HTTPMessage | HTTPHeaderDict,
)
|
return list(jar)
| 208 | 218 |
def extract_response_cookies(
req_url: str,
req_headers: Mapping[str, Any] | HTTPMessage | HTTPHeaderDict,
response_headers: HTTPMessage | HTTPHeaderDict,
) -> Sequence[Cookie]:
jar = CookieJar()
jar.extract_cookies(
cast(HTTPResponse, MockResponse(response_headers)),
cast(urllib.request.Request, MockRequest(req_url, dict(req_headers))),
)
return list(jar)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L208-L218
| 1 |
[
0,
5,
6,
10
] | 36.363636 |
[] | 0 | false | 81.927711 | 11 | 1 | 100 | 0 |
def extract_response_cookies(
req_url: str,
req_headers: Mapping[str, Any] | HTTPMessage | HTTPHeaderDict,
response_headers: HTTPMessage | HTTPHeaderDict,
) -> Sequence[Cookie]:
jar = CookieJar()
jar.extract_cookies(
cast(HTTPResponse, MockResponse(response_headers)),
cast(urllib.request.Request, MockRequest(req_url, dict(req_headers))),
)
return list(jar)
| 203 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.__init__
|
(self, url: str, headers: dict[str, str])
| 38 | 42 |
def __init__(self, url: str, headers: dict[str, str]):
self._url = url
self._headers = headers
self._new_headers: dict[str, Any] = {}
self.type = urlparse(self._url).scheme
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L38-L42
| 1 |
[
0,
1,
2,
3,
4
] | 100 |
[] | 0 | true | 81.927711 | 5 | 1 | 100 | 0 |
def __init__(self, url: str, headers: dict[str, str]):
self._url = url
self._headers = headers
self._new_headers: dict[str, Any] = {}
self.type = urlparse(self._url).scheme
| 204 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_type
|
(self)
|
return self.type
| 44 | 45 |
def get_type(self) -> str:
return self.type
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L44-L45
| 1 |
[
0
] | 50 |
[
1
] | 50 | false | 81.927711 | 2 | 1 | 50 | 0 |
def get_type(self) -> str:
return self.type
| 205 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_host
|
(self)
|
return urlparse(self._url).netloc
| 47 | 48 |
def get_host(self) -> str:
return urlparse(self._url).netloc
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L47-L48
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def get_host(self) -> str:
return urlparse(self._url).netloc
| 206 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_origin_req_host
|
(self)
|
return self.get_host()
| 50 | 51 |
def get_origin_req_host(self) -> str:
return self.get_host()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L50-L51
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def get_origin_req_host(self) -> str:
return self.get_host()
| 207 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_full_url
|
(self)
|
return urlunparse(
[
parsed.scheme,
host,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment,
]
)
| 53 | 71 |
def get_full_url(self) -> str:
# Only return the response's URL if the user hadn't set the Host
# header
if not self._headers.get("Host"):
return self._url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._headers["Host"]
parsed = urlparse(self._url)
# Reconstruct the URL as we expect it
return urlunparse(
[
parsed.scheme,
host,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment,
]
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L53-L71
| 1 |
[
0,
1,
2,
3,
4,
5
] | 31.578947 |
[
6,
7,
9
] | 15.789474 | false | 81.927711 | 19 | 2 | 84.210526 | 0 |
def get_full_url(self) -> str:
# Only return the response's URL if the user hadn't set the Host
# header
if not self._headers.get("Host"):
return self._url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._headers["Host"]
parsed = urlparse(self._url)
# Reconstruct the URL as we expect it
return urlunparse(
[
parsed.scheme,
host,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment,
]
)
| 208 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.is_unverifiable
|
(self)
|
return True
| 73 | 74 |
def is_unverifiable(self) -> bool:
return True
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L73-L74
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def is_unverifiable(self) -> bool:
return True
| 209 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.has_header
|
(self, name: str)
|
return name in self._headers or name in self._new_headers
| 76 | 77 |
def has_header(self, name: str) -> bool:
return name in self._headers or name in self._new_headers
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L76-L77
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 2 | 100 | 0 |
def has_header(self, name: str) -> bool:
return name in self._headers or name in self._new_headers
| 210 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_header
|
(self, name: str, default: Any = None)
|
return self._headers.get(name, self._new_headers.get(name, default))
| 79 | 80 |
def get_header(self, name: str, default: Any = None) -> str:
return self._headers.get(name, self._new_headers.get(name, default))
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L79-L80
| 1 |
[
0
] | 50 |
[
1
] | 50 | false | 81.927711 | 2 | 1 | 50 | 0 |
def get_header(self, name: str, default: Any = None) -> str:
return self._headers.get(name, self._new_headers.get(name, default))
| 211 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.add_header
|
(self, key: str, val: str)
|
Cookielib has no legitimate use for this method.
Add it back if you find one.
|
Cookielib has no legitimate use for this method.
| 82 | 89 |
def add_header(self, key: str, val: str) -> None:
"""Cookielib has no legitimate use for this method.
Add it back if you find one.
"""
raise NotImplementedError(
"Cookie headers should be added with add_unredirected_header()"
)
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L82-L89
| 1 |
[
0,
1,
2,
3,
4
] | 62.5 |
[
5
] | 12.5 | false | 81.927711 | 8 | 1 | 87.5 | 3 |
def add_header(self, key: str, val: str) -> None:
raise NotImplementedError(
"Cookie headers should be added with add_unredirected_header()"
)
| 212 |
|
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.add_unredirected_header
|
(self, name: str, value: str)
| 91 | 92 |
def add_unredirected_header(self, name: str, value: str) -> None:
self._new_headers[name] = value
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L91-L92
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def add_unredirected_header(self, name: str, value: str) -> None:
self._new_headers[name] = value
| 213 |
|||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.get_new_headers
|
(self)
|
return self._new_headers
| 94 | 95 |
def get_new_headers(self) -> dict[str, str]:
return self._new_headers
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L94-L95
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def get_new_headers(self) -> dict[str, str]:
return self._new_headers
| 214 |
||
lorien/grab
|
2d170c31a3335c2e29578b42a5d62ef3efc5d7ee
|
grab/util/cookies.py
|
MockRequest.unverifiable
|
(self)
|
return self.is_unverifiable()
| 98 | 99 |
def unverifiable(self) -> bool:
return self.is_unverifiable()
|
https://github.com/lorien/grab/blob/2d170c31a3335c2e29578b42a5d62ef3efc5d7ee/project1/grab/util/cookies.py#L98-L99
| 1 |
[
0,
1
] | 100 |
[] | 0 | true | 81.927711 | 2 | 1 | 100 | 0 |
def unverifiable(self) -> bool:
return self.is_unverifiable()
| 215 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.