Code
stringlengths 103
85.9k
| Summary
listlengths 0
94
|
---|---|
Please provide a description of the function:def _is_domain_match(domain: str, hostname: str) -> bool:
if hostname == domain:
return True
if not hostname.endswith(domain):
return False
non_matching = hostname[:-len(domain)]
if not non_matching.endswith("."):
return False
return not is_ip_address(hostname)
|
[
"Implements domain matching adhering to RFC 6265."
] |
Please provide a description of the function:def _is_path_match(req_path: str, cookie_path: str) -> bool:
if not req_path.startswith("/"):
req_path = "/"
if req_path == cookie_path:
return True
if not req_path.startswith(cookie_path):
return False
if cookie_path.endswith("/"):
return True
non_matching = req_path[len(cookie_path):]
return non_matching.startswith("/")
|
[
"Implements path matching adhering to RFC 6265."
] |
Please provide a description of the function:def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
if not date_str:
return None
found_time = False
found_day = False
found_month = False
found_year = False
hour = minute = second = 0
day = 0
month = 0
year = 0
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
token = token_match.group("token")
if not found_time:
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
hour, minute, second = [
int(s) for s in time_match.groups()]
continue
if not found_day:
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
if day_match:
found_day = True
day = int(day_match.group())
continue
if not found_month:
month_match = cls.DATE_MONTH_RE.match(token)
if month_match:
found_month = True
month = month_match.lastindex
continue
if not found_year:
year_match = cls.DATE_YEAR_RE.match(token)
if year_match:
found_year = True
year = int(year_match.group())
if 70 <= year <= 99:
year += 1900
elif 0 <= year <= 69:
year += 2000
if False in (found_day, found_month, found_year, found_time):
return None
if not 1 <= day <= 31:
return None
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
return datetime.datetime(year, month, day,
hour, minute, second,
tzinfo=datetime.timezone.utc)
|
[
"Implements date string parsing adhering to RFC 6265."
] |
Please provide a description of the function:def my_protocol_parser(out, buf):
while True:
tp = yield from buf.read(5)
if tp in (MSG_PING, MSG_PONG):
# skip line
yield from buf.skipuntil(b'\r\n')
out.feed_data(Message(tp, None))
elif tp == MSG_STOP:
out.feed_data(Message(tp, None))
elif tp == MSG_TEXT:
# read text
text = yield from buf.readuntil(b'\r\n')
out.feed_data(Message(tp, text.strip().decode('utf-8')))
else:
raise ValueError('Unknown protocol prefix.')
|
[
"Parser is used with StreamParser for incremental protocol parsing.\n Parser is a generator function, but it is not a coroutine. Usually\n parsers are implemented as a state machine.\n\n more details in asyncio/parsers.py\n existing parsers:\n * HTTP protocol parsers asyncio/http/protocol.py\n * websocket parser asyncio/http/websocket.py\n "
] |
Please provide a description of the function:def set_content_disposition(self,
disptype: str,
quote_fields: bool=True,
**params: Any) -> None:
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, **params)
|
[
"Sets ``Content-Disposition`` header."
] |
Please provide a description of the function:def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel,
headers: LooseHeaders=sentinel, scheme: str=sentinel,
host: str=sentinel,
remote: str=sentinel) -> 'BaseRequest':
if self._read_bytes:
raise RuntimeError("Cannot clone request "
"after reading its content")
dct = {} # type: Dict[str, Any]
if method is not sentinel:
dct['method'] = method
if rel_url is not sentinel:
new_url = URL(rel_url)
dct['url'] = new_url
dct['path'] = str(new_url)
if headers is not sentinel:
# a copy semantic
dct['headers'] = CIMultiDictProxy(CIMultiDict(headers))
dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8'))
for k, v in headers.items())
message = self._message._replace(**dct)
kwargs = {}
if scheme is not sentinel:
kwargs['scheme'] = scheme
if host is not sentinel:
kwargs['host'] = host
if remote is not sentinel:
kwargs['remote'] = remote
return self.__class__(
message,
self._payload,
self._protocol,
self._payload_writer,
self._task,
self._loop,
client_max_size=self._client_max_size,
state=self._state.copy(),
**kwargs)
|
[
"Clone itself with replacement some attributes.\n\n Creates and returns a new instance of Request object. If no parameters\n are given, an exact copy is returned. If a parameter is not passed, it\n will reuse the one from the current request object.\n\n "
] |
Please provide a description of the function:def forwarded(self) -> Tuple[Mapping[str, str], ...]:
elems = []
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
length = len(field_value)
pos = 0
need_separator = False
elem = {} # type: Dict[str, str]
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
if match is not None: # got a valid forwarded-pair
if need_separator:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
else:
name, value, port = match.groups()
if value[0] == '"':
# quoted string: remove quotes and unescape
value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1',
value[1:-1])
if port:
value += port
elem[name.lower()] = value
pos += len(match.group(0))
need_separator = True
elif field_value[pos] == ',': # next forwarded-element
need_separator = False
elem = {}
elems.append(types.MappingProxyType(elem))
pos += 1
elif field_value[pos] == ';': # next forwarded-pair
need_separator = False
pos += 1
elif field_value[pos] in ' \t':
# Allow whitespace even between forwarded-pairs, though
# RFC 7239 doesn't. This simplifies code and is in line
# with Postel's law.
pos += 1
else:
# bad syntax here, skip to next comma
pos = field_value.find(',', pos)
return tuple(elems)
|
[
"A tuple containing all parsed Forwarded header(s).\n\n Makes an effort to parse Forwarded headers as specified by RFC 7239:\n\n - It adds one (immutable) dictionary per Forwarded 'field-value', ie\n per proxy. The element corresponds to the data in the Forwarded\n field-value added by the first proxy encountered by the client. Each\n subsequent item corresponds to those added by later proxies.\n - It checks that every value has valid syntax in general as specified\n in section 4: either a 'token' or a 'quoted-string'.\n - It un-escapes found escape sequences.\n - It does NOT validate 'by' and 'for' contents as specified in section\n 6.\n - It does NOT validate 'host' contents (Host ABNF).\n - It does NOT validate 'proto' contents for valid URI scheme names.\n\n Returns a tuple containing one or more immutable dicts\n "
] |
Please provide a description of the function:def host(self) -> str:
host = self._message.headers.get(hdrs.HOST)
if host is not None:
return host
else:
return socket.getfqdn()
|
[
"Hostname of the request.\n\n Hostname is resolved in this order:\n\n - overridden value by .clone(host=new_host) call.\n - HOST HTTP header\n - socket.getfqdn() value\n "
] |
Please provide a description of the function:def remote(self) -> Optional[str]:
if isinstance(self._transport_peername, (list, tuple)):
return self._transport_peername[0]
else:
return self._transport_peername
|
[
"Remote IP of client initiated HTTP request.\n\n The IP is resolved in this order:\n\n - overridden value by .clone(remote=new_remote) call.\n - peername of opened socket\n "
] |
Please provide a description of the function:def _http_date(_date_str: str) -> Optional[datetime.datetime]:
if _date_str is not None:
timetuple = parsedate(_date_str)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None
|
[
"Process a date string, return a datetime object\n "
] |
Please provide a description of the function:def if_modified_since(self) -> Optional[datetime.datetime]:
return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
[
"The value of If-Modified-Since HTTP header, or None.\n\n This header is represented as a `datetime` object.\n "
] |
Please provide a description of the function:def if_unmodified_since(self) -> Optional[datetime.datetime]:
return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
[
"The value of If-Unmodified-Since HTTP header, or None.\n\n This header is represented as a `datetime` object.\n "
] |
Please provide a description of the function:def if_range(self) -> Optional[datetime.datetime]:
return self._http_date(self.headers.get(hdrs.IF_RANGE))
|
[
"The value of If-Range HTTP header, or None.\n\n This header is represented as a `datetime` object.\n "
] |
Please provide a description of the function:def cookies(self) -> Mapping[str, str]:
raw = self.headers.get(hdrs.COOKIE, '')
parsed = SimpleCookie(raw)
return MappingProxyType(
{key: val.value for key, val in parsed.items()})
|
[
"Return request cookies.\n\n A read-only dictionary-like object.\n "
] |
Please provide a description of the function:def http_range(self) -> slice:
rng = self._headers.get(hdrs.RANGE)
start, end = None, None
if rng is not None:
try:
pattern = r'^bytes=(\d*)-(\d*)$'
start, end = re.findall(pattern, rng)[0]
except IndexError: # pattern was not found in header
raise ValueError("range not in acceptable format")
end = int(end) if end else None
start = int(start) if start else None
if start is None and end is not None:
# end with no start is to return tail of content
start = -end
end = None
if start is not None and end is not None:
# end is inclusive in range header, exclusive for slice
end += 1
if start >= end:
raise ValueError('start cannot be after end')
if start is end is None: # No valid range supplied
raise ValueError('No start or end of range specified')
return slice(start, end, 1)
|
[
"The content of Range HTTP header.\n\n Return a slice instance.\n\n "
] |
Please provide a description of the function:def has_body(self) -> bool:
warnings.warn(
"Deprecated, use .can_read_body #2005",
DeprecationWarning, stacklevel=2)
return not self._payload.at_eof()
|
[
"Return True if request's HTTP BODY can be read, False otherwise."
] |
Please provide a description of the function:async def read(self) -> bytes:
if self._read_bytes is None:
body = bytearray()
while True:
chunk = await self._payload.readany()
body.extend(chunk)
if self._client_max_size:
body_size = len(body)
if body_size >= self._client_max_size:
raise HTTPRequestEntityTooLarge(
max_size=self._client_max_size,
actual_size=body_size
)
if not chunk:
break
self._read_bytes = bytes(body)
return self._read_bytes
|
[
"Read request body if present.\n\n Returns bytes object with full request content.\n "
] |
Please provide a description of the function:async def text(self) -> str:
bytes_body = await self.read()
encoding = self.charset or 'utf-8'
return bytes_body.decode(encoding)
|
[
"Return BODY as text using encoding from .charset."
] |
Please provide a description of the function:async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any:
body = await self.text()
return loads(body)
|
[
"Return BODY as JSON."
] |
Please provide a description of the function:async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]':
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_type
if (content_type not in ('',
'application/x-www-form-urlencoded',
'multipart/form-data')):
self._post = MultiDictProxy(MultiDict())
return self._post
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
if content_type == 'multipart/form-data':
multipart = await self.multipart()
max_size = self._client_max_size
field = await multipart.next()
while field is not None:
size = 0
content_type = field.headers.get(hdrs.CONTENT_TYPE)
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
)
chunk = await field.read_chunk(size=2**16)
tmp.seek(0)
ff = FileField(field.name, field.filename,
cast(io.BufferedReader, tmp),
content_type, field.headers)
out.add(field.name, ff)
else:
value = await field.read(decode=True)
if content_type is None or \
content_type.startswith('text/'):
charset = field.get_charset(default='utf-8')
value = value.decode(charset)
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size,
actual_size=size
)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or 'utf-8'
out.extend(
parse_qsl(
data.rstrip().decode(charset),
keep_blank_values=True,
encoding=charset))
self._post = MultiDictProxy(out)
return self._post
|
[
"Return POST parameters."
] |
Please provide a description of the function:async def shutdown(self, timeout: Optional[float]=15.0) -> None:
self._force_close = True
if self._keepalive_handle is not None:
self._keepalive_handle.cancel()
if self._waiter:
self._waiter.cancel()
# wait for handlers
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
with CeilTimeout(timeout, loop=self._loop):
if (self._error_handler is not None and
not self._error_handler.done()):
await self._error_handler
if (self._task_handler is not None and
not self._task_handler.done()):
await self._task_handler
# force-close non-idle handler
if self._task_handler is not None:
self._task_handler.cancel()
if self.transport is not None:
self.transport.close()
self.transport = None
|
[
"Worker process is about to exit, we need cleanup everything and\n stop accepting requests. It is especially important for keep-alive\n connections."
] |
Please provide a description of the function:def keep_alive(self, val: bool) -> None:
self._keepalive = val
if self._keepalive_handle:
self._keepalive_handle.cancel()
self._keepalive_handle = None
|
[
"Set keep-alive connection mode.\n\n :param bool val: new state.\n "
] |
Please provide a description of the function:def close(self) -> None:
self._close = True
if self._waiter:
self._waiter.cancel()
|
[
"Stop accepting new pipelinig messages and close\n connection when handlers done processing messages"
] |
Please provide a description of the function:def force_close(self) -> None:
self._force_close = True
if self._waiter:
self._waiter.cancel()
if self.transport is not None:
self.transport.close()
self.transport = None
|
[
"Force close connection"
] |
Please provide a description of the function:async def start(self) -> None:
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
if self.access_log:
now = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
request = self._request_factory(
message, payload, self, writer, handler)
try:
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(
self._request_handler(request))
resp = await task
except HTTPException as exc:
resp = Response(status=exc.status,
reason=exc.reason,
text=exc.text,
headers=exc.headers)
except asyncio.CancelledError:
self.log_debug('Ignored premature client disconnection')
break
except asyncio.TimeoutError as exc:
self.log_debug('Request handler timed out.', exc_info=exc)
resp = self.handle_error(request, 504)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return "
"statement on request handler")
else:
raise RuntimeError("Web-handler should return "
"a response instance, "
"got {!r}".format(resp))
await prepare_meth(request)
await resp.write_eof()
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# log access
if self.access_log:
self.log_access(request, resp, loop.time() - now)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
'Start lingering close timer for %s sec.',
lingering_time)
now = loop.time()
end_t = now + lingering_time
with suppress(
asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
with CeilTimeout(end_t - now, loop=loop):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug('Uncompleted request.')
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug('Ignored premature client disconnection ')
break
except RuntimeError as exc:
if self.debug:
self.log_exception(
'Unhandled runtime exception', exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception('Unhandled exception', exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug('Ignored premature client disconnection.')
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout,
self._process_keepalive)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None and self._error_handler is None:
self.transport.close()
|
[
"Process incoming request.\n\n It reads request line, request headers and request payload, then\n calls handle_request() method. Subclass has to override\n handle_request(). start() handles various exceptions in request\n or response handling. Connection is being closed always unless\n keep_alive(True) specified.\n "
] |
Please provide a description of the function:def handle_error(self,
request: BaseRequest,
status: int=500,
exc: Optional[BaseException]=None,
message: Optional[str]=None) -> StreamResponse:
self.log_exception("Error handling request", exc_info=exc)
ct = 'text/plain'
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
title = '{0.value} {0.phrase}'.format(
HTTPStatus.INTERNAL_SERVER_ERROR
)
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
tb = None
if self.debug:
with suppress(Exception):
tb = traceback.format_exc()
if 'text/html' in request.headers.get('Accept', ''):
if tb:
tb = html_escape(tb)
msg = '<h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb)
message = (
"<html><head>"
"<title>{title}</title>"
"</head><body>\n<h1>{title}</h1>"
"\n{msg}\n</body></html>\n"
).format(title=title, msg=msg)
ct = 'text/html'
else:
if tb:
msg = tb
message = title + '\n\n' + msg
resp = Response(status=status, text=message, content_type=ct)
resp.force_close()
# some data already got sent, connection is broken
if request.writer.output_size > 0 or self.transport is None:
self.force_close()
return resp
|
[
"Handle errors.\n\n Returns HTTP response with specific status code. Logs additional\n information. It always closes current connection."
] |
Please provide a description of the function:def run_app(app: Union[Application, Awaitable[Application]], *,
host: Optional[str]=None,
port: Optional[int]=None,
path: Optional[str]=None,
sock: Optional[socket.socket]=None,
shutdown_timeout: float=60.0,
ssl_context: Optional[SSLContext]=None,
print: Optional[Callable[..., None]]=print,
backlog: int=128,
access_log_class: Type[AbstractAccessLogger]=AccessLogger,
access_log_format: str=AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger]=access_logger,
handle_signals: bool=True,
reuse_address: Optional[bool]=None,
reuse_port: Optional[bool]=None) -> None:
loop = asyncio.get_event_loop()
# Configure if and only if in debugging mode and using the default logger
if loop.get_debug() and access_log and access_log.name == 'aiohttp.access':
if access_log.level == logging.NOTSET:
access_log.setLevel(logging.DEBUG)
if not access_log.hasHandlers():
access_log.addHandler(logging.StreamHandler())
try:
loop.run_until_complete(_run_app(app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port))
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
pass
finally:
_cancel_all_tasks(loop)
if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
|
[
"Run an app locally"
] |
Please provide a description of the function:def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
return AsyncStreamIterator(lambda: self.read(n))
|
[
"Returns an asynchronous iterator that yields chunks of size n.\n\n Python-3.5 available for Python 3.5+ only\n "
] |
Please provide a description of the function:def unread_data(self, data: bytes) -> None:
warnings.warn("unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2)
if not data:
return
if self._buffer_offset:
self._buffer[0] = self._buffer[0][self._buffer_offset:]
self._buffer_offset = 0
self._size += len(data)
self._cursor -= len(data)
self._buffer.appendleft(data)
self._eof_counter = 0
|
[
" rollback reading some data from stream, inserting it to buffer head.\n "
] |
Please provide a description of the function:async def readchunk(self) -> Tuple[bytes, bool]:
while True:
if self._exception is not None:
raise self._exception
while self._http_chunk_splits:
pos = self._http_chunk_splits.pop(0)
if pos == self._cursor:
return (b"", True)
if pos > self._cursor:
return (self._read_nowait(pos-self._cursor), True)
internal_logger.warning('Skipping HTTP chunk end due to data '
'consumption beyond chunk boundary')
if self._buffer:
return (self._read_nowait_chunk(-1), False)
# return (self._read_nowait(-1), False)
if self._eof:
# Special case for signifying EOF.
# (b'', True) is not a final return value actually.
return (b'', False)
await self._wait('readchunk')
|
[
"Returns a tuple of (data, end_of_http_chunk). When chunked transfer\n encoding is used, end_of_http_chunk is a boolean indicating if the end\n of the data corresponds to the end of a HTTP chunk , otherwise it is\n always False.\n "
] |
Please provide a description of the function:def _read_nowait(self, n: int) -> bytes:
chunks = []
while self._buffer:
chunk = self._read_nowait_chunk(n)
chunks.append(chunk)
if n != -1:
n -= len(chunk)
if n == 0:
break
return b''.join(chunks) if chunks else b''
|
[
" Read not more than n bytes, or whole buffer is n == -1 "
] |
Please provide a description of the function:async def send(self, *args, **kwargs):
if not self.frozen:
raise RuntimeError("Cannot send non-frozen signal.")
for receiver in self:
await receiver(*args, **kwargs)
|
[
"\n Sends data to all registered receivers.\n "
] |
Please provide a description of the function:def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
# list of (key, method) tuples, we don't use an OrderedDict as users
# can repeat the same key more than once
methods = list()
for atom in self.FORMAT_RE.findall(log_format):
if atom[1] == '':
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
m = getattr(AccessLogger, '_format_%s' % atom[0])
key_method = KeyMethod(format_key1, m)
else:
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
m = getattr(AccessLogger, '_format_%s' % atom[2])
key_method = KeyMethod(format_key2,
functools.partial(m, atom[1]))
methods.append(key_method)
log_format = self.FORMAT_RE.sub(r'%s', log_format)
log_format = self.CLEANUP_RE.sub(r'%\1', log_format)
return log_format, methods
|
[
"Translate log_format into form usable by modulo formatting\n\n All known atoms will be replaced with %s\n Also methods for formatting of those atoms will be added to\n _methods in appropriate order\n\n For example we have log_format = \"%a %t\"\n This format will be translated to \"%s %s\"\n Also contents of _methods will be\n [self._format_a, self._format_t]\n These method will be called and results will be passed\n to translated string format.\n\n Each _format_* method receive 'args' which is list of arguments\n given to self.log\n\n Exceptions are _format_e, _format_i and _format_o methods which\n also receive key name (by functools.partial)\n\n "
] |
Please provide a description of the function:def normalize_path_middleware(
*, append_slash: bool=True, remove_slash: bool=False,
merge_slashes: bool=True,
redirect_class: Type[HTTPMove]=HTTPMovedPermanently) -> _Middleware:
correct_configuration = not (append_slash and remove_slash)
assert correct_configuration, "Cannot both remove and append slash"
@middleware
async def impl(request: Request, handler: _Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if '?' in request.raw_path:
path, query = request.raw_path.split('?', 1)
query = '?' + query
else:
query = ''
path = request.raw_path
if merge_slashes:
paths_to_check.append(re.sub('//+', '/', path))
if append_slash and not request.path.endswith('/'):
paths_to_check.append(path + '/')
if remove_slash and request.path.endswith('/'):
paths_to_check.append(path[:-1])
if merge_slashes and append_slash:
paths_to_check.append(
re.sub('//+', '/', path + '/'))
if merge_slashes and remove_slash and path.endswith('/'):
merged_slashes = re.sub('//+', '/', path)
paths_to_check.append(merged_slashes[:-1])
for path in paths_to_check:
resolves, request = await _check_request_resolves(
request, path)
if resolves:
raise redirect_class(request.raw_path + query)
return await handler(request)
return impl
|
[
"\n Middleware factory which produces a middleware that normalizes\n the path of a request. By normalizing it means:\n\n - Add or remove a trailing slash to the path.\n - Double slashes are replaced by one.\n\n The middleware returns as soon as it finds a path that resolves\n correctly. The order if both merge and append/remove are enabled is\n 1) merge slashes\n 2) append/remove slash\n 3) both merge slashes and append/remove slash.\n If the path resolves with at least one of those conditions, it will\n redirect to the new path.\n\n Only one of `append_slash` and `remove_slash` can be enabled. If both\n are `True` the factory will raise an assertion error\n\n If `append_slash` is `True` the middleware will append a slash when\n needed. If a resource is defined with trailing slash and the request\n comes without it, it will append it automatically.\n\n If `remove_slash` is `True`, `append_slash` must be `False`. When enabled\n the middleware will remove trailing slashes and redirect if the resource\n is defined\n\n If merge_slashes is True, merge multiple consecutive slashes in the\n path into one.\n "
] |
Please provide a description of the function:def _gen_form_data(self) -> multipart.MultipartWriter:
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
part = payload.get_payload(
value, content_type=headers[hdrs.CONTENT_TYPE],
headers=headers, encoding=self._charset)
else:
part = payload.get_payload(
value, headers=headers, encoding=self._charset)
except Exception as exc:
raise TypeError(
'Can not serialize value type: %r\n '
'headers: %r\n value: %r' % (
type(value), headers, value)) from exc
if dispparams:
part.set_content_disposition(
'form-data', quote_fields=self._quote_fields, **dispparams
)
# FIXME cgi.FieldStorage doesn't likes body parts with
# Content-Length which were sent via chunked transfer encoding
assert part.headers is not None
part.headers.popall(hdrs.CONTENT_LENGTH, None)
self._writer.append_payload(part)
return self._writer
|
[
"Encode a list of fields using the multipart/form-data MIME format"
] |
Please provide a description of the function:async def write(self, chunk: bytes,
*, drain: bool=True, LIMIT: int=0x10000) -> None:
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if self._compress is not None:
chunk = self._compress.compress(chunk)
if not chunk:
return
if self.length is not None:
chunk_len = len(chunk)
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[:self.length]
self.length = 0
if not chunk:
return
if chunk:
if self.chunked:
chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii')
chunk = chunk_len_pre + chunk + b'\r\n'
self._write(chunk)
if self.buffer_size > LIMIT and drain:
self.buffer_size = 0
await self.drain()
|
[
"Writes chunk of data to a stream.\n\n write_eof() indicates end of stream.\n writer can't be used after write_eof() method being called.\n write() return drain future.\n "
] |
Please provide a description of the function:async def write_headers(self, status_line: str,
headers: 'CIMultiDict[str]') -> None:
# status + headers
buf = _serialize_headers(status_line, headers)
self._write(buf)
|
[
"Write request/response status and headers."
] |
Please provide a description of the function:def netrc_from_env() -> Optional[netrc.netrc]:
netrc_env = os.environ.get('NETRC')
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug('Could not resolve home directory when '
'trying to look for .netrc file: %s', e)
return None
netrc_path = home_dir / (
'_netrc' if platform.system() == 'Windows' else '.netrc')
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning('Could not parse .netrc file: %s', e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning('Could not read .netrc file: %s', e)
return None
|
[
"Attempt to load the netrc file from the path specified by the env-var\n NETRC or in the default location in the user's home directory.\n\n Returns None if it couldn't be found or fails to parse.\n "
] |
Please provide a description of the function:def parse_mimetype(mimetype: str) -> MimeType:
if not mimetype:
return MimeType(type='', subtype='', suffix='',
parameters=MultiDictProxy(MultiDict()))
parts = mimetype.split(';')
params = MultiDict() # type: MultiDict[str]
for item in parts[1:]:
if not item:
continue
key, value = cast(Tuple[str, str],
item.split('=', 1) if '=' in item else (item, ''))
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == '*':
fulltype = '*/*'
mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1))
if '/' in fulltype else (fulltype, ''))
stype, suffix = (cast(Tuple[str, str], stype.split('+', 1))
if '+' in stype else (stype, ''))
return MimeType(type=mtype, subtype=stype, suffix=suffix,
parameters=MultiDictProxy(params))
|
[
"Parses a MIME type into its components.\n\n mimetype is a MIME type string.\n\n Returns a MimeType object.\n\n Example:\n\n >>> parse_mimetype('text/html; charset=utf-8')\n MimeType(type='text', subtype='html', suffix='',\n parameters={'charset': 'utf-8'})\n\n "
] |
Please provide a description of the function:def content_disposition_header(disptype: str,
quote_fields: bool=True,
**params: str) -> str:
if not disptype or not (TOKEN > set(disptype)):
raise ValueError('bad content disposition type {!r}'
''.format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError('bad content disposition parameter'
' {!r}={!r}'.format(key, val))
qval = quote(val, '') if quote_fields else val
lparams.append((key, '"%s"' % qval))
if key == 'filename':
lparams.append(('filename*', "utf-8''" + qval))
sparams = '; '.join('='.join(pair) for pair in lparams)
value = '; '.join((value, sparams))
return value
|
[
"Sets ``Content-Disposition`` header.\n\n disptype is a disposition type: inline, attachment, form-data.\n Should be valid extension token (see RFC 2183)\n\n params is a dict with disposition params.\n "
] |
Please provide a description of the function:def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth':
try:
auth_type, encoded_credentials = auth_header.split(' ', 1)
except ValueError:
raise ValueError('Could not parse authorization header.')
if auth_type.lower() != 'basic':
raise ValueError('Unknown authorization method %s' % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode('ascii'), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError('Invalid base64 encoding.')
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(':', 1)
except ValueError:
raise ValueError('Invalid credentials.')
return cls(username, password, encoding=encoding)
|
[
"Create a BasicAuth object from an Authorization HTTP header."
] |
Please provide a description of the function:def from_url(cls, url: URL,
*, encoding: str='latin1') -> Optional['BasicAuth']:
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or '', encoding=encoding)
|
[
"Create BasicAuth from url."
] |
Please provide a description of the function:def encode(self) -> str:
creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding)
return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
|
[
"Encode credentials."
] |
Please provide a description of the function:def content_type(self) -> str:
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type
|
[
"The value of content part for Content-Type HTTP header."
] |
Please provide a description of the function:def charset(self) -> Optional[str]:
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get('charset')
|
[
"The value of charset part for Content-Type HTTP header."
] |
Please provide a description of the function:def content_length(self) -> Optional[int]:
content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore
if content_length is not None:
return int(content_length)
else:
return None
|
[
"The value of Content-Length HTTP header."
] |
Please provide a description of the function:def request(
method: str,
url: StrOrURL, *,
params: Optional[Mapping[str, str]]=None,
data: Any=None,
json: Any=None,
headers: LooseHeaders=None,
skip_auto_headers: Optional[Iterable[str]]=None,
auth: Optional[BasicAuth]=None,
allow_redirects: bool=True,
max_redirects: int=10,
compress: Optional[str]=None,
chunked: Optional[bool]=None,
expect100: bool=False,
raise_for_status: Optional[bool]=None,
read_until_eof: bool=True,
proxy: Optional[StrOrURL]=None,
proxy_auth: Optional[BasicAuth]=None,
timeout: Union[ClientTimeout, object]=sentinel,
cookies: Optional[LooseCookies]=None,
version: HttpVersion=http.HttpVersion11,
connector: Optional[BaseConnector]=None,
loop: Optional[asyncio.AbstractEventLoop]=None
) -> _SessionRequestContextManager:
connector_owner = False
if connector is None:
connector_owner = True
connector = TCPConnector(loop=loop, force_close=True)
session = ClientSession(
loop=loop, cookies=cookies, version=version, timeout=timeout,
connector=connector, connector_owner=connector_owner)
return _SessionRequestContextManager(
session._request(method, url,
params=params,
data=data,
json=json,
headers=headers,
skip_auto_headers=skip_auto_headers,
auth=auth,
allow_redirects=allow_redirects,
max_redirects=max_redirects,
compress=compress,
chunked=chunked,
expect100=expect100,
raise_for_status=raise_for_status,
read_until_eof=read_until_eof,
proxy=proxy,
proxy_auth=proxy_auth,),
session)
|
[
"Constructs and sends a request. Returns response object.\n method - HTTP method\n url - request url\n params - (optional) Dictionary or bytes to be sent in the query\n string of the new request\n data - (optional) Dictionary, bytes, or file-like object to\n send in the body of the request\n json - (optional) Any json compatible python object\n headers - (optional) Dictionary of HTTP Headers to send with\n the request\n cookies - (optional) Dict object to send with the request\n auth - (optional) BasicAuth named tuple represent HTTP Basic Auth\n auth - aiohttp.helpers.BasicAuth\n allow_redirects - (optional) If set to False, do not follow\n redirects\n version - Request HTTP version.\n compress - Set to True if request has to be compressed\n with deflate encoding.\n chunked - Set to chunk size for chunked transfer encoding.\n expect100 - Expect 100-continue response from server.\n connector - BaseConnector sub-class instance to support\n connection pooling.\n read_until_eof - Read response until eof if response\n does not have Content-Length header.\n loop - Optional event loop.\n timeout - Optional ClientTimeout settings structure, 5min\n total timeout by default.\n Usage::\n >>> import aiohttp\n >>> resp = await aiohttp.request('GET', 'http://python.org/')\n >>> resp\n <ClientResponse(python.org/) [200]>\n >>> data = await resp.read()\n "
] |
Please provide a description of the function:def request(self,
method: str,
url: StrOrURL,
**kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(self._request(method, url, **kwargs))
|
[
"Perform HTTP request."
] |
Please provide a description of the function:def ws_connect(
self,
url: StrOrURL, *,
method: str=hdrs.METH_GET,
protocols: Iterable[str]=(),
timeout: float=10.0,
receive_timeout: Optional[float]=None,
autoclose: bool=True,
autoping: bool=True,
heartbeat: Optional[float]=None,
auth: Optional[BasicAuth]=None,
origin: Optional[str]=None,
headers: Optional[LooseHeaders]=None,
proxy: Optional[StrOrURL]=None,
proxy_auth: Optional[BasicAuth]=None,
ssl: Union[SSLContext, bool, None, Fingerprint]=None,
proxy_headers: Optional[LooseHeaders]=None,
compress: int=0,
max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager':
return _WSRequestContextManager(
self._ws_connect(url,
method=method,
protocols=protocols,
timeout=timeout,
receive_timeout=receive_timeout,
autoclose=autoclose,
autoping=autoping,
heartbeat=heartbeat,
auth=auth,
origin=origin,
headers=headers,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
proxy_headers=proxy_headers,
compress=compress,
max_msg_size=max_msg_size))
|
[
"Initiate websocket connection."
] |
Please provide a description of the function:def _prepare_headers(
self,
headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]':
# Convert headers to MultiDict
result = CIMultiDict(self._default_headers)
if headers:
if not isinstance(headers, (MultiDictProxy, MultiDict)):
headers = CIMultiDict(headers)
added_names = set() # type: Set[str]
for key, value in headers.items():
if key in added_names:
result.add(key, value)
else:
result[key] = value
added_names.add(key)
return result
|
[
" Add default headers and transform it to CIMultiDict\n "
] |
Please provide a description of the function:def get(self, url: StrOrURL, *, allow_redirects: bool=True,
**kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_GET, url,
allow_redirects=allow_redirects,
**kwargs))
|
[
"Perform HTTP GET request."
] |
Please provide a description of the function:def options(self, url: StrOrURL, *, allow_redirects: bool=True,
**kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_OPTIONS, url,
allow_redirects=allow_redirects,
**kwargs))
|
[
"Perform HTTP OPTIONS request."
] |
Please provide a description of the function:def head(self, url: StrOrURL, *, allow_redirects: bool=False,
**kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_HEAD, url,
allow_redirects=allow_redirects,
**kwargs))
|
[
"Perform HTTP HEAD request."
] |
Please provide a description of the function:def post(self, url: StrOrURL,
*, data: Any=None, **kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_POST, url,
data=data,
**kwargs))
|
[
"Perform HTTP POST request."
] |
Please provide a description of the function:def put(self, url: StrOrURL,
*, data: Any=None, **kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_PUT, url,
data=data,
**kwargs))
|
[
"Perform HTTP PUT request."
] |
Please provide a description of the function:def patch(self, url: StrOrURL,
*, data: Any=None, **kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_PATCH, url,
data=data,
**kwargs))
|
[
"Perform HTTP PATCH request."
] |
Please provide a description of the function:def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager':
return _RequestContextManager(
self._request(hdrs.METH_DELETE, url,
**kwargs))
|
[
"Perform HTTP DELETE request."
] |
Please provide a description of the function:async def close(self) -> None:
if not self.closed:
if self._connector is not None and self._connector_owner:
await self._connector.close()
self._connector = None
|
[
"Close underlying connector.\n\n Release all acquired resources.\n "
] |
Please provide a description of the function:def requote_redirect_url(self, val: bool) -> None:
warnings.warn("session.requote_redirect_url modification "
"is deprecated #2778",
DeprecationWarning,
stacklevel=2)
self._requote_redirect_url = val
|
[
"Do URL requoting on redirection handling."
] |
Please provide a description of the function:async def resolve(self, host: str,
port: int, family: int) -> List[Dict[str, Any]]:
|
[
"Return IP address for given hostname"
] |
Please provide a description of the function:async def next(self) -> Any:
item = await self.stream.next()
if self.stream.at_eof():
await self.release()
return item
|
[
"Emits next multipart reader object."
] |
Please provide a description of the function:async def read(self, *, decode: bool=False) -> Any:
if self._at_eof:
return b''
data = bytearray()
while not self._at_eof:
data.extend((await self.read_chunk(self.chunk_size)))
if decode:
return self.decode(data)
return data
|
[
"Reads body part data.\n\n decode: Decodes data following by encoding\n method from Content-Encoding header. If it missed\n data remains untouched\n "
] |
Please provide a description of the function:async def read_chunk(self, size: int=chunk_size) -> bytes:
if self._at_eof:
return b''
if self._length:
chunk = await self._read_chunk_from_length(size)
else:
chunk = await self._read_chunk_from_stream(size)
self._read_bytes += len(chunk)
if self._read_bytes == self._length:
self._at_eof = True
if self._at_eof:
newline = await self._content.readline()
assert newline == self._newline, \
'reader did not read all the data or it is malformed'
return chunk
|
[
"Reads body part content chunk of the specified size.\n\n size: chunk size\n "
] |
Please provide a description of the function:async def readline(self) -> bytes:
if self._at_eof:
return b''
if self._unread:
line = self._unread.popleft()
else:
line = await self._content.readline()
if line.startswith(self._boundary):
# the very last boundary may not come with \r\n,
# so set single rules for everyone
sline = line.rstrip(b'\r\n')
boundary = self._boundary
last_boundary = self._boundary + b'--'
# ensure that we read exactly the boundary, not something alike
if sline == boundary or sline == last_boundary:
self._at_eof = True
self._unread.append(line)
return b''
else:
next_line = await self._content.readline()
if next_line.startswith(self._boundary):
# strip newline but only once
line = line[:-len(self._newline)]
self._unread.append(next_line)
return line
|
[
"Reads body part by line by line."
] |
Please provide a description of the function:async def release(self) -> None:
if self._at_eof:
return
while not self._at_eof:
await self.read_chunk(self.chunk_size)
|
[
"Like read(), but reads all the data to the void."
] |
Please provide a description of the function:async def text(self, *, encoding: Optional[str]=None) -> str:
data = await self.read(decode=True)
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
encoding = encoding or self.get_charset(default='utf-8')
return data.decode(encoding)
|
[
"Like read(), but assumes that body part contains text data."
] |
Please provide a description of the function:async def json(self, *, encoding: Optional[str]=None) -> Any:
data = await self.read(decode=True)
if not data:
return None
encoding = encoding or self.get_charset(default='utf-8')
return json.loads(data.decode(encoding))
|
[
"Like read(), but assumes that body parts contains JSON data."
] |
Please provide a description of the function:async def form(self, *,
encoding: Optional[str]=None) -> List[Tuple[str, str]]:
data = await self.read(decode=True)
if not data:
return []
if encoding is not None:
real_encoding = encoding
else:
real_encoding = self.get_charset(default='utf-8')
return parse_qsl(data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding)
|
[
"Like read(), but assumes that body parts contains form\n urlencoded data.\n "
] |
Please provide a description of the function:def decode(self, data: bytes) -> bytes:
if CONTENT_TRANSFER_ENCODING in self.headers:
data = self._decode_content_transfer(data)
if CONTENT_ENCODING in self.headers:
return self._decode_content(data)
return data
|
[
"Decodes data according the specified Content-Encoding\n or Content-Transfer-Encoding headers value.\n "
] |
Please provide a description of the function:def get_charset(self, default: str) -> str:
ctype = self.headers.get(CONTENT_TYPE, '')
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get('charset', default)
|
[
"Returns charset parameter from Content-Type header or default."
] |
Please provide a description of the function:def name(self) -> Optional[str]:
_, params = parse_content_disposition(
self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, 'name')
|
[
"Returns name specified in Content-Disposition header or None\n if missed or header is malformed.\n "
] |
Please provide a description of the function:def from_response(cls, response: 'ClientResponse') -> Any:
obj = cls.response_wrapper_cls(response, cls(response.headers,
response.content))
return obj
|
[
"Constructs reader instance from HTTP response.\n\n :param response: :class:`~aiohttp.client.ClientResponse` instance\n "
] |
Please provide a description of the function:async def next(self) -> Any:
# So, if we're at BOF, we need to skip till the boundary.
if self._at_eof:
return
await self._maybe_release_last_part()
if self._at_bof:
await self._read_until_first_boundary()
self._at_bof = False
else:
await self._read_boundary()
if self._at_eof: # we just read the last boundary, nothing to do there
return
self._last_part = await self.fetch_next_part()
return self._last_part
|
[
"Emits the next multipart body part."
] |
Please provide a description of the function:async def release(self) -> None:
while not self._at_eof:
item = await self.next()
if item is None:
break
await item.release()
|
[
"Reads all the body parts to the void till the final boundary."
] |
Please provide a description of the function:def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any:
ctype = headers.get(CONTENT_TYPE, '')
mimetype = parse_mimetype(ctype)
if mimetype.type == 'multipart':
if self.multipart_reader_cls is None:
return type(self)(headers, self._content)
return self.multipart_reader_cls(
headers, self._content, _newline=self._newline
)
else:
return self.part_reader_cls(
self._boundary, headers, self._content, _newline=self._newline
)
|
[
"Dispatches the response by the `Content-Type` header, returning\n suitable reader instance.\n\n :param dict headers: Response headers\n "
] |
Please provide a description of the function:async def _maybe_release_last_part(self) -> None:
if self._last_part is not None:
if not self._last_part.at_eof():
await self._last_part.release()
self._unread.extend(self._last_part._unread)
self._last_part = None
|
[
"Ensures that the last read body part is read completely."
] |
Please provide a description of the function:def _boundary_value(self) -> str:
# Refer to RFCs 7231, 7230, 5234.
#
# parameter = token "=" ( token / quoted-string )
# token = 1*tchar
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
# obs-text = %x80-FF
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
# / DIGIT / ALPHA
# ; any VCHAR, except delimiters
# VCHAR = %x21-7E
value = self._boundary
if re.match(self._valid_tchar_regex, value):
return value.decode('ascii') # cannot fail
if re.search(self._invalid_qdtext_char_regex, value):
raise ValueError("boundary value contains invalid characters")
# escape %x5C and %x22
quoted_value_content = value.replace(b'\\', b'\\\\')
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
return '"' + quoted_value_content.decode('ascii') + '"'
|
[
"Wrap boundary parameter value in quotes, if necessary.\n\n Reads self.boundary and returns a unicode sting.\n "
] |
Please provide a description of the function:def append_payload(self, payload: Payload) -> Payload:
# compression
encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
raise RuntimeError('unknown content encoding: {}'.format(encoding))
if encoding == 'identity':
encoding = None
# te encoding
te_encoding = payload.headers.get(
CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
raise RuntimeError('unknown content transfer encoding: {}'
''.format(te_encoding))
if te_encoding == 'binary':
te_encoding = None
# size
size = payload.size
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding)) # type: ignore
return payload
|
[
"Adds a new body part to multipart writer."
] |
Please provide a description of the function:def append_json(
self,
obj: Any,
headers: Optional['MultiMapping[str]']=None
) -> Payload:
if headers is None:
headers = CIMultiDict()
return self.append_payload(JsonPayload(obj, headers=headers))
|
[
"Helper to append JSON part."
] |
Please provide a description of the function:def append_form(
self,
obj: Union[Sequence[Tuple[str, str]],
Mapping[str, str]],
headers: Optional['MultiMapping[str]']=None
) -> Payload:
assert isinstance(obj, (Sequence, Mapping))
if headers is None:
headers = CIMultiDict()
if isinstance(obj, Mapping):
obj = list(obj.items())
data = urlencode(obj, doseq=True)
return self.append_payload(
StringPayload(data, headers=headers,
content_type='application/x-www-form-urlencoded'))
|
[
"Helper to append form urlencoded part."
] |
Please provide a description of the function:def size(self) -> Optional[int]:
if not self._parts:
return 0
total = 0
for part, encoding, te_encoding in self._parts:
if encoding or te_encoding or part.size is None:
return None
total += int(
2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n'
part.size + len(part._binary_headers) +
2 # b'\r\n'
)
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
return total
|
[
"Size of the payload."
] |
Please provide a description of the function:async def write(self, writer: Any,
close_boundary: bool=True) -> None:
if not self._parts:
return
for part, encoding, te_encoding in self._parts:
await writer.write(b'--' + self._boundary + b'\r\n')
await writer.write(part._binary_headers)
if encoding or te_encoding:
w = MultipartPayloadWriter(writer)
if encoding:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore
await w.write_eof()
else:
await part.write(writer)
await writer.write(b'\r\n')
if close_boundary:
await writer.write(b'--' + self._boundary + b'--\r\n')
|
[
"Write body."
] |
Please provide a description of the function:def update_host(self, url: URL) -> None:
# get host/port
if not url.host:
raise InvalidURL(url)
# basic auth info
username, password = url.user, url.password
if username:
self.auth = helpers.BasicAuth(username, password or '')
|
[
"Update destination host, port and connection type (ssl)."
] |
Please provide a description of the function:def update_version(self, version: Union[http.HttpVersion, str]) -> None:
if isinstance(version, str):
v = [l.strip() for l in version.split('.', 1)]
try:
version = http.HttpVersion(int(v[0]), int(v[1]))
except ValueError:
raise ValueError(
'Can not parse http version number: {}'
.format(version)) from None
self.version = version
|
[
"Convert request version to two elements tuple.\n\n parser HTTP version '1.1' => (1, 1)\n "
] |
Please provide a description of the function:def update_headers(self, headers: Optional[LooseHeaders]) -> None:
self.headers = CIMultiDict() # type: CIMultiDict[str]
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = '[{}]'.format(netloc)
if not self.url.is_default_port():
netloc += ':' + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore
for key, value in headers:
# A special case for Host header
if key.lower() == 'host':
self.headers[key] = value
else:
self.headers.add(key, value)
|
[
"Update request headers."
] |
Please provide a description of the function:def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
if not cookies:
return
c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ''))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa
c[name] = mrsl_val
else:
c[name] = value # type: ignore
self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
|
[
"Update request cookies header."
] |
Please provide a description of the function:def update_content_encoding(self, data: Any) -> None:
if not data:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower()
if enc:
if self.compress:
raise ValueError(
'compress can not be set '
'if Content-Encoding header is set')
elif self.compress:
if not isinstance(self.compress, str):
self.compress = 'deflate'
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True
|
[
"Set request content encoding."
] |
Please provide a description of the function:def update_transfer_encoding(self) -> None:
te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower()
if 'chunked' in te:
if self.chunked:
raise ValueError(
'chunked can not be set '
'if "Transfer-Encoding: chunked" header is set')
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
'chunked can not be set '
'if Content-Length header is set')
self.headers[hdrs.TRANSFER_ENCODING] = 'chunked'
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
[
"Analyze transfer-encoding header."
] |
Please provide a description of the function:def update_auth(self, auth: Optional[BasicAuth]) -> None:
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError('BasicAuth() tuple is required instead')
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
[
"Set basic auth."
] |
Please provide a description of the function:async def write_bytes(self, writer: AbstractStreamWriter,
conn: 'Connection') -> None:
# 100 response
if self._continue is not None:
await writer.drain()
await self._continue
protocol = conn.protocol
assert protocol is not None
try:
if isinstance(self.body, payload.Payload):
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore
for chunk in self.body:
await writer.write(chunk) # type: ignore
await writer.write_eof()
except OSError as exc:
new_exc = ClientOSError(
exc.errno,
'Can not write request body for %s' % self.url)
new_exc.__context__ = exc
new_exc.__cause__ = exc
protocol.set_exception(new_exc)
except asyncio.CancelledError as exc:
if not conn.closed:
protocol.set_exception(exc)
except Exception as exc:
protocol.set_exception(exc)
finally:
self._writer = None
|
[
"Support coroutines that yields bytes objects."
] |
Please provide a description of the function:async def start(self, connection: 'Connection') -> 'ClientResponse':
self._closed = False
self._protocol = connection.protocol
self._connection = connection
with self._timer:
while True:
# read response
try:
message, payload = await self._protocol.read() # type: ignore # noqa
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info, self.history,
status=exc.code,
message=exc.message, headers=exc.headers) from exc
if (message.code < 100 or
message.code > 199 or message.code == 101):
break
if self._continue is not None:
set_result(self._continue, True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self._headers = message.headers # type is CIMultiDictProxy
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning(
'Can not load response cookies: %s', exc)
return self
|
[
"Start response processing."
] |
Please provide a description of the function:async def read(self) -> bytes:
if self._body is None:
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(self._body)
except BaseException:
self.close()
raise
elif self._released:
raise ClientConnectionError('Connection closed')
return self._body
|
[
"Read response payload."
] |
Please provide a description of the function:async def text(self,
encoding: Optional[str]=None, errors: str='strict') -> str:
if self._body is None:
await self.read()
if encoding is None:
encoding = self.get_encoding()
return self._body.decode(encoding, errors=errors)
|
[
"Read response payload and decode."
] |
Please provide a description of the function:async def json(self, *, encoding: str=None,
loads: JSONDecoder=DEFAULT_JSON_DECODER,
content_type: Optional[str]='application/json') -> Any:
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=('Attempt to decode JSON with '
'unexpected mimetype: %s' % ctype),
headers=self.headers)
if encoding is None:
encoding = self.get_encoding()
return loads(self._body.decode(encoding))
|
[
"Read and decodes JSON response."
] |
Please provide a description of the function:def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
|
[
"Enables automatic chunked transfer encoding."
] |
Please provide a description of the function:def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
self._compression = True
self._compression_force = force
|
[
"Enables response compression encoding."
] |
Please provide a description of the function:def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version
|
[
"Set or update response cookie.\n\n Sets new cookie or updates existent with new value.\n Also updates only those params which are not None.\n "
] |
Please provide a description of the function:def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path)
|
[
"Delete cookie.\n\n Creates new empty expired cookie.\n "
] |
Please provide a description of the function:def last_modified(self) -> Optional[datetime.datetime]:
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None
|
[
"The value of Last-Modified HTTP header, or None.\n\n This header is represented as a `datetime` object.\n "
] |
Please provide a description of the function:async def _default_expect_handler(request: Request) -> None:
expect = request.headers.get(hdrs.EXPECT)
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
|
[
"Default handler for Expect header.\n\n Just send \"100 Continue\" to client.\n raise HTTPExpectationFailed if value of header is not \"100-continue\"\n "
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.