repo_id
stringclasses
208 values
file_path
stringlengths
31
190
content
stringlengths
1
2.65M
__index_level_0__
int64
0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/cors.py
from ..http import dump_header from ..http import parse_set_header from ..utils import environ_property from ..utils import header_property class CORSRequestMixin(object): """A mixin for :class:`~werkzeug.wrappers.BaseRequest` subclasses that adds descriptors for Cross Origin Resource Sharing (CORS) headers. .. versionadded:: 1.0 """ origin = environ_property( "HTTP_ORIGIN", doc=( "The host that the request originated from. Set" " :attr:`~CORSResponseMixin.access_control_allow_origin` on" " the response to indicate which origins are allowed." ), ) access_control_request_headers = environ_property( "HTTP_ACCESS_CONTROL_REQUEST_HEADERS", load_func=parse_set_header, doc=( "Sent with a preflight request to indicate which headers" " will be sent with the cross origin request. Set" " :attr:`~CORSResponseMixin.access_control_allow_headers`" " on the response to indicate which headers are allowed." ), ) access_control_request_method = environ_property( "HTTP_ACCESS_CONTROL_REQUEST_METHOD", doc=( "Sent with a preflight request to indicate which method" " will be used for the cross origin request. Set" " :attr:`~CORSResponseMixin.access_control_allow_methods`" " on the response to indicate which methods are allowed." ), ) class CORSResponseMixin(object): """A mixin for :class:`~werkzeug.wrappers.BaseResponse` subclasses that adds descriptors for Cross Origin Resource Sharing (CORS) headers. .. versionadded:: 1.0 """ @property def access_control_allow_credentials(self): """Whether credentials can be shared by the browser to JavaScript code. As part of the preflight request it indicates whether credentials can be used on the cross origin request. """ return "Access-Control-Allow-Credentials" in self.headers @access_control_allow_credentials.setter def access_control_allow_credentials(self, value): if value is True: self.headers["Access-Control-Allow-Credentials"] = "true" else: self.headers.pop("Access-Control-Allow-Credentials", None) access_control_allow_headers = header_property( "Access-Control-Allow-Headers", load_func=parse_set_header, dump_func=dump_header, doc="Which headers can be sent with the cross origin request.", ) access_control_allow_methods = header_property( "Access-Control-Allow-Methods", load_func=parse_set_header, dump_func=dump_header, doc="Which methods can be used for the cross origin request.", ) access_control_allow_origin = header_property( "Access-Control-Allow-Origin", doc="The origin or '*' for any origin that may make cross origin requests.", ) access_control_expose_headers = header_property( "Access-Control-Expose-Headers", load_func=parse_set_header, dump_func=dump_header, doc="Which headers can be shared by the browser to JavaScript code.", ) access_control_max_age = header_property( "Access-Control-Max-Age", load_func=int, dump_func=str, doc="The maximum age in seconds the access control settings can be cached for.", )
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/base_response.py
import warnings from .._compat import integer_types from .._compat import string_types from .._compat import text_type from .._compat import to_bytes from .._compat import to_native from ..datastructures import Headers from ..http import dump_cookie from ..http import HTTP_STATUS_CODES from ..http import remove_entity_headers from ..urls import iri_to_uri from ..urls import url_join from ..utils import get_content_type from ..wsgi import ClosingIterator from ..wsgi import get_current_url def _run_wsgi_app(*args): """This function replaces itself to ensure that the test module is not imported unless required. DO NOT USE! """ global _run_wsgi_app from ..test import run_wsgi_app as _run_wsgi_app return _run_wsgi_app(*args) def _warn_if_string(iterable): """Helper for the response objects to check if the iterable returned to the WSGI server is not a string. """ if isinstance(iterable, string_types): warnings.warn( "Response iterable was set to a string. This will appear to" " work but means that the server will send the data to the" " client one character at a time. This is almost never" " intended behavior, use 'response.data' to assign strings" " to the response object.", stacklevel=2, ) def _iter_encoded(iterable, charset): for item in iterable: if isinstance(item, text_type): yield item.encode(charset) else: yield item def _clean_accept_ranges(accept_ranges): if accept_ranges is True: return "bytes" elif accept_ranges is False: return "none" elif isinstance(accept_ranges, text_type): return to_native(accept_ranges) raise ValueError("Invalid accept_ranges value") class BaseResponse(object): """Base response class. The most important fact about a response object is that it's a regular WSGI application. It's initialized with a couple of response parameters (headers, body, status code etc.) and will start a valid WSGI response when called with the environ and start response callable. Because it's a WSGI application itself processing usually ends before the actual response is sent to the server. This helps debugging systems because they can catch all the exceptions before responses are started. Here a small example WSGI application that takes advantage of the response objects:: from werkzeug.wrappers import BaseResponse as Response def index(): return Response('Index page') def application(environ, start_response): path = environ.get('PATH_INFO') or '/' if path == '/': response = index() else: response = Response('Not Found', status=404) return response(environ, start_response) Like :class:`BaseRequest` which object is lacking a lot of functionality implemented in mixins. This gives you a better control about the actual API of your response objects, so you can create subclasses and add custom functionality. A full featured response object is available as :class:`Response` which implements a couple of useful mixins. To enforce a new type of already existing responses you can use the :meth:`force_type` method. This is useful if you're working with different subclasses of response objects and you want to post process them with a known interface. Per default the response object will assume all the text data is `utf-8` encoded. Please refer to :doc:`the unicode chapter </unicode>` for more details about customizing the behavior. Response can be any kind of iterable or string. If it's a string it's considered being an iterable with one item which is the string passed. Headers can be a list of tuples or a :class:`~werkzeug.datastructures.Headers` object. Special note for `mimetype` and `content_type`: For most mime types `mimetype` and `content_type` work the same, the difference affects only 'text' mimetypes. If the mimetype passed with `mimetype` is a mimetype starting with `text/`, the charset parameter of the response object is appended to it. In contrast the `content_type` parameter is always added as header unmodified. .. versionchanged:: 0.5 the `direct_passthrough` parameter was added. :param response: a string or response iterable. :param status: a string with a status or an integer with the status code. :param headers: a list of headers or a :class:`~werkzeug.datastructures.Headers` object. :param mimetype: the mimetype for the response. See notice above. :param content_type: the content type for the response. See notice above. :param direct_passthrough: if set to `True` :meth:`iter_encoded` is not called before iteration which makes it possible to pass special iterators through unchanged (see :func:`wrap_file` for more details.) """ #: the charset of the response. charset = "utf-8" #: the default status if none is provided. default_status = 200 #: the default mimetype if none is provided. default_mimetype = "text/plain" #: if set to `False` accessing properties on the response object will #: not try to consume the response iterator and convert it into a list. #: #: .. versionadded:: 0.6.2 #: #: That attribute was previously called `implicit_seqence_conversion`. #: (Notice the typo). If you did use this feature, you have to adapt #: your code to the name change. implicit_sequence_conversion = True #: Should this response object correct the location header to be RFC #: conformant? This is true by default. #: #: .. versionadded:: 0.8 autocorrect_location_header = True #: Should this response object automatically set the content-length #: header if possible? This is true by default. #: #: .. versionadded:: 0.8 automatically_set_content_length = True #: Warn if a cookie header exceeds this size. The default, 4093, should be #: safely `supported by most browsers <cookie_>`_. A cookie larger than #: this size will still be sent, but it may be ignored or handled #: incorrectly by some browsers. Set to 0 to disable this check. #: #: .. versionadded:: 0.13 #: #: .. _`cookie`: http://browsercookielimits.squawky.net/ max_cookie_size = 4093 def __init__( self, response=None, status=None, headers=None, mimetype=None, content_type=None, direct_passthrough=False, ): if isinstance(headers, Headers): self.headers = headers elif not headers: self.headers = Headers() else: self.headers = Headers(headers) if content_type is None: if mimetype is None and "content-type" not in self.headers: mimetype = self.default_mimetype if mimetype is not None: mimetype = get_content_type(mimetype, self.charset) content_type = mimetype if content_type is not None: self.headers["Content-Type"] = content_type if status is None: status = self.default_status if isinstance(status, integer_types): self.status_code = status else: self.status = status self.direct_passthrough = direct_passthrough self._on_close = [] # we set the response after the headers so that if a class changes # the charset attribute, the data is set in the correct charset. if response is None: self.response = [] elif isinstance(response, (text_type, bytes, bytearray)): self.set_data(response) else: self.response = response def call_on_close(self, func): """Adds a function to the internal list of functions that should be called as part of closing down the response. Since 0.7 this function also returns the function that was passed so that this can be used as a decorator. .. versionadded:: 0.6 """ self._on_close.append(func) return func def __repr__(self): if self.is_sequence: body_info = "%d bytes" % sum(map(len, self.iter_encoded())) else: body_info = "streamed" if self.is_streamed else "likely-streamed" return "<%s %s [%s]>" % (self.__class__.__name__, body_info, self.status) @classmethod def force_type(cls, response, environ=None): """Enforce that the WSGI response is a response object of the current type. Werkzeug will use the :class:`BaseResponse` internally in many situations like the exceptions. If you call :meth:`get_response` on an exception you will get back a regular :class:`BaseResponse` object, even if you are using a custom subclass. This method can enforce a given response type, and it will also convert arbitrary WSGI callables into response objects if an environ is provided:: # convert a Werkzeug response object into an instance of the # MyResponseClass subclass. response = MyResponseClass.force_type(response) # convert any WSGI application into a response object response = MyResponseClass.force_type(response, environ) This is especially useful if you want to post-process responses in the main dispatcher and use functionality provided by your subclass. Keep in mind that this will modify response objects in place if possible! :param response: a response object or wsgi application. :param environ: a WSGI environment object. :return: a response object. """ if not isinstance(response, BaseResponse): if environ is None: raise TypeError( "cannot convert WSGI application into response" " objects without an environ" ) response = BaseResponse(*_run_wsgi_app(response, environ)) response.__class__ = cls return response @classmethod def from_app(cls, app, environ, buffered=False): """Create a new response object from an application output. This works best if you pass it an application that returns a generator all the time. Sometimes applications may use the `write()` callable returned by the `start_response` function. This tries to resolve such edge cases automatically. But if you don't get the expected output you should set `buffered` to `True` which enforces buffering. :param app: the WSGI application to execute. :param environ: the WSGI environment to execute against. :param buffered: set to `True` to enforce buffering. :return: a response object. """ return cls(*_run_wsgi_app(app, environ, buffered)) @property def status_code(self): """The HTTP status code as a number.""" return self._status_code @status_code.setter def status_code(self, code): self._status_code = code try: self._status = "%d %s" % (code, HTTP_STATUS_CODES[code].upper()) except KeyError: self._status = "%d UNKNOWN" % code @property def status(self): """The HTTP status code as a string.""" return self._status @status.setter def status(self, value): try: self._status = to_native(value) except AttributeError: raise TypeError("Invalid status argument") try: self._status_code = int(self._status.split(None, 1)[0]) except ValueError: self._status_code = 0 self._status = "0 %s" % self._status except IndexError: raise ValueError("Empty status argument") def get_data(self, as_text=False): """The string representation of the request body. Whenever you call this property the request iterable is encoded and flattened. This can lead to unwanted behavior if you stream big data. This behavior can be disabled by setting :attr:`implicit_sequence_conversion` to `False`. If `as_text` is set to `True` the return value will be a decoded unicode string. .. versionadded:: 0.9 """ self._ensure_sequence() rv = b"".join(self.iter_encoded()) if as_text: rv = rv.decode(self.charset) return rv def set_data(self, value): """Sets a new string as response. The value set must be either a unicode or bytestring. If a unicode string is set it's encoded automatically to the charset of the response (utf-8 by default). .. versionadded:: 0.9 """ # if an unicode string is set, it's encoded directly so that we # can set the content length if isinstance(value, text_type): value = value.encode(self.charset) else: value = bytes(value) self.response = [value] if self.automatically_set_content_length: self.headers["Content-Length"] = str(len(value)) data = property( get_data, set_data, doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.", ) def calculate_content_length(self): """Returns the content length if available or `None` otherwise.""" try: self._ensure_sequence() except RuntimeError: return None return sum(len(x) for x in self.iter_encoded()) def _ensure_sequence(self, mutable=False): """This method can be called by methods that need a sequence. If `mutable` is true, it will also ensure that the response sequence is a standard Python list. .. versionadded:: 0.6 """ if self.is_sequence: # if we need a mutable object, we ensure it's a list. if mutable and not isinstance(self.response, list): self.response = list(self.response) return if self.direct_passthrough: raise RuntimeError( "Attempted implicit sequence conversion but the" " response object is in direct passthrough mode." ) if not self.implicit_sequence_conversion: raise RuntimeError( "The response object required the iterable to be a" " sequence, but the implicit conversion was disabled." " Call make_sequence() yourself." ) self.make_sequence() def make_sequence(self): """Converts the response iterator in a list. By default this happens automatically if required. If `implicit_sequence_conversion` is disabled, this method is not automatically called and some properties might raise exceptions. This also encodes all the items. .. versionadded:: 0.6 """ if not self.is_sequence: # if we consume an iterable we have to ensure that the close # method of the iterable is called if available when we tear # down the response close = getattr(self.response, "close", None) self.response = list(self.iter_encoded()) if close is not None: self.call_on_close(close) def iter_encoded(self): """Iter the response encoded with the encoding of the response. If the response object is invoked as WSGI application the return value of this method is used as application iterator unless :attr:`direct_passthrough` was activated. """ if __debug__: _warn_if_string(self.response) # Encode in a separate function so that self.response is fetched # early. This allows us to wrap the response with the return # value from get_app_iter or iter_encoded. return _iter_encoded(self.response, self.charset) def set_cookie( self, key, value="", max_age=None, expires=None, path="/", domain=None, secure=False, httponly=False, samesite=None, ): """Sets a cookie. The parameters are the same as in the cookie `Morsel` object in the Python standard library but it accepts unicode data, too. A warning is raised if the size of the cookie header exceeds :attr:`max_cookie_size`, but the header will still be set. :param key: the key (name) of the cookie to be set. :param value: the value of the cookie. :param max_age: should be a number of seconds, or `None` (default) if the cookie should last only as long as the client's browser session. :param expires: should be a `datetime` object or UNIX timestamp. :param path: limits the cookie to a given path, per default it will span the whole domain. :param domain: if you want to set a cross-domain cookie. For example, ``domain=".example.com"`` will set a cookie that is readable by the domain ``www.example.com``, ``foo.example.com`` etc. Otherwise, a cookie will only be readable by the domain that set it. :param secure: If `True`, the cookie will only be available via HTTPS :param httponly: disallow JavaScript to access the cookie. This is an extension to the cookie standard and probably not supported by all browsers. :param samesite: Limits the scope of the cookie such that it will only be attached to requests if those requests are "same-site". """ self.headers.add( "Set-Cookie", dump_cookie( key, value=value, max_age=max_age, expires=expires, path=path, domain=domain, secure=secure, httponly=httponly, charset=self.charset, max_size=self.max_cookie_size, samesite=samesite, ), ) def delete_cookie(self, key, path="/", domain=None): """Delete a cookie. Fails silently if key doesn't exist. :param key: the key (name) of the cookie to be deleted. :param path: if the cookie that should be deleted was limited to a path, the path has to be defined here. :param domain: if the cookie that should be deleted was limited to a domain, that domain has to be defined here. """ self.set_cookie(key, expires=0, max_age=0, path=path, domain=domain) @property def is_streamed(self): """If the response is streamed (the response is not an iterable with a length information) this property is `True`. In this case streamed means that there is no information about the number of iterations. This is usually `True` if a generator is passed to the response object. This is useful for checking before applying some sort of post filtering that should not take place for streamed responses. """ try: len(self.response) except (TypeError, AttributeError): return True return False @property def is_sequence(self): """If the iterator is buffered, this property will be `True`. A response object will consider an iterator to be buffered if the response attribute is a list or tuple. .. versionadded:: 0.6 """ return isinstance(self.response, (tuple, list)) def close(self): """Close the wrapped response if possible. You can also use the object in a with statement which will automatically close it. .. versionadded:: 0.9 Can now be used in a with statement. """ if hasattr(self.response, "close"): self.response.close() for func in self._on_close: func() def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): self.close() def freeze(self): """Call this method if you want to make your response object ready for being pickled. This buffers the generator if there is one. It will also set the `Content-Length` header to the length of the body. .. versionchanged:: 0.6 The `Content-Length` header is now set. """ # we explicitly set the length to a list of the *encoded* response # iterator. Even if the implicit sequence conversion is disabled. self.response = list(self.iter_encoded()) self.headers["Content-Length"] = str(sum(map(len, self.response))) def get_wsgi_headers(self, environ): """This is automatically called right before the response is started and returns headers modified for the given environment. It returns a copy of the headers from the response with some modifications applied if necessary. For example the location header (if present) is joined with the root URL of the environment. Also the content length is automatically set to zero here for certain status codes. .. versionchanged:: 0.6 Previously that function was called `fix_headers` and modified the response object in place. Also since 0.6, IRIs in location and content-location headers are handled properly. Also starting with 0.6, Werkzeug will attempt to set the content length if it is able to figure it out on its own. This is the case if all the strings in the response iterable are already encoded and the iterable is buffered. :param environ: the WSGI environment of the request. :return: returns a new :class:`~werkzeug.datastructures.Headers` object. """ headers = Headers(self.headers) location = None content_location = None content_length = None status = self.status_code # iterate over the headers to find all values in one go. Because # get_wsgi_headers is used each response that gives us a tiny # speedup. for key, value in headers: ikey = key.lower() if ikey == u"location": location = value elif ikey == u"content-location": content_location = value elif ikey == u"content-length": content_length = value # make sure the location header is an absolute URL if location is not None: old_location = location if isinstance(location, text_type): # Safe conversion is necessary here as we might redirect # to a broken URI scheme (for instance itms-services). location = iri_to_uri(location, safe_conversion=True) if self.autocorrect_location_header: current_url = get_current_url(environ, strip_querystring=True) if isinstance(current_url, text_type): current_url = iri_to_uri(current_url) location = url_join(current_url, location) if location != old_location: headers["Location"] = location # make sure the content location is a URL if content_location is not None and isinstance(content_location, text_type): headers["Content-Location"] = iri_to_uri(content_location) if 100 <= status < 200 or status == 204: # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a # Content-Length header field in any response with a status # code of 1xx (Informational) or 204 (No Content)." headers.remove("Content-Length") elif status == 304: remove_entity_headers(headers) # if we can determine the content length automatically, we # should try to do that. But only if this does not involve # flattening the iterator or encoding of unicode strings in # the response. We however should not do that if we have a 304 # response. if ( self.automatically_set_content_length and self.is_sequence and content_length is None and status not in (204, 304) and not (100 <= status < 200) ): try: content_length = sum(len(to_bytes(x, "ascii")) for x in self.response) except UnicodeError: # aha, something non-bytestringy in there, too bad, we # can't safely figure out the length of the response. pass else: headers["Content-Length"] = str(content_length) return headers def get_app_iter(self, environ): """Returns the application iterator for the given environ. Depending on the request method and the current status code the return value might be an empty response rather than the one from the response. If the request method is `HEAD` or the status code is in a range where the HTTP specification requires an empty response, an empty iterable is returned. .. versionadded:: 0.6 :param environ: the WSGI environment of the request. :return: a response iterable. """ status = self.status_code if ( environ["REQUEST_METHOD"] == "HEAD" or 100 <= status < 200 or status in (204, 304) ): iterable = () elif self.direct_passthrough: if __debug__: _warn_if_string(self.response) return self.response else: iterable = self.iter_encoded() return ClosingIterator(iterable, self.close) def get_wsgi_response(self, environ): """Returns the final WSGI response as tuple. The first item in the tuple is the application iterator, the second the status and the third the list of headers. The response returned is created specially for the given environment. For example if the request method in the WSGI environment is ``'HEAD'`` the response will be empty and only the headers and status code will be present. .. versionadded:: 0.6 :param environ: the WSGI environment of the request. :return: an ``(app_iter, status, headers)`` tuple. """ headers = self.get_wsgi_headers(environ) app_iter = self.get_app_iter(environ) return app_iter, self.status, headers.to_wsgi_list() def __call__(self, environ, start_response): """Process this response as WSGI application. :param environ: the WSGI environment. :param start_response: the response callable provided by the WSGI server. :return: an application iterator """ app_iter, status, headers = self.get_wsgi_response(environ) start_response(status, headers) return app_iter
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/request.py
from .accept import AcceptMixin from .auth import AuthorizationMixin from .base_request import BaseRequest from .common_descriptors import CommonRequestDescriptorsMixin from .cors import CORSRequestMixin from .etag import ETagRequestMixin from .user_agent import UserAgentMixin class Request( BaseRequest, AcceptMixin, ETagRequestMixin, UserAgentMixin, AuthorizationMixin, CORSRequestMixin, CommonRequestDescriptorsMixin, ): """Full featured request object implementing the following mixins: - :class:`AcceptMixin` for accept header parsing - :class:`ETagRequestMixin` for etag and cache control handling - :class:`UserAgentMixin` for user agent introspection - :class:`AuthorizationMixin` for http auth handling - :class:`~werkzeug.wrappers.cors.CORSRequestMixin` for Cross Origin Resource Sharing headers - :class:`CommonRequestDescriptorsMixin` for common headers """ class StreamOnlyMixin(object): """If mixed in before the request object this will change the behavior of it to disable handling of form parsing. This disables the :attr:`files`, :attr:`form` attributes and will just provide a :attr:`stream` attribute that however is always available. .. versionadded:: 0.9 """ disable_data_descriptor = True want_form_data_parsed = False class PlainRequest(StreamOnlyMixin, Request): """A request object without special form parsing capabilities. .. versionadded:: 0.9 """
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/__init__.py
""" werkzeug.wrappers ~~~~~~~~~~~~~~~~~ The wrappers are simple request and response objects which you can subclass to do whatever you want them to do. The request object contains the information transmitted by the client (webbrowser) and the response object contains all the information sent back to the browser. An important detail is that the request object is created with the WSGI environ and will act as high-level proxy whereas the response object is an actual WSGI application. Like everything else in Werkzeug these objects will work correctly with unicode data. Incoming form data parsed by the response object will be decoded into an unicode object if possible and if it makes sense. :copyright: 2007 Pallets :license: BSD-3-Clause """ from .accept import AcceptMixin from .auth import AuthorizationMixin from .auth import WWWAuthenticateMixin from .base_request import BaseRequest from .base_response import BaseResponse from .common_descriptors import CommonRequestDescriptorsMixin from .common_descriptors import CommonResponseDescriptorsMixin from .etag import ETagRequestMixin from .etag import ETagResponseMixin from .request import PlainRequest from .request import Request from .request import StreamOnlyMixin from .response import Response from .response import ResponseStream from .response import ResponseStreamMixin from .user_agent import UserAgentMixin
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/response.py
from ..utils import cached_property from .auth import WWWAuthenticateMixin from .base_response import BaseResponse from .common_descriptors import CommonResponseDescriptorsMixin from .cors import CORSResponseMixin from .etag import ETagResponseMixin class ResponseStream(object): """A file descriptor like object used by the :class:`ResponseStreamMixin` to represent the body of the stream. It directly pushes into the response iterable of the response object. """ mode = "wb+" def __init__(self, response): self.response = response self.closed = False def write(self, value): if self.closed: raise ValueError("I/O operation on closed file") self.response._ensure_sequence(mutable=True) self.response.response.append(value) self.response.headers.pop("Content-Length", None) return len(value) def writelines(self, seq): for item in seq: self.write(item) def close(self): self.closed = True def flush(self): if self.closed: raise ValueError("I/O operation on closed file") def isatty(self): if self.closed: raise ValueError("I/O operation on closed file") return False def tell(self): self.response._ensure_sequence() return sum(map(len, self.response.response)) @property def encoding(self): return self.response.charset class ResponseStreamMixin(object): """Mixin for :class:`BaseResponse` subclasses. Classes that inherit from this mixin will automatically get a :attr:`stream` property that provides a write-only interface to the response iterable. """ @cached_property def stream(self): """The response iterable as write-only stream.""" return ResponseStream(self) class Response( BaseResponse, ETagResponseMixin, WWWAuthenticateMixin, CORSResponseMixin, ResponseStreamMixin, CommonResponseDescriptorsMixin, ): """Full featured response object implementing the following mixins: - :class:`ETagResponseMixin` for etag and cache control handling - :class:`WWWAuthenticateMixin` for HTTP authentication support - :class:`~werkzeug.wrappers.cors.CORSResponseMixin` for Cross Origin Resource Sharing headers - :class:`ResponseStreamMixin` to add support for the ``stream`` property - :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors """
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/etag.py
from .._compat import string_types from .._internal import _get_environ from ..datastructures import ContentRange from ..datastructures import RequestCacheControl from ..datastructures import ResponseCacheControl from ..http import generate_etag from ..http import http_date from ..http import is_resource_modified from ..http import parse_cache_control_header from ..http import parse_content_range_header from ..http import parse_date from ..http import parse_etags from ..http import parse_if_range_header from ..http import parse_range_header from ..http import quote_etag from ..http import unquote_etag from ..utils import cached_property from ..utils import header_property from ..wrappers.base_response import _clean_accept_ranges from ..wsgi import _RangeWrapper class ETagRequestMixin(object): """Add entity tag and cache descriptors to a request object or object with a WSGI environment available as :attr:`~BaseRequest.environ`. This not only provides access to etags but also to the cache control header. """ @cached_property def cache_control(self): """A :class:`~werkzeug.datastructures.RequestCacheControl` object for the incoming cache control headers. """ cache_control = self.environ.get("HTTP_CACHE_CONTROL") return parse_cache_control_header(cache_control, None, RequestCacheControl) @cached_property def if_match(self): """An object containing all the etags in the `If-Match` header. :rtype: :class:`~werkzeug.datastructures.ETags` """ return parse_etags(self.environ.get("HTTP_IF_MATCH")) @cached_property def if_none_match(self): """An object containing all the etags in the `If-None-Match` header. :rtype: :class:`~werkzeug.datastructures.ETags` """ return parse_etags(self.environ.get("HTTP_IF_NONE_MATCH")) @cached_property def if_modified_since(self): """The parsed `If-Modified-Since` header as datetime object.""" return parse_date(self.environ.get("HTTP_IF_MODIFIED_SINCE")) @cached_property def if_unmodified_since(self): """The parsed `If-Unmodified-Since` header as datetime object.""" return parse_date(self.environ.get("HTTP_IF_UNMODIFIED_SINCE")) @cached_property def if_range(self): """The parsed `If-Range` header. .. versionadded:: 0.7 :rtype: :class:`~werkzeug.datastructures.IfRange` """ return parse_if_range_header(self.environ.get("HTTP_IF_RANGE")) @cached_property def range(self): """The parsed `Range` header. .. versionadded:: 0.7 :rtype: :class:`~werkzeug.datastructures.Range` """ return parse_range_header(self.environ.get("HTTP_RANGE")) class ETagResponseMixin(object): """Adds extra functionality to a response object for etag and cache handling. This mixin requires an object with at least a `headers` object that implements a dict like interface similar to :class:`~werkzeug.datastructures.Headers`. If you want the :meth:`freeze` method to automatically add an etag, you have to mixin this method before the response base class. The default response class does not do that. """ @property def cache_control(self): """The Cache-Control general-header field is used to specify directives that MUST be obeyed by all caching mechanisms along the request/response chain. """ def on_update(cache_control): if not cache_control and "cache-control" in self.headers: del self.headers["cache-control"] elif cache_control: self.headers["Cache-Control"] = cache_control.to_header() return parse_cache_control_header( self.headers.get("cache-control"), on_update, ResponseCacheControl ) def _wrap_response(self, start, length): """Wrap existing Response in case of Range Request context.""" if self.status_code == 206: self.response = _RangeWrapper(self.response, start, length) def _is_range_request_processable(self, environ): """Return ``True`` if `Range` header is present and if underlying resource is considered unchanged when compared with `If-Range` header. """ return ( "HTTP_IF_RANGE" not in environ or not is_resource_modified( environ, self.headers.get("etag"), None, self.headers.get("last-modified"), ignore_if_range=False, ) ) and "HTTP_RANGE" in environ def _process_range_request(self, environ, complete_length=None, accept_ranges=None): """Handle Range Request related headers (RFC7233). If `Accept-Ranges` header is valid, and Range Request is processable, we set the headers as described by the RFC, and wrap the underlying response in a RangeWrapper. Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` if `Range` header could not be parsed or satisfied. """ from ..exceptions import RequestedRangeNotSatisfiable if ( accept_ranges is None or complete_length is None or not self._is_range_request_processable(environ) ): return False parsed_range = parse_range_header(environ.get("HTTP_RANGE")) if parsed_range is None: raise RequestedRangeNotSatisfiable(complete_length) range_tuple = parsed_range.range_for_length(complete_length) content_range_header = parsed_range.to_content_range_header(complete_length) if range_tuple is None or content_range_header is None: raise RequestedRangeNotSatisfiable(complete_length) content_length = range_tuple[1] - range_tuple[0] self.headers["Content-Length"] = content_length self.headers["Accept-Ranges"] = accept_ranges self.content_range = content_range_header self.status_code = 206 self._wrap_response(range_tuple[0], content_length) return True def make_conditional( self, request_or_environ, accept_ranges=False, complete_length=None ): """Make the response conditional to the request. This method works best if an etag was defined for the response already. The `add_etag` method can be used to do that. If called without etag just the date header is set. This does nothing if the request method in the request or environ is anything but GET or HEAD. For optimal performance when handling range requests, it's recommended that your response data object implements `seekable`, `seek` and `tell` methods as described by :py:class:`io.IOBase`. Objects returned by :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods. It does not remove the body of the response because that's something the :meth:`__call__` function does for us automatically. Returns self so that you can do ``return resp.make_conditional(req)`` but modifies the object in-place. :param request_or_environ: a request object or WSGI environment to be used to make the response conditional against. :param accept_ranges: This parameter dictates the value of `Accept-Ranges` header. If ``False`` (default), the header is not set. If ``True``, it will be set to ``"bytes"``. If ``None``, it will be set to ``"none"``. If it's a string, it will use this value. :param complete_length: Will be used only in valid Range Requests. It will set `Content-Range` complete length value and compute `Content-Length` real value. This parameter is mandatory for successful Range Requests completion. :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` if `Range` header could not be parsed or satisfied. """ environ = _get_environ(request_or_environ) if environ["REQUEST_METHOD"] in ("GET", "HEAD"): # if the date is not in the headers, add it now. We however # will not override an already existing header. Unfortunately # this header will be overriden by many WSGI servers including # wsgiref. if "date" not in self.headers: self.headers["Date"] = http_date() accept_ranges = _clean_accept_ranges(accept_ranges) is206 = self._process_range_request(environ, complete_length, accept_ranges) if not is206 and not is_resource_modified( environ, self.headers.get("etag"), None, self.headers.get("last-modified"), ): if parse_etags(environ.get("HTTP_IF_MATCH")): self.status_code = 412 else: self.status_code = 304 if ( self.automatically_set_content_length and "content-length" not in self.headers ): length = self.calculate_content_length() if length is not None: self.headers["Content-Length"] = length return self def add_etag(self, overwrite=False, weak=False): """Add an etag for the current response if there is none yet.""" if overwrite or "etag" not in self.headers: self.set_etag(generate_etag(self.get_data()), weak) def set_etag(self, etag, weak=False): """Set the etag, and override the old one if there was one.""" self.headers["ETag"] = quote_etag(etag, weak) def get_etag(self): """Return a tuple in the form ``(etag, is_weak)``. If there is no ETag the return value is ``(None, None)``. """ return unquote_etag(self.headers.get("ETag")) def freeze(self, no_etag=False): """Call this method if you want to make your response object ready for pickeling. This buffers the generator if there is one. This also sets the etag unless `no_etag` is set to `True`. """ if not no_etag: self.add_etag() super(ETagResponseMixin, self).freeze() accept_ranges = header_property( "Accept-Ranges", doc="""The `Accept-Ranges` header. Even though the name would indicate that multiple values are supported, it must be one string token only. The values ``'bytes'`` and ``'none'`` are common. .. versionadded:: 0.7""", ) @property def content_range(self): """The ``Content-Range`` header as a :class:`~werkzeug.datastructures.ContentRange` object. Available even if the header is not set. .. versionadded:: 0.7 """ def on_update(rng): if not rng: del self.headers["content-range"] else: self.headers["Content-Range"] = rng.to_header() rv = parse_content_range_header(self.headers.get("content-range"), on_update) # always provide a content range object to make the descriptor # more user friendly. It provides an unset() method that can be # used to remove the header quickly. if rv is None: rv = ContentRange(None, None, None, on_update=on_update) return rv @content_range.setter def content_range(self, value): if not value: del self.headers["content-range"] elif isinstance(value, string_types): self.headers["Content-Range"] = value else: self.headers["Content-Range"] = value.to_header()
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/common_descriptors.py
from datetime import datetime from datetime import timedelta from .._compat import string_types from ..datastructures import CallbackDict from ..http import dump_age from ..http import dump_csp_header from ..http import dump_header from ..http import dump_options_header from ..http import http_date from ..http import parse_age from ..http import parse_csp_header from ..http import parse_date from ..http import parse_options_header from ..http import parse_set_header from ..utils import cached_property from ..utils import environ_property from ..utils import get_content_type from ..utils import header_property from ..wsgi import get_content_length class CommonRequestDescriptorsMixin(object): """A mixin for :class:`BaseRequest` subclasses. Request objects that mix this class in will automatically get descriptors for a couple of HTTP headers with automatic type conversion. .. versionadded:: 0.5 """ content_type = environ_property( "CONTENT_TYPE", doc="""The Content-Type entity-header field indicates the media type of the entity-body sent to the recipient or, in the case of the HEAD method, the media type that would have been sent had the request been a GET.""", ) @cached_property def content_length(self): """The Content-Length entity-header field indicates the size of the entity-body in bytes or, in the case of the HEAD method, the size of the entity-body that would have been sent had the request been a GET. """ return get_content_length(self.environ) content_encoding = environ_property( "HTTP_CONTENT_ENCODING", doc="""The Content-Encoding entity-header field is used as a modifier to the media-type. When present, its value indicates what additional content codings have been applied to the entity-body, and thus what decoding mechanisms must be applied in order to obtain the media-type referenced by the Content-Type header field. .. versionadded:: 0.9""", ) content_md5 = environ_property( "HTTP_CONTENT_MD5", doc="""The Content-MD5 entity-header field, as defined in RFC 1864, is an MD5 digest of the entity-body for the purpose of providing an end-to-end message integrity check (MIC) of the entity-body. (Note: a MIC is good for detecting accidental modification of the entity-body in transit, but is not proof against malicious attacks.) .. versionadded:: 0.9""", ) referrer = environ_property( "HTTP_REFERER", doc="""The Referer[sic] request-header field allows the client to specify, for the server's benefit, the address (URI) of the resource from which the Request-URI was obtained (the "referrer", although the header field is misspelled).""", ) date = environ_property( "HTTP_DATE", None, parse_date, doc="""The Date general-header field represents the date and time at which the message was originated, having the same semantics as orig-date in RFC 822.""", ) max_forwards = environ_property( "HTTP_MAX_FORWARDS", None, int, doc="""The Max-Forwards request-header field provides a mechanism with the TRACE and OPTIONS methods to limit the number of proxies or gateways that can forward the request to the next inbound server.""", ) def _parse_content_type(self): if not hasattr(self, "_parsed_content_type"): self._parsed_content_type = parse_options_header( self.environ.get("CONTENT_TYPE", "") ) @property def mimetype(self): """Like :attr:`content_type`, but without parameters (eg, without charset, type etc.) and always lowercase. For example if the content type is ``text/HTML; charset=utf-8`` the mimetype would be ``'text/html'``. """ self._parse_content_type() return self._parsed_content_type[0].lower() @property def mimetype_params(self): """The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. """ self._parse_content_type() return self._parsed_content_type[1] @cached_property def pragma(self): """The Pragma general-header field is used to include implementation-specific directives that might apply to any recipient along the request/response chain. All pragma directives specify optional behavior from the viewpoint of the protocol; however, some systems MAY require that behavior be consistent with the directives. """ return parse_set_header(self.environ.get("HTTP_PRAGMA", "")) class CommonResponseDescriptorsMixin(object): """A mixin for :class:`BaseResponse` subclasses. Response objects that mix this class in will automatically get descriptors for a couple of HTTP headers with automatic type conversion. """ @property def mimetype(self): """The mimetype (content type without charset etc.)""" ct = self.headers.get("content-type") if ct: return ct.split(";")[0].strip() @mimetype.setter def mimetype(self, value): self.headers["Content-Type"] = get_content_type(value, self.charset) @property def mimetype_params(self): """The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. .. versionadded:: 0.5 """ def on_update(d): self.headers["Content-Type"] = dump_options_header(self.mimetype, d) d = parse_options_header(self.headers.get("content-type", ""))[1] return CallbackDict(d, on_update) location = header_property( "Location", doc="""The Location response-header field is used to redirect the recipient to a location other than the Request-URI for completion of the request or identification of a new resource.""", ) age = header_property( "Age", None, parse_age, dump_age, doc="""The Age response-header field conveys the sender's estimate of the amount of time since the response (or its revalidation) was generated at the origin server. Age values are non-negative decimal integers, representing time in seconds.""", ) content_type = header_property( "Content-Type", doc="""The Content-Type entity-header field indicates the media type of the entity-body sent to the recipient or, in the case of the HEAD method, the media type that would have been sent had the request been a GET.""", ) content_length = header_property( "Content-Length", None, int, str, doc="""The Content-Length entity-header field indicates the size of the entity-body, in decimal number of OCTETs, sent to the recipient or, in the case of the HEAD method, the size of the entity-body that would have been sent had the request been a GET.""", ) content_location = header_property( "Content-Location", doc="""The Content-Location entity-header field MAY be used to supply the resource location for the entity enclosed in the message when that entity is accessible from a location separate from the requested resource's URI.""", ) content_encoding = header_property( "Content-Encoding", doc="""The Content-Encoding entity-header field is used as a modifier to the media-type. When present, its value indicates what additional content codings have been applied to the entity-body, and thus what decoding mechanisms must be applied in order to obtain the media-type referenced by the Content-Type header field.""", ) content_md5 = header_property( "Content-MD5", doc="""The Content-MD5 entity-header field, as defined in RFC 1864, is an MD5 digest of the entity-body for the purpose of providing an end-to-end message integrity check (MIC) of the entity-body. (Note: a MIC is good for detecting accidental modification of the entity-body in transit, but is not proof against malicious attacks.)""", ) content_security_policy = header_property( "Content-Security-Policy", None, parse_csp_header, dump_csp_header, doc="""The Content-Security-Policy header adds an additional layer of security to help detect and mitigate certain types of attacks.""", ) content_security_policy_report_only = header_property( "Content-Security-Policy-Report-Only", None, parse_csp_header, dump_csp_header, doc="""The Content-Security-Policy-Report-Only header adds a csp policy that is not enforced but is reported thereby helping detect certain types of attacks.""", ) date = header_property( "Date", None, parse_date, http_date, doc="""The Date general-header field represents the date and time at which the message was originated, having the same semantics as orig-date in RFC 822.""", ) expires = header_property( "Expires", None, parse_date, http_date, doc="""The Expires entity-header field gives the date/time after which the response is considered stale. A stale cache entry may not normally be returned by a cache.""", ) last_modified = header_property( "Last-Modified", None, parse_date, http_date, doc="""The Last-Modified entity-header field indicates the date and time at which the origin server believes the variant was last modified.""", ) @property def retry_after(self): """The Retry-After response-header field can be used with a 503 (Service Unavailable) response to indicate how long the service is expected to be unavailable to the requesting client. Time in seconds until expiration or date. """ value = self.headers.get("retry-after") if value is None: return elif value.isdigit(): return datetime.utcnow() + timedelta(seconds=int(value)) return parse_date(value) @retry_after.setter def retry_after(self, value): if value is None: if "retry-after" in self.headers: del self.headers["retry-after"] return elif isinstance(value, datetime): value = http_date(value) else: value = str(value) self.headers["Retry-After"] = value def _set_property(name, doc=None): # noqa: B902 def fget(self): def on_update(header_set): if not header_set and name in self.headers: del self.headers[name] elif header_set: self.headers[name] = header_set.to_header() return parse_set_header(self.headers.get(name), on_update) def fset(self, value): if not value: del self.headers[name] elif isinstance(value, string_types): self.headers[name] = value else: self.headers[name] = dump_header(value) return property(fget, fset, doc=doc) vary = _set_property( "Vary", doc="""The Vary field value indicates the set of request-header fields that fully determines, while the response is fresh, whether a cache is permitted to use the response to reply to a subsequent request without revalidation.""", ) content_language = _set_property( "Content-Language", doc="""The Content-Language entity-header field describes the natural language(s) of the intended audience for the enclosed entity. Note that this might not be equivalent to all the languages used within the entity-body.""", ) allow = _set_property( "Allow", doc="""The Allow entity-header field lists the set of methods supported by the resource identified by the Request-URI. The purpose of this field is strictly to inform the recipient of valid methods associated with the resource. An Allow header field MUST be present in a 405 (Method Not Allowed) response.""", ) del _set_property
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/accept.py
from ..datastructures import CharsetAccept from ..datastructures import LanguageAccept from ..datastructures import MIMEAccept from ..http import parse_accept_header from ..utils import cached_property class AcceptMixin(object): """A mixin for classes with an :attr:`~BaseResponse.environ` attribute to get all the HTTP accept headers as :class:`~werkzeug.datastructures.Accept` objects (or subclasses thereof). """ @cached_property def accept_mimetypes(self): """List of mimetypes this client supports as :class:`~werkzeug.datastructures.MIMEAccept` object. """ return parse_accept_header(self.environ.get("HTTP_ACCEPT"), MIMEAccept) @cached_property def accept_charsets(self): """List of charsets this client supports as :class:`~werkzeug.datastructures.CharsetAccept` object. """ return parse_accept_header( self.environ.get("HTTP_ACCEPT_CHARSET"), CharsetAccept ) @cached_property def accept_encodings(self): """List of encodings this client accepts. Encodings in a HTTP term are compression encodings such as gzip. For charsets have a look at :attr:`accept_charset`. """ return parse_accept_header(self.environ.get("HTTP_ACCEPT_ENCODING")) @cached_property def accept_languages(self): """List of languages this client accepts as :class:`~werkzeug.datastructures.LanguageAccept` object. .. versionchanged 0.5 In previous versions this was a regular :class:`~werkzeug.datastructures.Accept` object. """ return parse_accept_header( self.environ.get("HTTP_ACCEPT_LANGUAGE"), LanguageAccept )
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/json.py
from __future__ import absolute_import import datetime import uuid from .._compat import text_type from ..exceptions import BadRequest from ..utils import detect_utf_encoding try: import simplejson as _json except ImportError: import json as _json class _JSONModule(object): @staticmethod def _default(o): if isinstance(o, datetime.date): return o.isoformat() if isinstance(o, uuid.UUID): return str(o) if hasattr(o, "__html__"): return text_type(o.__html__()) raise TypeError() @classmethod def dumps(cls, obj, **kw): kw.setdefault("separators", (",", ":")) kw.setdefault("default", cls._default) kw.setdefault("sort_keys", True) return _json.dumps(obj, **kw) @staticmethod def loads(s, **kw): if isinstance(s, bytes): # Needed for Python < 3.6 encoding = detect_utf_encoding(s) s = s.decode(encoding) return _json.loads(s, **kw) class JSONMixin(object): """Mixin to parse :attr:`data` as JSON. Can be mixed in for both :class:`~werkzeug.wrappers.Request` and :class:`~werkzeug.wrappers.Response` classes. If `simplejson`_ is installed it is preferred over Python's built-in :mod:`json` module. .. _simplejson: https://simplejson.readthedocs.io/en/latest/ """ #: A module or other object that has ``dumps`` and ``loads`` #: functions that match the API of the built-in :mod:`json` module. json_module = _JSONModule @property def json(self): """The parsed JSON data if :attr:`mimetype` indicates JSON (:mimetype:`application/json`, see :meth:`is_json`). Calls :meth:`get_json` with default arguments. """ return self.get_json() @property def is_json(self): """Check if the mimetype indicates JSON data, either :mimetype:`application/json` or :mimetype:`application/*+json`. """ mt = self.mimetype return ( mt == "application/json" or mt.startswith("application/") and mt.endswith("+json") ) def _get_data_for_json(self, cache): try: return self.get_data(cache=cache) except TypeError: # Response doesn't have cache param. return self.get_data() # Cached values for ``(silent=False, silent=True)``. Initialized # with sentinel values. _cached_json = (Ellipsis, Ellipsis) def get_json(self, force=False, silent=False, cache=True): """Parse :attr:`data` as JSON. If the mimetype does not indicate JSON (:mimetype:`application/json`, see :meth:`is_json`), this returns ``None``. If parsing fails, :meth:`on_json_loading_failed` is called and its return value is used as the return value. :param force: Ignore the mimetype and always try to parse JSON. :param silent: Silence parsing errors and return ``None`` instead. :param cache: Store the parsed JSON to return for subsequent calls. """ if cache and self._cached_json[silent] is not Ellipsis: return self._cached_json[silent] if not (force or self.is_json): return None data = self._get_data_for_json(cache=cache) try: rv = self.json_module.loads(data) except ValueError as e: if silent: rv = None if cache: normal_rv, _ = self._cached_json self._cached_json = (normal_rv, rv) else: rv = self.on_json_loading_failed(e) if cache: _, silent_rv = self._cached_json self._cached_json = (rv, silent_rv) else: if cache: self._cached_json = (rv, rv) return rv def on_json_loading_failed(self, e): """Called if :meth:`get_json` parsing fails and isn't silenced. If this method returns a value, it is used as the return value for :meth:`get_json`. The default implementation raises :exc:`~werkzeug.exceptions.BadRequest`. """ raise BadRequest("Failed to decode JSON object: {0}".format(e))
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/wrappers/base_request.py
from functools import update_wrapper from io import BytesIO from .._compat import to_native from .._compat import to_unicode from .._compat import wsgi_decoding_dance from .._compat import wsgi_get_bytes from ..datastructures import CombinedMultiDict from ..datastructures import EnvironHeaders from ..datastructures import ImmutableList from ..datastructures import ImmutableMultiDict from ..datastructures import iter_multi_items from ..datastructures import MultiDict from ..formparser import default_stream_factory from ..formparser import FormDataParser from ..http import parse_cookie from ..http import parse_list_header from ..http import parse_options_header from ..urls import url_decode from ..utils import cached_property from ..utils import environ_property from ..wsgi import get_content_length from ..wsgi import get_current_url from ..wsgi import get_host from ..wsgi import get_input_stream class BaseRequest(object): """Very basic request object. This does not implement advanced stuff like entity tag parsing or cache controls. The request object is created with the WSGI environment as first argument and will add itself to the WSGI environment as ``'werkzeug.request'`` unless it's created with `populate_request` set to False. There are a couple of mixins available that add additional functionality to the request object, there is also a class called `Request` which subclasses `BaseRequest` and all the important mixins. It's a good idea to create a custom subclass of the :class:`BaseRequest` and add missing functionality either via mixins or direct implementation. Here an example for such subclasses:: from werkzeug.wrappers import BaseRequest, ETagRequestMixin class Request(BaseRequest, ETagRequestMixin): pass Request objects are **read only**. As of 0.5 modifications are not allowed in any place. Unlike the lower level parsing functions the request object will use immutable objects everywhere possible. Per default the request object will assume all the text data is `utf-8` encoded. Please refer to :doc:`the unicode chapter </unicode>` for more details about customizing the behavior. Per default the request object will be added to the WSGI environment as `werkzeug.request` to support the debugging system. If you don't want that, set `populate_request` to `False`. If `shallow` is `True` the environment is initialized as shallow object around the environ. Every operation that would modify the environ in any way (such as consuming form data) raises an exception unless the `shallow` attribute is explicitly set to `False`. This is useful for middlewares where you don't want to consume the form data by accident. A shallow request is not populated to the WSGI environment. .. versionchanged:: 0.5 read-only mode was enforced by using immutables classes for all data. """ #: the charset for the request, defaults to utf-8 charset = "utf-8" #: the error handling procedure for errors, defaults to 'replace' encoding_errors = "replace" #: the maximum content length. This is forwarded to the form data #: parsing function (:func:`parse_form_data`). When set and the #: :attr:`form` or :attr:`files` attribute is accessed and the #: parsing fails because more than the specified value is transmitted #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. #: #: Have a look at :ref:`dealing-with-request-data` for more details. #: #: .. versionadded:: 0.5 max_content_length = None #: the maximum form field size. This is forwarded to the form data #: parsing function (:func:`parse_form_data`). When set and the #: :attr:`form` or :attr:`files` attribute is accessed and the #: data in memory for post data is longer than the specified value a #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. #: #: Have a look at :ref:`dealing-with-request-data` for more details. #: #: .. versionadded:: 0.5 max_form_memory_size = None #: the class to use for `args` and `form`. The default is an #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports #: multiple values per key. alternatively it makes sense to use an #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` #: which is the fastest but only remembers the last key. It is also #: possible to use mutable structures, but this is not recommended. #: #: .. versionadded:: 0.6 parameter_storage_class = ImmutableMultiDict #: the type to be used for list values from the incoming WSGI environment. #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used #: (for example for :attr:`access_list`). #: #: .. versionadded:: 0.6 list_storage_class = ImmutableList #: The type to be used for dict values from the incoming WSGI #: environment. (For example for :attr:`cookies`.) By default an #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used. #: #: .. versionchanged:: 1.0.0 #: Changed to ``ImmutableMultiDict`` to support multiple values. #: #: .. versionadded:: 0.6 dict_storage_class = ImmutableMultiDict #: The form data parser that shoud be used. Can be replaced to customize #: the form date parsing. form_data_parser_class = FormDataParser #: Optionally a list of hosts that is trusted by this request. By default #: all hosts are trusted which means that whatever the client sends the #: host is will be accepted. #: #: Because `Host` and `X-Forwarded-Host` headers can be set to any value by #: a malicious client, it is recommended to either set this property or #: implement similar validation in the proxy (if application is being run #: behind one). #: #: .. versionadded:: 0.9 trusted_hosts = None #: Indicates whether the data descriptor should be allowed to read and #: buffer up the input stream. By default it's enabled. #: #: .. versionadded:: 0.9 disable_data_descriptor = False def __init__(self, environ, populate_request=True, shallow=False): self.environ = environ if populate_request and not shallow: self.environ["werkzeug.request"] = self self.shallow = shallow def __repr__(self): # make sure the __repr__ even works if the request was created # from an invalid WSGI environment. If we display the request # in a debug session we don't want the repr to blow up. args = [] try: args.append("'%s'" % to_native(self.url, self.url_charset)) args.append("[%s]" % self.method) except Exception: args.append("(invalid WSGI environ)") return "<%s %s>" % (self.__class__.__name__, " ".join(args)) @property def url_charset(self): """The charset that is assumed for URLs. Defaults to the value of :attr:`charset`. .. versionadded:: 0.6 """ return self.charset @classmethod def from_values(cls, *args, **kwargs): """Create a new request object based on the values provided. If environ is given missing values are filled from there. This method is useful for small scripts when you need to simulate a request from an URL. Do not use this method for unittesting, there is a full featured client object (:class:`Client`) that allows to create multipart requests, support for cookies etc. This accepts the same options as the :class:`~werkzeug.test.EnvironBuilder`. .. versionchanged:: 0.5 This method now accepts the same arguments as :class:`~werkzeug.test.EnvironBuilder`. Because of this the `environ` parameter is now called `environ_overrides`. :return: request object """ from ..test import EnvironBuilder charset = kwargs.pop("charset", cls.charset) kwargs["charset"] = charset builder = EnvironBuilder(*args, **kwargs) try: return builder.get_request(cls) finally: builder.close() @classmethod def application(cls, f): """Decorate a function as responder that accepts the request as the last argument. This works like the :func:`responder` decorator but the function is passed the request object as the last argument and the request object will be closed automatically:: @Request.application def my_wsgi_app(request): return Response('Hello World!') As of Werkzeug 0.14 HTTP exceptions are automatically caught and converted to responses instead of failing. :param f: the WSGI callable to decorate :return: a new WSGI callable """ #: return a callable that wraps the -2nd argument with the request #: and calls the function with all the arguments up to that one and #: the request. The return value is then called with the latest #: two arguments. This makes it possible to use this decorator for #: both standalone WSGI functions as well as bound methods and #: partially applied functions. from ..exceptions import HTTPException def application(*args): request = cls(args[-2]) with request: try: resp = f(*args[:-2] + (request,)) except HTTPException as e: resp = e.get_response(args[-2]) return resp(*args[-2:]) return update_wrapper(application, f) def _get_file_stream( self, total_content_length, content_type, filename=None, content_length=None ): """Called to get a stream for the file upload. This must provide a file-like class with `read()`, `readline()` and `seek()` methods that is both writeable and readable. The default implementation returns a temporary file if the total content length is higher than 500KB. Because many browsers do not provide a content length for the files only the total content length matters. :param total_content_length: the total content length of all the data in the request combined. This value is guaranteed to be there. :param content_type: the mimetype of the uploaded file. :param filename: the filename of the uploaded file. May be `None`. :param content_length: the length of this file. This value is usually not provided because webbrowsers do not provide this value. """ return default_stream_factory( total_content_length=total_content_length, filename=filename, content_type=content_type, content_length=content_length, ) @property def want_form_data_parsed(self): """Returns True if the request method carries content. As of Werkzeug 0.9 this will be the case if a content type is transmitted. .. versionadded:: 0.8 """ return bool(self.environ.get("CONTENT_TYPE")) def make_form_data_parser(self): """Creates the form data parser. Instantiates the :attr:`form_data_parser_class` with some parameters. .. versionadded:: 0.8 """ return self.form_data_parser_class( self._get_file_stream, self.charset, self.encoding_errors, self.max_form_memory_size, self.max_content_length, self.parameter_storage_class, ) def _load_form_data(self): """Method used internally to retrieve submitted data. After calling this sets `form` and `files` on the request object to multi dicts filled with the incoming form data. As a matter of fact the input stream will be empty afterwards. You can also call this method to force the parsing of the form data. .. versionadded:: 0.8 """ # abort early if we have already consumed the stream if "form" in self.__dict__: return _assert_not_shallow(self) if self.want_form_data_parsed: content_type = self.environ.get("CONTENT_TYPE", "") content_length = get_content_length(self.environ) mimetype, options = parse_options_header(content_type) parser = self.make_form_data_parser() data = parser.parse( self._get_stream_for_parsing(), mimetype, content_length, options ) else: data = ( self.stream, self.parameter_storage_class(), self.parameter_storage_class(), ) # inject the values into the instance dict so that we bypass # our cached_property non-data descriptor. d = self.__dict__ d["stream"], d["form"], d["files"] = data def _get_stream_for_parsing(self): """This is the same as accessing :attr:`stream` with the difference that if it finds cached data from calling :meth:`get_data` first it will create a new stream out of the cached data. .. versionadded:: 0.9.3 """ cached_data = getattr(self, "_cached_data", None) if cached_data is not None: return BytesIO(cached_data) return self.stream def close(self): """Closes associated resources of this request object. This closes all file handles explicitly. You can also use the request object in a with statement which will automatically close it. .. versionadded:: 0.9 """ files = self.__dict__.get("files") for _key, value in iter_multi_items(files or ()): value.close() def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): self.close() @cached_property def stream(self): """ If the incoming form data was not encoded with a known mimetype the data is stored unmodified in this stream for consumption. Most of the time it is a better idea to use :attr:`data` which will give you that data as a string. The stream only returns the data once. Unlike :attr:`input_stream` this stream is properly guarded that you can't accidentally read past the length of the input. Werkzeug will internally always refer to this stream to read data which makes it possible to wrap this object with a stream that does filtering. .. versionchanged:: 0.9 This stream is now always available but might be consumed by the form parser later on. Previously the stream was only set if no parsing happened. """ _assert_not_shallow(self) return get_input_stream(self.environ) input_stream = environ_property( "wsgi.input", """The WSGI input stream. In general it's a bad idea to use this one because you can easily read past the boundary. Use the :attr:`stream` instead.""", ) @cached_property def args(self): """The parsed URL parameters (the part in the URL after the question mark). By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important. """ return url_decode( wsgi_get_bytes(self.environ.get("QUERY_STRING", "")), self.url_charset, errors=self.encoding_errors, cls=self.parameter_storage_class, ) @cached_property def data(self): """ Contains the incoming request data as string in case it came with a mimetype Werkzeug does not handle. """ if self.disable_data_descriptor: raise AttributeError("data descriptor is disabled") # XXX: this should eventually be deprecated. # We trigger form data parsing first which means that the descriptor # will not cache the data that would otherwise be .form or .files # data. This restores the behavior that was there in Werkzeug # before 0.9. New code should use :meth:`get_data` explicitly as # this will make behavior explicit. return self.get_data(parse_form_data=True) def get_data(self, cache=True, as_text=False, parse_form_data=False): """This reads the buffered incoming data from the client into one bytestring. By default this is cached but that behavior can be changed by setting `cache` to `False`. Usually it's a bad idea to call this method without checking the content length first as a client could send dozens of megabytes or more to cause memory problems on the server. Note that if the form data was already parsed this method will not return anything as form data parsing does not cache the data like this method does. To implicitly invoke form data parsing function set `parse_form_data` to `True`. When this is done the return value of this method will be an empty string if the form parser handles the data. This generally is not necessary as if the whole data is cached (which is the default) the form parser will used the cached data to parse the form data. Please be generally aware of checking the content length first in any case before calling this method to avoid exhausting server memory. If `as_text` is set to `True` the return value will be a decoded unicode string. .. versionadded:: 0.9 """ rv = getattr(self, "_cached_data", None) if rv is None: if parse_form_data: self._load_form_data() rv = self.stream.read() if cache: self._cached_data = rv if as_text: rv = rv.decode(self.charset, self.encoding_errors) return rv @cached_property def form(self): """The form parameters. By default an :class:`~werkzeug.datastructures.ImmutableMultiDict` is returned from this function. This can be changed by setting :attr:`parameter_storage_class` to a different type. This might be necessary if the order of the form data is important. Please keep in mind that file uploads will not end up here, but instead in the :attr:`files` attribute. .. versionchanged:: 0.9 Previous to Werkzeug 0.9 this would only contain form data for POST and PUT requests. """ self._load_form_data() return self.form @cached_property def values(self): """A :class:`werkzeug.datastructures.CombinedMultiDict` that combines :attr:`args` and :attr:`form`.""" args = [] for d in self.args, self.form: if not isinstance(d, MultiDict): d = MultiDict(d) args.append(d) return CombinedMultiDict(args) @cached_property def files(self): """:class:`~werkzeug.datastructures.MultiDict` object containing all uploaded files. Each key in :attr:`files` is the name from the ``<input type="file" name="">``. Each value in :attr:`files` is a Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. It basically behaves like a standard file object you know from Python, with the difference that it also has a :meth:`~werkzeug.datastructures.FileStorage.save` function that can store the file on the filesystem. Note that :attr:`files` will only contain data if the request method was POST, PUT or PATCH and the ``<form>`` that posted to the request had ``enctype="multipart/form-data"``. It will be empty otherwise. See the :class:`~werkzeug.datastructures.MultiDict` / :class:`~werkzeug.datastructures.FileStorage` documentation for more details about the used data structure. """ self._load_form_data() return self.files @cached_property def cookies(self): """A :class:`dict` with the contents of all cookies transmitted with the request.""" return parse_cookie( self.environ, self.charset, self.encoding_errors, cls=self.dict_storage_class, ) @cached_property def headers(self): """The headers from the WSGI environ as immutable :class:`~werkzeug.datastructures.EnvironHeaders`. """ return EnvironHeaders(self.environ) @cached_property def path(self): """Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will always include a leading slash, even if the URL root is accessed. """ raw_path = wsgi_decoding_dance( self.environ.get("PATH_INFO") or "", self.charset, self.encoding_errors ) return "/" + raw_path.lstrip("/") @cached_property def full_path(self): """Requested path as unicode, including the query string.""" return self.path + u"?" + to_unicode(self.query_string, self.url_charset) @cached_property def script_root(self): """The root path of the script without the trailing slash.""" raw_path = wsgi_decoding_dance( self.environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors ) return raw_path.rstrip("/") @cached_property def url(self): """The reconstructed current URL as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url(self.environ, trusted_hosts=self.trusted_hosts) @cached_property def base_url(self): """Like :attr:`url` but without the querystring See also: :attr:`trusted_hosts`. """ return get_current_url( self.environ, strip_querystring=True, trusted_hosts=self.trusted_hosts ) @cached_property def url_root(self): """The full URL root (with hostname), this is the application root as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url(self.environ, True, trusted_hosts=self.trusted_hosts) @cached_property def host_url(self): """Just the host with scheme as IRI. See also: :attr:`trusted_hosts`. """ return get_current_url( self.environ, host_only=True, trusted_hosts=self.trusted_hosts ) @cached_property def host(self): """Just the host including the port if available. See also: :attr:`trusted_hosts`. """ return get_host(self.environ, trusted_hosts=self.trusted_hosts) query_string = environ_property( "QUERY_STRING", "", read_only=True, load_func=wsgi_get_bytes, doc="The URL parameters as raw bytestring.", ) method = environ_property( "REQUEST_METHOD", "GET", read_only=True, load_func=lambda x: x.upper(), doc="The request method. (For example ``'GET'`` or ``'POST'``).", ) @cached_property def access_route(self): """If a forwarded header exists this is a list of all ip addresses from the client ip to the last proxy server. """ if "HTTP_X_FORWARDED_FOR" in self.environ: return self.list_storage_class( parse_list_header(self.environ["HTTP_X_FORWARDED_FOR"]) ) elif "REMOTE_ADDR" in self.environ: return self.list_storage_class([self.environ["REMOTE_ADDR"]]) return self.list_storage_class() @property def remote_addr(self): """The remote address of the client.""" return self.environ.get("REMOTE_ADDR") remote_user = environ_property( "REMOTE_USER", doc="""If the server supports user authentication, and the script is protected, this attribute contains the username the user has authenticated as.""", ) scheme = environ_property( "wsgi.url_scheme", doc=""" URL scheme (http or https). .. versionadded:: 0.7""", ) is_secure = property( lambda self: self.environ["wsgi.url_scheme"] == "https", doc="`True` if the request is secure.", ) is_multithread = environ_property( "wsgi.multithread", doc="""boolean that is `True` if the application is served by a multithreaded WSGI server.""", ) is_multiprocess = environ_property( "wsgi.multiprocess", doc="""boolean that is `True` if the application is served by a WSGI server that spawns multiple processes.""", ) is_run_once = environ_property( "wsgi.run_once", doc="""boolean that is `True` if the application will be executed only once in a process lifetime. This is the case for CGI for example, but it's not guaranteed that the execution only happens one time.""", ) def _assert_not_shallow(request): if request.shallow: raise RuntimeError( "A shallow request tried to consume form data. If you really" " want to do that, set `shallow` to False." )
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/console.py
# -*- coding: utf-8 -*- """ werkzeug.debug.console ~~~~~~~~~~~~~~~~~~~~~~ Interactive console support. :copyright: 2007 Pallets :license: BSD-3-Clause """ import code import sys from types import CodeType from ..local import Local from ..utils import escape from .repr import debug_repr from .repr import dump from .repr import helper _local = Local() class HTMLStringO(object): """A StringO version that HTML escapes on write.""" def __init__(self): self._buffer = [] def isatty(self): return False def close(self): pass def flush(self): pass def seek(self, n, mode=0): pass def readline(self): if len(self._buffer) == 0: return "" ret = self._buffer[0] del self._buffer[0] return ret def reset(self): val = "".join(self._buffer) del self._buffer[:] return val def _write(self, x): if isinstance(x, bytes): x = x.decode("utf-8", "replace") self._buffer.append(x) def write(self, x): self._write(escape(x)) def writelines(self, x): self._write(escape("".join(x))) class ThreadedStream(object): """Thread-local wrapper for sys.stdout for the interactive console.""" @staticmethod def push(): if not isinstance(sys.stdout, ThreadedStream): sys.stdout = ThreadedStream() _local.stream = HTMLStringO() @staticmethod def fetch(): try: stream = _local.stream except AttributeError: return "" return stream.reset() @staticmethod def displayhook(obj): try: stream = _local.stream except AttributeError: return _displayhook(obj) # stream._write bypasses escaping as debug_repr is # already generating HTML for us. if obj is not None: _local._current_ipy.locals["_"] = obj stream._write(debug_repr(obj)) def __setattr__(self, name, value): raise AttributeError("read only attribute %s" % name) def __dir__(self): return dir(sys.__stdout__) def __getattribute__(self, name): if name == "__members__": return dir(sys.__stdout__) try: stream = _local.stream except AttributeError: stream = sys.__stdout__ return getattr(stream, name) def __repr__(self): return repr(sys.__stdout__) # add the threaded stream as display hook _displayhook = sys.displayhook sys.displayhook = ThreadedStream.displayhook class _ConsoleLoader(object): def __init__(self): self._storage = {} def register(self, code, source): self._storage[id(code)] = source # register code objects of wrapped functions too. for var in code.co_consts: if isinstance(var, CodeType): self._storage[id(var)] = source def get_source_by_code(self, code): try: return self._storage[id(code)] except KeyError: pass def _wrap_compiler(console): compile = console.compile def func(source, filename, symbol): code = compile(source, filename, symbol) console.loader.register(code, source) return code console.compile = func class _InteractiveConsole(code.InteractiveInterpreter): def __init__(self, globals, locals): _locals = dict(globals) _locals.update(locals) locals = _locals locals["dump"] = dump locals["help"] = helper locals["__loader__"] = self.loader = _ConsoleLoader() code.InteractiveInterpreter.__init__(self, locals) self.more = False self.buffer = [] _wrap_compiler(self) def runsource(self, source): source = source.rstrip() + "\n" ThreadedStream.push() prompt = "... " if self.more else ">>> " try: source_to_eval = "".join(self.buffer + [source]) if code.InteractiveInterpreter.runsource( self, source_to_eval, "<debugger>", "single" ): self.more = True self.buffer.append(source) else: self.more = False del self.buffer[:] finally: output = ThreadedStream.fetch() return prompt + escape(source) + output def runcode(self, code): try: exec(code, self.locals) except Exception: self.showtraceback() def showtraceback(self): from .tbtools import get_current_traceback tb = get_current_traceback(skip=1) sys.stdout._write(tb.render_summary()) def showsyntaxerror(self, filename=None): from .tbtools import get_current_traceback tb = get_current_traceback(skip=4) sys.stdout._write(tb.render_summary()) def write(self, data): sys.stdout.write(data) class Console(object): """An interactive console.""" def __init__(self, globals=None, locals=None): if locals is None: locals = {} if globals is None: globals = {} self._ipy = _InteractiveConsole(globals, locals) def eval(self, code): _local._current_ipy = self._ipy old_sys_stdout = sys.stdout try: return self._ipy.runsource(code) finally: sys.stdout = old_sys_stdout
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/tbtools.py
# -*- coding: utf-8 -*- """ werkzeug.debug.tbtools ~~~~~~~~~~~~~~~~~~~~~~ This module provides various traceback related utility functions. :copyright: 2007 Pallets :license: BSD-3-Clause """ import codecs import inspect import json import os import re import sys import sysconfig import traceback from tokenize import TokenError from .._compat import PY2 from .._compat import range_type from .._compat import reraise from .._compat import string_types from .._compat import text_type from .._compat import to_native from .._compat import to_unicode from ..filesystem import get_filesystem_encoding from ..utils import cached_property from ..utils import escape from .console import Console _coding_re = re.compile(br"coding[:=]\s*([-\w.]+)") _line_re = re.compile(br"^(.*?)$", re.MULTILINE) _funcdef_re = re.compile(r"^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)") UTF8_COOKIE = b"\xef\xbb\xbf" system_exceptions = (SystemExit, KeyboardInterrupt) try: system_exceptions += (GeneratorExit,) except NameError: pass HEADER = u"""\ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <title>%(title)s // Werkzeug Debugger</title> <link rel="stylesheet" href="?__debugger__=yes&amp;cmd=resource&amp;f=style.css" type="text/css"> <!-- We need to make sure this has a favicon so that the debugger does not by accident trigger a request to /favicon.ico which might change the application state. --> <link rel="shortcut icon" href="?__debugger__=yes&amp;cmd=resource&amp;f=console.png"> <script src="?__debugger__=yes&amp;cmd=resource&amp;f=jquery.js"></script> <script src="?__debugger__=yes&amp;cmd=resource&amp;f=debugger.js"></script> <script type="text/javascript"> var TRACEBACK = %(traceback_id)d, CONSOLE_MODE = %(console)s, EVALEX = %(evalex)s, EVALEX_TRUSTED = %(evalex_trusted)s, SECRET = "%(secret)s"; </script> </head> <body style="background-color: #fff"> <div class="debugger"> """ FOOTER = u"""\ <div class="footer"> Brought to you by <strong class="arthur">DON'T PANIC</strong>, your friendly Werkzeug powered traceback interpreter. </div> </div> <div class="pin-prompt"> <div class="inner"> <h3>Console Locked</h3> <p> The console is locked and needs to be unlocked by entering the PIN. You can find the PIN printed out on the standard output of your shell that runs the server. <form> <p>PIN: <input type=text name=pin size=14> <input type=submit name=btn value="Confirm Pin"> </form> </div> </div> </body> </html> """ PAGE_HTML = ( HEADER + u"""\ <h1>%(exception_type)s</h1> <div class="detail"> <p class="errormsg">%(exception)s</p> </div> <h2 class="traceback">Traceback <em>(most recent call last)</em></h2> %(summary)s <div class="plain"> <form action="/?__debugger__=yes&amp;cmd=paste" method="post"> <p> <input type="hidden" name="language" value="pytb"> This is the Copy/Paste friendly version of the traceback. <span class="pastemessage">You can also paste this traceback into a <a href="https://gist.github.com/">gist</a>: <input type="submit" value="create paste"></span> </p> <textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea> </form> </div> <div class="explanation"> The debugger caught an exception in your WSGI application. You can now look at the traceback which led to the error. <span class="nojavascript"> If you enable JavaScript you can also use additional features such as code execution (if the evalex feature is enabled), automatic pasting of the exceptions and much more.</span> </div> """ + FOOTER + """ <!-- %(plaintext_cs)s --> """ ) CONSOLE_HTML = ( HEADER + u"""\ <h1>Interactive Console</h1> <div class="explanation"> In this console you can execute Python expressions in the context of the application. The initial namespace was created by the debugger automatically. </div> <div class="console"><div class="inner">The Console requires JavaScript.</div></div> """ + FOOTER ) SUMMARY_HTML = u"""\ <div class="%(classes)s"> %(title)s <ul>%(frames)s</ul> %(description)s </div> """ FRAME_HTML = u"""\ <div class="frame" id="frame-%(id)d"> <h4>File <cite class="filename">"%(filename)s"</cite>, line <em class="line">%(lineno)s</em>, in <code class="function">%(function_name)s</code></h4> <div class="source %(library)s">%(lines)s</div> </div> """ SOURCE_LINE_HTML = u"""\ <tr class="%(classes)s"> <td class=lineno>%(lineno)s</td> <td>%(code)s</td> </tr> """ def render_console_html(secret, evalex_trusted=True): return CONSOLE_HTML % { "evalex": "true", "evalex_trusted": "true" if evalex_trusted else "false", "console": "true", "title": "Console", "secret": secret, "traceback_id": -1, } def get_current_traceback( ignore_system_exceptions=False, show_hidden_frames=False, skip=0 ): """Get the current exception info as `Traceback` object. Per default calling this method will reraise system exceptions such as generator exit, system exit or others. This behavior can be disabled by passing `False` to the function as first parameter. """ exc_type, exc_value, tb = sys.exc_info() if ignore_system_exceptions and exc_type in system_exceptions: reraise(exc_type, exc_value, tb) for _ in range_type(skip): if tb.tb_next is None: break tb = tb.tb_next tb = Traceback(exc_type, exc_value, tb) if not show_hidden_frames: tb.filter_hidden_frames() return tb class Line(object): """Helper for the source renderer.""" __slots__ = ("lineno", "code", "in_frame", "current") def __init__(self, lineno, code): self.lineno = lineno self.code = code self.in_frame = False self.current = False @property def classes(self): rv = ["line"] if self.in_frame: rv.append("in-frame") if self.current: rv.append("current") return rv def render(self): return SOURCE_LINE_HTML % { "classes": u" ".join(self.classes), "lineno": self.lineno, "code": escape(self.code), } class Traceback(object): """Wraps a traceback.""" def __init__(self, exc_type, exc_value, tb): self.exc_type = exc_type self.exc_value = exc_value self.tb = tb exception_type = exc_type.__name__ if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}: exception_type = exc_type.__module__ + "." + exception_type self.exception_type = exception_type self.groups = [] memo = set() while True: self.groups.append(Group(exc_type, exc_value, tb)) memo.add(id(exc_value)) if PY2: break exc_value = exc_value.__cause__ or exc_value.__context__ if exc_value is None or id(exc_value) in memo: break exc_type = type(exc_value) tb = exc_value.__traceback__ self.groups.reverse() self.frames = [frame for group in self.groups for frame in group.frames] def filter_hidden_frames(self): """Remove the frames according to the paste spec.""" for group in self.groups: group.filter_hidden_frames() self.frames[:] = [frame for group in self.groups for frame in group.frames] @property def is_syntax_error(self): """Is it a syntax error?""" return isinstance(self.exc_value, SyntaxError) @property def exception(self): """String representation of the final exception.""" return self.groups[-1].exception def log(self, logfile=None): """Log the ASCII traceback into a file object.""" if logfile is None: logfile = sys.stderr tb = self.plaintext.rstrip() + u"\n" logfile.write(to_native(tb, "utf-8", "replace")) def paste(self): """Create a paste and return the paste id.""" data = json.dumps( { "description": "Werkzeug Internal Server Error", "public": False, "files": {"traceback.txt": {"content": self.plaintext}}, } ).encode("utf-8") try: from urllib2 import urlopen except ImportError: from urllib.request import urlopen rv = urlopen("https://api.github.com/gists", data=data) resp = json.loads(rv.read().decode("utf-8")) rv.close() return {"url": resp["html_url"], "id": resp["id"]} def render_summary(self, include_title=True): """Render the traceback for the interactive console.""" title = "" classes = ["traceback"] if not self.frames: classes.append("noframe-traceback") frames = [] else: library_frames = sum(frame.is_library for frame in self.frames) mark_lib = 0 < library_frames < len(self.frames) frames = [group.render(mark_lib=mark_lib) for group in self.groups] if include_title: if self.is_syntax_error: title = u"Syntax Error" else: title = u"Traceback <em>(most recent call last)</em>:" if self.is_syntax_error: description_wrapper = u"<pre class=syntaxerror>%s</pre>" else: description_wrapper = u"<blockquote>%s</blockquote>" return SUMMARY_HTML % { "classes": u" ".join(classes), "title": u"<h3>%s</h3>" % title if title else u"", "frames": u"\n".join(frames), "description": description_wrapper % escape(self.exception), } def render_full(self, evalex=False, secret=None, evalex_trusted=True): """Render the Full HTML page with the traceback info.""" exc = escape(self.exception) return PAGE_HTML % { "evalex": "true" if evalex else "false", "evalex_trusted": "true" if evalex_trusted else "false", "console": "false", "title": exc, "exception": exc, "exception_type": escape(self.exception_type), "summary": self.render_summary(include_title=False), "plaintext": escape(self.plaintext), "plaintext_cs": re.sub("-{2,}", "-", self.plaintext), "traceback_id": self.id, "secret": secret, } @cached_property def plaintext(self): return u"\n".join([group.render_text() for group in self.groups]) @property def id(self): return id(self) class Group(object): """A group of frames for an exception in a traceback. On Python 3, if the exception has a ``__cause__`` or ``__context__``, there are multiple exception groups. """ def __init__(self, exc_type, exc_value, tb): self.exc_type = exc_type self.exc_value = exc_value self.info = None if not PY2: if exc_value.__cause__ is not None: self.info = ( u"The above exception was the direct cause of the" u" following exception" ) elif exc_value.__context__ is not None: self.info = ( u"During handling of the above exception, another" u" exception occurred" ) self.frames = [] while tb is not None: self.frames.append(Frame(exc_type, exc_value, tb)) tb = tb.tb_next def filter_hidden_frames(self): new_frames = [] hidden = False for frame in self.frames: hide = frame.hide if hide in ("before", "before_and_this"): new_frames = [] hidden = False if hide == "before_and_this": continue elif hide in ("reset", "reset_and_this"): hidden = False if hide == "reset_and_this": continue elif hide in ("after", "after_and_this"): hidden = True if hide == "after_and_this": continue elif hide or hidden: continue new_frames.append(frame) # if we only have one frame and that frame is from the codeop # module, remove it. if len(new_frames) == 1 and self.frames[0].module == "codeop": del self.frames[:] # if the last frame is missing something went terrible wrong :( elif self.frames[-1] in new_frames: self.frames[:] = new_frames @property def exception(self): """String representation of the exception.""" buf = traceback.format_exception_only(self.exc_type, self.exc_value) rv = "".join(buf).strip() return to_unicode(rv, "utf-8", "replace") def render(self, mark_lib=True): out = [] if self.info is not None: out.append(u'<li><div class="exc-divider">%s:</div>' % self.info) for frame in self.frames: out.append( u"<li%s>%s" % ( u' title="%s"' % escape(frame.info) if frame.info else u"", frame.render(mark_lib=mark_lib), ) ) return u"\n".join(out) def render_text(self): out = [] if self.info is not None: out.append(u"\n%s:\n" % self.info) out.append(u"Traceback (most recent call last):") for frame in self.frames: out.append(frame.render_text()) out.append(self.exception) return u"\n".join(out) class Frame(object): """A single frame in a traceback.""" def __init__(self, exc_type, exc_value, tb): self.lineno = tb.tb_lineno self.function_name = tb.tb_frame.f_code.co_name self.locals = tb.tb_frame.f_locals self.globals = tb.tb_frame.f_globals fn = inspect.getsourcefile(tb) or inspect.getfile(tb) if fn[-4:] in (".pyo", ".pyc"): fn = fn[:-1] # if it's a file on the file system resolve the real filename. if os.path.isfile(fn): fn = os.path.realpath(fn) self.filename = to_unicode(fn, get_filesystem_encoding()) self.module = self.globals.get("__name__", self.locals.get("__name__")) self.loader = self.globals.get("__loader__", self.locals.get("__loader__")) self.code = tb.tb_frame.f_code # support for paste's traceback extensions self.hide = self.locals.get("__traceback_hide__", False) info = self.locals.get("__traceback_info__") if info is not None: info = to_unicode(info, "utf-8", "replace") self.info = info def render(self, mark_lib=True): """Render a single frame in a traceback.""" return FRAME_HTML % { "id": self.id, "filename": escape(self.filename), "lineno": self.lineno, "function_name": escape(self.function_name), "lines": self.render_line_context(), "library": "library" if mark_lib and self.is_library else "", } @cached_property def is_library(self): return any( self.filename.startswith(path) for path in sysconfig.get_paths().values() ) def render_text(self): return u' File "%s", line %s, in %s\n %s' % ( self.filename, self.lineno, self.function_name, self.current_line.strip(), ) def render_line_context(self): before, current, after = self.get_context_lines() rv = [] def render_line(line, cls): line = line.expandtabs().rstrip() stripped_line = line.strip() prefix = len(line) - len(stripped_line) rv.append( '<pre class="line %s"><span class="ws">%s</span>%s</pre>' % (cls, " " * prefix, escape(stripped_line) or " ") ) for line in before: render_line(line, "before") render_line(current, "current") for line in after: render_line(line, "after") return "\n".join(rv) def get_annotated_lines(self): """Helper function that returns lines with extra information.""" lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)] # find function definition and mark lines if hasattr(self.code, "co_firstlineno"): lineno = self.code.co_firstlineno - 1 while lineno > 0: if _funcdef_re.match(lines[lineno].code): break lineno -= 1 try: offset = len(inspect.getblock([x.code + "\n" for x in lines[lineno:]])) except TokenError: offset = 0 for line in lines[lineno : lineno + offset]: line.in_frame = True # mark current line try: lines[self.lineno - 1].current = True except IndexError: pass return lines def eval(self, code, mode="single"): """Evaluate code in the context of the frame.""" if isinstance(code, string_types): if PY2 and isinstance(code, text_type): # noqa code = UTF8_COOKIE + code.encode("utf-8") code = compile(code, "<interactive>", mode) return eval(code, self.globals, self.locals) @cached_property def sourcelines(self): """The sourcecode of the file as list of unicode strings.""" # get sourcecode from loader or file source = None if self.loader is not None: try: if hasattr(self.loader, "get_source"): source = self.loader.get_source(self.module) elif hasattr(self.loader, "get_source_by_code"): source = self.loader.get_source_by_code(self.code) except Exception: # we munch the exception so that we don't cause troubles # if the loader is broken. pass if source is None: try: with open( to_native(self.filename, get_filesystem_encoding()), mode="rb" ) as f: source = f.read() except IOError: return [] # already unicode? return right away if isinstance(source, text_type): return source.splitlines() # yes. it should be ascii, but we don't want to reject too many # characters in the debugger if something breaks charset = "utf-8" if source.startswith(UTF8_COOKIE): source = source[3:] else: for idx, match in enumerate(_line_re.finditer(source)): match = _coding_re.search(match.group()) if match is not None: charset = match.group(1) break if idx > 1: break # on broken cookies we fall back to utf-8 too charset = to_native(charset) try: codecs.lookup(charset) except LookupError: charset = "utf-8" return source.decode(charset, "replace").splitlines() def get_context_lines(self, context=5): before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1] past = self.sourcelines[self.lineno : self.lineno + context] return (before, self.current_line, past) @property def current_line(self): try: return self.sourcelines[self.lineno - 1] except IndexError: return u"" @cached_property def console(self): return Console(self.globals, self.locals) @property def id(self): return id(self)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/__init__.py
# -*- coding: utf-8 -*- """ werkzeug.debug ~~~~~~~~~~~~~~ WSGI application traceback debugger. :copyright: 2007 Pallets :license: BSD-3-Clause """ import getpass import hashlib import json import mimetypes import os import pkgutil import re import sys import time import uuid from itertools import chain from os.path import basename from os.path import join from .._compat import text_type from .._internal import _log from ..http import parse_cookie from ..security import gen_salt from ..wrappers import BaseRequest as Request from ..wrappers import BaseResponse as Response from .console import Console from .tbtools import get_current_traceback from .tbtools import render_console_html # A week PIN_TIME = 60 * 60 * 24 * 7 def hash_pin(pin): if isinstance(pin, text_type): pin = pin.encode("utf-8", "replace") return hashlib.md5(pin + b"shittysalt").hexdigest()[:12] _machine_id = None def get_machine_id(): global _machine_id if _machine_id is not None: return _machine_id def _generate(): linux = b"" # machine-id is stable across boots, boot_id is not. for filename in "/etc/machine-id", "/proc/sys/kernel/random/boot_id": try: with open(filename, "rb") as f: value = f.readline().strip() except IOError: continue if value: linux += value break # Containers share the same machine id, add some cgroup # information. This is used outside containers too but should be # relatively stable across boots. try: with open("/proc/self/cgroup", "rb") as f: linux += f.readline().strip().rpartition(b"/")[2] except IOError: pass if linux: return linux # On OS X, use ioreg to get the computer's serial number. try: # subprocess may not be available, e.g. Google App Engine # https://github.com/pallets/werkzeug/issues/925 from subprocess import Popen, PIPE dump = Popen( ["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE ).communicate()[0] match = re.search(b'"serial-number" = <([^>]+)', dump) if match is not None: return match.group(1) except (OSError, ImportError): pass # On Windows, use winreg to get the machine guid. try: import winreg as wr except ImportError: try: import _winreg as wr except ImportError: wr = None if wr is not None: try: with wr.OpenKey( wr.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Cryptography", 0, wr.KEY_READ | wr.KEY_WOW64_64KEY, ) as rk: guid, guid_type = wr.QueryValueEx(rk, "MachineGuid") if guid_type == wr.REG_SZ: return guid.encode("utf-8") return guid except WindowsError: pass _machine_id = _generate() return _machine_id class _ConsoleFrame(object): """Helper class so that we can reuse the frame console code for the standalone console. """ def __init__(self, namespace): self.console = Console(namespace) self.id = 0 def get_pin_and_cookie_name(app): """Given an application object this returns a semi-stable 9 digit pin code and a random key. The hope is that this is stable between restarts to not make debugging particularly frustrating. If the pin was forcefully disabled this returns `None`. Second item in the resulting tuple is the cookie name for remembering. """ pin = os.environ.get("WERKZEUG_DEBUG_PIN") rv = None num = None # Pin was explicitly disabled if pin == "off": return None, None # Pin was provided explicitly if pin is not None and pin.replace("-", "").isdigit(): # If there are separators in the pin, return it directly if "-" in pin: rv = pin else: num = pin modname = getattr(app, "__module__", app.__class__.__module__) try: # getuser imports the pwd module, which does not exist in Google # App Engine. It may also raise a KeyError if the UID does not # have a username, such as in Docker. username = getpass.getuser() except (ImportError, KeyError): username = None mod = sys.modules.get(modname) # This information only exists to make the cookie unique on the # computer, not as a security feature. probably_public_bits = [ username, modname, getattr(app, "__name__", app.__class__.__name__), getattr(mod, "__file__", None), ] # This information is here to make it harder for an attacker to # guess the cookie name. They are unlikely to be contained anywhere # within the unauthenticated debug page. private_bits = [str(uuid.getnode()), get_machine_id()] h = hashlib.md5() for bit in chain(probably_public_bits, private_bits): if not bit: continue if isinstance(bit, text_type): bit = bit.encode("utf-8") h.update(bit) h.update(b"cookiesalt") cookie_name = "__wzd" + h.hexdigest()[:20] # If we need to generate a pin we salt it a bit more so that we don't # end up with the same value and generate out 9 digits if num is None: h.update(b"pinsalt") num = ("%09d" % int(h.hexdigest(), 16))[:9] # Format the pincode in groups of digits for easier remembering if # we don't have a result yet. if rv is None: for group_size in 5, 4, 3: if len(num) % group_size == 0: rv = "-".join( num[x : x + group_size].rjust(group_size, "0") for x in range(0, len(num), group_size) ) break else: rv = num return rv, cookie_name class DebuggedApplication(object): """Enables debugging support for a given application:: from werkzeug.debug import DebuggedApplication from myapp import app app = DebuggedApplication(app, evalex=True) The `evalex` keyword argument allows evaluating expressions in a traceback's frame context. :param app: the WSGI application to run debugged. :param evalex: enable exception evaluation feature (interactive debugging). This requires a non-forking server. :param request_key: The key that points to the request object in ths environment. This parameter is ignored in current versions. :param console_path: the URL for a general purpose console. :param console_init_func: the function that is executed before starting the general purpose console. The return value is used as initial namespace. :param show_hidden_frames: by default hidden traceback frames are skipped. You can show them by setting this parameter to `True`. :param pin_security: can be used to disable the pin based security system. :param pin_logging: enables the logging of the pin system. """ def __init__( self, app, evalex=False, request_key="werkzeug.request", console_path="/console", console_init_func=None, show_hidden_frames=False, pin_security=True, pin_logging=True, ): if not console_init_func: console_init_func = None self.app = app self.evalex = evalex self.frames = {} self.tracebacks = {} self.request_key = request_key self.console_path = console_path self.console_init_func = console_init_func self.show_hidden_frames = show_hidden_frames self.secret = gen_salt(20) self._failed_pin_auth = 0 self.pin_logging = pin_logging if pin_security: # Print out the pin for the debugger on standard out. if os.environ.get("WERKZEUG_RUN_MAIN") == "true" and pin_logging: _log("warning", " * Debugger is active!") if self.pin is None: _log("warning", " * Debugger PIN disabled. DEBUGGER UNSECURED!") else: _log("info", " * Debugger PIN: %s" % self.pin) else: self.pin = None @property def pin(self): if not hasattr(self, "_pin"): self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) return self._pin @pin.setter def pin(self, value): self._pin = value @property def pin_cookie_name(self): """The name of the pin cookie.""" if not hasattr(self, "_pin_cookie"): self._pin, self._pin_cookie = get_pin_and_cookie_name(self.app) return self._pin_cookie def debug_application(self, environ, start_response): """Run the application and conserve the traceback frames.""" app_iter = None try: app_iter = self.app(environ, start_response) for item in app_iter: yield item if hasattr(app_iter, "close"): app_iter.close() except Exception: if hasattr(app_iter, "close"): app_iter.close() traceback = get_current_traceback( skip=1, show_hidden_frames=self.show_hidden_frames, ignore_system_exceptions=True, ) for frame in traceback.frames: self.frames[frame.id] = frame self.tracebacks[traceback.id] = traceback try: start_response( "500 INTERNAL SERVER ERROR", [ ("Content-Type", "text/html; charset=utf-8"), # Disable Chrome's XSS protection, the debug # output can cause false-positives. ("X-XSS-Protection", "0"), ], ) except Exception: # if we end up here there has been output but an error # occurred. in that situation we can do nothing fancy any # more, better log something into the error log and fall # back gracefully. environ["wsgi.errors"].write( "Debugging middleware caught exception in streamed " "response at a point where response headers were already " "sent.\n" ) else: is_trusted = bool(self.check_pin_trust(environ)) yield traceback.render_full( evalex=self.evalex, evalex_trusted=is_trusted, secret=self.secret ).encode("utf-8", "replace") traceback.log(environ["wsgi.errors"]) def execute_command(self, request, command, frame): """Execute a command in a console.""" return Response(frame.console.eval(command), mimetype="text/html") def display_console(self, request): """Display a standalone shell.""" if 0 not in self.frames: if self.console_init_func is None: ns = {} else: ns = dict(self.console_init_func()) ns.setdefault("app", self.app) self.frames[0] = _ConsoleFrame(ns) is_trusted = bool(self.check_pin_trust(request.environ)) return Response( render_console_html(secret=self.secret, evalex_trusted=is_trusted), mimetype="text/html", ) def paste_traceback(self, request, traceback): """Paste the traceback and return a JSON response.""" rv = traceback.paste() return Response(json.dumps(rv), mimetype="application/json") def get_resource(self, request, filename): """Return a static resource from the shared folder.""" filename = join("shared", basename(filename)) try: data = pkgutil.get_data(__package__, filename) except OSError: data = None if data is not None: mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" return Response(data, mimetype=mimetype) return Response("Not Found", status=404) def check_pin_trust(self, environ): """Checks if the request passed the pin test. This returns `True` if the request is trusted on a pin/cookie basis and returns `False` if not. Additionally if the cookie's stored pin hash is wrong it will return `None` so that appropriate action can be taken. """ if self.pin is None: return True val = parse_cookie(environ).get(self.pin_cookie_name) if not val or "|" not in val: return False ts, pin_hash = val.split("|", 1) if not ts.isdigit(): return False if pin_hash != hash_pin(self.pin): return None return (time.time() - PIN_TIME) < int(ts) def _fail_pin_auth(self): time.sleep(5.0 if self._failed_pin_auth > 5 else 0.5) self._failed_pin_auth += 1 def pin_auth(self, request): """Authenticates with the pin.""" exhausted = False auth = False trust = self.check_pin_trust(request.environ) # If the trust return value is `None` it means that the cookie is # set but the stored pin hash value is bad. This means that the # pin was changed. In this case we count a bad auth and unset the # cookie. This way it becomes harder to guess the cookie name # instead of the pin as we still count up failures. bad_cookie = False if trust is None: self._fail_pin_auth() bad_cookie = True # If we're trusted, we're authenticated. elif trust: auth = True # If we failed too many times, then we're locked out. elif self._failed_pin_auth > 10: exhausted = True # Otherwise go through pin based authentication else: entered_pin = request.args.get("pin") if entered_pin.strip().replace("-", "") == self.pin.replace("-", ""): self._failed_pin_auth = 0 auth = True else: self._fail_pin_auth() rv = Response( json.dumps({"auth": auth, "exhausted": exhausted}), mimetype="application/json", ) if auth: rv.set_cookie( self.pin_cookie_name, "%s|%s" % (int(time.time()), hash_pin(self.pin)), httponly=True, ) elif bad_cookie: rv.delete_cookie(self.pin_cookie_name) return rv def log_pin_request(self): """Log the pin if needed.""" if self.pin_logging and self.pin is not None: _log( "info", " * To enable the debugger you need to enter the security pin:" ) _log("info", " * Debugger pin code: %s" % self.pin) return Response("") def __call__(self, environ, start_response): """Dispatch the requests.""" # important: don't ever access a function here that reads the incoming # form data! Otherwise the application won't have access to that data # any more! request = Request(environ) response = self.debug_application if request.args.get("__debugger__") == "yes": cmd = request.args.get("cmd") arg = request.args.get("f") secret = request.args.get("s") traceback = self.tracebacks.get(request.args.get("tb", type=int)) frame = self.frames.get(request.args.get("frm", type=int)) if cmd == "resource" and arg: response = self.get_resource(request, arg) elif cmd == "paste" and traceback is not None and secret == self.secret: response = self.paste_traceback(request, traceback) elif cmd == "pinauth" and secret == self.secret: response = self.pin_auth(request) elif cmd == "printpin" and secret == self.secret: response = self.log_pin_request() elif ( self.evalex and cmd is not None and frame is not None and self.secret == secret and self.check_pin_trust(environ) ): response = self.execute_command(request, cmd, frame) elif ( self.evalex and self.console_path is not None and request.path == self.console_path ): response = self.display_console(request) return response(environ, start_response)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/repr.py
# -*- coding: utf-8 -*- """ werkzeug.debug.repr ~~~~~~~~~~~~~~~~~~~ This module implements object representations for debugging purposes. Unlike the default repr these reprs expose a lot more information and produce HTML instead of ASCII. Together with the CSS and JavaScript files of the debugger this gives a colorful and more compact output. :copyright: 2007 Pallets :license: BSD-3-Clause """ import codecs import re import sys from collections import deque from traceback import format_exception_only from .._compat import integer_types from .._compat import iteritems from .._compat import PY2 from .._compat import string_types from .._compat import text_type from ..utils import escape missing = object() _paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}") RegexType = type(_paragraph_re) HELP_HTML = """\ <div class=box> <h3>%(title)s</h3> <pre class=help>%(text)s</pre> </div>\ """ OBJECT_DUMP_HTML = """\ <div class=box> <h3>%(title)s</h3> %(repr)s <table>%(items)s</table> </div>\ """ def debug_repr(obj): """Creates a debug repr of an object as HTML unicode string.""" return DebugReprGenerator().repr(obj) def dump(obj=missing): """Print the object details to stdout._write (for the interactive console of the web debugger. """ gen = DebugReprGenerator() if obj is missing: rv = gen.dump_locals(sys._getframe(1).f_locals) else: rv = gen.dump_object(obj) sys.stdout._write(rv) class _Helper(object): """Displays an HTML version of the normal help, for the interactive debugger only because it requires a patched sys.stdout. """ def __repr__(self): return "Type help(object) for help about object." def __call__(self, topic=None): if topic is None: sys.stdout._write("<span class=help>%s</span>" % repr(self)) return import pydoc pydoc.help(topic) rv = sys.stdout.reset() if isinstance(rv, bytes): rv = rv.decode("utf-8", "ignore") paragraphs = _paragraph_re.split(rv) if len(paragraphs) > 1: title = paragraphs[0] text = "\n\n".join(paragraphs[1:]) else: # pragma: no cover title = "Help" text = paragraphs[0] sys.stdout._write(HELP_HTML % {"title": title, "text": text}) helper = _Helper() def _add_subclass_info(inner, obj, base): if isinstance(base, tuple): for base in base: if type(obj) is base: return inner elif type(obj) is base: return inner module = "" if obj.__class__.__module__ not in ("__builtin__", "exceptions"): module = '<span class="module">%s.</span>' % obj.__class__.__module__ return "%s%s(%s)" % (module, obj.__class__.__name__, inner) class DebugReprGenerator(object): def __init__(self): self._stack = [] def _sequence_repr_maker(left, right, base=object(), limit=8): # noqa: B008, B902 def proxy(self, obj, recursive): if recursive: return _add_subclass_info(left + "..." + right, obj, base) buf = [left] have_extended_section = False for idx, item in enumerate(obj): if idx: buf.append(", ") if idx == limit: buf.append('<span class="extended">') have_extended_section = True buf.append(self.repr(item)) if have_extended_section: buf.append("</span>") buf.append(right) return _add_subclass_info(u"".join(buf), obj, base) return proxy list_repr = _sequence_repr_maker("[", "]", list) tuple_repr = _sequence_repr_maker("(", ")", tuple) set_repr = _sequence_repr_maker("set([", "])", set) frozenset_repr = _sequence_repr_maker("frozenset([", "])", frozenset) deque_repr = _sequence_repr_maker( '<span class="module">collections.' "</span>deque([", "])", deque ) del _sequence_repr_maker def regex_repr(self, obj): pattern = repr(obj.pattern) if PY2: pattern = pattern.decode("string-escape", "ignore") else: pattern = codecs.decode(pattern, "unicode-escape", "ignore") if pattern[:1] == "u": pattern = "ur" + pattern[1:] else: pattern = "r" + pattern return u're.compile(<span class="string regex">%s</span>)' % pattern def string_repr(self, obj, limit=70): buf = ['<span class="string">'] r = repr(obj) # shorten the repr when the hidden part would be at least 3 chars if len(r) - limit > 2: buf.extend( ( escape(r[:limit]), '<span class="extended">', escape(r[limit:]), "</span>", ) ) else: buf.append(escape(r)) buf.append("</span>") out = u"".join(buf) # if the repr looks like a standard string, add subclass info if needed if r[0] in "'\"" or (r[0] in "ub" and r[1] in "'\""): return _add_subclass_info(out, obj, (bytes, text_type)) # otherwise, assume the repr distinguishes the subclass already return out def dict_repr(self, d, recursive, limit=5): if recursive: return _add_subclass_info(u"{...}", d, dict) buf = ["{"] have_extended_section = False for idx, (key, value) in enumerate(iteritems(d)): if idx: buf.append(", ") if idx == limit - 1: buf.append('<span class="extended">') have_extended_section = True buf.append( '<span class="pair"><span class="key">%s</span>: ' '<span class="value">%s</span></span>' % (self.repr(key), self.repr(value)) ) if have_extended_section: buf.append("</span>") buf.append("}") return _add_subclass_info(u"".join(buf), d, dict) def object_repr(self, obj): r = repr(obj) if PY2: r = r.decode("utf-8", "replace") return u'<span class="object">%s</span>' % escape(r) def dispatch_repr(self, obj, recursive): if obj is helper: return u'<span class="help">%r</span>' % helper if isinstance(obj, (integer_types, float, complex)): return u'<span class="number">%r</span>' % obj if isinstance(obj, string_types) or isinstance(obj, bytes): return self.string_repr(obj) if isinstance(obj, RegexType): return self.regex_repr(obj) if isinstance(obj, list): return self.list_repr(obj, recursive) if isinstance(obj, tuple): return self.tuple_repr(obj, recursive) if isinstance(obj, set): return self.set_repr(obj, recursive) if isinstance(obj, frozenset): return self.frozenset_repr(obj, recursive) if isinstance(obj, dict): return self.dict_repr(obj, recursive) if deque is not None and isinstance(obj, deque): return self.deque_repr(obj, recursive) return self.object_repr(obj) def fallback_repr(self): try: info = "".join(format_exception_only(*sys.exc_info()[:2])) except Exception: # pragma: no cover info = "?" if PY2: info = info.decode("utf-8", "ignore") return u'<span class="brokenrepr">&lt;broken repr (%s)&gt;' u"</span>" % escape( info.strip() ) def repr(self, obj): recursive = False for item in self._stack: if item is obj: recursive = True break self._stack.append(obj) try: try: return self.dispatch_repr(obj, recursive) except Exception: return self.fallback_repr() finally: self._stack.pop() def dump_object(self, obj): repr = items = None if isinstance(obj, dict): title = "Contents of" items = [] for key, value in iteritems(obj): if not isinstance(key, string_types): items = None break items.append((key, self.repr(value))) if items is None: items = [] repr = self.repr(obj) for key in dir(obj): try: items.append((key, self.repr(getattr(obj, key)))) except Exception: pass title = "Details for" title += " " + object.__repr__(obj)[1:-1] return self.render_object_dump(items, title, repr) def dump_locals(self, d): items = [(key, self.repr(value)) for key, value in d.items()] return self.render_object_dump(items, "Local variables in frame") def render_object_dump(self, items, title, repr=None): html_items = [] for key, value in items: html_items.append( "<tr><th>%s<td><pre class=repr>%s</pre>" % (escape(key), value) ) if not html_items: html_items.append("<tr><td><em>Nothing</em>") return OBJECT_DUMP_HTML % { "title": escape(title), "repr": "<pre class=repr>%s</pre>" % repr if repr else "", "items": "\n".join(html_items), }
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/shared/FONT_LICENSE
------------------------------- UBUNTU FONT LICENCE Version 1.0 ------------------------------- PREAMBLE This licence allows the licensed fonts to be used, studied, modified and redistributed freely. The fonts, including any derivative works, can be bundled, embedded, and redistributed provided the terms of this licence are met. The fonts and derivatives, however, cannot be released under any other licence. The requirement for fonts to remain under this licence does not require any document created using the fonts or their derivatives to be published under this licence, as long as the primary purpose of the document is not to be a vehicle for the distribution of the fonts. DEFINITIONS "Font Software" refers to the set of files released by the Copyright Holder(s) under this licence and clearly marked as such. This may include source files, build scripts and documentation. "Original Version" refers to the collection of Font Software components as received under this licence. "Modified Version" refers to any derivative made by adding to, deleting, or substituting -- in part or in whole -- any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment. "Copyright Holder(s)" refers to all individuals and companies who have a copyright ownership of the Font Software. "Substantially Changed" refers to Modified Versions which can be easily identified as dissimilar to the Font Software by users of the Font Software comparing the Original Version with the Modified Version. To "Propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification and with or without charging a redistribution fee), making available to the public, and in some countries other activities as well. PERMISSION & CONDITIONS This licence does not grant any rights under trademark law and all such rights are reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to propagate the Font Software, subject to the below conditions: 1) Each copy of the Font Software must contain the above copyright notice and this licence. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine- readable metadata fields within text or binary files as long as those fields can be easily viewed by the user. 2) The font name complies with the following: (a) The Original Version must retain its name, unmodified. (b) Modified Versions which are Substantially Changed must be renamed to avoid use of the name of the Original Version or similar names entirely. (c) Modified Versions which are not Substantially Changed must be renamed to both (i) retain the name of the Original Version and (ii) add additional naming elements to distinguish the Modified Version from the Original Version. The name of such Modified Versions must be the name of the Original Version, with "derivative X" where X represents the name of the new work, appended to that name. 3) The name(s) of the Copyright Holder(s) and any contributor to the Font Software shall not be used to promote, endorse or advertise any Modified Version, except (i) as required by this licence, (ii) to acknowledge the contribution(s) of the Copyright Holder(s) or (iii) with their explicit written permission. 4) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this licence, and must not be distributed under any other licence. The requirement for fonts to remain under this licence does not affect any document created using the Font Software, except any version of the Font Software extracted from a document created using the Font Software may only be distributed under this licence. TERMINATION This licence becomes null and void if any of the above conditions are not met. DISCLAIMER THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/shared/style.css
@font-face { font-family: 'Ubuntu'; font-style: normal; font-weight: normal; src: local('Ubuntu'), local('Ubuntu-Regular'), url('?__debugger__=yes&cmd=resource&f=ubuntu.ttf') format('truetype'); } body, input { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; color: #000; text-align: center; margin: 1em; padding: 0; font-size: 15px; } h1, h2, h3 { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; font-weight: normal; } input { background-color: #fff; margin: 0; text-align: left; outline: none !important; } input[type="submit"] { padding: 3px 6px; } a { color: #11557C; } a:hover { color: #177199; } pre, code, textarea { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono', monospace; font-size: 14px; } div.debugger { text-align: left; padding: 12px; margin: auto; background-color: white; } h1 { font-size: 36px; margin: 0 0 0.3em 0; } div.detail { cursor: pointer; } div.detail p { margin: 0 0 8px 13px; font-size: 14px; white-space: pre-wrap; font-family: monospace; } div.explanation { margin: 20px 13px; font-size: 15px; color: #555; } div.footer { font-size: 13px; text-align: right; margin: 30px 0; color: #86989B; } h2 { font-size: 16px; margin: 1.3em 0 0.0 0; padding: 9px; background-color: #11557C; color: white; } h2 em, h3 em { font-style: normal; color: #A5D6D9; font-weight: normal; } div.traceback, div.plain { border: 1px solid #ddd; margin: 0 0 1em 0; padding: 10px; } div.plain p { margin: 0; } div.plain textarea, div.plain pre { margin: 10px 0 0 0; padding: 4px; background-color: #E8EFF0; border: 1px solid #D3E7E9; } div.plain textarea { width: 99%; height: 300px; } div.traceback h3 { font-size: 1em; margin: 0 0 0.8em 0; } div.traceback ul { list-style: none; margin: 0; padding: 0 0 0 1em; } div.traceback h4 { font-size: 13px; font-weight: normal; margin: 0.7em 0 0.1em 0; } div.traceback pre { margin: 0; padding: 5px 0 3px 15px; background-color: #E8EFF0; border: 1px solid #D3E7E9; } div.traceback .library .current { background: white; color: #555; } div.traceback .expanded .current { background: #E8EFF0; color: black; } div.traceback pre:hover { background-color: #DDECEE; color: black; cursor: pointer; } div.traceback div.source.expanded pre + pre { border-top: none; } div.traceback span.ws { display: none; } div.traceback pre.before, div.traceback pre.after { display: none; background: white; } div.traceback div.source.expanded pre.before, div.traceback div.source.expanded pre.after { display: block; } div.traceback div.source.expanded span.ws { display: inline; } div.traceback blockquote { margin: 1em 0 0 0; padding: 0; white-space: pre-line; } div.traceback img { float: right; padding: 2px; margin: -3px 2px 0 0; display: none; } div.traceback img:hover { background-color: #ddd; cursor: pointer; border-color: #BFDDE0; } div.traceback pre:hover img { display: block; } div.traceback cite.filename { font-style: normal; color: #3B666B; } pre.console { border: 1px solid #ccc; background: white!important; color: black; padding: 5px!important; margin: 3px 0 0 0!important; cursor: default!important; max-height: 400px; overflow: auto; } pre.console form { color: #555; } pre.console input { background-color: transparent; color: #555; width: 90%; font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 14px; border: none!important; } span.string { color: #30799B; } span.number { color: #9C1A1C; } span.help { color: #3A7734; } span.object { color: #485F6E; } span.extended { opacity: 0.5; } span.extended:hover { opacity: 1; } a.toggle { text-decoration: none; background-repeat: no-repeat; background-position: center center; background-image: url(?__debugger__=yes&cmd=resource&f=more.png); } a.toggle:hover { background-color: #444; } a.open { background-image: url(?__debugger__=yes&cmd=resource&f=less.png); } pre.console div.traceback, pre.console div.box { margin: 5px 10px; white-space: normal; border: 1px solid #11557C; padding: 10px; font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; } pre.console div.box h3, pre.console div.traceback h3 { margin: -10px -10px 10px -10px; padding: 5px; background: #11557C; color: white; } pre.console div.traceback pre:hover { cursor: default; background: #E8EFF0; } pre.console div.traceback pre.syntaxerror { background: inherit; border: none; margin: 20px -10px -10px -10px; padding: 10px; border-top: 1px solid #BFDDE0; background: #E8EFF0; } pre.console div.noframe-traceback pre.syntaxerror { margin-top: -10px; border: none; } pre.console div.box pre.repr { padding: 0; margin: 0; background-color: white; border: none; } pre.console div.box table { margin-top: 6px; } pre.console div.box pre { border: none; } pre.console div.box pre.help { background-color: white; } pre.console div.box pre.help:hover { cursor: default; } pre.console table tr { vertical-align: top; } div.console { border: 1px solid #ccc; padding: 4px; background-color: #fafafa; } div.traceback pre, div.console pre { white-space: pre-wrap; /* css-3 should we be so lucky... */ white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ white-space: -pre-wrap; /* Opera 4-6 ?? */ white-space: -o-pre-wrap; /* Opera 7 ?? */ word-wrap: break-word; /* Internet Explorer 5.5+ */ _white-space: pre; /* IE only hack to re-specify in addition to word-wrap */ } div.pin-prompt { position: absolute; display: none; top: 0; bottom: 0; left: 0; right: 0; background: rgba(255, 255, 255, 0.8); } div.pin-prompt .inner { background: #eee; padding: 10px 50px; width: 350px; margin: 10% auto 0 auto; border: 1px solid #ccc; border-radius: 2px; } div.exc-divider { margin: 0.7em 0 0 -1em; padding: 0.5em; background: #11557C; color: #ddd; border: 1px solid #ddd; }
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/shared/debugger.js
$(function() { if (!EVALEX_TRUSTED) { initPinBox(); } /** * if we are in console mode, show the console. */ if (CONSOLE_MODE && EVALEX) { openShell(null, $('div.console div.inner').empty(), 0); } $("div.detail").click(function() { $("div.traceback").get(0).scrollIntoView(false); }); $('div.traceback div.frame').each(function() { var target = $('pre', this), consoleNode = null, frameID = this.id.substring(6); target.click(function() { $(this).parent().toggleClass('expanded'); }); /** * Add an interactive console to the frames */ if (EVALEX && target.is('.current')) { $('<img src="?__debugger__=yes&cmd=resource&f=console.png">') .attr('title', 'Open an interactive python shell in this frame') .click(function() { consoleNode = openShell(consoleNode, target, frameID); return false; }) .prependTo(target); } }); /** * toggle traceback types on click. */ $('h2.traceback').click(function() { $(this).next().slideToggle('fast'); $('div.plain').slideToggle('fast'); }).css('cursor', 'pointer'); $('div.plain').hide(); /** * Add extra info (this is here so that only users with JavaScript * enabled see it.) */ $('span.nojavascript') .removeClass('nojavascript') .html('<p>To switch between the interactive traceback and the plaintext ' + 'one, you can click on the "Traceback" headline. From the text ' + 'traceback you can also create a paste of it. ' + (!EVALEX ? '' : 'For code execution mouse-over the frame you want to debug and ' + 'click on the console icon on the right side.' + '<p>You can execute arbitrary Python code in the stack frames and ' + 'there are some extra helpers available for introspection:' + '<ul><li><code>dump()</code> shows all variables in the frame' + '<li><code>dump(obj)</code> dumps all that\'s known about the object</ul>')); /** * Add the pastebin feature */ $('div.plain form') .submit(function() { var label = $('input[type="submit"]', this); var old_val = label.val(); label.val('submitting...'); $.ajax({ dataType: 'json', url: document.location.pathname, data: {__debugger__: 'yes', tb: TRACEBACK, cmd: 'paste', s: SECRET}, success: function(data) { $('div.plain span.pastemessage') .removeClass('pastemessage') .text('Paste created: ') .append($('<a>#' + data.id + '</a>').attr('href', data.url)); }, error: function() { alert('Error: Could not submit paste. No network connection?'); label.val(old_val); } }); return false; }); // if we have javascript we submit by ajax anyways, so no need for the // not scaling textarea. var plainTraceback = $('div.plain textarea'); plainTraceback.replaceWith($('<pre>').text(plainTraceback.text())); }); function initPinBox() { $('.pin-prompt form').submit(function(evt) { evt.preventDefault(); var pin = this.pin.value; var btn = this.btn; btn.disabled = true; $.ajax({ dataType: 'json', url: document.location.pathname, data: {__debugger__: 'yes', cmd: 'pinauth', pin: pin, s: SECRET}, success: function(data) { btn.disabled = false; if (data.auth) { EVALEX_TRUSTED = true; $('.pin-prompt').fadeOut(); } else { if (data.exhausted) { alert('Error: too many attempts. Restart server to retry.'); } else { alert('Error: incorrect pin'); } } console.log(data); }, error: function() { btn.disabled = false; alert('Error: Could not verify PIN. Network error?'); } }); }); } function promptForPin() { if (!EVALEX_TRUSTED) { $.ajax({ url: document.location.pathname, data: {__debugger__: 'yes', cmd: 'printpin', s: SECRET} }); $('.pin-prompt').fadeIn(function() { $('.pin-prompt input[name="pin"]').focus(); }); } } /** * Helper function for shell initialization */ function openShell(consoleNode, target, frameID) { promptForPin(); if (consoleNode) return consoleNode.slideToggle('fast'); consoleNode = $('<pre class="console">') .appendTo(target.parent()) .hide() var historyPos = 0, history = ['']; var output = $('<div class="output">[console ready]</div>') .appendTo(consoleNode); var form = $('<form>&gt;&gt;&gt; </form>') .submit(function() { var cmd = command.val(); $.get('', { __debugger__: 'yes', cmd: cmd, frm: frameID, s: SECRET}, function(data) { var tmp = $('<div>').html(data); $('span.extended', tmp).each(function() { var hidden = $(this).wrap('<span>').hide(); hidden .parent() .append($('<a href="#" class="toggle">&nbsp;&nbsp;</a>') .click(function() { hidden.toggle(); $(this).toggleClass('open') return false; })); }); output.append(tmp); command.focus(); consoleNode.scrollTop(consoleNode.get(0).scrollHeight); var old = history.pop(); history.push(cmd); if (typeof old != 'undefined') history.push(old); historyPos = history.length - 1; }); command.val(''); return false; }). appendTo(consoleNode); var command = $('<input type="text" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false">') .appendTo(form) .keydown(function(e) { if (e.key == 'l' && e.ctrlKey) { output.text('--- screen cleared ---'); return false; } else if (e.charCode == 0 && (e.keyCode == 38 || e.keyCode == 40)) { // handle up arrow and down arrow if (e.keyCode == 38 && historyPos > 0) historyPos--; else if (e.keyCode == 40 && historyPos < history.length) historyPos++; command.val(history[historyPos]); return false; } }); return consoleNode.slideDown('fast', function() { command.focus(); }); }
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/werkzeug/debug/shared/jquery.js
/*! jQuery v3.4.1 | (c) JS Foundation and other contributors | jquery.org/license */ !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],E=C.document,r=Object.getPrototypeOf,s=t.slice,g=t.concat,u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType},x=function(e){return null!=e&&e===e.window},c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.4.1",k=function(e,t){return new k.fn.init(e,t)},p=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;function d(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0<t&&t-1 in e)}k.fn=k.prototype={jquery:f,constructor:k,length:0,toArray:function(){return s.call(this)},get:function(e){return null==e?s.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=k.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return k.each(this,e)},map:function(n){return this.pushStack(k.map(this,function(e,t){return n.call(e,t,e)}))},slice:function(){return this.pushStack(s.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(0<=n&&n<t?[this[n]]:[])},end:function(){return this.prevObject||this.constructor()},push:u,sort:t.sort,splice:t.splice},k.extend=k.fn.extend=function(){var e,t,n,r,i,o,a=arguments[0]||{},s=1,u=arguments.length,l=!1;for("boolean"==typeof a&&(l=a,a=arguments[s]||{},s++),"object"==typeof a||m(a)||(a={}),s===u&&(a=this,s--);s<u;s++)if(null!=(e=arguments[s]))for(t in e)r=e[t],"__proto__"!==t&&a!==r&&(l&&r&&(k.isPlainObject(r)||(i=Array.isArray(r)))?(n=a[t],o=i&&!Array.isArray(n)?[]:i||k.isPlainObject(n)?n:{},i=!1,a[t]=k.extend(l,o,r)):void 0!==r&&(a[t]=r));return a},k.extend({expando:"jQuery"+(f+Math.random()).replace(/\D/g,""),isReady:!0,error:function(e){throw new Error(e)},noop:function(){},isPlainObject:function(e){var t,n;return!(!e||"[object Object]"!==o.call(e))&&(!(t=r(e))||"function"==typeof(n=v.call(t,"constructor")&&t.constructor)&&a.call(n)===l)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},globalEval:function(e,t){b(e,{nonce:t&&t.nonce})},each:function(e,t){var n,r=0;if(d(e)){for(n=e.length;r<n;r++)if(!1===t.call(e[r],r,e[r]))break}else for(r in e)if(!1===t.call(e[r],r,e[r]))break;return e},trim:function(e){return null==e?"":(e+"").replace(p,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(d(Object(e))?k.merge(n,"string"==typeof e?[e]:e):u.call(n,e)),n},inArray:function(e,t,n){return null==t?-1:i.call(t,e,n)},merge:function(e,t){for(var n=+t.length,r=0,i=e.length;r<n;r++)e[i++]=t[r];return e.length=i,e},grep:function(e,t,n){for(var r=[],i=0,o=e.length,a=!n;i<o;i++)!t(e[i],i)!==a&&r.push(e[i]);return r},map:function(e,t,n){var r,i,o=0,a=[];if(d(e))for(r=e.length;o<r;o++)null!=(i=t(e[o],o,n))&&a.push(i);else for(o in e)null!=(i=t(e[o],o,n))&&a.push(i);return g.apply([],a)},guid:1,support:y}),"function"==typeof Symbol&&(k.fn[Symbol.iterator]=t[Symbol.iterator]),k.each("Boolean Number String Function Array Date RegExp Object Error Symbol".split(" "),function(e,t){n["[object "+t+"]"]=t.toLowerCase()});var h=function(n){var e,d,b,o,i,h,f,g,w,u,l,T,C,a,E,v,s,c,y,k="sizzle"+1*new Date,m=n.document,S=0,r=0,p=ue(),x=ue(),N=ue(),A=ue(),D=function(e,t){return e===t&&(l=!0),0},j={}.hasOwnProperty,t=[],q=t.pop,L=t.push,H=t.push,O=t.slice,P=function(e,t){for(var n=0,r=e.length;n<r;n++)if(e[n]===t)return n;return-1},R="checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",M="[\\x20\\t\\r\\n\\f]",I="(?:\\\\.|[\\w-]|[^\0-\\xa0])+",W="\\["+M+"*("+I+")(?:"+M+"*([*^$|!~]?=)"+M+"*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|("+I+"))|)"+M+"*\\]",$=":("+I+")(?:\\((('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|((?:\\\\.|[^\\\\()[\\]]|"+W+")*)|.*)\\)|)",F=new RegExp(M+"+","g"),B=new RegExp("^"+M+"+|((?:^|[^\\\\])(?:\\\\.)*)"+M+"+$","g"),_=new RegExp("^"+M+"*,"+M+"*"),z=new RegExp("^"+M+"*([>+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp($),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+$),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ne=function(e,t,n){var r="0x"+t-65536;return r!=r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(m.childNodes),m.childNodes),t[m.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&((e?e.ownerDocument||e:m)!==C&&T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!A[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&U.test(t)){(s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=k),o=(l=h(t)).length;while(o--)l[o]="#"+s+" "+xe(l[o]);c=l.join(","),f=ee.test(t)&&ye(e.parentNode)||e}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){A(t,!0)}finally{s===k&&e.removeAttribute("id")}}}return g(t.replace(B,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[k]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e.namespaceURI,n=(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:m;return r!==C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),m!==C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=k,!C.getElementsByName||!C.getElementsByName(k).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){a.appendChild(e).innerHTML="<a id='"+k+"'></a><select id='"+k+"-\r\\' msallowcapture=''><option selected=''></option></select>",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+k+"-]").length||v.push("~="),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+k+"+*").length||v.push(".#.+[+~]")}),ce(function(e){e.innerHTML="<a href='' disabled='disabled'></a><select disabled='disabled'><option/></select>";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",$)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e===C||e.ownerDocument===m&&y(m,e)?-1:t===C||t.ownerDocument===m&&y(m,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===C?-1:t===C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]===m?-1:s[r]===m?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if((e.ownerDocument||e)!==C&&T(e),d.matchesSelector&&E&&!A[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){A(t,!0)}return 0<se(t,C,null,[e]).length},se.contains=function(e,t){return(e.ownerDocument||e)!==C&&T(e),y(e,t)},se.attr=function(e,t){(e.ownerDocument||e)!==C&&T(e);var n=b.attrHandle[t.toLowerCase()],r=n&&j.call(b.attrHandle,t.toLowerCase())?n(e,t,!E):void 0;return void 0!==r?r:d.attributes||!E?e.getAttribute(t):(r=e.getAttributeNode(t))&&r.specified?r.value:null},se.escape=function(e){return(e+"").replace(re,ie)},se.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},se.uniqueSort=function(e){var t,n=[],r=0,i=0;if(l=!d.detectDuplicates,u=!d.sortStable&&e.slice(0),e.sort(D),l){while(t=e[i++])t===e[i]&&(r=n.push(i));while(r--)e.splice(n[r],1)}return u=null,e},o=se.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else while(t=e[r++])n+=o(t);return n},(b=se.selectors={cacheLength:50,createPseudo:le,match:G,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=p[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&p(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1<t.indexOf(i):"$="===r?i&&t.slice(-i.length)===i:"~="===r?-1<(" "+t.replace(F," ")+" ").indexOf(i):"|="===r&&(t===i||t.slice(0,i.length+1)===i+"-"))}},CHILD:function(h,e,t,g,v){var y="nth"!==h.slice(0,3),m="last"!==h.slice(-4),x="of-type"===e;return 1===g&&0===v?function(e){return!!e.parentNode}:function(e,t,n){var r,i,o,a,s,u,l=y!==m?"nextSibling":"previousSibling",c=e.parentNode,f=x&&e.nodeName.toLowerCase(),p=!n&&!x,d=!1;if(c){if(y){while(l){a=e;while(a=a[l])if(x?a.nodeName.toLowerCase()===f:1===a.nodeType)return!1;u=l="only"===h&&!u&&"nextSibling"}return!0}if(u=[m?c.firstChild:c.lastChild],m&&p){d=(s=(r=(i=(o=(a=c)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1])&&r[2],a=s&&c.childNodes[s];while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if(1===a.nodeType&&++d&&a===e){i[h]=[S,s,d];break}}else if(p&&(d=s=(r=(i=(o=(a=e)[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]||[])[0]===S&&r[1]),!1===d)while(a=++s&&a&&a[l]||(d=s=0)||u.pop())if((x?a.nodeName.toLowerCase()===f:1===a.nodeType)&&++d&&(p&&((i=(o=a[k]||(a[k]={}))[a.uniqueID]||(o[a.uniqueID]={}))[h]=[S,d]),a===e))break;return(d-=v)===g||d%g==0&&0<=d/g}}},PSEUDO:function(e,o){var t,a=b.pseudos[e]||b.setFilters[e.toLowerCase()]||se.error("unsupported pseudo: "+e);return a[k]?a(o):1<a.length?(t=[e,e,"",o],b.setFilters.hasOwnProperty(e.toLowerCase())?le(function(e,t){var n,r=a(e,o),i=r.length;while(i--)e[n=P(e,r[i])]=!(t[n]=r[i])}):function(e){return a(e,0,t)}):a}},pseudos:{not:le(function(e){var r=[],i=[],s=f(e.replace(B,"$1"));return s[k]?le(function(e,t,n,r){var i,o=s(e,null,r,[]),a=e.length;while(a--)(i=o[a])&&(e[a]=!(t[a]=i))}):function(e,t,n){return r[0]=e,s(r,null,n,i),r[0]=null,!i.pop()}}),has:le(function(t){return function(e){return 0<se(t,e).length}}),contains:le(function(t){return t=t.replace(te,ne),function(e){return-1<(e.textContent||o(e)).indexOf(t)}}),lang:le(function(n){return V.test(n||"")||se.error("unsupported lang: "+n),n=n.replace(te,ne).toLowerCase(),function(e){var t;do{if(t=E?e.lang:e.getAttribute("xml:lang")||e.getAttribute("lang"))return(t=t.toLowerCase())===n||0===t.indexOf(n+"-")}while((e=e.parentNode)&&1===e.nodeType);return!1}}),target:function(e){var t=n.location&&n.location.hash;return t&&t.slice(1)===e.id},root:function(e){return e===a},focus:function(e){return e===C.activeElement&&(!C.hasFocus||C.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:ge(!1),disabled:ge(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!b.pseudos.empty(e)},header:function(e){return J.test(e.nodeName)},input:function(e){return Q.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:ve(function(){return[0]}),last:ve(function(e,t){return[t-1]}),eq:ve(function(e,t,n){return[n<0?n+t:n]}),even:ve(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:ve(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:ve(function(e,t,n){for(var r=n<0?n+t:t<n?t:n;0<=--r;)e.push(r);return e}),gt:ve(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}}).pseudos.nth=b.pseudos.eq,{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})b.pseudos[e]=de(e);for(e in{submit:!0,reset:!0})b.pseudos[e]=he(e);function me(){}function xe(e){for(var t=0,n=e.length,r="";t<n;t++)r+=e[t].value;return r}function be(s,e,t){var u=e.dir,l=e.next,c=l||u,f=t&&"parentNode"===c,p=r++;return e.first?function(e,t,n){while(e=e[u])if(1===e.nodeType||f)return s(e,t,n);return!1}:function(e,t,n){var r,i,o,a=[S,p];if(n){while(e=e[u])if((1===e.nodeType||f)&&s(e,t,n))return!0}else while(e=e[u])if(1===e.nodeType||f)if(i=(o=e[k]||(e[k]={}))[e.uniqueID]||(o[e.uniqueID]={}),l&&l===e.nodeName.toLowerCase())e=e[u]||e;else{if((r=i[c])&&r[0]===S&&r[1]===p)return a[2]=r[2];if((i[c]=a)[2]=s(e,t,n))return!0}return!1}}function we(i){return 1<i.length?function(e,t,n){var r=i.length;while(r--)if(!i[r](e,t,n))return!1;return!0}:i[0]}function Te(e,t,n,r,i){for(var o,a=[],s=0,u=e.length,l=null!=t;s<u;s++)(o=e[s])&&(n&&!n(o,r,i)||(a.push(o),l&&t.push(s)));return a}function Ce(d,h,g,v,y,e){return v&&!v[k]&&(v=Ce(v)),y&&!y[k]&&(y=Ce(y,e)),le(function(e,t,n,r){var i,o,a,s=[],u=[],l=t.length,c=e||function(e,t,n){for(var r=0,i=t.length;r<i;r++)se(e,t[r],n);return n}(h||"*",n.nodeType?[n]:n,[]),f=!d||!e&&h?c:Te(c,s,d,n,r),p=g?y||(e?d:l||v)?[]:t:f;if(g&&g(f,p,n,r),v){i=Te(p,u),v(i,[],n,r),o=i.length;while(o--)(a=i[o])&&(p[u[o]]=!(f[u[o]]=a))}if(e){if(y||d){if(y){i=[],o=p.length;while(o--)(a=p[o])&&i.push(f[o]=a);y(null,p=[],i,r)}o=p.length;while(o--)(a=p[o])&&-1<(i=y?P(e,a):s[o])&&(e[i]=!(t[i]=a))}}else p=Te(p===t?p.splice(l,p.length):p),y?y(null,t,p,r):H.apply(t,p)})}function Ee(e){for(var i,t,n,r=e.length,o=b.relative[e[0].type],a=o||b.relative[" "],s=o?1:0,u=be(function(e){return e===i},a,!0),l=be(function(e){return-1<P(i,e)},a,!0),c=[function(e,t,n){var r=!o&&(n||t!==w)||((i=t).nodeType?u(e,t,n):l(e,t,n));return i=null,r}];s<r;s++)if(t=b.relative[e[s].type])c=[be(we(c),t)];else{if((t=b.filter[e[s].type].apply(null,e[s].matches))[k]){for(n=++s;n<r;n++)if(b.relative[e[n].type])break;return Ce(1<s&&we(c),1<s&&xe(e.slice(0,s-1).concat({value:" "===e[s-2].type?"*":""})).replace(B,"$1"),t,s<n&&Ee(e.slice(s,n)),n<r&&Ee(e=e.slice(n)),n<r&&xe(e))}c.push(t)}return we(c)}return me.prototype=b.filters=b.pseudos,b.setFilters=new me,h=se.tokenize=function(e,t){var n,r,i,o,a,s,u,l=x[e+" "];if(l)return t?0:l.slice(0);a=e,s=[],u=b.preFilter;while(a){for(o in n&&!(r=_.exec(a))||(r&&(a=a.slice(r[0].length)||a),s.push(i=[])),n=!1,(r=z.exec(a))&&(n=r.shift(),i.push({value:n,type:r[0].replace(B," ")}),a=a.slice(n.length)),b.filter)!(r=G[o].exec(a))||u[o]&&!(r=u[o](r))||(n=r.shift(),i.push({value:n,type:o,matches:r}),a=a.slice(n.length));if(!n)break}return t?a.length:a?se.error(e):x(e,s).slice(0)},f=se.compile=function(e,t){var n,v,y,m,x,r,i=[],o=[],a=N[e+" "];if(!a){t||(t=h(e)),n=t.length;while(n--)(a=Ee(t[n]))[k]?i.push(a):o.push(a);(a=N(e,(v=o,m=0<(y=i).length,x=0<v.length,r=function(e,t,n,r,i){var o,a,s,u=0,l="0",c=e&&[],f=[],p=w,d=e||x&&b.find.TAG("*",i),h=S+=null==p?1:Math.random()||.1,g=d.length;for(i&&(w=t===C||t||i);l!==g&&null!=(o=d[l]);l++){if(x&&o){a=0,t||o.ownerDocument===C||(T(o),n=!E);while(s=v[a++])if(s(o,t||C,n)){r.push(o);break}i&&(S=h)}m&&((o=!s&&o)&&u--,e&&c.push(o))}if(u+=l,m&&l!==u){a=0;while(s=y[a++])s(c,f,t,n);if(e){if(0<u)while(l--)c[l]||f[l]||(f[l]=q.call(r));f=Te(f)}H.apply(r,f),i&&!e&&0<f.length&&1<u+y.length&&se.uniqueSort(r)}return i&&(S=h,w=p),c},m?le(r):r))).selector=e}return a},g=se.select=function(e,t,n,r){var i,o,a,s,u,l="function"==typeof e&&e,c=!r&&h(e=l.selector||e);if(n=n||[],1===c.length){if(2<(o=c[0]=c[0].slice(0)).length&&"ID"===(a=o[0]).type&&9===t.nodeType&&E&&b.relative[o[1].type]){if(!(t=(b.find.ID(a.matches[0].replace(te,ne),t)||[])[0]))return n;l&&(t=t.parentNode),e=e.slice(o.shift().value.length)}i=G.needsContext.test(e)?0:o.length;while(i--){if(a=o[i],b.relative[s=a.type])break;if((u=b.find[s])&&(r=u(a.matches[0].replace(te,ne),ee.test(o[0].type)&&ye(t.parentNode)||t))){if(o.splice(i,1),!(e=r.length&&xe(o)))return H.apply(n,r),n;break}}}return(l||f(e,c))(r,t,!E,n,!t||ee.test(e)&&ye(t.parentNode)||t),n},d.sortStable=k.split("").sort(D).join("")===k,d.detectDuplicates=!!l,T(),d.sortDetached=ce(function(e){return 1&e.compareDocumentPosition(C.createElement("fieldset"))}),ce(function(e){return e.innerHTML="<a href='#'></a>","#"===e.firstChild.getAttribute("href")})||fe("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),d.attributes&&ce(function(e){return e.innerHTML="<input/>",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||fe("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ce(function(e){return null==e.getAttribute("disabled")})||fe(R,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),se}(C);k.find=h,k.expr=h.selectors,k.expr[":"]=k.expr.pseudos,k.uniqueSort=k.unique=h.uniqueSort,k.text=h.getText,k.isXMLDoc=h.isXML,k.contains=h.contains,k.escapeSelector=h.escape;var T=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&k(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},N=k.expr.match.needsContext;function A(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var D=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?k.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?k.grep(e,function(e){return e===n!==r}):"string"!=typeof n?k.grep(e,function(e){return-1<i.call(n,e)!==r}):k.filter(n,e,r)}k.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?k.find.matchesSelector(r,e)?[r]:[]:k.find.matches(e,k.grep(t,function(e){return 1===e.nodeType}))},k.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(k(e).filter(function(){for(t=0;t<r;t++)if(k.contains(i[t],this))return!0}));for(n=this.pushStack([]),t=0;t<r;t++)k.find(e,i[t],n);return 1<r?k.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&N.test(e)?k(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(k.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:L.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof k?t[0]:t,k.merge(this,k.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),D.test(r[1])&&k.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(k):k.makeArray(e,this)}).prototype=k.fn,q=k(E);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}k.fn.extend({has:function(e){var t=k(e,this),n=t.length;return this.filter(function(){for(var e=0;e<n;e++)if(k.contains(this,t[e]))return!0})},closest:function(e,t){var n,r=0,i=this.length,o=[],a="string"!=typeof e&&k(e);if(!N.test(e))for(;r<i;r++)for(n=this[r];n&&n!==t;n=n.parentNode)if(n.nodeType<11&&(a?-1<a.index(n):1===n.nodeType&&k.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(1<o.length?k.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?i.call(k(e),this[0]):i.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(k.uniqueSort(k.merge(this.get(),k(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),k.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return T(e,"parentNode")},parentsUntil:function(e,t,n){return T(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return T(e,"nextSibling")},prevAll:function(e){return T(e,"previousSibling")},nextUntil:function(e,t,n){return T(e,"nextSibling",n)},prevUntil:function(e,t,n){return T(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return"undefined"!=typeof e.contentDocument?e.contentDocument:(A(e,"template")&&(e=e.content||e),k.merge([],e.childNodes))}},function(r,i){k.fn[r]=function(e,t){var n=k.map(this,i,e);return"Until"!==r.slice(-5)&&(t=e),t&&"string"==typeof t&&(n=k.filter(t,n)),1<this.length&&(O[r]||k.uniqueSort(n),H.test(r)&&n.reverse()),this.pushStack(n)}});var R=/[^\x20\t\r\n\f]+/g;function M(e){return e}function I(e){throw e}function W(e,t,n,r){var i;try{e&&m(i=e.promise)?i.call(e).done(t).fail(n):e&&m(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}k.Callbacks=function(r){var e,n;r="string"==typeof r?(e=r,n={},k.each(e.match(R)||[],function(e,t){n[t]=!0}),n):k.extend({},r);var i,t,o,a,s=[],u=[],l=-1,c=function(){for(a=a||r.once,o=i=!0;u.length;l=-1){t=u.shift();while(++l<s.length)!1===s[l].apply(t[0],t[1])&&r.stopOnFalse&&(l=s.length,t=!1)}r.memory||(t=!1),i=!1,a&&(s=t?[]:"")},f={add:function(){return s&&(t&&!i&&(l=s.length-1,u.push(t)),function n(e){k.each(e,function(e,t){m(t)?r.unique&&f.has(t)||s.push(t):t&&t.length&&"string"!==w(t)&&n(t)})}(arguments),t&&!i&&c()),this},remove:function(){return k.each(arguments,function(e,t){var n;while(-1<(n=k.inArray(t,s,n)))s.splice(n,1),n<=l&&l--}),this},has:function(e){return e?-1<k.inArray(e,s):0<s.length},empty:function(){return s&&(s=[]),this},disable:function(){return a=u=[],s=t="",this},disabled:function(){return!s},lock:function(){return a=u=[],t||i||(s=t=""),this},locked:function(){return!!a},fireWith:function(e,t){return a||(t=[e,(t=t||[]).slice?t.slice():t],u.push(t),i||c()),this},fire:function(){return f.fireWith(this,arguments),this},fired:function(){return!!o}};return f},k.extend({Deferred:function(e){var o=[["notify","progress",k.Callbacks("memory"),k.Callbacks("memory"),2],["resolve","done",k.Callbacks("once memory"),k.Callbacks("once memory"),0,"resolved"],["reject","fail",k.Callbacks("once memory"),k.Callbacks("once memory"),1,"rejected"]],i="pending",a={state:function(){return i},always:function(){return s.done(arguments).fail(arguments),this},"catch":function(e){return a.then(null,e)},pipe:function(){var i=arguments;return k.Deferred(function(r){k.each(o,function(e,t){var n=m(i[t[4]])&&i[t[4]];s[t[1]](function(){var e=n&&n.apply(this,arguments);e&&m(e.promise)?e.promise().progress(r.notify).done(r.resolve).fail(r.reject):r[t[0]+"With"](this,n?[e]:arguments)})}),i=null}).promise()},then:function(t,n,r){var u=0;function l(i,o,a,s){return function(){var n=this,r=arguments,e=function(){var e,t;if(!(i<u)){if((e=a.apply(n,r))===o.promise())throw new TypeError("Thenable self-resolution");t=e&&("object"==typeof e||"function"==typeof e)&&e.then,m(t)?s?t.call(e,l(u,o,M,s),l(u,o,I,s)):(u++,t.call(e,l(u,o,M,s),l(u,o,I,s),l(u,o,M,o.notifyWith))):(a!==M&&(n=void 0,r=[e]),(s||o.resolveWith)(n,r))}},t=s?e:function(){try{e()}catch(e){k.Deferred.exceptionHook&&k.Deferred.exceptionHook(e,t.stackTrace),u<=i+1&&(a!==I&&(n=void 0,r=[e]),o.rejectWith(n,r))}};i?t():(k.Deferred.getStackHook&&(t.stackTrace=k.Deferred.getStackHook()),C.setTimeout(t))}}return k.Deferred(function(e){o[0][3].add(l(0,e,m(r)?r:M,e.notifyWith)),o[1][3].add(l(0,e,m(t)?t:M)),o[2][3].add(l(0,e,m(n)?n:I))}).promise()},promise:function(e){return null!=e?k.extend(e,a):a}},s={};return k.each(o,function(e,t){var n=t[2],r=t[5];a[t[1]]=n.add,r&&n.add(function(){i=r},o[3-e][2].disable,o[3-e][3].disable,o[0][2].lock,o[0][3].lock),n.add(t[3].fire),s[t[0]]=function(){return s[t[0]+"With"](this===s?void 0:this,arguments),this},s[t[0]+"With"]=n.fireWith}),a.promise(s),e&&e.call(s,s),s},when:function(e){var n=arguments.length,t=n,r=Array(t),i=s.call(arguments),o=k.Deferred(),a=function(t){return function(e){r[t]=this,i[t]=1<arguments.length?s.call(arguments):e,--n||o.resolveWith(r,i)}};if(n<=1&&(W(e,o.done(a(t)).resolve,o.reject,!n),"pending"===o.state()||m(i[t]&&i[t].then)))return o.then();while(t--)W(i[t],a(t),o.reject);return o.promise()}});var $=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;k.Deferred.exceptionHook=function(e,t){C.console&&C.console.warn&&e&&$.test(e.name)&&C.console.warn("jQuery.Deferred exception: "+e.message,e.stack,t)},k.readyException=function(e){C.setTimeout(function(){throw e})};var F=k.Deferred();function B(){E.removeEventListener("DOMContentLoaded",B),C.removeEventListener("load",B),k.ready()}k.fn.ready=function(e){return F.then(e)["catch"](function(e){k.readyException(e)}),this},k.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--k.readyWait:k.isReady)||(k.isReady=!0)!==e&&0<--k.readyWait||F.resolveWith(E,[k])}}),k.ready.then=F.then,"complete"===E.readyState||"loading"!==E.readyState&&!E.documentElement.doScroll?C.setTimeout(k.ready):(E.addEventListener("DOMContentLoaded",B),C.addEventListener("load",B));var _=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===w(n))for(s in i=!0,n)_(e,t,s,n[s],!0,o,a);else if(void 0!==r&&(i=!0,m(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(k(e),n)})),t))for(;s<u;s++)t(e[s],n,a?r:r.call(e[s],s,t(e[s],n)));return i?e:l?t.call(e):u?t(e[0],n):o},z=/^-ms-/,U=/-([a-z])/g;function X(e,t){return t.toUpperCase()}function V(e){return e.replace(z,"ms-").replace(U,X)}var G=function(e){return 1===e.nodeType||9===e.nodeType||!+e.nodeType};function Y(){this.expando=k.expando+Y.uid++}Y.uid=1,Y.prototype={cache:function(e){var t=e[this.expando];return t||(t={},G(e)&&(e.nodeType?e[this.expando]=t:Object.defineProperty(e,this.expando,{value:t,configurable:!0}))),t},set:function(e,t,n){var r,i=this.cache(e);if("string"==typeof t)i[V(t)]=n;else for(r in t)i[V(r)]=t[r];return i},get:function(e,t){return void 0===t?this.cache(e):e[this.expando]&&e[this.expando][V(t)]},access:function(e,t,n){return void 0===t||t&&"string"==typeof t&&void 0===n?this.get(e,t):(this.set(e,t,n),void 0!==n?n:t)},remove:function(e,t){var n,r=e[this.expando];if(void 0!==r){if(void 0!==t){n=(t=Array.isArray(t)?t.map(V):(t=V(t))in r?[t]:t.match(R)||[]).length;while(n--)delete r[t[n]]}(void 0===t||k.isEmptyObject(r))&&(e.nodeType?e[this.expando]=void 0:delete e[this.expando])}},hasData:function(e){var t=e[this.expando];return void 0!==t&&!k.isEmptyObject(t)}};var Q=new Y,J=new Y,K=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,Z=/[A-Z]/g;function ee(e,t,n){var r,i;if(void 0===n&&1===e.nodeType)if(r="data-"+t.replace(Z,"-$&").toLowerCase(),"string"==typeof(n=e.getAttribute(r))){try{n="true"===(i=n)||"false"!==i&&("null"===i?null:i===+i+""?+i:K.test(i)?JSON.parse(i):i)}catch(e){}J.set(e,t,n)}else n=void 0;return n}k.extend({hasData:function(e){return J.hasData(e)||Q.hasData(e)},data:function(e,t,n){return J.access(e,t,n)},removeData:function(e,t){J.remove(e,t)},_data:function(e,t,n){return Q.access(e,t,n)},_removeData:function(e,t){Q.remove(e,t)}}),k.fn.extend({data:function(n,e){var t,r,i,o=this[0],a=o&&o.attributes;if(void 0===n){if(this.length&&(i=J.get(o),1===o.nodeType&&!Q.get(o,"hasDataAttrs"))){t=a.length;while(t--)a[t]&&0===(r=a[t].name).indexOf("data-")&&(r=V(r.slice(5)),ee(o,r,i[r]));Q.set(o,"hasDataAttrs",!0)}return i}return"object"==typeof n?this.each(function(){J.set(this,n)}):_(this,function(e){var t;if(o&&void 0===e)return void 0!==(t=J.get(o,n))?t:void 0!==(t=ee(o,n))?t:void 0;this.each(function(){J.set(this,n,e)})},null,e,1<arguments.length,null,!0)},removeData:function(e){return this.each(function(){J.remove(this,e)})}}),k.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=Q.get(e,t),n&&(!r||Array.isArray(n)?r=Q.access(e,t,k.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=k.queue(e,t),r=n.length,i=n.shift(),o=k._queueHooks(e,t);"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,function(){k.dequeue(e,t)},o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return Q.get(e,n)||Q.access(e,n,{empty:k.Callbacks("once memory").add(function(){Q.remove(e,[t+"queue",n])})})}}),k.fn.extend({queue:function(t,n){var e=2;return"string"!=typeof t&&(n=t,t="fx",e--),arguments.length<e?k.queue(this[0],t):void 0===n?this:this.each(function(){var e=k.queue(this,t,n);k._queueHooks(this,t),"fx"===t&&"inprogress"!==e[0]&&k.dequeue(this,t)})},dequeue:function(e){return this.each(function(){k.dequeue(this,e)})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,t){var n,r=1,i=k.Deferred(),o=this,a=this.length,s=function(){--r||i.resolveWith(o,[o])};"string"!=typeof e&&(t=e,e=void 0),e=e||"fx";while(a--)(n=Q.get(o[a],e+"queueHooks"))&&n.empty&&(r++,n.empty.add(s));return s(),i.promise(t)}});var te=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,ne=new RegExp("^(?:([+-])=|)("+te+")([a-z%]*)$","i"),re=["Top","Right","Bottom","Left"],ie=E.documentElement,oe=function(e){return k.contains(e.ownerDocument,e)},ae={composed:!0};ie.getRootNode&&(oe=function(e){return k.contains(e.ownerDocument,e)||e.getRootNode(ae)===e.ownerDocument});var se=function(e,t){return"none"===(e=t||e).style.display||""===e.style.display&&oe(e)&&"none"===k.css(e,"display")},ue=function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];for(o in i=n.apply(e,r||[]),t)e.style[o]=a[o];return i};function le(e,t,n,r){var i,o,a=20,s=r?function(){return r.cur()}:function(){return k.css(e,t,"")},u=s(),l=n&&n[3]||(k.cssNumber[t]?"":"px"),c=e.nodeType&&(k.cssNumber[t]||"px"!==l&&+u)&&ne.exec(k.css(e,t));if(c&&c[3]!==l){u/=2,l=l||c[3],c=+u||1;while(a--)k.style(e,t,c+l),(1-o)*(1-(o=s()/u||.5))<=0&&(a=0),c/=o;c*=2,k.style(e,t,c+l),n=n||[]}return n&&(c=+c||+u||0,i=n[1]?c+(n[1]+1)*n[2]:+n[2],r&&(r.unit=l,r.start=c,r.end=i)),i}var ce={};function fe(e,t){for(var n,r,i,o,a,s,u,l=[],c=0,f=e.length;c<f;c++)(r=e[c]).style&&(n=r.style.display,t?("none"===n&&(l[c]=Q.get(r,"display")||null,l[c]||(r.style.display="")),""===r.style.display&&se(r)&&(l[c]=(u=a=o=void 0,a=(i=r).ownerDocument,s=i.nodeName,(u=ce[s])||(o=a.body.appendChild(a.createElement(s)),u=k.css(o,"display"),o.parentNode.removeChild(o),"none"===u&&(u="block"),ce[s]=u)))):"none"!==n&&(l[c]="none",Q.set(r,"display",n)));for(c=0;c<f;c++)null!=l[c]&&(e[c].style.display=l[c]);return e}k.fn.extend({show:function(){return fe(this,!0)},hide:function(){return fe(this)},toggle:function(e){return"boolean"==typeof e?e?this.show():this.hide():this.each(function(){se(this)?k(this).show():k(this).hide()})}});var pe=/^(?:checkbox|radio)$/i,de=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,"<select multiple='multiple'>","</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?k.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n<r;n++)Q.set(e[n],"globalEval",!t||Q.get(t[n],"globalEval"))}ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;var me,xe,be=/<|&#?\w+;/;function we(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d<h;d++)if((o=e[d])||0===o)if("object"===w(o))k.merge(p,o.nodeType?[o]:o);else if(be.test(o)){a=a||f.appendChild(t.createElement("div")),s=(de.exec(o)||["",""])[1].toLowerCase(),u=ge[s]||ge._default,a.innerHTML=u[1]+k.htmlPrefilter(o)+u[2],c=u[0];while(c--)a=a.lastChild;k.merge(p,a.childNodes),(a=f.firstChild).textContent=""}else p.push(t.createTextNode(o));f.textContent="",d=0;while(o=p[d++])if(r&&-1<k.inArray(o,r))i&&i.push(o);else if(l=oe(o),a=ve(f.appendChild(o),"script"),l&&ye(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}me=E.createDocumentFragment().appendChild(E.createElement("div")),(xe=E.createElement("input")).setAttribute("type","radio"),xe.setAttribute("checked","checked"),xe.setAttribute("name","t"),me.appendChild(xe),y.checkClone=me.cloneNode(!0).cloneNode(!0).lastChild.checked,me.innerHTML="<textarea>x</textarea>",y.noCloneChecked=!!me.cloneNode(!0).lastChild.defaultValue;var Te=/^key/,Ce=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ee=/^([^.]*)(?:\.(.+)|)/;function ke(){return!0}function Se(){return!1}function Ne(e,t){return e===function(){try{return E.activeElement}catch(e){}}()==("focus"===t)}function Ae(e,t,n,r,i,o){var a,s;if("object"==typeof t){for(s in"string"!=typeof n&&(r=r||n,n=void 0),t)Ae(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=Se;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return k().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=k.guid++)),e.each(function(){k.event.add(this,t,i,r,n)})}function De(e,i,o){o?(Q.set(e,i,!1),k.event.add(e,i,{namespace:!1,handler:function(e){var t,n,r=Q.get(this,i);if(1&e.isTrigger&&this[i]){if(r.length)(k.event.special[i]||{}).delegateType&&e.stopPropagation();else if(r=s.call(arguments),Q.set(this,i,r),t=o(this,i),this[i](),r!==(n=Q.get(this,i))||t?Q.set(this,i,!1):n={},r!==n)return e.stopImmediatePropagation(),e.preventDefault(),n.value}else r.length&&(Q.set(this,i,{value:k.event.trigger(k.extend(r[0],k.Event.prototype),r.slice(1),this)}),e.stopImmediatePropagation())}})):void 0===Q.get(e,i)&&k.event.add(e,i,ke)}k.event={global:{},add:function(t,e,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.get(t);if(v){n.handler&&(n=(o=n).handler,i=o.selector),i&&k.find.matchesSelector(ie,i),n.guid||(n.guid=k.guid++),(u=v.events)||(u=v.events={}),(a=v.handle)||(a=v.handle=function(e){return"undefined"!=typeof k&&k.event.triggered!==e.type?k.event.dispatch.apply(t,arguments):void 0}),l=(e=(e||"").match(R)||[""]).length;while(l--)d=g=(s=Ee.exec(e[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=k.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=k.event.special[d]||{},c=k.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&k.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(t,r,h,a)||t.addEventListener&&t.addEventListener(d,a)),f.add&&(f.add.call(t,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),k.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,v=Q.hasData(e)&&Q.get(e);if(v&&(u=v.events)){l=(t=(t||"").match(R)||[""]).length;while(l--)if(d=g=(s=Ee.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d){f=k.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,v.handle)||k.removeEvent(e,d,v.handle),delete u[d])}else for(d in u)k.event.remove(e,d+t[l],n,r,!0);k.isEmptyObject(u)&&Q.remove(e,"handle events")}},dispatch:function(e){var t,n,r,i,o,a,s=k.event.fix(e),u=new Array(arguments.length),l=(Q.get(this,"events")||{})[s.type]||[],c=k.event.special[s.type]||{};for(u[0]=s,t=1;t<arguments.length;t++)u[t]=arguments[t];if(s.delegateTarget=this,!c.preDispatch||!1!==c.preDispatch.call(this,s)){a=k.event.handlers.call(this,s,l),t=0;while((i=a[t++])&&!s.isPropagationStopped()){s.currentTarget=i.elem,n=0;while((o=i.handlers[n++])&&!s.isImmediatePropagationStopped())s.rnamespace&&!1!==o.namespace&&!s.rnamespace.test(o.namespace)||(s.handleObj=o,s.data=o.data,void 0!==(r=((k.event.special[o.origType]||{}).handle||o.handler).apply(i.elem,u))&&!1===(s.result=r)&&(s.preventDefault(),s.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,s),s.result}},handlers:function(e,t){var n,r,i,o,a,s=[],u=t.delegateCount,l=e.target;if(u&&l.nodeType&&!("click"===e.type&&1<=e.button))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n<u;n++)void 0===a[i=(r=t[n]).selector+" "]&&(a[i]=r.needsContext?-1<k(i,this).index(l):k.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u<t.length&&s.push({elem:l,handlers:t.slice(u)}),s},addProp:function(t,e){Object.defineProperty(k.Event.prototype,t,{enumerable:!0,configurable:!0,get:m(e)?function(){if(this.originalEvent)return e(this.originalEvent)}:function(){if(this.originalEvent)return this.originalEvent[t]},set:function(e){Object.defineProperty(this,t,{enumerable:!0,configurable:!0,writable:!0,value:e})}})},fix:function(e){return e[k.expando]?e:new k.Event(e)},special:{load:{noBubble:!0},click:{setup:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click",ke),!1},trigger:function(e){var t=this||e;return pe.test(t.type)&&t.click&&A(t,"input")&&De(t,"click"),!0},_default:function(e){var t=e.target;return pe.test(t.type)&&t.click&&A(t,"input")&&Q.get(t,"click")||A(t,"a")}},beforeunload:{postDispatch:function(e){void 0!==e.result&&e.originalEvent&&(e.originalEvent.returnValue=e.result)}}}},k.removeEvent=function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n)},k.Event=function(e,t){if(!(this instanceof k.Event))return new k.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||void 0===e.defaultPrevented&&!1===e.returnValue?ke:Se,this.target=e.target&&3===e.target.nodeType?e.target.parentNode:e.target,this.currentTarget=e.currentTarget,this.relatedTarget=e.relatedTarget):this.type=e,t&&k.extend(this,t),this.timeStamp=e&&e.timeStamp||Date.now(),this[k.expando]=!0},k.Event.prototype={constructor:k.Event,isDefaultPrevented:Se,isPropagationStopped:Se,isImmediatePropagationStopped:Se,isSimulated:!1,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=ke,e&&!this.isSimulated&&e.preventDefault()},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=ke,e&&!this.isSimulated&&e.stopPropagation()},stopImmediatePropagation:function(){var e=this.originalEvent;this.isImmediatePropagationStopped=ke,e&&!this.isSimulated&&e.stopImmediatePropagation(),this.stopPropagation()}},k.each({altKey:!0,bubbles:!0,cancelable:!0,changedTouches:!0,ctrlKey:!0,detail:!0,eventPhase:!0,metaKey:!0,pageX:!0,pageY:!0,shiftKey:!0,view:!0,"char":!0,code:!0,charCode:!0,key:!0,keyCode:!0,button:!0,buttons:!0,clientX:!0,clientY:!0,offsetX:!0,offsetY:!0,pointerId:!0,pointerType:!0,screenX:!0,screenY:!0,targetTouches:!0,toElement:!0,touches:!0,which:function(e){var t=e.button;return null==e.which&&Te.test(e.type)?null!=e.charCode?e.charCode:e.keyCode:!e.which&&void 0!==t&&Ce.test(e.type)?1&t?1:2&t?3:4&t?2:0:e.which}},k.event.addProp),k.each({focus:"focusin",blur:"focusout"},function(e,t){k.event.special[e]={setup:function(){return De(this,e,Ne),!1},trigger:function(){return De(this,e),!0},delegateType:t}}),k.each({mouseenter:"mouseover",mouseleave:"mouseout",pointerenter:"pointerover",pointerleave:"pointerout"},function(e,i){k.event.special[e]={delegateType:i,bindType:i,handle:function(e){var t,n=e.relatedTarget,r=e.handleObj;return n&&(n===this||k.contains(this,n))||(e.type=r.origType,t=r.handler.apply(this,arguments),e.type=i),t}}}),k.fn.extend({on:function(e,t,n,r){return Ae(this,e,t,n,r)},one:function(e,t,n,r){return Ae(this,e,t,n,r,1)},off:function(e,t,n){var r,i;if(e&&e.preventDefault&&e.handleObj)return r=e.handleObj,k(e.delegateTarget).off(r.namespace?r.origType+"."+r.namespace:r.origType,r.selector,r.handler),this;if("object"==typeof e){for(i in e)this.off(i,t,e[i]);return this}return!1!==t&&"function"!=typeof t||(n=t,t=void 0),!1===n&&(n=Se),this.each(function(){k.event.remove(this,e,n,t)})}});var je=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi,qe=/<script|<style|<link/i,Le=/checked\s*(?:[^=]|=\s*.checked.)/i,He=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g;function Oe(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&k(e).children("tbody")[0]||e}function Pe(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Re(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Me(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(Q.hasData(e)&&(o=Q.access(e),a=Q.set(t,o),l=o.events))for(i in delete a.handle,a.events={},l)for(n=0,r=l[i].length;n<r;n++)k.event.add(t,i,l[i][n]);J.hasData(e)&&(s=J.access(e),u=k.extend({},s),J.set(t,u))}}function Ie(n,r,i,o){r=g.apply([],r);var e,t,a,s,u,l,c=0,f=n.length,p=f-1,d=r[0],h=m(d);if(h||1<f&&"string"==typeof d&&!y.checkClone&&Le.test(d))return n.each(function(e){var t=n.eq(e);h&&(r[0]=d.call(this,e,t.html())),Ie(t,r,i,o)});if(f&&(t=(e=we(r,n[0].ownerDocument,!1,n,o)).firstChild,1===e.childNodes.length&&(e=t),t||o)){for(s=(a=k.map(ve(e,"script"),Pe)).length;c<f;c++)u=e,c!==p&&(u=k.clone(u,!0,!0),s&&k.merge(a,ve(u,"script"))),i.call(n[c],u,c);if(s)for(l=a[a.length-1].ownerDocument,k.map(a,Re),c=0;c<s;c++)u=a[c],he.test(u.type||"")&&!Q.access(u,"globalEval")&&k.contains(l,u)&&(u.src&&"module"!==(u.type||"").toLowerCase()?k._evalUrl&&!u.noModule&&k._evalUrl(u.src,{nonce:u.nonce||u.getAttribute("nonce")}):b(u.textContent.replace(He,""),u,l))}return n}function We(e,t,n){for(var r,i=t?k.filter(t,e):e,o=0;null!=(r=i[o]);o++)n||1!==r.nodeType||k.cleanData(ve(r)),r.parentNode&&(n&&oe(r)&&ye(ve(r,"script")),r.parentNode.removeChild(r));return e}k.extend({htmlPrefilter:function(e){return e.replace(je,"<$1></$2>")},clone:function(e,t,n){var r,i,o,a,s,u,l,c=e.cloneNode(!0),f=oe(e);if(!(y.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||k.isXMLDoc(e)))for(a=ve(c),r=0,i=(o=ve(e)).length;r<i;r++)s=o[r],u=a[r],void 0,"input"===(l=u.nodeName.toLowerCase())&&pe.test(s.type)?u.checked=s.checked:"input"!==l&&"textarea"!==l||(u.defaultValue=s.defaultValue);if(t)if(n)for(o=o||ve(e),a=a||ve(c),r=0,i=o.length;r<i;r++)Me(o[r],a[r]);else Me(e,c);return 0<(a=ve(c,"script")).length&&ye(a,!f&&ve(e,"script")),c},cleanData:function(e){for(var t,n,r,i=k.event.special,o=0;void 0!==(n=e[o]);o++)if(G(n)){if(t=n[Q.expando]){if(t.events)for(r in t.events)i[r]?k.event.remove(n,r):k.removeEvent(n,r,t.handle);n[Q.expando]=void 0}n[J.expando]&&(n[J.expando]=void 0)}}}),k.fn.extend({detach:function(e){return We(this,e,!0)},remove:function(e){return We(this,e)},text:function(e){return _(this,function(e){return void 0===e?k.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Ie(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Oe(this,e).appendChild(e)})},prepend:function(){return Ie(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Oe(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Ie(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(k.cleanData(ve(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return k.clone(this,e,t)})},html:function(e){return _(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!qe.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=k.htmlPrefilter(e);try{for(;n<r;n++)1===(t=this[n]||{}).nodeType&&(k.cleanData(ve(t,!1)),t.innerHTML=e);t=0}catch(e){}}t&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(){var n=[];return Ie(this,arguments,function(e){var t=this.parentNode;k.inArray(this,n)<0&&(k.cleanData(ve(this)),t&&t.replaceChild(e,this))},n)}}),k.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,a){k.fn[e]=function(e){for(var t,n=[],r=k(e),i=r.length-1,o=0;o<=i;o++)t=o===i?this:this.clone(!0),k(r[o])[a](t),u.apply(n,t.get());return this.pushStack(n)}});var $e=new RegExp("^("+te+")(?!px)[a-z%]+$","i"),Fe=function(e){var t=e.ownerDocument.defaultView;return t&&t.opener||(t=C),t.getComputedStyle(e)},Be=new RegExp(re.join("|"),"i");function _e(e,t,n){var r,i,o,a,s=e.style;return(n=n||Fe(e))&&(""!==(a=n.getPropertyValue(t)||n[t])||oe(e)||(a=k.style(e,t)),!y.pixelBoxStyles()&&$e.test(a)&&Be.test(t)&&(r=s.width,i=s.minWidth,o=s.maxWidth,s.minWidth=s.maxWidth=s.width=a,a=n.width,s.width=r,s.minWidth=i,s.maxWidth=o)),void 0!==a?a+"":a}function ze(e,t){return{get:function(){if(!e())return(this.get=t).apply(this,arguments);delete this.get}}}!function(){function e(){if(u){s.style.cssText="position:absolute;left:-11111px;width:60px;margin-top:1px;padding:0;border:0",u.style.cssText="position:relative;display:block;box-sizing:border-box;overflow:scroll;margin:auto;border:1px;padding:1px;width:60%;top:1%",ie.appendChild(s).appendChild(u);var e=C.getComputedStyle(u);n="1%"!==e.top,a=12===t(e.marginLeft),u.style.right="60%",o=36===t(e.right),r=36===t(e.width),u.style.position="absolute",i=12===t(u.offsetWidth/3),ie.removeChild(s),u=null}}function t(e){return Math.round(parseFloat(e))}var n,r,i,o,a,s=E.createElement("div"),u=E.createElement("div");u.style&&(u.style.backgroundClip="content-box",u.cloneNode(!0).style.backgroundClip="",y.clearCloneStyle="content-box"===u.style.backgroundClip,k.extend(y,{boxSizingReliable:function(){return e(),r},pixelBoxStyles:function(){return e(),o},pixelPosition:function(){return e(),n},reliableMarginLeft:function(){return e(),a},scrollboxSize:function(){return e(),i}}))}();var Ue=["Webkit","Moz","ms"],Xe=E.createElement("div").style,Ve={};function Ge(e){var t=k.cssProps[e]||Ve[e];return t||(e in Xe?e:Ve[e]=function(e){var t=e[0].toUpperCase()+e.slice(1),n=Ue.length;while(n--)if((e=Ue[n]+t)in Xe)return e}(e)||e)}var Ye=/^(none|table(?!-c[ea]).+)/,Qe=/^--/,Je={position:"absolute",visibility:"hidden",display:"block"},Ke={letterSpacing:"0",fontWeight:"400"};function Ze(e,t,n){var r=ne.exec(t);return r?Math.max(0,r[2]-(n||0))+(r[3]||"px"):t}function et(e,t,n,r,i,o){var a="width"===t?1:0,s=0,u=0;if(n===(r?"border":"content"))return 0;for(;a<4;a+=2)"margin"===n&&(u+=k.css(e,n+re[a],!0,i)),r?("content"===n&&(u-=k.css(e,"padding"+re[a],!0,i)),"margin"!==n&&(u-=k.css(e,"border"+re[a]+"Width",!0,i))):(u+=k.css(e,"padding"+re[a],!0,i),"padding"!==n?u+=k.css(e,"border"+re[a]+"Width",!0,i):s+=k.css(e,"border"+re[a]+"Width",!0,i));return!r&&0<=o&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))||0),u}function tt(e,t,n){var r=Fe(e),i=(!y.boxSizingReliable()||n)&&"border-box"===k.css(e,"boxSizing",!1,r),o=i,a=_e(e,t,r),s="offset"+t[0].toUpperCase()+t.slice(1);if($e.test(a)){if(!n)return a;a="auto"}return(!y.boxSizingReliable()&&i||"auto"===a||!parseFloat(a)&&"inline"===k.css(e,"display",!1,r))&&e.getClientRects().length&&(i="border-box"===k.css(e,"boxSizing",!1,r),(o=s in e)&&(a=e[s])),(a=parseFloat(a)||0)+et(e,t,n||(i?"border":"content"),o,r,a)+"px"}function nt(e,t,n,r,i){return new nt.prototype.init(e,t,n,r,i)}k.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=_e(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,gridArea:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnStart:!0,gridRow:!0,gridRowEnd:!0,gridRowStart:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=V(t),u=Qe.test(t),l=e.style;if(u||(t=Ge(s)),a=k.cssHooks[t]||k.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"===(o=typeof n)&&(i=ne.exec(n))&&i[1]&&(n=le(e,t,i),o="number"),null!=n&&n==n&&("number"!==o||u||(n+=i&&i[3]||(k.cssNumber[s]?"":"px")),y.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=V(t);return Qe.test(t)||(t=Ge(s)),(a=k.cssHooks[t]||k.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=_e(e,t,r)),"normal"===i&&t in Ke&&(i=Ke[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),k.each(["height","width"],function(e,u){k.cssHooks[u]={get:function(e,t,n){if(t)return!Ye.test(k.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?tt(e,u,n):ue(e,Je,function(){return tt(e,u,n)})},set:function(e,t,n){var r,i=Fe(e),o=!y.scrollboxSize()&&"absolute"===i.position,a=(o||n)&&"border-box"===k.css(e,"boxSizing",!1,i),s=n?et(e,u,n,a,i):0;return a&&o&&(s-=Math.ceil(e["offset"+u[0].toUpperCase()+u.slice(1)]-parseFloat(i[u])-et(e,u,"border",!1,i)-.5)),s&&(r=ne.exec(t))&&"px"!==(r[3]||"px")&&(e.style[u]=t,t=k.css(e,u)),Ze(0,t,s)}}}),k.cssHooks.marginLeft=ze(y.reliableMarginLeft,function(e,t){if(t)return(parseFloat(_e(e,"marginLeft"))||e.getBoundingClientRect().left-ue(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),k.each({margin:"",padding:"",border:"Width"},function(i,o){k.cssHooks[i+o]={expand:function(e){for(var t=0,n={},r="string"==typeof e?e.split(" "):[e];t<4;t++)n[i+re[t]+o]=r[t]||r[t-2]||r[0];return n}},"margin"!==i&&(k.cssHooks[i+o].set=Ze)}),k.fn.extend({css:function(e,t){return _(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=Fe(e),i=t.length;a<i;a++)o[t[a]]=k.css(e,t[a],!1,r);return o}return void 0!==n?k.style(e,t,n):k.css(e,t)},e,t,1<arguments.length)}}),((k.Tween=nt).prototype={constructor:nt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||k.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(k.cssNumber[n]?"":"px")},cur:function(){var e=nt.propHooks[this.prop];return e&&e.get?e.get(this):nt.propHooks._default.get(this)},run:function(e){var t,n=nt.propHooks[this.prop];return this.options.duration?this.pos=t=k.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):nt.propHooks._default.set(this),this}}).init.prototype=nt.prototype,(nt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=k.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){k.fx.step[e.prop]?k.fx.step[e.prop](e):1!==e.elem.nodeType||!k.cssHooks[e.prop]&&null==e.elem.style[Ge(e.prop)]?e.elem[e.prop]=e.now:k.style(e.elem,e.prop,e.now+e.unit)}}}).scrollTop=nt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},k.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},k.fx=nt.prototype.init,k.fx.step={};var rt,it,ot,at,st=/^(?:toggle|show|hide)$/,ut=/queueHooks$/;function lt(){it&&(!1===E.hidden&&C.requestAnimationFrame?C.requestAnimationFrame(lt):C.setTimeout(lt,k.fx.interval),k.fx.tick())}function ct(){return C.setTimeout(function(){rt=void 0}),rt=Date.now()}function ft(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=re[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function pt(e,t,n){for(var r,i=(dt.tweeners[t]||[]).concat(dt.tweeners["*"]),o=0,a=i.length;o<a;o++)if(r=i[o].call(n,t,e))return r}function dt(o,e,t){var n,a,r=0,i=dt.prefilters.length,s=k.Deferred().always(function(){delete u.elem}),u=function(){if(a)return!1;for(var e=rt||ct(),t=Math.max(0,l.startTime+l.duration-e),n=1-(t/l.duration||0),r=0,i=l.tweens.length;r<i;r++)l.tweens[r].run(n);return s.notifyWith(o,[l,n,t]),n<1&&i?t:(i||s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l]),!1)},l=s.promise({elem:o,props:k.extend({},e),opts:k.extend(!0,{specialEasing:{},easing:k.easing._default},t),originalProperties:e,originalOptions:t,startTime:rt||ct(),duration:t.duration,tweens:[],createTween:function(e,t){var n=k.Tween(o,l.opts,e,t,l.opts.specialEasing[e]||l.opts.easing);return l.tweens.push(n),n},stop:function(e){var t=0,n=e?l.tweens.length:0;if(a)return this;for(a=!0;t<n;t++)l.tweens[t].run(1);return e?(s.notifyWith(o,[l,1,0]),s.resolveWith(o,[l,e])):s.rejectWith(o,[l,e]),this}}),c=l.props;for(!function(e,t){var n,r,i,o,a;for(n in e)if(i=t[r=V(n)],o=e[n],Array.isArray(o)&&(i=o[1],o=e[n]=o[0]),n!==r&&(e[r]=o,delete e[n]),(a=k.cssHooks[r])&&"expand"in a)for(n in o=a.expand(o),delete e[r],o)n in e||(e[n]=o[n],t[n]=i);else t[r]=i}(c,l.opts.specialEasing);r<i;r++)if(n=dt.prefilters[r].call(l,o,c,l.opts))return m(n.stop)&&(k._queueHooks(l.elem,l.opts.queue).stop=n.stop.bind(n)),n;return k.map(c,pt,l),m(l.opts.start)&&l.opts.start.call(o,l),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always),k.fx.timer(k.extend(u,{elem:o,anim:l,queue:l.opts.queue})),l}k.Animation=k.extend(dt,{tweeners:{"*":[function(e,t){var n=this.createTween(e,t);return le(n.elem,e,ne.exec(t),n),n}]},tweener:function(e,t){m(e)?(t=e,e=["*"]):e=e.match(R);for(var n,r=0,i=e.length;r<i;r++)n=e[r],dt.tweeners[n]=dt.tweeners[n]||[],dt.tweeners[n].unshift(t)},prefilters:[function(e,t,n){var r,i,o,a,s,u,l,c,f="width"in t||"height"in t,p=this,d={},h=e.style,g=e.nodeType&&se(e),v=Q.get(e,"fxshow");for(r in n.queue||(null==(a=k._queueHooks(e,"fx")).unqueued&&(a.unqueued=0,s=a.empty.fire,a.empty.fire=function(){a.unqueued||s()}),a.unqueued++,p.always(function(){p.always(function(){a.unqueued--,k.queue(e,"fx").length||a.empty.fire()})})),t)if(i=t[r],st.test(i)){if(delete t[r],o=o||"toggle"===i,i===(g?"hide":"show")){if("show"!==i||!v||void 0===v[r])continue;g=!0}d[r]=v&&v[r]||k.style(e,r)}if((u=!k.isEmptyObject(t))||!k.isEmptyObject(d))for(r in f&&1===e.nodeType&&(n.overflow=[h.overflow,h.overflowX,h.overflowY],null==(l=v&&v.display)&&(l=Q.get(e,"display")),"none"===(c=k.css(e,"display"))&&(l?c=l:(fe([e],!0),l=e.style.display||l,c=k.css(e,"display"),fe([e]))),("inline"===c||"inline-block"===c&&null!=l)&&"none"===k.css(e,"float")&&(u||(p.done(function(){h.display=l}),null==l&&(c=h.display,l="none"===c?"":c)),h.display="inline-block")),n.overflow&&(h.overflow="hidden",p.always(function(){h.overflow=n.overflow[0],h.overflowX=n.overflow[1],h.overflowY=n.overflow[2]})),u=!1,d)u||(v?"hidden"in v&&(g=v.hidden):v=Q.access(e,"fxshow",{display:l}),o&&(v.hidden=!g),g&&fe([e],!0),p.done(function(){for(r in g||fe([e]),Q.remove(e,"fxshow"),d)k.style(e,r,d[r])})),u=pt(g?v[r]:0,r,p),r in v||(v[r]=u.start,g&&(u.end=u.start,u.start=0))}],prefilter:function(e,t){t?dt.prefilters.unshift(e):dt.prefilters.push(e)}}),k.speed=function(e,t,n){var r=e&&"object"==typeof e?k.extend({},e):{complete:n||!n&&t||m(e)&&e,duration:e,easing:n&&t||t&&!m(t)&&t};return k.fx.off?r.duration=0:"number"!=typeof r.duration&&(r.duration in k.fx.speeds?r.duration=k.fx.speeds[r.duration]:r.duration=k.fx.speeds._default),null!=r.queue&&!0!==r.queue||(r.queue="fx"),r.old=r.complete,r.complete=function(){m(r.old)&&r.old.call(this),r.queue&&k.dequeue(this,r.queue)},r},k.fn.extend({fadeTo:function(e,t,n,r){return this.filter(se).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(t,e,n,r){var i=k.isEmptyObject(t),o=k.speed(e,n,r),a=function(){var e=dt(this,k.extend({},t),o);(i||Q.get(this,"finish"))&&e.stop(!0)};return a.finish=a,i||!1===o.queue?this.each(a):this.queue(o.queue,a)},stop:function(i,e,o){var a=function(e){var t=e.stop;delete e.stop,t(o)};return"string"!=typeof i&&(o=e,e=i,i=void 0),e&&!1!==i&&this.queue(i||"fx",[]),this.each(function(){var e=!0,t=null!=i&&i+"queueHooks",n=k.timers,r=Q.get(this);if(t)r[t]&&r[t].stop&&a(r[t]);else for(t in r)r[t]&&r[t].stop&&ut.test(t)&&a(r[t]);for(t=n.length;t--;)n[t].elem!==this||null!=i&&n[t].queue!==i||(n[t].anim.stop(o),e=!1,n.splice(t,1));!e&&o||k.dequeue(this,i)})},finish:function(a){return!1!==a&&(a=a||"fx"),this.each(function(){var e,t=Q.get(this),n=t[a+"queue"],r=t[a+"queueHooks"],i=k.timers,o=n?n.length:0;for(t.finish=!0,k.queue(this,a,[]),r&&r.stop&&r.stop.call(this,!0),e=i.length;e--;)i[e].elem===this&&i[e].queue===a&&(i[e].anim.stop(!0),i.splice(e,1));for(e=0;e<o;e++)n[e]&&n[e].finish&&n[e].finish.call(this);delete t.finish})}}),k.each(["toggle","show","hide"],function(e,r){var i=k.fn[r];k.fn[r]=function(e,t,n){return null==e||"boolean"==typeof e?i.apply(this,arguments):this.animate(ft(r,!0),e,t,n)}}),k.each({slideDown:ft("show"),slideUp:ft("hide"),slideToggle:ft("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,r){k.fn[e]=function(e,t,n){return this.animate(r,e,t,n)}}),k.timers=[],k.fx.tick=function(){var e,t=0,n=k.timers;for(rt=Date.now();t<n.length;t++)(e=n[t])()||n[t]!==e||n.splice(t--,1);n.length||k.fx.stop(),rt=void 0},k.fx.timer=function(e){k.timers.push(e),k.fx.start()},k.fx.interval=13,k.fx.start=function(){it||(it=!0,lt())},k.fx.stop=function(){it=null},k.fx.speeds={slow:600,fast:200,_default:400},k.fn.delay=function(r,e){return r=k.fx&&k.fx.speeds[r]||r,e=e||"fx",this.queue(e,function(e,t){var n=C.setTimeout(e,r);t.stop=function(){C.clearTimeout(n)}})},ot=E.createElement("input"),at=E.createElement("select").appendChild(E.createElement("option")),ot.type="checkbox",y.checkOn=""!==ot.value,y.optSelected=at.selected,(ot=E.createElement("input")).value="t",ot.type="radio",y.radioValue="t"===ot.value;var ht,gt=k.expr.attrHandle;k.fn.extend({attr:function(e,t){return _(this,k.attr,e,t,1<arguments.length)},removeAttr:function(e){return this.each(function(){k.removeAttr(this,e)})}}),k.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?k.prop(e,t,n):(1===o&&k.isXMLDoc(e)||(i=k.attrHooks[t.toLowerCase()]||(k.expr.match.bool.test(t)?ht:void 0)),void 0!==n?null===n?void k.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=k.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!y.radioValue&&"radio"===t&&A(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(R);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),ht={set:function(e,t,n){return!1===t?k.removeAttr(e,n):e.setAttribute(n,n),n}},k.each(k.expr.match.bool.source.match(/\w+/g),function(e,t){var a=gt[t]||k.find.attr;gt[t]=function(e,t,n){var r,i,o=t.toLowerCase();return n||(i=gt[o],gt[o]=r,r=null!=a(e,t,n)?o:null,gt[o]=i),r}});var vt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;function mt(e){return(e.match(R)||[]).join(" ")}function xt(e){return e.getAttribute&&e.getAttribute("class")||""}function bt(e){return Array.isArray(e)?e:"string"==typeof e&&e.match(R)||[]}k.fn.extend({prop:function(e,t){return _(this,k.prop,e,t,1<arguments.length)},removeProp:function(e){return this.each(function(){delete this[k.propFix[e]||e]})}}),k.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&k.isXMLDoc(e)||(t=k.propFix[t]||t,i=k.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=k.find.attr(e,"tabindex");return t?parseInt(t,10):vt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),y.optSelected||(k.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),k.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){k.propFix[this.toLowerCase()]=this}),k.fn.extend({addClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).addClass(t.call(this,e,xt(this)))});if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},removeClass:function(t){var e,n,r,i,o,a,s,u=0;if(m(t))return this.each(function(e){k(this).removeClass(t.call(this,e,xt(this)))});if(!arguments.length)return this.attr("class","");if((e=bt(t)).length)while(n=this[u++])if(i=xt(n),r=1===n.nodeType&&" "+mt(i)+" "){a=0;while(o=e[a++])while(-1<r.indexOf(" "+o+" "))r=r.replace(" "+o+" "," ");i!==(s=mt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(i,t){var o=typeof i,a="string"===o||Array.isArray(i);return"boolean"==typeof t&&a?t?this.addClass(i):this.removeClass(i):m(i)?this.each(function(e){k(this).toggleClass(i.call(this,e,xt(this),t),t)}):this.each(function(){var e,t,n,r;if(a){t=0,n=k(this),r=bt(i);while(e=r[t++])n.hasClass(e)?n.removeClass(e):n.addClass(e)}else void 0!==i&&"boolean"!==o||((e=xt(this))&&Q.set(this,"__className__",e),this.setAttribute&&this.setAttribute("class",e||!1===i?"":Q.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&-1<(" "+mt(xt(n))+" ").indexOf(t))return!0;return!1}});var wt=/\r/g;k.fn.extend({val:function(n){var r,e,i,t=this[0];return arguments.length?(i=m(n),this.each(function(e){var t;1===this.nodeType&&(null==(t=i?n.call(this,e,k(this).val()):n)?t="":"number"==typeof t?t+="":Array.isArray(t)&&(t=k.map(t,function(e){return null==e?"":e+""})),(r=k.valHooks[this.type]||k.valHooks[this.nodeName.toLowerCase()])&&"set"in r&&void 0!==r.set(this,t,"value")||(this.value=t))})):t?(r=k.valHooks[t.type]||k.valHooks[t.nodeName.toLowerCase()])&&"get"in r&&void 0!==(e=r.get(t,"value"))?e:"string"==typeof(e=t.value)?e.replace(wt,""):null==e?"":e:void 0}}),k.extend({valHooks:{option:{get:function(e){var t=k.find.attr(e,"value");return null!=t?t:mt(k.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r<u;r++)if(((n=i[r]).selected||r===o)&&!n.disabled&&(!n.parentNode.disabled||!A(n.parentNode,"optgroup"))){if(t=k(n).val(),a)return t;s.push(t)}return s},set:function(e,t){var n,r,i=e.options,o=k.makeArray(t),a=i.length;while(a--)((r=i[a]).selected=-1<k.inArray(k.valHooks.option.get(r),o))&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),k.each(["radio","checkbox"],function(){k.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=-1<k.inArray(k(e).val(),t)}},y.checkOn||(k.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),y.focusin="onfocusin"in C;var Tt=/^(?:focusinfocus|focusoutblur)$/,Ct=function(e){e.stopPropagation()};k.extend(k.event,{trigger:function(e,t,n,r){var i,o,a,s,u,l,c,f,p=[n||E],d=v.call(e,"type")?e.type:e,h=v.call(e,"namespace")?e.namespace.split("."):[];if(o=f=a=n=n||E,3!==n.nodeType&&8!==n.nodeType&&!Tt.test(d+k.event.triggered)&&(-1<d.indexOf(".")&&(d=(h=d.split(".")).shift(),h.sort()),u=d.indexOf(":")<0&&"on"+d,(e=e[k.expando]?e:new k.Event(d,"object"==typeof e&&e)).isTrigger=r?2:3,e.namespace=h.join("."),e.rnamespace=e.namespace?new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,e.result=void 0,e.target||(e.target=n),t=null==t?[e]:k.makeArray(t,[e]),c=k.event.special[d]||{},r||!c.trigger||!1!==c.trigger.apply(n,t))){if(!r&&!c.noBubble&&!x(n)){for(s=c.delegateType||d,Tt.test(s+d)||(o=o.parentNode);o;o=o.parentNode)p.push(o),a=o;a===(n.ownerDocument||E)&&p.push(a.defaultView||a.parentWindow||C)}i=0;while((o=p[i++])&&!e.isPropagationStopped())f=o,e.type=1<i?s:c.bindType||d,(l=(Q.get(o,"events")||{})[e.type]&&Q.get(o,"handle"))&&l.apply(o,t),(l=u&&o[u])&&l.apply&&G(o)&&(e.result=l.apply(o,t),!1===e.result&&e.preventDefault());return e.type=d,r||e.isDefaultPrevented()||c._default&&!1!==c._default.apply(p.pop(),t)||!G(n)||u&&m(n[d])&&!x(n)&&((a=n[u])&&(n[u]=null),k.event.triggered=d,e.isPropagationStopped()&&f.addEventListener(d,Ct),n[d](),e.isPropagationStopped()&&f.removeEventListener(d,Ct),k.event.triggered=void 0,a&&(n[u]=a)),e.result}},simulate:function(e,t,n){var r=k.extend(new k.Event,n,{type:e,isSimulated:!0});k.event.trigger(r,null,t)}}),k.fn.extend({trigger:function(e,t){return this.each(function(){k.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return k.event.trigger(e,t,n,!0)}}),y.focusin||k.each({focus:"focusin",blur:"focusout"},function(n,r){var i=function(e){k.event.simulate(r,e.target,k.event.fix(e))};k.event.special[r]={setup:function(){var e=this.ownerDocument||this,t=Q.access(e,r);t||e.addEventListener(n,i,!0),Q.access(e,r,(t||0)+1)},teardown:function(){var e=this.ownerDocument||this,t=Q.access(e,r)-1;t?Q.access(e,r,t):(e.removeEventListener(n,i,!0),Q.remove(e,r))}}});var Et=C.location,kt=Date.now(),St=/\?/;k.parseXML=function(e){var t;if(!e||"string"!=typeof e)return null;try{t=(new C.DOMParser).parseFromString(e,"text/xml")}catch(e){t=void 0}return t&&!t.getElementsByTagName("parsererror").length||k.error("Invalid XML: "+e),t};var Nt=/\[\]$/,At=/\r?\n/g,Dt=/^(?:submit|button|image|reset|file)$/i,jt=/^(?:input|select|textarea|keygen)/i;function qt(n,e,r,i){var t;if(Array.isArray(e))k.each(e,function(e,t){r||Nt.test(n)?i(n,t):qt(n+"["+("object"==typeof t&&null!=t?e:"")+"]",t,r,i)});else if(r||"object"!==w(e))i(n,e);else for(t in e)qt(n+"["+t+"]",e[t],r,i)}k.param=function(e,t){var n,r=[],i=function(e,t){var n=m(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(null==e)return"";if(Array.isArray(e)||e.jquery&&!k.isPlainObject(e))k.each(e,function(){i(this.name,this.value)});else for(n in e)qt(n,e[n],t,i);return r.join("&")},k.fn.extend({serialize:function(){return k.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=k.prop(this,"elements");return e?k.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!k(this).is(":disabled")&&jt.test(this.nodeName)&&!Dt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=k(this).val();return null==n?null:Array.isArray(n)?k.map(n,function(e){return{name:t.name,value:e.replace(At,"\r\n")}}):{name:t.name,value:n.replace(At,"\r\n")}}).get()}});var Lt=/%20/g,Ht=/#.*$/,Ot=/([?&])_=[^&]*/,Pt=/^(.*?):[ \t]*([^\r\n]*)$/gm,Rt=/^(?:GET|HEAD)$/,Mt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Ft=E.createElement("a");function Bt(o){return function(e,t){"string"!=typeof e&&(t=e,e="*");var n,r=0,i=e.toLowerCase().match(R)||[];if(m(t))while(n=i[r++])"+"===n[0]?(n=n.slice(1)||"*",(o[n]=o[n]||[]).unshift(t)):(o[n]=o[n]||[]).push(t)}}function _t(t,i,o,a){var s={},u=t===Wt;function l(e){var r;return s[e]=!0,k.each(t[e]||[],function(e,t){var n=t(i,o,a);return"string"!=typeof n||u||s[n]?u?!(r=n):void 0:(i.dataTypes.unshift(n),l(n),!1)}),r}return l(i.dataTypes[0])||!s["*"]&&l("*")}function zt(e,t){var n,r,i=k.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&k.extend(!0,e,r),e}Ft.href=Et.href,k.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Et.href,type:"GET",isLocal:/^(?:about|app|app-storage|.+-extension|file|res|widget):$/.test(Et.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":k.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,k.ajaxSettings),t):zt(k.ajaxSettings,e)},ajaxPrefilter:Bt(It),ajaxTransport:Bt(Wt),ajax:function(e,t){"object"==typeof e&&(t=e,e=void 0),t=t||{};var c,f,p,n,d,r,h,g,i,o,v=k.ajaxSetup({},t),y=v.context||v,m=v.context&&(y.nodeType||y.jquery)?k(y):k.event,x=k.Deferred(),b=k.Callbacks("once memory"),w=v.statusCode||{},a={},s={},u="canceled",T={readyState:0,getResponseHeader:function(e){var t;if(h){if(!n){n={};while(t=Pt.exec(p))n[t[1].toLowerCase()+" "]=(n[t[1].toLowerCase()+" "]||[]).concat(t[2])}t=n[e.toLowerCase()+" "]}return null==t?null:t.join(", ")},getAllResponseHeaders:function(){return h?p:null},setRequestHeader:function(e,t){return null==h&&(e=s[e.toLowerCase()]=s[e.toLowerCase()]||e,a[e]=t),this},overrideMimeType:function(e){return null==h&&(v.mimeType=e),this},statusCode:function(e){var t;if(e)if(h)T.always(e[T.status]);else for(t in e)w[t]=[w[t],e[t]];return this},abort:function(e){var t=e||u;return c&&c.abort(t),l(0,t),this}};if(x.promise(T),v.url=((e||v.url||Et.href)+"").replace(Mt,Et.protocol+"//"),v.type=t.method||t.type||v.method||v.type,v.dataTypes=(v.dataType||"*").toLowerCase().match(R)||[""],null==v.crossDomain){r=E.createElement("a");try{r.href=v.url,r.href=r.href,v.crossDomain=Ft.protocol+"//"+Ft.host!=r.protocol+"//"+r.host}catch(e){v.crossDomain=!0}}if(v.data&&v.processData&&"string"!=typeof v.data&&(v.data=k.param(v.data,v.traditional)),_t(It,v,t,T),h)return T;for(i in(g=k.event&&v.global)&&0==k.active++&&k.event.trigger("ajaxStart"),v.type=v.type.toUpperCase(),v.hasContent=!Rt.test(v.type),f=v.url.replace(Ht,""),v.hasContent?v.data&&v.processData&&0===(v.contentType||"").indexOf("application/x-www-form-urlencoded")&&(v.data=v.data.replace(Lt,"+")):(o=v.url.slice(f.length),v.data&&(v.processData||"string"==typeof v.data)&&(f+=(St.test(f)?"&":"?")+v.data,delete v.data),!1===v.cache&&(f=f.replace(Ot,"$1"),o=(St.test(f)?"&":"?")+"_="+kt+++o),v.url=f+o),v.ifModified&&(k.lastModified[f]&&T.setRequestHeader("If-Modified-Since",k.lastModified[f]),k.etag[f]&&T.setRequestHeader("If-None-Match",k.etag[f])),(v.data&&v.hasContent&&!1!==v.contentType||t.contentType)&&T.setRequestHeader("Content-Type",v.contentType),T.setRequestHeader("Accept",v.dataTypes[0]&&v.accepts[v.dataTypes[0]]?v.accepts[v.dataTypes[0]]+("*"!==v.dataTypes[0]?", "+$t+"; q=0.01":""):v.accepts["*"]),v.headers)T.setRequestHeader(i,v.headers[i]);if(v.beforeSend&&(!1===v.beforeSend.call(y,T,v)||h))return T.abort();if(u="abort",b.add(v.complete),T.done(v.success),T.fail(v.error),c=_t(Wt,v,t,T)){if(T.readyState=1,g&&m.trigger("ajaxSend",[T,v]),h)return T;v.async&&0<v.timeout&&(d=C.setTimeout(function(){T.abort("timeout")},v.timeout));try{h=!1,c.send(a,l)}catch(e){if(h)throw e;l(-1,e)}}else l(-1,"No Transport");function l(e,t,n,r){var i,o,a,s,u,l=t;h||(h=!0,d&&C.clearTimeout(d),c=void 0,p=r||"",T.readyState=0<e?4:0,i=200<=e&&e<300||304===e,n&&(s=function(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}(v,T,n)),s=function(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}(v,s,T,i),i?(v.ifModified&&((u=T.getResponseHeader("Last-Modified"))&&(k.lastModified[f]=u),(u=T.getResponseHeader("etag"))&&(k.etag[f]=u)),204===e||"HEAD"===v.type?l="nocontent":304===e?l="notmodified":(l=s.state,o=s.data,i=!(a=s.error))):(a=l,!e&&l||(l="error",e<0&&(e=0))),T.status=e,T.statusText=(t||l)+"",i?x.resolveWith(y,[o,l,T]):x.rejectWith(y,[T,l,a]),T.statusCode(w),w=void 0,g&&m.trigger(i?"ajaxSuccess":"ajaxError",[T,v,i?o:a]),b.fireWith(y,[T,l]),g&&(m.trigger("ajaxComplete",[T,v]),--k.active||k.event.trigger("ajaxStop")))}return T},getJSON:function(e,t,n){return k.get(e,t,n,"json")},getScript:function(e,t){return k.get(e,void 0,t,"script")}}),k.each(["get","post"],function(e,i){k[i]=function(e,t,n,r){return m(t)&&(r=r||n,n=t,t=void 0),k.ajax(k.extend({url:e,type:i,dataType:r,data:t,success:n},k.isPlainObject(e)&&e))}}),k._evalUrl=function(e,t){return k.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,converters:{"text script":function(){}},dataFilter:function(e){k.globalEval(e,t)}})},k.fn.extend({wrapAll:function(e){var t;return this[0]&&(m(e)&&(e=e.call(this[0])),t=k(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(n){return m(n)?this.each(function(e){k(this).wrapInner(n.call(this,e))}):this.each(function(){var e=k(this),t=e.contents();t.length?t.wrapAll(n):e.append(n)})},wrap:function(t){var n=m(t);return this.each(function(e){k(this).wrapAll(n?t.call(this,e):t)})},unwrap:function(e){return this.parent(e).not("body").each(function(){k(this).replaceWith(this.childNodes)}),this}}),k.expr.pseudos.hidden=function(e){return!k.expr.pseudos.visible(e)},k.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},k.ajaxSettings.xhr=function(){try{return new C.XMLHttpRequest}catch(e){}};var Ut={0:200,1223:204},Xt=k.ajaxSettings.xhr();y.cors=!!Xt&&"withCredentials"in Xt,y.ajax=Xt=!!Xt,k.ajaxTransport(function(i){var o,a;if(y.cors||Xt&&!i.crossDomain)return{send:function(e,t){var n,r=i.xhr();if(r.open(i.type,i.url,i.async,i.username,i.password),i.xhrFields)for(n in i.xhrFields)r[n]=i.xhrFields[n];for(n in i.mimeType&&r.overrideMimeType&&r.overrideMimeType(i.mimeType),i.crossDomain||e["X-Requested-With"]||(e["X-Requested-With"]="XMLHttpRequest"),e)r.setRequestHeader(n,e[n]);o=function(e){return function(){o&&(o=a=r.onload=r.onerror=r.onabort=r.ontimeout=r.onreadystatechange=null,"abort"===e?r.abort():"error"===e?"number"!=typeof r.status?t(0,"error"):t(r.status,r.statusText):t(Ut[r.status]||r.status,r.statusText,"text"!==(r.responseType||"text")||"string"!=typeof r.responseText?{binary:r.response}:{text:r.responseText},r.getAllResponseHeaders()))}},r.onload=o(),a=r.onerror=r.ontimeout=o("error"),void 0!==r.onabort?r.onabort=a:r.onreadystatechange=function(){4===r.readyState&&C.setTimeout(function(){o&&a()})},o=o("abort");try{r.send(i.hasContent&&i.data||null)}catch(e){if(o)throw e}},abort:function(){o&&o()}}}),k.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),k.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return k.globalEval(e),e}}}),k.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),k.ajaxTransport("script",function(n){var r,i;if(n.crossDomain||n.scriptAttrs)return{send:function(e,t){r=k("<script>").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var Vt,Gt=[],Yt=/(=)\?(?=&|$)|\?\?/;k.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Gt.pop()||k.expando+"_"+kt++;return this[e]=!0,e}}),k.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Yt.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Yt.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Yt,"$1"+r):!1!==e.jsonp&&(e.url+=(St.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||k.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?k(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,Gt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((Vt=E.implementation.createHTMLDocument("").body).innerHTML="<form></form><form></form>",2===Vt.childNodes.length),k.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=D.exec(e))?[t.createElement(i[1])]:(i=we([e],t,o),o&&o.length&&k(o).remove(),k.merge([],i.childNodes)));var r,i,o},k.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1<s&&(r=mt(e.slice(s)),e=e.slice(0,s)),m(t)?(n=t,t=void 0):t&&"object"==typeof t&&(i="POST"),0<a.length&&k.ajax({url:e,type:i||"GET",dataType:"html",data:t}).done(function(e){o=arguments,a.html(r?k("<div>").append(k.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},k.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){k.fn[t]=function(e){return this.on(t,e)}}),k.expr.pseudos.animated=function(t){return k.grep(k.timers,function(e){return t===e.elem}).length},k.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=k.css(e,"position"),c=k(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=k.css(e,"top"),u=k.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,k.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},k.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){k.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===k.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===k.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=k(e).offset()).top+=k.css(e,"borderTopWidth",!0),i.left+=k.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-k.css(r,"marginTop",!0),left:t.left-i.left-k.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===k.css(e,"position"))e=e.offsetParent;return e||ie})}}),k.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;k.fn[t]=function(e){return _(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),k.each(["top","left"],function(e,n){k.cssHooks[n]=ze(y.pixelPosition,function(e,t){if(t)return t=_e(e,n),$e.test(t)?k(e).position()[n]+"px":t})}),k.each({Height:"height",Width:"width"},function(a,s){k.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){k.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return _(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?k.css(e,t,i):k.style(e,t,n,i)},s,n?e:void 0,n)}})}),k.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){k.fn[n]=function(e,t){return 0<arguments.length?this.on(n,null,e,t):this.trigger(n)}}),k.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),k.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)}}),k.proxy=function(e,t){var n,r,i;if("string"==typeof t&&(n=e[t],t=e,e=n),m(e))return r=s.call(arguments,2),(i=function(){return e.apply(t||this,r.concat(s.call(arguments)))}).guid=e.guid=e.guid||k.guid++,i},k.holdReady=function(e){e?k.readyWait++:k.ready(!0)},k.isArray=Array.isArray,k.parseJSON=JSON.parse,k.nodeName=A,k.isFunction=m,k.isWindow=x,k.camelCase=V,k.type=w,k.now=Date.now,k.isNumeric=function(e){var t=k.type(e);return("number"===t||"string"===t)&&!isNaN(e-parseFloat(e))},"function"==typeof define&&define.amd&&define("jquery",[],function(){return k});var Qt=C.jQuery,Jt=C.$;return k.noConflict=function(e){return C.$===k&&(C.$=Jt),e&&C.jQuery===k&&(C.jQuery=Qt),k},e||(C.jQuery=C.$=k),k});
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_winconsole.py
# -*- coding: utf-8 -*- # This module is based on the excellent work by Adam Bartoš who # provided a lot of what went into the implementation here in # the discussion to issue1602 in the Python bug tracker. # # There are some general differences in regards to how this works # compared to the original patches as we do not need to patch # the entire interpreter but just work in our little world of # echo and prmopt. import ctypes import io import os import sys import time import zlib from ctypes import byref from ctypes import c_char from ctypes import c_char_p from ctypes import c_int from ctypes import c_ssize_t from ctypes import c_ulong from ctypes import c_void_p from ctypes import POINTER from ctypes import py_object from ctypes import windll from ctypes import WinError from ctypes import WINFUNCTYPE from ctypes.wintypes import DWORD from ctypes.wintypes import HANDLE from ctypes.wintypes import LPCWSTR from ctypes.wintypes import LPWSTR import msvcrt from ._compat import _NonClosingTextIOWrapper from ._compat import PY2 from ._compat import text_type try: from ctypes import pythonapi PyObject_GetBuffer = pythonapi.PyObject_GetBuffer PyBuffer_Release = pythonapi.PyBuffer_Release except ImportError: pythonapi = None c_ssize_p = POINTER(c_ssize_t) kernel32 = windll.kernel32 GetStdHandle = kernel32.GetStdHandle ReadConsoleW = kernel32.ReadConsoleW WriteConsoleW = kernel32.WriteConsoleW GetConsoleMode = kernel32.GetConsoleMode GetLastError = kernel32.GetLastError GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( ("CommandLineToArgvW", windll.shell32) ) LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)( ("LocalFree", windll.kernel32) ) STDIN_HANDLE = GetStdHandle(-10) STDOUT_HANDLE = GetStdHandle(-11) STDERR_HANDLE = GetStdHandle(-12) PyBUF_SIMPLE = 0 PyBUF_WRITABLE = 1 ERROR_SUCCESS = 0 ERROR_NOT_ENOUGH_MEMORY = 8 ERROR_OPERATION_ABORTED = 995 STDIN_FILENO = 0 STDOUT_FILENO = 1 STDERR_FILENO = 2 EOF = b"\x1a" MAX_BYTES_WRITTEN = 32767 class Py_buffer(ctypes.Structure): _fields_ = [ ("buf", c_void_p), ("obj", py_object), ("len", c_ssize_t), ("itemsize", c_ssize_t), ("readonly", c_int), ("ndim", c_int), ("format", c_char_p), ("shape", c_ssize_p), ("strides", c_ssize_p), ("suboffsets", c_ssize_p), ("internal", c_void_p), ] if PY2: _fields_.insert(-1, ("smalltable", c_ssize_t * 2)) # On PyPy we cannot get buffers so our ability to operate here is # serverly limited. if pythonapi is None: get_buffer = None else: def get_buffer(obj, writable=False): buf = Py_buffer() flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE PyObject_GetBuffer(py_object(obj), byref(buf), flags) try: buffer_type = c_char * buf.len return buffer_type.from_address(buf.buf) finally: PyBuffer_Release(byref(buf)) class _WindowsConsoleRawIOBase(io.RawIOBase): def __init__(self, handle): self.handle = handle def isatty(self): io.RawIOBase.isatty(self) return True class _WindowsConsoleReader(_WindowsConsoleRawIOBase): def readable(self): return True def readinto(self, b): bytes_to_be_read = len(b) if not bytes_to_be_read: return 0 elif bytes_to_be_read % 2: raise ValueError( "cannot read odd number of bytes from UTF-16-LE encoded console" ) buffer = get_buffer(b, writable=True) code_units_to_be_read = bytes_to_be_read // 2 code_units_read = c_ulong() rv = ReadConsoleW( HANDLE(self.handle), buffer, code_units_to_be_read, byref(code_units_read), None, ) if GetLastError() == ERROR_OPERATION_ABORTED: # wait for KeyboardInterrupt time.sleep(0.1) if not rv: raise OSError("Windows error: {}".format(GetLastError())) if buffer[0] == EOF: return 0 return 2 * code_units_read.value class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): def writable(self): return True @staticmethod def _get_error_message(errno): if errno == ERROR_SUCCESS: return "ERROR_SUCCESS" elif errno == ERROR_NOT_ENOUGH_MEMORY: return "ERROR_NOT_ENOUGH_MEMORY" return "Windows error {}".format(errno) def write(self, b): bytes_to_be_written = len(b) buf = get_buffer(b) code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 code_units_written = c_ulong() WriteConsoleW( HANDLE(self.handle), buf, code_units_to_be_written, byref(code_units_written), None, ) bytes_written = 2 * code_units_written.value if bytes_written == 0 and bytes_to_be_written > 0: raise OSError(self._get_error_message(GetLastError())) return bytes_written class ConsoleStream(object): def __init__(self, text_stream, byte_stream): self._text_stream = text_stream self.buffer = byte_stream @property def name(self): return self.buffer.name def write(self, x): if isinstance(x, text_type): return self._text_stream.write(x) try: self.flush() except Exception: pass return self.buffer.write(x) def writelines(self, lines): for line in lines: self.write(line) def __getattr__(self, name): return getattr(self._text_stream, name) def isatty(self): return self.buffer.isatty() def __repr__(self): return "<ConsoleStream name={!r} encoding={!r}>".format( self.name, self.encoding ) class WindowsChunkedWriter(object): """ Wraps a stream (such as stdout), acting as a transparent proxy for all attribute access apart from method 'write()' which we wrap to write in limited chunks due to a Windows limitation on binary console streams. """ def __init__(self, wrapped): # double-underscore everything to prevent clashes with names of # attributes on the wrapped stream object. self.__wrapped = wrapped def __getattr__(self, name): return getattr(self.__wrapped, name) def write(self, text): total_to_write = len(text) written = 0 while written < total_to_write: to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) self.__wrapped.write(text[written : written + to_write]) written += to_write _wrapped_std_streams = set() def _wrap_std_stream(name): # Python 2 & Windows 7 and below if ( PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams ): setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) _wrapped_std_streams.add(name) def _get_text_stdin(buffer_stream): text_stream = _NonClosingTextIOWrapper( io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), "utf-16-le", "strict", line_buffering=True, ) return ConsoleStream(text_stream, buffer_stream) def _get_text_stdout(buffer_stream): text_stream = _NonClosingTextIOWrapper( io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), "utf-16-le", "strict", line_buffering=True, ) return ConsoleStream(text_stream, buffer_stream) def _get_text_stderr(buffer_stream): text_stream = _NonClosingTextIOWrapper( io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), "utf-16-le", "strict", line_buffering=True, ) return ConsoleStream(text_stream, buffer_stream) if PY2: def _hash_py_argv(): return zlib.crc32("\x00".join(sys.argv[1:])) _initial_argv_hash = _hash_py_argv() def _get_windows_argv(): argc = c_int(0) argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) if not argv_unicode: raise WinError() try: argv = [argv_unicode[i] for i in range(0, argc.value)] finally: LocalFree(argv_unicode) del argv_unicode if not hasattr(sys, "frozen"): argv = argv[1:] while len(argv) > 0: arg = argv[0] if not arg.startswith("-") or arg == "-": break argv = argv[1:] if arg.startswith(("-c", "-m")): break return argv[1:] _stream_factories = { 0: _get_text_stdin, 1: _get_text_stdout, 2: _get_text_stderr, } def _is_console(f): if not hasattr(f, "fileno"): return False try: fileno = f.fileno() except OSError: return False handle = msvcrt.get_osfhandle(fileno) return bool(GetConsoleMode(handle, byref(DWORD()))) def _get_windows_console_stream(f, encoding, errors): if ( get_buffer is not None and encoding in ("utf-16-le", None) and errors in ("strict", None) and _is_console(f) ): func = _stream_factories.get(f.fileno()) if func is not None: if not PY2: f = getattr(f, "buffer", None) if f is None: return None else: # If we are on Python 2 we need to set the stream that we # deal with to binary mode as otherwise the exercise if a # bit moot. The same problems apply as for # get_binary_stdin and friends from _compat. msvcrt.setmode(f.fileno(), os.O_BINARY) return func(f)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_unicodefun.py
import codecs import os import sys from ._compat import PY2 def _find_unicode_literals_frame(): import __future__ if not hasattr(sys, "_getframe"): # not all Python implementations have it return 0 frm = sys._getframe(1) idx = 1 while frm is not None: if frm.f_globals.get("__name__", "").startswith("click."): frm = frm.f_back idx += 1 elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: return idx else: break return 0 def _check_for_unicode_literals(): if not __debug__: return from . import disable_unicode_literals_warning if not PY2 or disable_unicode_literals_warning: return bad_frame = _find_unicode_literals_frame() if bad_frame <= 0: return from warnings import warn warn( Warning( "Click detected the use of the unicode_literals __future__" " import. This is heavily discouraged because it can" " introduce subtle bugs in your code. You should instead" ' use explicit u"" literals for your unicode strings. For' " more information see" " https://click.palletsprojects.com/python3/" ), stacklevel=bad_frame, ) def _verify_python3_env(): """Ensures that the environment is good for unicode on Python 3.""" if PY2: return try: import locale fs_enc = codecs.lookup(locale.getpreferredencoding()).name except Exception: fs_enc = "ascii" if fs_enc != "ascii": return extra = "" if os.name == "posix": import subprocess try: rv = subprocess.Popen( ["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate()[0] except OSError: rv = b"" good_locales = set() has_c_utf8 = False # Make sure we're operating on text here. if isinstance(rv, bytes): rv = rv.decode("ascii", "replace") for line in rv.splitlines(): locale = line.strip() if locale.lower().endswith((".utf-8", ".utf8")): good_locales.add(locale) if locale.lower() in ("c.utf8", "c.utf-8"): has_c_utf8 = True extra += "\n\n" if not good_locales: extra += ( "Additional information: on this system no suitable" " UTF-8 locales were discovered. This most likely" " requires resolving by reconfiguring the locale" " system." ) elif has_c_utf8: extra += ( "This system supports the C.UTF-8 locale which is" " recommended. You might be able to resolve your issue" " by exporting the following environment variables:\n\n" " export LC_ALL=C.UTF-8\n" " export LANG=C.UTF-8" ) else: extra += ( "This system lists a couple of UTF-8 supporting locales" " that you can pick from. The following suitable" " locales were discovered: {}".format(", ".join(sorted(good_locales))) ) bad_locale = None for locale in os.environ.get("LC_ALL"), os.environ.get("LANG"): if locale and locale.lower().endswith((".utf-8", ".utf8")): bad_locale = locale if locale is not None: break if bad_locale is not None: extra += ( "\n\nClick discovered that you exported a UTF-8 locale" " but the locale system could not pick up from it" " because it does not exist. The exported locale is" " '{}' but it is not supported".format(bad_locale) ) raise RuntimeError( "Click will abort further execution because Python 3 was" " configured to use ASCII as encoding for the environment." " Consult https://click.palletsprojects.com/python3/ for" " mitigation steps.{}".format(extra) )
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_textwrap.py
import textwrap from contextlib import contextmanager class TextWrapper(textwrap.TextWrapper): def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): space_left = max(width - cur_len, 1) if self.break_long_words: last = reversed_chunks[-1] cut = last[:space_left] res = last[space_left:] cur_line.append(cut) reversed_chunks[-1] = res elif not cur_line: cur_line.append(reversed_chunks.pop()) @contextmanager def extra_indent(self, indent): old_initial_indent = self.initial_indent old_subsequent_indent = self.subsequent_indent self.initial_indent += indent self.subsequent_indent += indent try: yield finally: self.initial_indent = old_initial_indent self.subsequent_indent = old_subsequent_indent def indent_only(self, text): rv = [] for idx, line in enumerate(text.splitlines()): indent = self.initial_indent if idx > 0: indent = self.subsequent_indent rv.append(indent + line) return "\n".join(rv)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/globals.py
from threading import local _local = local() def get_current_context(silent=False): """Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. .. versionadded:: 5.0 :param silent: if set to `True` the return value is `None` if no context is available. The default behavior is to raise a :exc:`RuntimeError`. """ try: return _local.stack[-1] except (AttributeError, IndexError): if not silent: raise RuntimeError("There is no active click context.") def push_context(ctx): """Pushes a new context to the current stack.""" _local.__dict__.setdefault("stack", []).append(ctx) def pop_context(): """Removes the top level from the stack.""" _local.stack.pop() def resolve_color_default(color=None): """"Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context. """ if color is not None: return color ctx = get_current_context(silent=True) if ctx is not None: return ctx.color
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/__init__.py
""" Click is a simple Python module inspired by the stdlib optparse to make writing command line scripts fun. Unlike other modules, it's based around a simple API that does not come with too much magic and is composable. """ from .core import Argument from .core import BaseCommand from .core import Command from .core import CommandCollection from .core import Context from .core import Group from .core import MultiCommand from .core import Option from .core import Parameter from .decorators import argument from .decorators import command from .decorators import confirmation_option from .decorators import group from .decorators import help_option from .decorators import make_pass_decorator from .decorators import option from .decorators import pass_context from .decorators import pass_obj from .decorators import password_option from .decorators import version_option from .exceptions import Abort from .exceptions import BadArgumentUsage from .exceptions import BadOptionUsage from .exceptions import BadParameter from .exceptions import ClickException from .exceptions import FileError from .exceptions import MissingParameter from .exceptions import NoSuchOption from .exceptions import UsageError from .formatting import HelpFormatter from .formatting import wrap_text from .globals import get_current_context from .parser import OptionParser from .termui import clear from .termui import confirm from .termui import echo_via_pager from .termui import edit from .termui import get_terminal_size from .termui import getchar from .termui import launch from .termui import pause from .termui import progressbar from .termui import prompt from .termui import secho from .termui import style from .termui import unstyle from .types import BOOL from .types import Choice from .types import DateTime from .types import File from .types import FLOAT from .types import FloatRange from .types import INT from .types import IntRange from .types import ParamType from .types import Path from .types import STRING from .types import Tuple from .types import UNPROCESSED from .types import UUID from .utils import echo from .utils import format_filename from .utils import get_app_dir from .utils import get_binary_stream from .utils import get_os_args from .utils import get_text_stream from .utils import open_file # Controls if click should emit the warning about the use of unicode # literals. disable_unicode_literals_warning = False __version__ = "7.1.2"
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/core.py
import errno import inspect import os import sys from contextlib import contextmanager from functools import update_wrapper from itertools import repeat from ._compat import isidentifier from ._compat import iteritems from ._compat import PY2 from ._compat import string_types from ._unicodefun import _check_for_unicode_literals from ._unicodefun import _verify_python3_env from .exceptions import Abort from .exceptions import BadParameter from .exceptions import ClickException from .exceptions import Exit from .exceptions import MissingParameter from .exceptions import UsageError from .formatting import HelpFormatter from .formatting import join_options from .globals import pop_context from .globals import push_context from .parser import OptionParser from .parser import split_opt from .termui import confirm from .termui import prompt from .termui import style from .types import BOOL from .types import convert_type from .types import IntRange from .utils import echo from .utils import get_os_args from .utils import make_default_short_help from .utils import make_str from .utils import PacifyFlushWrapper _missing = object() SUBCOMMAND_METAVAR = "COMMAND [ARGS]..." SUBCOMMANDS_METAVAR = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." DEPRECATED_HELP_NOTICE = " (DEPRECATED)" DEPRECATED_INVOKE_NOTICE = "DeprecationWarning: The command %(name)s is deprecated." def _maybe_show_deprecated_notice(cmd): if cmd.deprecated: echo(style(DEPRECATED_INVOKE_NOTICE % {"name": cmd.name}, fg="red"), err=True) def fast_exit(code): """Exit without garbage collection, this speeds up exit by about 10ms for things like bash completion. """ sys.stdout.flush() sys.stderr.flush() os._exit(code) def _bashcomplete(cmd, prog_name, complete_var=None): """Internal handler for the bash completion support.""" if complete_var is None: complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper()) complete_instr = os.environ.get(complete_var) if not complete_instr: return from ._bashcomplete import bashcomplete if bashcomplete(cmd, prog_name, complete_var, complete_instr): fast_exit(1) def _check_multicommand(base_command, cmd_name, cmd, register=False): if not base_command.chain or not isinstance(cmd, MultiCommand): return if register: hint = ( "It is not possible to add multi commands as children to" " another multi command that is in chain mode." ) else: hint = ( "Found a multi command as subcommand to a multi command" " that is in chain mode. This is not supported." ) raise RuntimeError( "{}. Command '{}' is set to chain and '{}' was added as" " subcommand but it in itself is a multi command. ('{}' is a {}" " within a chained {} named '{}').".format( hint, base_command.name, cmd_name, cmd_name, cmd.__class__.__name__, base_command.__class__.__name__, base_command.name, ) ) def batch(iterable, batch_size): return list(zip(*repeat(iter(iterable), batch_size))) def invoke_param_callback(callback, ctx, param, value): code = getattr(callback, "__code__", None) args = getattr(code, "co_argcount", 3) if args < 3: from warnings import warn warn( "Parameter callbacks take 3 args, (ctx, param, value). The" " 2-arg style is deprecated and will be removed in 8.0.".format(callback), DeprecationWarning, stacklevel=3, ) return callback(ctx, value) return callback(ctx, param, value) @contextmanager def augment_usage_errors(ctx, param=None): """Context manager that attaches extra information to exceptions.""" try: yield except BadParameter as e: if e.ctx is None: e.ctx = ctx if param is not None and e.param is None: e.param = param raise except UsageError as e: if e.ctx is None: e.ctx = ctx raise def iter_params_for_processing(invocation_order, declaration_order): """Given a sequence of parameters in the order as should be considered for processing and an iterable of parameters that exist, this returns a list in the correct order as they should be processed. """ def sort_key(item): try: idx = invocation_order.index(item) except ValueError: idx = float("inf") return (not item.is_eager, idx) return sorted(declaration_order, key=sort_key) class Context(object): """The context is a special internal object that holds state relevant for the script execution at every single level. It's normally invisible to commands unless they opt-in to getting access to it. The context is useful as it can pass internal objects around and can control special execution features such as reading data from environment variables. A context can be used as context manager in which case it will call :meth:`close` on teardown. .. versionadded:: 2.0 Added the `resilient_parsing`, `help_option_names`, `token_normalize_func` parameters. .. versionadded:: 3.0 Added the `allow_extra_args` and `allow_interspersed_args` parameters. .. versionadded:: 4.0 Added the `color`, `ignore_unknown_options`, and `max_content_width` parameters. .. versionadded:: 7.1 Added the `show_default` parameter. :param command: the command class for this context. :param parent: the parent context. :param info_name: the info name for this invocation. Generally this is the most descriptive name for the script or command. For the toplevel script it is usually the name of the script, for commands below it it's the name of the script. :param obj: an arbitrary object of user data. :param auto_envvar_prefix: the prefix to use for automatic environment variables. If this is `None` then reading from environment variables is disabled. This does not affect manually set environment variables which are always read. :param default_map: a dictionary (like object) with default values for parameters. :param terminal_width: the width of the terminal. The default is inherit from parent context. If no context defines the terminal width then auto detection will be applied. :param max_content_width: the maximum width for content rendered by Click (this currently only affects help pages). This defaults to 80 characters if not overridden. In other words: even if the terminal is larger than that, Click will not format things wider than 80 characters by default. In addition to that, formatters might add some safety mapping on the right. :param resilient_parsing: if this flag is enabled then Click will parse without any interactivity or callback invocation. Default values will also be ignored. This is useful for implementing things such as completion support. :param allow_extra_args: if this is set to `True` then extra arguments at the end will not raise an error and will be kept on the context. The default is to inherit from the command. :param allow_interspersed_args: if this is set to `False` then options and arguments cannot be mixed. The default is to inherit from the command. :param ignore_unknown_options: instructs click to ignore options it does not know and keeps them for later processing. :param help_option_names: optionally a list of strings that define how the default help parameter is named. The default is ``['--help']``. :param token_normalize_func: an optional function that is used to normalize tokens (options, choices, etc.). This for instance can be used to implement case insensitive behavior. :param color: controls if the terminal supports ANSI colors or not. The default is autodetection. This is only needed if ANSI codes are used in texts that Click prints which is by default not the case. This for instance would affect help output. :param show_default: if True, shows defaults for all options. Even if an option is later created with show_default=False, this command-level setting overrides it. """ def __init__( self, command, parent=None, info_name=None, obj=None, auto_envvar_prefix=None, default_map=None, terminal_width=None, max_content_width=None, resilient_parsing=False, allow_extra_args=None, allow_interspersed_args=None, ignore_unknown_options=None, help_option_names=None, token_normalize_func=None, color=None, show_default=None, ): #: the parent context or `None` if none exists. self.parent = parent #: the :class:`Command` for this context. self.command = command #: the descriptive information name self.info_name = info_name #: the parsed parameters except if the value is hidden in which #: case it's not remembered. self.params = {} #: the leftover arguments. self.args = [] #: protected arguments. These are arguments that are prepended #: to `args` when certain parsing scenarios are encountered but #: must be never propagated to another arguments. This is used #: to implement nested parsing. self.protected_args = [] if obj is None and parent is not None: obj = parent.obj #: the user object stored. self.obj = obj self._meta = getattr(parent, "meta", {}) #: A dictionary (-like object) with defaults for parameters. if ( default_map is None and parent is not None and parent.default_map is not None ): default_map = parent.default_map.get(info_name) self.default_map = default_map #: This flag indicates if a subcommand is going to be executed. A #: group callback can use this information to figure out if it's #: being executed directly or because the execution flow passes #: onwards to a subcommand. By default it's None, but it can be #: the name of the subcommand to execute. #: #: If chaining is enabled this will be set to ``'*'`` in case #: any commands are executed. It is however not possible to #: figure out which ones. If you require this knowledge you #: should use a :func:`resultcallback`. self.invoked_subcommand = None if terminal_width is None and parent is not None: terminal_width = parent.terminal_width #: The width of the terminal (None is autodetection). self.terminal_width = terminal_width if max_content_width is None and parent is not None: max_content_width = parent.max_content_width #: The maximum width of formatted content (None implies a sensible #: default which is 80 for most things). self.max_content_width = max_content_width if allow_extra_args is None: allow_extra_args = command.allow_extra_args #: Indicates if the context allows extra args or if it should #: fail on parsing. #: #: .. versionadded:: 3.0 self.allow_extra_args = allow_extra_args if allow_interspersed_args is None: allow_interspersed_args = command.allow_interspersed_args #: Indicates if the context allows mixing of arguments and #: options or not. #: #: .. versionadded:: 3.0 self.allow_interspersed_args = allow_interspersed_args if ignore_unknown_options is None: ignore_unknown_options = command.ignore_unknown_options #: Instructs click to ignore options that a command does not #: understand and will store it on the context for later #: processing. This is primarily useful for situations where you #: want to call into external programs. Generally this pattern is #: strongly discouraged because it's not possibly to losslessly #: forward all arguments. #: #: .. versionadded:: 4.0 self.ignore_unknown_options = ignore_unknown_options if help_option_names is None: if parent is not None: help_option_names = parent.help_option_names else: help_option_names = ["--help"] #: The names for the help options. self.help_option_names = help_option_names if token_normalize_func is None and parent is not None: token_normalize_func = parent.token_normalize_func #: An optional normalization function for tokens. This is #: options, choices, commands etc. self.token_normalize_func = token_normalize_func #: Indicates if resilient parsing is enabled. In that case Click #: will do its best to not cause any failures and default values #: will be ignored. Useful for completion. self.resilient_parsing = resilient_parsing # If there is no envvar prefix yet, but the parent has one and # the command on this level has a name, we can expand the envvar # prefix automatically. if auto_envvar_prefix is None: if ( parent is not None and parent.auto_envvar_prefix is not None and self.info_name is not None ): auto_envvar_prefix = "{}_{}".format( parent.auto_envvar_prefix, self.info_name.upper() ) else: auto_envvar_prefix = auto_envvar_prefix.upper() if auto_envvar_prefix is not None: auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") self.auto_envvar_prefix = auto_envvar_prefix if color is None and parent is not None: color = parent.color #: Controls if styling output is wanted or not. self.color = color self.show_default = show_default self._close_callbacks = [] self._depth = 0 def __enter__(self): self._depth += 1 push_context(self) return self def __exit__(self, exc_type, exc_value, tb): self._depth -= 1 if self._depth == 0: self.close() pop_context() @contextmanager def scope(self, cleanup=True): """This helper method can be used with the context object to promote it to the current thread local (see :func:`get_current_context`). The default behavior of this is to invoke the cleanup functions which can be disabled by setting `cleanup` to `False`. The cleanup functions are typically used for things such as closing file handles. If the cleanup is intended the context object can also be directly used as a context manager. Example usage:: with ctx.scope(): assert get_current_context() is ctx This is equivalent:: with ctx: assert get_current_context() is ctx .. versionadded:: 5.0 :param cleanup: controls if the cleanup functions should be run or not. The default is to run these functions. In some situations the context only wants to be temporarily pushed in which case this can be disabled. Nested pushes automatically defer the cleanup. """ if not cleanup: self._depth += 1 try: with self as rv: yield rv finally: if not cleanup: self._depth -= 1 @property def meta(self): """This is a dictionary which is shared with all the contexts that are nested. It exists so that click utilities can store some state here if they need to. It is however the responsibility of that code to manage this dictionary well. The keys are supposed to be unique dotted strings. For instance module paths are a good choice for it. What is stored in there is irrelevant for the operation of click. However what is important is that code that places data here adheres to the general semantics of the system. Example usage:: LANG_KEY = f'{__name__}.lang' def set_language(value): ctx = get_current_context() ctx.meta[LANG_KEY] = value def get_language(): return get_current_context().meta.get(LANG_KEY, 'en_US') .. versionadded:: 5.0 """ return self._meta def make_formatter(self): """Creates the formatter for the help and usage output.""" return HelpFormatter( width=self.terminal_width, max_width=self.max_content_width ) def call_on_close(self, f): """This decorator remembers a function as callback that should be executed when the context tears down. This is most useful to bind resource handling to the script execution. For instance, file objects opened by the :class:`File` type will register their close callbacks here. :param f: the function to execute on teardown. """ self._close_callbacks.append(f) return f def close(self): """Invokes all close callbacks.""" for cb in self._close_callbacks: cb() self._close_callbacks = [] @property def command_path(self): """The computed command path. This is used for the ``usage`` information on the help page. It's automatically created by combining the info names of the chain of contexts to the root. """ rv = "" if self.info_name is not None: rv = self.info_name if self.parent is not None: rv = "{} {}".format(self.parent.command_path, rv) return rv.lstrip() def find_root(self): """Finds the outermost context.""" node = self while node.parent is not None: node = node.parent return node def find_object(self, object_type): """Finds the closest object of a given type.""" node = self while node is not None: if isinstance(node.obj, object_type): return node.obj node = node.parent def ensure_object(self, object_type): """Like :meth:`find_object` but sets the innermost object to a new instance of `object_type` if it does not exist. """ rv = self.find_object(object_type) if rv is None: self.obj = rv = object_type() return rv def lookup_default(self, name): """Looks up the default for a parameter name. This by default looks into the :attr:`default_map` if available. """ if self.default_map is not None: rv = self.default_map.get(name) if callable(rv): rv = rv() return rv def fail(self, message): """Aborts the execution of the program with a specific error message. :param message: the error message to fail with. """ raise UsageError(message, self) def abort(self): """Aborts the script.""" raise Abort() def exit(self, code=0): """Exits the application with a given exit code.""" raise Exit(code) def get_usage(self): """Helper method to get formatted usage string for the current context and command. """ return self.command.get_usage(self) def get_help(self): """Helper method to get formatted help page for the current context and command. """ return self.command.get_help(self) def invoke(*args, **kwargs): # noqa: B902 """Invokes a command callback in exactly the way it expects. There are two ways to invoke this method: 1. the first argument can be a callback and all other arguments and keyword arguments are forwarded directly to the function. 2. the first argument is a click command object. In that case all arguments are forwarded as well but proper click parameters (options and click arguments) must be keyword arguments and Click will fill in defaults. Note that before Click 3.2 keyword arguments were not properly filled in against the intention of this code and no context was created. For more information about this change and why it was done in a bugfix release see :ref:`upgrade-to-3.2`. """ self, callback = args[:2] ctx = self # It's also possible to invoke another command which might or # might not have a callback. In that case we also fill # in defaults and make a new context for this command. if isinstance(callback, Command): other_cmd = callback callback = other_cmd.callback ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) if callback is None: raise TypeError( "The given command does not have a callback that can be invoked." ) for param in other_cmd.params: if param.name not in kwargs and param.expose_value: kwargs[param.name] = param.get_default(ctx) args = args[2:] with augment_usage_errors(self): with ctx: return callback(*args, **kwargs) def forward(*args, **kwargs): # noqa: B902 """Similar to :meth:`invoke` but fills in default keyword arguments from the current context if the other command expects it. This cannot invoke callbacks directly, only other commands. """ self, cmd = args[:2] # It's also possible to invoke another command which might or # might not have a callback. if not isinstance(cmd, Command): raise TypeError("Callback is not a command.") for param in self.params: if param not in kwargs: kwargs[param] = self.params[param] return self.invoke(cmd, **kwargs) class BaseCommand(object): """The base command implements the minimal API contract of commands. Most code will never use this as it does not implement a lot of useful functionality but it can act as the direct subclass of alternative parsing methods that do not depend on the Click parser. For instance, this can be used to bridge Click and other systems like argparse or docopt. Because base commands do not implement a lot of the API that other parts of Click take for granted, they are not supported for all operations. For instance, they cannot be used with the decorators usually and they have no built-in callback system. .. versionchanged:: 2.0 Added the `context_settings` parameter. :param name: the name of the command to use unless a group overrides it. :param context_settings: an optional dictionary with defaults that are passed to the context object. """ #: the default for the :attr:`Context.allow_extra_args` flag. allow_extra_args = False #: the default for the :attr:`Context.allow_interspersed_args` flag. allow_interspersed_args = True #: the default for the :attr:`Context.ignore_unknown_options` flag. ignore_unknown_options = False def __init__(self, name, context_settings=None): #: the name the command thinks it has. Upon registering a command #: on a :class:`Group` the group will default the command name #: with this information. You should instead use the #: :class:`Context`\'s :attr:`~Context.info_name` attribute. self.name = name if context_settings is None: context_settings = {} #: an optional dictionary with defaults passed to the context. self.context_settings = context_settings def __repr__(self): return "<{} {}>".format(self.__class__.__name__, self.name) def get_usage(self, ctx): raise NotImplementedError("Base commands cannot get usage") def get_help(self, ctx): raise NotImplementedError("Base commands cannot get help") def make_context(self, info_name, args, parent=None, **extra): """This function when given an info name and arguments will kick off the parsing and create a new :class:`Context`. It does not invoke the actual command callback though. :param info_name: the info name for this invokation. Generally this is the most descriptive name for the script or command. For the toplevel script it's usually the name of the script, for commands below it it's the name of the script. :param args: the arguments to parse as list of strings. :param parent: the parent context if available. :param extra: extra keyword arguments forwarded to the context constructor. """ for key, value in iteritems(self.context_settings): if key not in extra: extra[key] = value ctx = Context(self, info_name=info_name, parent=parent, **extra) with ctx.scope(cleanup=False): self.parse_args(ctx, args) return ctx def parse_args(self, ctx, args): """Given a context and a list of arguments this creates the parser and parses the arguments, then modifies the context as necessary. This is automatically invoked by :meth:`make_context`. """ raise NotImplementedError("Base commands do not know how to parse arguments.") def invoke(self, ctx): """Given a context, this invokes the command. The default implementation is raising a not implemented error. """ raise NotImplementedError("Base commands are not invokable by default") def main( self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra ): """This is the way to invoke a script with all the bells and whistles as a command line application. This will always terminate the application after a call. If this is not wanted, ``SystemExit`` needs to be caught. This method is also available by directly calling the instance of a :class:`Command`. .. versionadded:: 3.0 Added the `standalone_mode` flag to control the standalone mode. :param args: the arguments that should be used for parsing. If not provided, ``sys.argv[1:]`` is used. :param prog_name: the program name that should be used. By default the program name is constructed by taking the file name from ``sys.argv[0]``. :param complete_var: the environment variable that controls the bash completion support. The default is ``"_<prog_name>_COMPLETE"`` with prog_name in uppercase. :param standalone_mode: the default behavior is to invoke the script in standalone mode. Click will then handle exceptions and convert them into error messages and the function will never return but shut down the interpreter. If this is set to `False` they will be propagated to the caller and the return value of this function is the return value of :meth:`invoke`. :param extra: extra keyword arguments are forwarded to the context constructor. See :class:`Context` for more information. """ # If we are in Python 3, we will verify that the environment is # sane at this point or reject further execution to avoid a # broken script. if not PY2: _verify_python3_env() else: _check_for_unicode_literals() if args is None: args = get_os_args() else: args = list(args) if prog_name is None: prog_name = make_str( os.path.basename(sys.argv[0] if sys.argv else __file__) ) # Hook for the Bash completion. This only activates if the Bash # completion is actually enabled, otherwise this is quite a fast # noop. _bashcomplete(self, prog_name, complete_var) try: try: with self.make_context(prog_name, args, **extra) as ctx: rv = self.invoke(ctx) if not standalone_mode: return rv # it's not safe to `ctx.exit(rv)` here! # note that `rv` may actually contain data like "1" which # has obvious effects # more subtle case: `rv=[None, None]` can come out of # chained commands which all returned `None` -- so it's not # even always obvious that `rv` indicates success/failure # by its truthiness/falsiness ctx.exit() except (EOFError, KeyboardInterrupt): echo(file=sys.stderr) raise Abort() except ClickException as e: if not standalone_mode: raise e.show() sys.exit(e.exit_code) except IOError as e: if e.errno == errno.EPIPE: sys.stdout = PacifyFlushWrapper(sys.stdout) sys.stderr = PacifyFlushWrapper(sys.stderr) sys.exit(1) else: raise except Exit as e: if standalone_mode: sys.exit(e.exit_code) else: # in non-standalone mode, return the exit code # note that this is only reached if `self.invoke` above raises # an Exit explicitly -- thus bypassing the check there which # would return its result # the results of non-standalone execution may therefore be # somewhat ambiguous: if there are codepaths which lead to # `ctx.exit(1)` and to `return 1`, the caller won't be able to # tell the difference between the two return e.exit_code except Abort: if not standalone_mode: raise echo("Aborted!", file=sys.stderr) sys.exit(1) def __call__(self, *args, **kwargs): """Alias for :meth:`main`.""" return self.main(*args, **kwargs) class Command(BaseCommand): """Commands are the basic building block of command line interfaces in Click. A basic command handles command line parsing and might dispatch more parsing to commands nested below it. .. versionchanged:: 2.0 Added the `context_settings` parameter. .. versionchanged:: 7.1 Added the `no_args_is_help` parameter. :param name: the name of the command to use unless a group overrides it. :param context_settings: an optional dictionary with defaults that are passed to the context object. :param callback: the callback to invoke. This is optional. :param params: the parameters to register with this command. This can be either :class:`Option` or :class:`Argument` objects. :param help: the help string to use for this command. :param epilog: like the help string but it's printed at the end of the help page after everything else. :param short_help: the short help to use for this command. This is shown on the command listing of the parent command. :param add_help_option: by default each command registers a ``--help`` option. This can be disabled by this parameter. :param no_args_is_help: this controls what happens if no arguments are provided. This option is disabled by default. If enabled this will add ``--help`` as argument if no arguments are passed :param hidden: hide this command from help outputs. :param deprecated: issues a message indicating that the command is deprecated. """ def __init__( self, name, context_settings=None, callback=None, params=None, help=None, epilog=None, short_help=None, options_metavar="[OPTIONS]", add_help_option=True, no_args_is_help=False, hidden=False, deprecated=False, ): BaseCommand.__init__(self, name, context_settings) #: the callback to execute when the command fires. This might be #: `None` in which case nothing happens. self.callback = callback #: the list of parameters for this command in the order they #: should show up in the help page and execute. Eager parameters #: will automatically be handled before non eager ones. self.params = params or [] # if a form feed (page break) is found in the help text, truncate help # text to the content preceding the first form feed if help and "\f" in help: help = help.split("\f", 1)[0] self.help = help self.epilog = epilog self.options_metavar = options_metavar self.short_help = short_help self.add_help_option = add_help_option self.no_args_is_help = no_args_is_help self.hidden = hidden self.deprecated = deprecated def get_usage(self, ctx): """Formats the usage line into a string and returns it. Calls :meth:`format_usage` internally. """ formatter = ctx.make_formatter() self.format_usage(ctx, formatter) return formatter.getvalue().rstrip("\n") def get_params(self, ctx): rv = self.params help_option = self.get_help_option(ctx) if help_option is not None: rv = rv + [help_option] return rv def format_usage(self, ctx, formatter): """Writes the usage line into the formatter. This is a low-level method called by :meth:`get_usage`. """ pieces = self.collect_usage_pieces(ctx) formatter.write_usage(ctx.command_path, " ".join(pieces)) def collect_usage_pieces(self, ctx): """Returns all the pieces that go into the usage line and returns it as a list of strings. """ rv = [self.options_metavar] for param in self.get_params(ctx): rv.extend(param.get_usage_pieces(ctx)) return rv def get_help_option_names(self, ctx): """Returns the names for the help option.""" all_names = set(ctx.help_option_names) for param in self.params: all_names.difference_update(param.opts) all_names.difference_update(param.secondary_opts) return all_names def get_help_option(self, ctx): """Returns the help option object.""" help_options = self.get_help_option_names(ctx) if not help_options or not self.add_help_option: return def show_help(ctx, param, value): if value and not ctx.resilient_parsing: echo(ctx.get_help(), color=ctx.color) ctx.exit() return Option( help_options, is_flag=True, is_eager=True, expose_value=False, callback=show_help, help="Show this message and exit.", ) def make_parser(self, ctx): """Creates the underlying option parser for this command.""" parser = OptionParser(ctx) for param in self.get_params(ctx): param.add_to_parser(parser, ctx) return parser def get_help(self, ctx): """Formats the help into a string and returns it. Calls :meth:`format_help` internally. """ formatter = ctx.make_formatter() self.format_help(ctx, formatter) return formatter.getvalue().rstrip("\n") def get_short_help_str(self, limit=45): """Gets short help for the command or makes it by shortening the long help string. """ return ( self.short_help or self.help and make_default_short_help(self.help, limit) or "" ) def format_help(self, ctx, formatter): """Writes the help into the formatter if it exists. This is a low-level method called by :meth:`get_help`. This calls the following methods: - :meth:`format_usage` - :meth:`format_help_text` - :meth:`format_options` - :meth:`format_epilog` """ self.format_usage(ctx, formatter) self.format_help_text(ctx, formatter) self.format_options(ctx, formatter) self.format_epilog(ctx, formatter) def format_help_text(self, ctx, formatter): """Writes the help text to the formatter if it exists.""" if self.help: formatter.write_paragraph() with formatter.indentation(): help_text = self.help if self.deprecated: help_text += DEPRECATED_HELP_NOTICE formatter.write_text(help_text) elif self.deprecated: formatter.write_paragraph() with formatter.indentation(): formatter.write_text(DEPRECATED_HELP_NOTICE) def format_options(self, ctx, formatter): """Writes all the options into the formatter if they exist.""" opts = [] for param in self.get_params(ctx): rv = param.get_help_record(ctx) if rv is not None: opts.append(rv) if opts: with formatter.section("Options"): formatter.write_dl(opts) def format_epilog(self, ctx, formatter): """Writes the epilog into the formatter if it exists.""" if self.epilog: formatter.write_paragraph() with formatter.indentation(): formatter.write_text(self.epilog) def parse_args(self, ctx, args): if not args and self.no_args_is_help and not ctx.resilient_parsing: echo(ctx.get_help(), color=ctx.color) ctx.exit() parser = self.make_parser(ctx) opts, args, param_order = parser.parse_args(args=args) for param in iter_params_for_processing(param_order, self.get_params(ctx)): value, args = param.handle_parse_result(ctx, opts, args) if args and not ctx.allow_extra_args and not ctx.resilient_parsing: ctx.fail( "Got unexpected extra argument{} ({})".format( "s" if len(args) != 1 else "", " ".join(map(make_str, args)) ) ) ctx.args = args return args def invoke(self, ctx): """Given a context, this invokes the attached callback (if it exists) in the right way. """ _maybe_show_deprecated_notice(self) if self.callback is not None: return ctx.invoke(self.callback, **ctx.params) class MultiCommand(Command): """A multi command is the basic implementation of a command that dispatches to subcommands. The most common version is the :class:`Group`. :param invoke_without_command: this controls how the multi command itself is invoked. By default it's only invoked if a subcommand is provided. :param no_args_is_help: this controls what happens if no arguments are provided. This option is enabled by default if `invoke_without_command` is disabled or disabled if it's enabled. If enabled this will add ``--help`` as argument if no arguments are passed. :param subcommand_metavar: the string that is used in the documentation to indicate the subcommand place. :param chain: if this is set to `True` chaining of multiple subcommands is enabled. This restricts the form of commands in that they cannot have optional arguments but it allows multiple commands to be chained together. :param result_callback: the result callback to attach to this multi command. """ allow_extra_args = True allow_interspersed_args = False def __init__( self, name=None, invoke_without_command=False, no_args_is_help=None, subcommand_metavar=None, chain=False, result_callback=None, **attrs ): Command.__init__(self, name, **attrs) if no_args_is_help is None: no_args_is_help = not invoke_without_command self.no_args_is_help = no_args_is_help self.invoke_without_command = invoke_without_command if subcommand_metavar is None: if chain: subcommand_metavar = SUBCOMMANDS_METAVAR else: subcommand_metavar = SUBCOMMAND_METAVAR self.subcommand_metavar = subcommand_metavar self.chain = chain #: The result callback that is stored. This can be set or #: overridden with the :func:`resultcallback` decorator. self.result_callback = result_callback if self.chain: for param in self.params: if isinstance(param, Argument) and not param.required: raise RuntimeError( "Multi commands in chain mode cannot have" " optional arguments." ) def collect_usage_pieces(self, ctx): rv = Command.collect_usage_pieces(self, ctx) rv.append(self.subcommand_metavar) return rv def format_options(self, ctx, formatter): Command.format_options(self, ctx, formatter) self.format_commands(ctx, formatter) def resultcallback(self, replace=False): """Adds a result callback to the chain command. By default if a result callback is already registered this will chain them but this can be disabled with the `replace` parameter. The result callback is invoked with the return value of the subcommand (or the list of return values from all subcommands if chaining is enabled) as well as the parameters as they would be passed to the main callback. Example:: @click.group() @click.option('-i', '--input', default=23) def cli(input): return 42 @cli.resultcallback() def process_result(result, input): return result + input .. versionadded:: 3.0 :param replace: if set to `True` an already existing result callback will be removed. """ def decorator(f): old_callback = self.result_callback if old_callback is None or replace: self.result_callback = f return f def function(__value, *args, **kwargs): return f(old_callback(__value, *args, **kwargs), *args, **kwargs) self.result_callback = rv = update_wrapper(function, f) return rv return decorator def format_commands(self, ctx, formatter): """Extra format methods for multi methods that adds all the commands after the options. """ commands = [] for subcommand in self.list_commands(ctx): cmd = self.get_command(ctx, subcommand) # What is this, the tool lied about a command. Ignore it if cmd is None: continue if cmd.hidden: continue commands.append((subcommand, cmd)) # allow for 3 times the default spacing if len(commands): limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) rows = [] for subcommand, cmd in commands: help = cmd.get_short_help_str(limit) rows.append((subcommand, help)) if rows: with formatter.section("Commands"): formatter.write_dl(rows) def parse_args(self, ctx, args): if not args and self.no_args_is_help and not ctx.resilient_parsing: echo(ctx.get_help(), color=ctx.color) ctx.exit() rest = Command.parse_args(self, ctx, args) if self.chain: ctx.protected_args = rest ctx.args = [] elif rest: ctx.protected_args, ctx.args = rest[:1], rest[1:] return ctx.args def invoke(self, ctx): def _process_result(value): if self.result_callback is not None: value = ctx.invoke(self.result_callback, value, **ctx.params) return value if not ctx.protected_args: # If we are invoked without command the chain flag controls # how this happens. If we are not in chain mode, the return # value here is the return value of the command. # If however we are in chain mode, the return value is the # return value of the result processor invoked with an empty # list (which means that no subcommand actually was executed). if self.invoke_without_command: if not self.chain: return Command.invoke(self, ctx) with ctx: Command.invoke(self, ctx) return _process_result([]) ctx.fail("Missing command.") # Fetch args back out args = ctx.protected_args + ctx.args ctx.args = [] ctx.protected_args = [] # If we're not in chain mode, we only allow the invocation of a # single command but we also inform the current context about the # name of the command to invoke. if not self.chain: # Make sure the context is entered so we do not clean up # resources until the result processor has worked. with ctx: cmd_name, cmd, args = self.resolve_command(ctx, args) ctx.invoked_subcommand = cmd_name Command.invoke(self, ctx) sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) with sub_ctx: return _process_result(sub_ctx.command.invoke(sub_ctx)) # In chain mode we create the contexts step by step, but after the # base command has been invoked. Because at that point we do not # know the subcommands yet, the invoked subcommand attribute is # set to ``*`` to inform the command that subcommands are executed # but nothing else. with ctx: ctx.invoked_subcommand = "*" if args else None Command.invoke(self, ctx) # Otherwise we make every single context and invoke them in a # chain. In that case the return value to the result processor # is the list of all invoked subcommand's results. contexts = [] while args: cmd_name, cmd, args = self.resolve_command(ctx, args) sub_ctx = cmd.make_context( cmd_name, args, parent=ctx, allow_extra_args=True, allow_interspersed_args=False, ) contexts.append(sub_ctx) args, sub_ctx.args = sub_ctx.args, [] rv = [] for sub_ctx in contexts: with sub_ctx: rv.append(sub_ctx.command.invoke(sub_ctx)) return _process_result(rv) def resolve_command(self, ctx, args): cmd_name = make_str(args[0]) original_cmd_name = cmd_name # Get the command cmd = self.get_command(ctx, cmd_name) # If we can't find the command but there is a normalization # function available, we try with that one. if cmd is None and ctx.token_normalize_func is not None: cmd_name = ctx.token_normalize_func(cmd_name) cmd = self.get_command(ctx, cmd_name) # If we don't find the command we want to show an error message # to the user that it was not provided. However, there is # something else we should do: if the first argument looks like # an option we want to kick off parsing again for arguments to # resolve things like --help which now should go to the main # place. if cmd is None and not ctx.resilient_parsing: if split_opt(cmd_name)[0]: self.parse_args(ctx, ctx.args) ctx.fail("No such command '{}'.".format(original_cmd_name)) return cmd_name, cmd, args[1:] def get_command(self, ctx, cmd_name): """Given a context and a command name, this returns a :class:`Command` object if it exists or returns `None`. """ raise NotImplementedError() def list_commands(self, ctx): """Returns a list of subcommand names in the order they should appear. """ return [] class Group(MultiCommand): """A group allows a command to have subcommands attached. This is the most common way to implement nesting in Click. :param commands: a dictionary of commands. """ def __init__(self, name=None, commands=None, **attrs): MultiCommand.__init__(self, name, **attrs) #: the registered subcommands by their exported names. self.commands = commands or {} def add_command(self, cmd, name=None): """Registers another :class:`Command` with this group. If the name is not provided, the name of the command is used. """ name = name or cmd.name if name is None: raise TypeError("Command has no name.") _check_multicommand(self, name, cmd, register=True) self.commands[name] = cmd def command(self, *args, **kwargs): """A shortcut decorator for declaring and attaching a command to the group. This takes the same arguments as :func:`command` but immediately registers the created command with this instance by calling into :meth:`add_command`. """ from .decorators import command def decorator(f): cmd = command(*args, **kwargs)(f) self.add_command(cmd) return cmd return decorator def group(self, *args, **kwargs): """A shortcut decorator for declaring and attaching a group to the group. This takes the same arguments as :func:`group` but immediately registers the created command with this instance by calling into :meth:`add_command`. """ from .decorators import group def decorator(f): cmd = group(*args, **kwargs)(f) self.add_command(cmd) return cmd return decorator def get_command(self, ctx, cmd_name): return self.commands.get(cmd_name) def list_commands(self, ctx): return sorted(self.commands) class CommandCollection(MultiCommand): """A command collection is a multi command that merges multiple multi commands together into one. This is a straightforward implementation that accepts a list of different multi commands as sources and provides all the commands for each of them. """ def __init__(self, name=None, sources=None, **attrs): MultiCommand.__init__(self, name, **attrs) #: The list of registered multi commands. self.sources = sources or [] def add_source(self, multi_cmd): """Adds a new multi command to the chain dispatcher.""" self.sources.append(multi_cmd) def get_command(self, ctx, cmd_name): for source in self.sources: rv = source.get_command(ctx, cmd_name) if rv is not None: if self.chain: _check_multicommand(self, cmd_name, rv) return rv def list_commands(self, ctx): rv = set() for source in self.sources: rv.update(source.list_commands(ctx)) return sorted(rv) class Parameter(object): r"""A parameter to a command comes in two versions: they are either :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently not supported by design as some of the internals for parsing are intentionally not finalized. Some settings are supported by both options and arguments. :param param_decls: the parameter declarations for this option or argument. This is a list of flags or argument names. :param type: the type that should be used. Either a :class:`ParamType` or a Python type. The later is converted into the former automatically if supported. :param required: controls if this is optional or not. :param default: the default value if omitted. This can also be a callable, in which case it's invoked when the default is needed without any arguments. :param callback: a callback that should be executed after the parameter was matched. This is called as ``fn(ctx, param, value)`` and needs to return the value. :param nargs: the number of arguments to match. If not ``1`` the return value is a tuple instead of single value. The default for nargs is ``1`` (except if the type is a tuple, then it's the arity of the tuple). :param metavar: how the value is represented in the help page. :param expose_value: if this is `True` then the value is passed onwards to the command callback and stored on the context, otherwise it's skipped. :param is_eager: eager values are processed before non eager ones. This should not be set for arguments or it will inverse the order of processing. :param envvar: a string or list of strings that are environment variables that should be checked. .. versionchanged:: 7.1 Empty environment variables are ignored rather than taking the empty string value. This makes it possible for scripts to clear variables if they can't unset them. .. versionchanged:: 2.0 Changed signature for parameter callback to also be passed the parameter. The old callback format will still work, but it will raise a warning to give you a chance to migrate the code easier. """ param_type_name = "parameter" def __init__( self, param_decls=None, type=None, required=False, default=None, callback=None, nargs=None, metavar=None, expose_value=True, is_eager=False, envvar=None, autocompletion=None, ): self.name, self.opts, self.secondary_opts = self._parse_decls( param_decls or (), expose_value ) self.type = convert_type(type, default) # Default nargs to what the type tells us if we have that # information available. if nargs is None: if self.type.is_composite: nargs = self.type.arity else: nargs = 1 self.required = required self.callback = callback self.nargs = nargs self.multiple = False self.expose_value = expose_value self.default = default self.is_eager = is_eager self.metavar = metavar self.envvar = envvar self.autocompletion = autocompletion def __repr__(self): return "<{} {}>".format(self.__class__.__name__, self.name) @property def human_readable_name(self): """Returns the human readable name of this parameter. This is the same as the name for options, but the metavar for arguments. """ return self.name def make_metavar(self): if self.metavar is not None: return self.metavar metavar = self.type.get_metavar(self) if metavar is None: metavar = self.type.name.upper() if self.nargs != 1: metavar += "..." return metavar def get_default(self, ctx): """Given a context variable this calculates the default value.""" # Otherwise go with the regular default. if callable(self.default): rv = self.default() else: rv = self.default return self.type_cast_value(ctx, rv) def add_to_parser(self, parser, ctx): pass def consume_value(self, ctx, opts): value = opts.get(self.name) if value is None: value = self.value_from_envvar(ctx) if value is None: value = ctx.lookup_default(self.name) return value def type_cast_value(self, ctx, value): """Given a value this runs it properly through the type system. This automatically handles things like `nargs` and `multiple` as well as composite types. """ if self.type.is_composite: if self.nargs <= 1: raise TypeError( "Attempted to invoke composite type but nargs has" " been set to {}. This is not supported; nargs" " needs to be set to a fixed value > 1.".format(self.nargs) ) if self.multiple: return tuple(self.type(x or (), self, ctx) for x in value or ()) return self.type(value or (), self, ctx) def _convert(value, level): if level == 0: return self.type(value, self, ctx) return tuple(_convert(x, level - 1) for x in value or ()) return _convert(value, (self.nargs != 1) + bool(self.multiple)) def process_value(self, ctx, value): """Given a value and context this runs the logic to convert the value as necessary. """ # If the value we were given is None we do nothing. This way # code that calls this can easily figure out if something was # not provided. Otherwise it would be converted into an empty # tuple for multiple invocations which is inconvenient. if value is not None: return self.type_cast_value(ctx, value) def value_is_missing(self, value): if value is None: return True if (self.nargs != 1 or self.multiple) and value == (): return True return False def full_process_value(self, ctx, value): value = self.process_value(ctx, value) if value is None and not ctx.resilient_parsing: value = self.get_default(ctx) if self.required and self.value_is_missing(value): raise MissingParameter(ctx=ctx, param=self) return value def resolve_envvar_value(self, ctx): if self.envvar is None: return if isinstance(self.envvar, (tuple, list)): for envvar in self.envvar: rv = os.environ.get(envvar) if rv is not None: return rv else: rv = os.environ.get(self.envvar) if rv != "": return rv def value_from_envvar(self, ctx): rv = self.resolve_envvar_value(ctx) if rv is not None and self.nargs != 1: rv = self.type.split_envvar_value(rv) return rv def handle_parse_result(self, ctx, opts, args): with augment_usage_errors(ctx, param=self): value = self.consume_value(ctx, opts) try: value = self.full_process_value(ctx, value) except Exception: if not ctx.resilient_parsing: raise value = None if self.callback is not None: try: value = invoke_param_callback(self.callback, ctx, self, value) except Exception: if not ctx.resilient_parsing: raise if self.expose_value: ctx.params[self.name] = value return value, args def get_help_record(self, ctx): pass def get_usage_pieces(self, ctx): return [] def get_error_hint(self, ctx): """Get a stringified version of the param for use in error messages to indicate which param caused the error. """ hint_list = self.opts or [self.human_readable_name] return " / ".join(repr(x) for x in hint_list) class Option(Parameter): """Options are usually optional values on the command line and have some extra features that arguments don't have. All other parameters are passed onwards to the parameter constructor. :param show_default: controls if the default value should be shown on the help page. Normally, defaults are not shown. If this value is a string, it shows the string instead of the value. This is particularly useful for dynamic options. :param show_envvar: controls if an environment variable should be shown on the help page. Normally, environment variables are not shown. :param prompt: if set to `True` or a non empty string then the user will be prompted for input. If set to `True` the prompt will be the option name capitalized. :param confirmation_prompt: if set then the value will need to be confirmed if it was prompted for. :param hide_input: if this is `True` then the input on the prompt will be hidden from the user. This is useful for password input. :param is_flag: forces this option to act as a flag. The default is auto detection. :param flag_value: which value should be used for this flag if it's enabled. This is set to a boolean automatically if the option string contains a slash to mark two options. :param multiple: if this is set to `True` then the argument is accepted multiple times and recorded. This is similar to ``nargs`` in how it works but supports arbitrary number of arguments. :param count: this flag makes an option increment an integer. :param allow_from_autoenv: if this is enabled then the value of this parameter will be pulled from an environment variable in case a prefix is defined on the context. :param help: the help string. :param hidden: hide this option from help outputs. """ param_type_name = "option" def __init__( self, param_decls=None, show_default=False, prompt=False, confirmation_prompt=False, hide_input=False, is_flag=None, flag_value=None, multiple=False, count=False, allow_from_autoenv=True, type=None, help=None, hidden=False, show_choices=True, show_envvar=False, **attrs ): default_is_missing = attrs.get("default", _missing) is _missing Parameter.__init__(self, param_decls, type=type, **attrs) if prompt is True: prompt_text = self.name.replace("_", " ").capitalize() elif prompt is False: prompt_text = None else: prompt_text = prompt self.prompt = prompt_text self.confirmation_prompt = confirmation_prompt self.hide_input = hide_input self.hidden = hidden # Flags if is_flag is None: if flag_value is not None: is_flag = True else: is_flag = bool(self.secondary_opts) if is_flag and default_is_missing: self.default = False if flag_value is None: flag_value = not self.default self.is_flag = is_flag self.flag_value = flag_value if self.is_flag and isinstance(self.flag_value, bool) and type in [None, bool]: self.type = BOOL self.is_bool_flag = True else: self.is_bool_flag = False # Counting self.count = count if count: if type is None: self.type = IntRange(min=0) if default_is_missing: self.default = 0 self.multiple = multiple self.allow_from_autoenv = allow_from_autoenv self.help = help self.show_default = show_default self.show_choices = show_choices self.show_envvar = show_envvar # Sanity check for stuff we don't support if __debug__: if self.nargs < 0: raise TypeError("Options cannot have nargs < 0") if self.prompt and self.is_flag and not self.is_bool_flag: raise TypeError("Cannot prompt for flags that are not bools.") if not self.is_bool_flag and self.secondary_opts: raise TypeError("Got secondary option for non boolean flag.") if self.is_bool_flag and self.hide_input and self.prompt is not None: raise TypeError("Hidden input does not work with boolean flag prompts.") if self.count: if self.multiple: raise TypeError( "Options cannot be multiple and count at the same time." ) elif self.is_flag: raise TypeError( "Options cannot be count and flags at the same time." ) def _parse_decls(self, decls, expose_value): opts = [] secondary_opts = [] name = None possible_names = [] for decl in decls: if isidentifier(decl): if name is not None: raise TypeError("Name defined twice") name = decl else: split_char = ";" if decl[:1] == "/" else "/" if split_char in decl: first, second = decl.split(split_char, 1) first = first.rstrip() if first: possible_names.append(split_opt(first)) opts.append(first) second = second.lstrip() if second: secondary_opts.append(second.lstrip()) else: possible_names.append(split_opt(decl)) opts.append(decl) if name is None and possible_names: possible_names.sort(key=lambda x: -len(x[0])) # group long options first name = possible_names[0][1].replace("-", "_").lower() if not isidentifier(name): name = None if name is None: if not expose_value: return None, opts, secondary_opts raise TypeError("Could not determine name for option") if not opts and not secondary_opts: raise TypeError( "No options defined but a name was passed ({}). Did you" " mean to declare an argument instead of an option?".format(name) ) return name, opts, secondary_opts def add_to_parser(self, parser, ctx): kwargs = { "dest": self.name, "nargs": self.nargs, "obj": self, } if self.multiple: action = "append" elif self.count: action = "count" else: action = "store" if self.is_flag: kwargs.pop("nargs", None) action_const = "{}_const".format(action) if self.is_bool_flag and self.secondary_opts: parser.add_option(self.opts, action=action_const, const=True, **kwargs) parser.add_option( self.secondary_opts, action=action_const, const=False, **kwargs ) else: parser.add_option( self.opts, action=action_const, const=self.flag_value, **kwargs ) else: kwargs["action"] = action parser.add_option(self.opts, **kwargs) def get_help_record(self, ctx): if self.hidden: return any_prefix_is_slash = [] def _write_opts(opts): rv, any_slashes = join_options(opts) if any_slashes: any_prefix_is_slash[:] = [True] if not self.is_flag and not self.count: rv += " {}".format(self.make_metavar()) return rv rv = [_write_opts(self.opts)] if self.secondary_opts: rv.append(_write_opts(self.secondary_opts)) help = self.help or "" extra = [] if self.show_envvar: envvar = self.envvar if envvar is None: if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None: envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper()) if envvar is not None: extra.append( "env var: {}".format( ", ".join(str(d) for d in envvar) if isinstance(envvar, (list, tuple)) else envvar ) ) if self.default is not None and (self.show_default or ctx.show_default): if isinstance(self.show_default, string_types): default_string = "({})".format(self.show_default) elif isinstance(self.default, (list, tuple)): default_string = ", ".join(str(d) for d in self.default) elif inspect.isfunction(self.default): default_string = "(dynamic)" else: default_string = self.default extra.append("default: {}".format(default_string)) if self.required: extra.append("required") if extra: help = "{}[{}]".format( "{} ".format(help) if help else "", "; ".join(extra) ) return ("; " if any_prefix_is_slash else " / ").join(rv), help def get_default(self, ctx): # If we're a non boolean flag our default is more complex because # we need to look at all flags in the same group to figure out # if we're the the default one in which case we return the flag # value as default. if self.is_flag and not self.is_bool_flag: for param in ctx.command.params: if param.name == self.name and param.default: return param.flag_value return None return Parameter.get_default(self, ctx) def prompt_for_value(self, ctx): """This is an alternative flow that can be activated in the full value processing if a value does not exist. It will prompt the user until a valid value exists and then returns the processed value as result. """ # Calculate the default before prompting anything to be stable. default = self.get_default(ctx) # If this is a prompt for a flag we need to handle this # differently. if self.is_bool_flag: return confirm(self.prompt, default) return prompt( self.prompt, default=default, type=self.type, hide_input=self.hide_input, show_choices=self.show_choices, confirmation_prompt=self.confirmation_prompt, value_proc=lambda x: self.process_value(ctx, x), ) def resolve_envvar_value(self, ctx): rv = Parameter.resolve_envvar_value(self, ctx) if rv is not None: return rv if self.allow_from_autoenv and ctx.auto_envvar_prefix is not None: envvar = "{}_{}".format(ctx.auto_envvar_prefix, self.name.upper()) return os.environ.get(envvar) def value_from_envvar(self, ctx): rv = self.resolve_envvar_value(ctx) if rv is None: return None value_depth = (self.nargs != 1) + bool(self.multiple) if value_depth > 0 and rv is not None: rv = self.type.split_envvar_value(rv) if self.multiple and self.nargs != 1: rv = batch(rv, self.nargs) return rv def full_process_value(self, ctx, value): if value is None and self.prompt is not None and not ctx.resilient_parsing: return self.prompt_for_value(ctx) return Parameter.full_process_value(self, ctx, value) class Argument(Parameter): """Arguments are positional parameters to a command. They generally provide fewer features than options but can have infinite ``nargs`` and are required by default. All parameters are passed onwards to the parameter constructor. """ param_type_name = "argument" def __init__(self, param_decls, required=None, **attrs): if required is None: if attrs.get("default") is not None: required = False else: required = attrs.get("nargs", 1) > 0 Parameter.__init__(self, param_decls, required=required, **attrs) if self.default is not None and self.nargs < 0: raise TypeError( "nargs=-1 in combination with a default value is not supported." ) @property def human_readable_name(self): if self.metavar is not None: return self.metavar return self.name.upper() def make_metavar(self): if self.metavar is not None: return self.metavar var = self.type.get_metavar(self) if not var: var = self.name.upper() if not self.required: var = "[{}]".format(var) if self.nargs != 1: var += "..." return var def _parse_decls(self, decls, expose_value): if not decls: if not expose_value: return None, [], [] raise TypeError("Could not determine name for argument") if len(decls) == 1: name = arg = decls[0] name = name.replace("-", "_").lower() else: raise TypeError( "Arguments take exactly one parameter declaration, got" " {}".format(len(decls)) ) return name, [arg], [] def get_usage_pieces(self, ctx): return [self.make_metavar()] def get_error_hint(self, ctx): return repr(self.make_metavar()) def add_to_parser(self, parser, ctx): parser.add_argument(dest=self.name, nargs=self.nargs, obj=self)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/types.py
import os import stat from datetime import datetime from ._compat import _get_argv_encoding from ._compat import filename_to_ui from ._compat import get_filesystem_encoding from ._compat import get_streerror from ._compat import open_stream from ._compat import PY2 from ._compat import text_type from .exceptions import BadParameter from .utils import LazyFile from .utils import safecall class ParamType(object): """Helper for converting values through types. The following is necessary for a valid type: * it needs a name * it needs to pass through None unchanged * it needs to convert from a string * it needs to convert its result type through unchanged (eg: needs to be idempotent) * it needs to be able to deal with param and context being `None`. This can be the case when the object is used with prompt inputs. """ is_composite = False #: the descriptive name of this type name = None #: if a list of this type is expected and the value is pulled from a #: string environment variable, this is what splits it up. `None` #: means any whitespace. For all parameters the general rule is that #: whitespace splits them up. The exception are paths and files which #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on #: Windows). envvar_list_splitter = None def __call__(self, value, param=None, ctx=None): if value is not None: return self.convert(value, param, ctx) def get_metavar(self, param): """Returns the metavar default for this param if it provides one.""" def get_missing_message(self, param): """Optionally might return extra information about a missing parameter. .. versionadded:: 2.0 """ def convert(self, value, param, ctx): """Converts the value. This is not invoked for values that are `None` (the missing value). """ return value def split_envvar_value(self, rv): """Given a value from an environment variable this splits it up into small chunks depending on the defined envvar list splitter. If the splitter is set to `None`, which means that whitespace splits, then leading and trailing whitespace is ignored. Otherwise, leading and trailing splitters usually lead to empty items being included. """ return (rv or "").split(self.envvar_list_splitter) def fail(self, message, param=None, ctx=None): """Helper method to fail with an invalid value message.""" raise BadParameter(message, ctx=ctx, param=param) class CompositeParamType(ParamType): is_composite = True @property def arity(self): raise NotImplementedError() class FuncParamType(ParamType): def __init__(self, func): self.name = func.__name__ self.func = func def convert(self, value, param, ctx): try: return self.func(value) except ValueError: try: value = text_type(value) except UnicodeError: value = str(value).decode("utf-8", "replace") self.fail(value, param, ctx) class UnprocessedParamType(ParamType): name = "text" def convert(self, value, param, ctx): return value def __repr__(self): return "UNPROCESSED" class StringParamType(ParamType): name = "text" def convert(self, value, param, ctx): if isinstance(value, bytes): enc = _get_argv_encoding() try: value = value.decode(enc) except UnicodeError: fs_enc = get_filesystem_encoding() if fs_enc != enc: try: value = value.decode(fs_enc) except UnicodeError: value = value.decode("utf-8", "replace") else: value = value.decode("utf-8", "replace") return value return value def __repr__(self): return "STRING" class Choice(ParamType): """The choice type allows a value to be checked against a fixed set of supported values. All of these values have to be strings. You should only pass a list or tuple of choices. Other iterables (like generators) may lead to surprising results. The resulting value will always be one of the originally passed choices regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` being specified. See :ref:`choice-opts` for an example. :param case_sensitive: Set to false to make choices case insensitive. Defaults to true. """ name = "choice" def __init__(self, choices, case_sensitive=True): self.choices = choices self.case_sensitive = case_sensitive def get_metavar(self, param): return "[{}]".format("|".join(self.choices)) def get_missing_message(self, param): return "Choose from:\n\t{}.".format(",\n\t".join(self.choices)) def convert(self, value, param, ctx): # Match through normalization and case sensitivity # first do token_normalize_func, then lowercase # preserve original `value` to produce an accurate message in # `self.fail` normed_value = value normed_choices = {choice: choice for choice in self.choices} if ctx is not None and ctx.token_normalize_func is not None: normed_value = ctx.token_normalize_func(value) normed_choices = { ctx.token_normalize_func(normed_choice): original for normed_choice, original in normed_choices.items() } if not self.case_sensitive: if PY2: lower = str.lower else: lower = str.casefold normed_value = lower(normed_value) normed_choices = { lower(normed_choice): original for normed_choice, original in normed_choices.items() } if normed_value in normed_choices: return normed_choices[normed_value] self.fail( "invalid choice: {}. (choose from {})".format( value, ", ".join(self.choices) ), param, ctx, ) def __repr__(self): return "Choice('{}')".format(list(self.choices)) class DateTime(ParamType): """The DateTime type converts date strings into `datetime` objects. The format strings which are checked are configurable, but default to some common (non-timezone aware) ISO 8601 formats. When specifying *DateTime* formats, you should only pass a list or a tuple. Other iterables, like generators, may lead to surprising results. The format strings are processed using ``datetime.strptime``, and this consequently defines the format strings which are allowed. Parsing is tried using each format, in order, and the first format which parses successfully is used. :param formats: A list or tuple of date format strings, in the order in which they should be tried. Defaults to ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, ``'%Y-%m-%d %H:%M:%S'``. """ name = "datetime" def __init__(self, formats=None): self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"] def get_metavar(self, param): return "[{}]".format("|".join(self.formats)) def _try_to_convert_date(self, value, format): try: return datetime.strptime(value, format) except ValueError: return None def convert(self, value, param, ctx): # Exact match for format in self.formats: dtime = self._try_to_convert_date(value, format) if dtime: return dtime self.fail( "invalid datetime format: {}. (choose from {})".format( value, ", ".join(self.formats) ) ) def __repr__(self): return "DateTime" class IntParamType(ParamType): name = "integer" def convert(self, value, param, ctx): try: return int(value) except ValueError: self.fail("{} is not a valid integer".format(value), param, ctx) def __repr__(self): return "INT" class IntRange(IntParamType): """A parameter that works similar to :data:`click.INT` but restricts the value to fit into a range. The default behavior is to fail if the value falls outside the range, but it can also be silently clamped between the two edges. See :ref:`ranges` for an example. """ name = "integer range" def __init__(self, min=None, max=None, clamp=False): self.min = min self.max = max self.clamp = clamp def convert(self, value, param, ctx): rv = IntParamType.convert(self, value, param, ctx) if self.clamp: if self.min is not None and rv < self.min: return self.min if self.max is not None and rv > self.max: return self.max if ( self.min is not None and rv < self.min or self.max is not None and rv > self.max ): if self.min is None: self.fail( "{} is bigger than the maximum valid value {}.".format( rv, self.max ), param, ctx, ) elif self.max is None: self.fail( "{} is smaller than the minimum valid value {}.".format( rv, self.min ), param, ctx, ) else: self.fail( "{} is not in the valid range of {} to {}.".format( rv, self.min, self.max ), param, ctx, ) return rv def __repr__(self): return "IntRange({}, {})".format(self.min, self.max) class FloatParamType(ParamType): name = "float" def convert(self, value, param, ctx): try: return float(value) except ValueError: self.fail( "{} is not a valid floating point value".format(value), param, ctx ) def __repr__(self): return "FLOAT" class FloatRange(FloatParamType): """A parameter that works similar to :data:`click.FLOAT` but restricts the value to fit into a range. The default behavior is to fail if the value falls outside the range, but it can also be silently clamped between the two edges. See :ref:`ranges` for an example. """ name = "float range" def __init__(self, min=None, max=None, clamp=False): self.min = min self.max = max self.clamp = clamp def convert(self, value, param, ctx): rv = FloatParamType.convert(self, value, param, ctx) if self.clamp: if self.min is not None and rv < self.min: return self.min if self.max is not None and rv > self.max: return self.max if ( self.min is not None and rv < self.min or self.max is not None and rv > self.max ): if self.min is None: self.fail( "{} is bigger than the maximum valid value {}.".format( rv, self.max ), param, ctx, ) elif self.max is None: self.fail( "{} is smaller than the minimum valid value {}.".format( rv, self.min ), param, ctx, ) else: self.fail( "{} is not in the valid range of {} to {}.".format( rv, self.min, self.max ), param, ctx, ) return rv def __repr__(self): return "FloatRange({}, {})".format(self.min, self.max) class BoolParamType(ParamType): name = "boolean" def convert(self, value, param, ctx): if isinstance(value, bool): return bool(value) value = value.lower() if value in ("true", "t", "1", "yes", "y"): return True elif value in ("false", "f", "0", "no", "n"): return False self.fail("{} is not a valid boolean".format(value), param, ctx) def __repr__(self): return "BOOL" class UUIDParameterType(ParamType): name = "uuid" def convert(self, value, param, ctx): import uuid try: if PY2 and isinstance(value, text_type): value = value.encode("ascii") return uuid.UUID(value) except ValueError: self.fail("{} is not a valid UUID value".format(value), param, ctx) def __repr__(self): return "UUID" class File(ParamType): """Declares a parameter to be a file for reading or writing. The file is automatically closed once the context tears down (after the command finished working). Files can be opened for reading or writing. The special value ``-`` indicates stdin or stdout depending on the mode. By default, the file is opened for reading text data, but it can also be opened in binary mode or for writing. The encoding parameter can be used to force a specific encoding. The `lazy` flag controls if the file should be opened immediately or upon first IO. The default is to be non-lazy for standard input and output streams as well as files opened for reading, `lazy` otherwise. When opening a file lazily for reading, it is still opened temporarily for validation, but will not be held open until first IO. lazy is mainly useful when opening for writing to avoid creating the file until it is needed. Starting with Click 2.0, files can also be opened atomically in which case all writes go into a separate file in the same folder and upon completion the file will be moved over to the original location. This is useful if a file regularly read by other users is modified. See :ref:`file-args` for more information. """ name = "filename" envvar_list_splitter = os.path.pathsep def __init__( self, mode="r", encoding=None, errors="strict", lazy=None, atomic=False ): self.mode = mode self.encoding = encoding self.errors = errors self.lazy = lazy self.atomic = atomic def resolve_lazy_flag(self, value): if self.lazy is not None: return self.lazy if value == "-": return False elif "w" in self.mode: return True return False def convert(self, value, param, ctx): try: if hasattr(value, "read") or hasattr(value, "write"): return value lazy = self.resolve_lazy_flag(value) if lazy: f = LazyFile( value, self.mode, self.encoding, self.errors, atomic=self.atomic ) if ctx is not None: ctx.call_on_close(f.close_intelligently) return f f, should_close = open_stream( value, self.mode, self.encoding, self.errors, atomic=self.atomic ) # If a context is provided, we automatically close the file # at the end of the context execution (or flush out). If a # context does not exist, it's the caller's responsibility to # properly close the file. This for instance happens when the # type is used with prompts. if ctx is not None: if should_close: ctx.call_on_close(safecall(f.close)) else: ctx.call_on_close(safecall(f.flush)) return f except (IOError, OSError) as e: # noqa: B014 self.fail( "Could not open file: {}: {}".format( filename_to_ui(value), get_streerror(e) ), param, ctx, ) class Path(ParamType): """The path type is similar to the :class:`File` type but it performs different checks. First of all, instead of returning an open file handle it returns just the filename. Secondly, it can perform various basic checks about what the file or directory should be. .. versionchanged:: 6.0 `allow_dash` was added. :param exists: if set to true, the file or directory needs to exist for this value to be valid. If this is not required and a file does indeed not exist, then all further checks are silently skipped. :param file_okay: controls if a file is a possible value. :param dir_okay: controls if a directory is a possible value. :param writable: if true, a writable check is performed. :param readable: if true, a readable check is performed. :param resolve_path: if this is true, then the path is fully resolved before the value is passed onwards. This means that it's absolute and symlinks are resolved. It will not expand a tilde-prefix, as this is supposed to be done by the shell only. :param allow_dash: If this is set to `True`, a single dash to indicate standard streams is permitted. :param path_type: optionally a string type that should be used to represent the path. The default is `None` which means the return value will be either bytes or unicode depending on what makes most sense given the input data Click deals with. """ envvar_list_splitter = os.path.pathsep def __init__( self, exists=False, file_okay=True, dir_okay=True, writable=False, readable=True, resolve_path=False, allow_dash=False, path_type=None, ): self.exists = exists self.file_okay = file_okay self.dir_okay = dir_okay self.writable = writable self.readable = readable self.resolve_path = resolve_path self.allow_dash = allow_dash self.type = path_type if self.file_okay and not self.dir_okay: self.name = "file" self.path_type = "File" elif self.dir_okay and not self.file_okay: self.name = "directory" self.path_type = "Directory" else: self.name = "path" self.path_type = "Path" def coerce_path_result(self, rv): if self.type is not None and not isinstance(rv, self.type): if self.type is text_type: rv = rv.decode(get_filesystem_encoding()) else: rv = rv.encode(get_filesystem_encoding()) return rv def convert(self, value, param, ctx): rv = value is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") if not is_dash: if self.resolve_path: rv = os.path.realpath(rv) try: st = os.stat(rv) except OSError: if not self.exists: return self.coerce_path_result(rv) self.fail( "{} '{}' does not exist.".format( self.path_type, filename_to_ui(value) ), param, ctx, ) if not self.file_okay and stat.S_ISREG(st.st_mode): self.fail( "{} '{}' is a file.".format(self.path_type, filename_to_ui(value)), param, ctx, ) if not self.dir_okay and stat.S_ISDIR(st.st_mode): self.fail( "{} '{}' is a directory.".format( self.path_type, filename_to_ui(value) ), param, ctx, ) if self.writable and not os.access(value, os.W_OK): self.fail( "{} '{}' is not writable.".format( self.path_type, filename_to_ui(value) ), param, ctx, ) if self.readable and not os.access(value, os.R_OK): self.fail( "{} '{}' is not readable.".format( self.path_type, filename_to_ui(value) ), param, ctx, ) return self.coerce_path_result(rv) class Tuple(CompositeParamType): """The default behavior of Click is to apply a type on a value directly. This works well in most cases, except for when `nargs` is set to a fixed count and different types should be used for different items. In this case the :class:`Tuple` type can be used. This type can only be used if `nargs` is set to a fixed number. For more information see :ref:`tuple-type`. This can be selected by using a Python tuple literal as a type. :param types: a list of types that should be used for the tuple items. """ def __init__(self, types): self.types = [convert_type(ty) for ty in types] @property def name(self): return "<{}>".format(" ".join(ty.name for ty in self.types)) @property def arity(self): return len(self.types) def convert(self, value, param, ctx): if len(value) != len(self.types): raise TypeError( "It would appear that nargs is set to conflict with the" " composite type arity." ) return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) def convert_type(ty, default=None): """Converts a callable or python type into the most appropriate param type. """ guessed_type = False if ty is None and default is not None: if isinstance(default, tuple): ty = tuple(map(type, default)) else: ty = type(default) guessed_type = True if isinstance(ty, tuple): return Tuple(ty) if isinstance(ty, ParamType): return ty if ty is text_type or ty is str or ty is None: return STRING if ty is int: return INT # Booleans are only okay if not guessed. This is done because for # flags the default value is actually a bit of a lie in that it # indicates which of the flags is the one we want. See get_default() # for more information. if ty is bool and not guessed_type: return BOOL if ty is float: return FLOAT if guessed_type: return STRING # Catch a common mistake if __debug__: try: if issubclass(ty, ParamType): raise AssertionError( "Attempted to use an uninstantiated parameter type ({}).".format(ty) ) except TypeError: pass return FuncParamType(ty) #: A dummy parameter type that just does nothing. From a user's #: perspective this appears to just be the same as `STRING` but internally #: no string conversion takes place. This is necessary to achieve the #: same bytes/unicode behavior on Python 2/3 in situations where you want #: to not convert argument types. This is usually useful when working #: with file paths as they can appear in bytes and unicode. #: #: For path related uses the :class:`Path` type is a better choice but #: there are situations where an unprocessed type is useful which is why #: it is is provided. #: #: .. versionadded:: 4.0 UNPROCESSED = UnprocessedParamType() #: A unicode string parameter type which is the implicit default. This #: can also be selected by using ``str`` as type. STRING = StringParamType() #: An integer parameter. This can also be selected by using ``int`` as #: type. INT = IntParamType() #: A floating point value parameter. This can also be selected by using #: ``float`` as type. FLOAT = FloatParamType() #: A boolean parameter. This is the default for boolean flags. This can #: also be selected by using ``bool`` as a type. BOOL = BoolParamType() #: A UUID parameter. UUID = UUIDParameterType()
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/formatting.py
from contextlib import contextmanager from ._compat import term_len from .parser import split_opt from .termui import get_terminal_size # Can force a width. This is used by the test system FORCED_WIDTH = None def measure_table(rows): widths = {} for row in rows: for idx, col in enumerate(row): widths[idx] = max(widths.get(idx, 0), term_len(col)) return tuple(y for x, y in sorted(widths.items())) def iter_rows(rows, col_count): for row in rows: row = tuple(row) yield row + ("",) * (col_count - len(row)) def wrap_text( text, width=78, initial_indent="", subsequent_indent="", preserve_paragraphs=False ): """A helper function that intelligently wraps text. By default, it assumes that it operates on a single paragraph of text but if the `preserve_paragraphs` parameter is provided it will intelligently handle paragraphs (defined by two empty lines). If paragraphs are handled, a paragraph can be prefixed with an empty line containing the ``\\b`` character (``\\x08``) to indicate that no rewrapping should happen in that block. :param text: the text that should be rewrapped. :param width: the maximum width for the text. :param initial_indent: the initial indent that should be placed on the first line as a string. :param subsequent_indent: the indent string that should be placed on each consecutive line. :param preserve_paragraphs: if this flag is set then the wrapping will intelligently handle paragraphs. """ from ._textwrap import TextWrapper text = text.expandtabs() wrapper = TextWrapper( width, initial_indent=initial_indent, subsequent_indent=subsequent_indent, replace_whitespace=False, ) if not preserve_paragraphs: return wrapper.fill(text) p = [] buf = [] indent = None def _flush_par(): if not buf: return if buf[0].strip() == "\b": p.append((indent or 0, True, "\n".join(buf[1:]))) else: p.append((indent or 0, False, " ".join(buf))) del buf[:] for line in text.splitlines(): if not line: _flush_par() indent = None else: if indent is None: orig_len = term_len(line) line = line.lstrip() indent = orig_len - term_len(line) buf.append(line) _flush_par() rv = [] for indent, raw, text in p: with wrapper.extra_indent(" " * indent): if raw: rv.append(wrapper.indent_only(text)) else: rv.append(wrapper.fill(text)) return "\n\n".join(rv) class HelpFormatter(object): """This class helps with formatting text-based help pages. It's usually just needed for very special internal cases, but it's also exposed so that developers can write their own fancy outputs. At present, it always writes into memory. :param indent_increment: the additional increment for each level. :param width: the width for the text. This defaults to the terminal width clamped to a maximum of 78. """ def __init__(self, indent_increment=2, width=None, max_width=None): self.indent_increment = indent_increment if max_width is None: max_width = 80 if width is None: width = FORCED_WIDTH if width is None: width = max(min(get_terminal_size()[0], max_width) - 2, 50) self.width = width self.current_indent = 0 self.buffer = [] def write(self, string): """Writes a unicode string into the internal buffer.""" self.buffer.append(string) def indent(self): """Increases the indentation.""" self.current_indent += self.indent_increment def dedent(self): """Decreases the indentation.""" self.current_indent -= self.indent_increment def write_usage(self, prog, args="", prefix="Usage: "): """Writes a usage line into the buffer. :param prog: the program name. :param args: whitespace separated list of arguments. :param prefix: the prefix for the first line. """ usage_prefix = "{:>{w}}{} ".format(prefix, prog, w=self.current_indent) text_width = self.width - self.current_indent if text_width >= (term_len(usage_prefix) + 20): # The arguments will fit to the right of the prefix. indent = " " * term_len(usage_prefix) self.write( wrap_text( args, text_width, initial_indent=usage_prefix, subsequent_indent=indent, ) ) else: # The prefix is too long, put the arguments on the next line. self.write(usage_prefix) self.write("\n") indent = " " * (max(self.current_indent, term_len(prefix)) + 4) self.write( wrap_text( args, text_width, initial_indent=indent, subsequent_indent=indent ) ) self.write("\n") def write_heading(self, heading): """Writes a heading into the buffer.""" self.write("{:>{w}}{}:\n".format("", heading, w=self.current_indent)) def write_paragraph(self): """Writes a paragraph into the buffer.""" if self.buffer: self.write("\n") def write_text(self, text): """Writes re-indented text into the buffer. This rewraps and preserves paragraphs. """ text_width = max(self.width - self.current_indent, 11) indent = " " * self.current_indent self.write( wrap_text( text, text_width, initial_indent=indent, subsequent_indent=indent, preserve_paragraphs=True, ) ) self.write("\n") def write_dl(self, rows, col_max=30, col_spacing=2): """Writes a definition list into the buffer. This is how options and commands are usually formatted. :param rows: a list of two item tuples for the terms and values. :param col_max: the maximum width of the first column. :param col_spacing: the number of spaces between the first and second column. """ rows = list(rows) widths = measure_table(rows) if len(widths) != 2: raise TypeError("Expected two columns for definition list") first_col = min(widths[0], col_max) + col_spacing for first, second in iter_rows(rows, len(widths)): self.write("{:>{w}}{}".format("", first, w=self.current_indent)) if not second: self.write("\n") continue if term_len(first) <= first_col - col_spacing: self.write(" " * (first_col - term_len(first))) else: self.write("\n") self.write(" " * (first_col + self.current_indent)) text_width = max(self.width - first_col - 2, 10) wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) lines = wrapped_text.splitlines() if lines: self.write("{}\n".format(lines[0])) for line in lines[1:]: self.write( "{:>{w}}{}\n".format( "", line, w=first_col + self.current_indent ) ) if len(lines) > 1: # separate long help from next option self.write("\n") else: self.write("\n") @contextmanager def section(self, name): """Helpful context manager that writes a paragraph, a heading, and the indents. :param name: the section name that is written as heading. """ self.write_paragraph() self.write_heading(name) self.indent() try: yield finally: self.dedent() @contextmanager def indentation(self): """A context manager that increases the indentation.""" self.indent() try: yield finally: self.dedent() def getvalue(self): """Returns the buffer contents.""" return "".join(self.buffer) def join_options(options): """Given a list of option strings this joins them in the most appropriate way and returns them in the form ``(formatted_string, any_prefix_is_slash)`` where the second item in the tuple is a flag that indicates if any of the option prefixes was a slash. """ rv = [] any_prefix_is_slash = False for opt in options: prefix = split_opt(opt)[0] if prefix == "/": any_prefix_is_slash = True rv.append((len(prefix), opt)) rv.sort(key=lambda x: x[0]) rv = ", ".join(x[1] for x in rv) return rv, any_prefix_is_slash
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/parser.py
# -*- coding: utf-8 -*- """ This module started out as largely a copy paste from the stdlib's optparse module with the features removed that we do not need from optparse because we implement them in Click on a higher level (for instance type handling, help formatting and a lot more). The plan is to remove more and more from here over time. The reason this is a different module and not optparse from the stdlib is that there are differences in 2.x and 3.x about the error messages generated and optparse in the stdlib uses gettext for no good reason and might cause us issues. Click uses parts of optparse written by Gregory P. Ward and maintained by the Python Software Foundation. This is limited to code in parser.py. Copyright 2001-2006 Gregory P. Ward. All rights reserved. Copyright 2002-2006 Python Software Foundation. All rights reserved. """ import re from collections import deque from .exceptions import BadArgumentUsage from .exceptions import BadOptionUsage from .exceptions import NoSuchOption from .exceptions import UsageError def _unpack_args(args, nargs_spec): """Given an iterable of arguments and an iterable of nargs specifications, it returns a tuple with all the unpacked arguments at the first index and all remaining arguments as the second. The nargs specification is the number of arguments that should be consumed or `-1` to indicate that this position should eat up all the remainders. Missing items are filled with `None`. """ args = deque(args) nargs_spec = deque(nargs_spec) rv = [] spos = None def _fetch(c): try: if spos is None: return c.popleft() else: return c.pop() except IndexError: return None while nargs_spec: nargs = _fetch(nargs_spec) if nargs == 1: rv.append(_fetch(args)) elif nargs > 1: x = [_fetch(args) for _ in range(nargs)] # If we're reversed, we're pulling in the arguments in reverse, # so we need to turn them around. if spos is not None: x.reverse() rv.append(tuple(x)) elif nargs < 0: if spos is not None: raise TypeError("Cannot have two nargs < 0") spos = len(rv) rv.append(None) # spos is the position of the wildcard (star). If it's not `None`, # we fill it with the remainder. if spos is not None: rv[spos] = tuple(args) args = [] rv[spos + 1 :] = reversed(rv[spos + 1 :]) return tuple(rv), list(args) def _error_opt_args(nargs, opt): if nargs == 1: raise BadOptionUsage(opt, "{} option requires an argument".format(opt)) raise BadOptionUsage(opt, "{} option requires {} arguments".format(opt, nargs)) def split_opt(opt): first = opt[:1] if first.isalnum(): return "", opt if opt[1:2] == first: return opt[:2], opt[2:] return first, opt[1:] def normalize_opt(opt, ctx): if ctx is None or ctx.token_normalize_func is None: return opt prefix, opt = split_opt(opt) return prefix + ctx.token_normalize_func(opt) def split_arg_string(string): """Given an argument string this attempts to split it into small parts.""" rv = [] for match in re.finditer( r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*", string, re.S, ): arg = match.group().strip() if arg[:1] == arg[-1:] and arg[:1] in "\"'": arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape") try: arg = type(string)(arg) except UnicodeError: pass rv.append(arg) return rv class Option(object): def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): self._short_opts = [] self._long_opts = [] self.prefixes = set() for opt in opts: prefix, value = split_opt(opt) if not prefix: raise ValueError("Invalid start character for option ({})".format(opt)) self.prefixes.add(prefix[0]) if len(prefix) == 1 and len(value) == 1: self._short_opts.append(opt) else: self._long_opts.append(opt) self.prefixes.add(prefix) if action is None: action = "store" self.dest = dest self.action = action self.nargs = nargs self.const = const self.obj = obj @property def takes_value(self): return self.action in ("store", "append") def process(self, value, state): if self.action == "store": state.opts[self.dest] = value elif self.action == "store_const": state.opts[self.dest] = self.const elif self.action == "append": state.opts.setdefault(self.dest, []).append(value) elif self.action == "append_const": state.opts.setdefault(self.dest, []).append(self.const) elif self.action == "count": state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 else: raise ValueError("unknown action '{}'".format(self.action)) state.order.append(self.obj) class Argument(object): def __init__(self, dest, nargs=1, obj=None): self.dest = dest self.nargs = nargs self.obj = obj def process(self, value, state): if self.nargs > 1: holes = sum(1 for x in value if x is None) if holes == len(value): value = None elif holes != 0: raise BadArgumentUsage( "argument {} takes {} values".format(self.dest, self.nargs) ) state.opts[self.dest] = value state.order.append(self.obj) class ParsingState(object): def __init__(self, rargs): self.opts = {} self.largs = [] self.rargs = rargs self.order = [] class OptionParser(object): """The option parser is an internal class that is ultimately used to parse options and arguments. It's modelled after optparse and brings a similar but vastly simplified API. It should generally not be used directly as the high level Click classes wrap it for you. It's not nearly as extensible as optparse or argparse as it does not implement features that are implemented on a higher level (such as types or defaults). :param ctx: optionally the :class:`~click.Context` where this parser should go with. """ def __init__(self, ctx=None): #: The :class:`~click.Context` for this parser. This might be #: `None` for some advanced use cases. self.ctx = ctx #: This controls how the parser deals with interspersed arguments. #: If this is set to `False`, the parser will stop on the first #: non-option. Click uses this to implement nested subcommands #: safely. self.allow_interspersed_args = True #: This tells the parser how to deal with unknown options. By #: default it will error out (which is sensible), but there is a #: second mode where it will ignore it and continue processing #: after shifting all the unknown options into the resulting args. self.ignore_unknown_options = False if ctx is not None: self.allow_interspersed_args = ctx.allow_interspersed_args self.ignore_unknown_options = ctx.ignore_unknown_options self._short_opt = {} self._long_opt = {} self._opt_prefixes = {"-", "--"} self._args = [] def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None): """Adds a new option named `dest` to the parser. The destination is not inferred (unlike with optparse) and needs to be explicitly provided. Action can be any of ``store``, ``store_const``, ``append``, ``appnd_const`` or ``count``. The `obj` can be used to identify the option in the order list that is returned from the parser. """ if obj is None: obj = dest opts = [normalize_opt(opt, self.ctx) for opt in opts] option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj) self._opt_prefixes.update(option.prefixes) for opt in option._short_opts: self._short_opt[opt] = option for opt in option._long_opts: self._long_opt[opt] = option def add_argument(self, dest, nargs=1, obj=None): """Adds a positional argument named `dest` to the parser. The `obj` can be used to identify the option in the order list that is returned from the parser. """ if obj is None: obj = dest self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) def parse_args(self, args): """Parses positional arguments and returns ``(values, args, order)`` for the parsed options and arguments as well as the leftover arguments if there are any. The order is a list of objects as they appear on the command line. If arguments appear multiple times they will be memorized multiple times as well. """ state = ParsingState(args) try: self._process_args_for_options(state) self._process_args_for_args(state) except UsageError: if self.ctx is None or not self.ctx.resilient_parsing: raise return state.opts, state.largs, state.order def _process_args_for_args(self, state): pargs, args = _unpack_args( state.largs + state.rargs, [x.nargs for x in self._args] ) for idx, arg in enumerate(self._args): arg.process(pargs[idx], state) state.largs = args state.rargs = [] def _process_args_for_options(self, state): while state.rargs: arg = state.rargs.pop(0) arglen = len(arg) # Double dashes always handled explicitly regardless of what # prefixes are valid. if arg == "--": return elif arg[:1] in self._opt_prefixes and arglen > 1: self._process_opts(arg, state) elif self.allow_interspersed_args: state.largs.append(arg) else: state.rargs.insert(0, arg) return # Say this is the original argument list: # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] # ^ # (we are about to process arg(i)). # # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of # [arg0, ..., arg(i-1)] (any options and their arguments will have # been removed from largs). # # The while loop will usually consume 1 or more arguments per pass. # If it consumes 1 (eg. arg is an option that takes no arguments), # then after _process_arg() is done the situation is: # # largs = subset of [arg0, ..., arg(i)] # rargs = [arg(i+1), ..., arg(N-1)] # # If allow_interspersed_args is false, largs will always be # *empty* -- still a subset of [arg0, ..., arg(i-1)], but # not a very interesting subset! def _match_long_opt(self, opt, explicit_value, state): if opt not in self._long_opt: possibilities = [word for word in self._long_opt if word.startswith(opt)] raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) option = self._long_opt[opt] if option.takes_value: # At this point it's safe to modify rargs by injecting the # explicit value, because no exception is raised in this # branch. This means that the inserted value will be fully # consumed. if explicit_value is not None: state.rargs.insert(0, explicit_value) nargs = option.nargs if len(state.rargs) < nargs: _error_opt_args(nargs, opt) elif nargs == 1: value = state.rargs.pop(0) else: value = tuple(state.rargs[:nargs]) del state.rargs[:nargs] elif explicit_value is not None: raise BadOptionUsage(opt, "{} option does not take a value".format(opt)) else: value = None option.process(value, state) def _match_short_opt(self, arg, state): stop = False i = 1 prefix = arg[0] unknown_options = [] for ch in arg[1:]: opt = normalize_opt(prefix + ch, self.ctx) option = self._short_opt.get(opt) i += 1 if not option: if self.ignore_unknown_options: unknown_options.append(ch) continue raise NoSuchOption(opt, ctx=self.ctx) if option.takes_value: # Any characters left in arg? Pretend they're the # next arg, and stop consuming characters of arg. if i < len(arg): state.rargs.insert(0, arg[i:]) stop = True nargs = option.nargs if len(state.rargs) < nargs: _error_opt_args(nargs, opt) elif nargs == 1: value = state.rargs.pop(0) else: value = tuple(state.rargs[:nargs]) del state.rargs[:nargs] else: value = None option.process(value, state) if stop: break # If we got any unknown options we re-combinate the string of the # remaining options and re-attach the prefix, then report that # to the state as new larg. This way there is basic combinatorics # that can be achieved while still ignoring unknown arguments. if self.ignore_unknown_options and unknown_options: state.largs.append("{}{}".format(prefix, "".join(unknown_options))) def _process_opts(self, arg, state): explicit_value = None # Long option handling happens in two parts. The first part is # supporting explicitly attached values. In any case, we will try # to long match the option first. if "=" in arg: long_opt, explicit_value = arg.split("=", 1) else: long_opt = arg norm_long_opt = normalize_opt(long_opt, self.ctx) # At this point we will match the (assumed) long option through # the long option matching code. Note that this allows options # like "-foo" to be matched as long options. try: self._match_long_opt(norm_long_opt, explicit_value, state) except NoSuchOption: # At this point the long option matching failed, and we need # to try with short options. However there is a special rule # which says, that if we have a two character options prefix # (applies to "--foo" for instance), we do not dispatch to the # short option code and will instead raise the no option # error. if arg[:2] not in self._opt_prefixes: return self._match_short_opt(arg, state) if not self.ignore_unknown_options: raise state.largs.append(arg)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/termui.py
import inspect import io import itertools import os import struct import sys from ._compat import DEFAULT_COLUMNS from ._compat import get_winterm_size from ._compat import isatty from ._compat import raw_input from ._compat import string_types from ._compat import strip_ansi from ._compat import text_type from ._compat import WIN from .exceptions import Abort from .exceptions import UsageError from .globals import resolve_color_default from .types import Choice from .types import convert_type from .types import Path from .utils import echo from .utils import LazyFile # The prompt functions to use. The doc tools currently override these # functions to customize how they work. visible_prompt_func = raw_input _ansi_colors = { "black": 30, "red": 31, "green": 32, "yellow": 33, "blue": 34, "magenta": 35, "cyan": 36, "white": 37, "reset": 39, "bright_black": 90, "bright_red": 91, "bright_green": 92, "bright_yellow": 93, "bright_blue": 94, "bright_magenta": 95, "bright_cyan": 96, "bright_white": 97, } _ansi_reset_all = "\033[0m" def hidden_prompt_func(prompt): import getpass return getpass.getpass(prompt) def _build_prompt( text, suffix, show_default=False, default=None, show_choices=True, type=None ): prompt = text if type is not None and show_choices and isinstance(type, Choice): prompt += " ({})".format(", ".join(map(str, type.choices))) if default is not None and show_default: prompt = "{} [{}]".format(prompt, _format_default(default)) return prompt + suffix def _format_default(default): if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): return default.name return default def prompt( text, default=None, hide_input=False, confirmation_prompt=False, type=None, value_proc=None, prompt_suffix=": ", show_default=True, err=False, show_choices=True, ): """Prompts a user for input. This is a convenience function that can be used to prompt a user for input later. If the user aborts the input by sending a interrupt signal, this function will catch it and raise a :exc:`Abort` exception. .. versionadded:: 7.0 Added the show_choices parameter. .. versionadded:: 6.0 Added unicode support for cmd.exe on Windows. .. versionadded:: 4.0 Added the `err` parameter. :param text: the text to show for the prompt. :param default: the default value to use if no input happens. If this is not given it will prompt until it's aborted. :param hide_input: if this is set to true then the input value will be hidden. :param confirmation_prompt: asks for confirmation for the value. :param type: the type to use to check the value against. :param value_proc: if this parameter is provided it's a function that is invoked instead of the type conversion to convert a value. :param prompt_suffix: a suffix that should be added to the prompt. :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo. :param show_choices: Show or hide choices if the passed type is a Choice. For example if type is a Choice of either day or week, show_choices is true and text is "Group by" then the prompt will be "Group by (day, week): ". """ result = None def prompt_func(text): f = hidden_prompt_func if hide_input else visible_prompt_func try: # Write the prompt separately so that we get nice # coloring through colorama on Windows echo(text, nl=False, err=err) return f("") except (KeyboardInterrupt, EOFError): # getpass doesn't print a newline if the user aborts input with ^C. # Allegedly this behavior is inherited from getpass(3). # A doc bug has been filed at https://bugs.python.org/issue24711 if hide_input: echo(None, err=err) raise Abort() if value_proc is None: value_proc = convert_type(type, default) prompt = _build_prompt( text, prompt_suffix, show_default, default, show_choices, type ) while 1: while 1: value = prompt_func(prompt) if value: break elif default is not None: if isinstance(value_proc, Path): # validate Path default value(exists, dir_okay etc.) value = default break return default try: result = value_proc(value) except UsageError as e: echo("Error: {}".format(e.message), err=err) # noqa: B306 continue if not confirmation_prompt: return result while 1: value2 = prompt_func("Repeat for confirmation: ") if value2: break if value == value2: return result echo("Error: the two entered values do not match", err=err) def confirm( text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False ): """Prompts for confirmation (yes/no question). If the user aborts the input by sending a interrupt signal this function will catch it and raise a :exc:`Abort` exception. .. versionadded:: 4.0 Added the `err` parameter. :param text: the question to ask. :param default: the default for the prompt. :param abort: if this is set to `True` a negative answer aborts the exception by raising :exc:`Abort`. :param prompt_suffix: a suffix that should be added to the prompt. :param show_default: shows or hides the default value in the prompt. :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``, the same as with echo. """ prompt = _build_prompt( text, prompt_suffix, show_default, "Y/n" if default else "y/N" ) while 1: try: # Write the prompt separately so that we get nice # coloring through colorama on Windows echo(prompt, nl=False, err=err) value = visible_prompt_func("").lower().strip() except (KeyboardInterrupt, EOFError): raise Abort() if value in ("y", "yes"): rv = True elif value in ("n", "no"): rv = False elif value == "": rv = default else: echo("Error: invalid input", err=err) continue break if abort and not rv: raise Abort() return rv def get_terminal_size(): """Returns the current size of the terminal as tuple in the form ``(width, height)`` in columns and rows. """ # If shutil has get_terminal_size() (Python 3.3 and later) use that if sys.version_info >= (3, 3): import shutil shutil_get_terminal_size = getattr(shutil, "get_terminal_size", None) if shutil_get_terminal_size: sz = shutil_get_terminal_size() return sz.columns, sz.lines # We provide a sensible default for get_winterm_size() when being invoked # inside a subprocess. Without this, it would not provide a useful input. if get_winterm_size is not None: size = get_winterm_size() if size == (0, 0): return (79, 24) else: return size def ioctl_gwinsz(fd): try: import fcntl import termios cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) except Exception: return return cr cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) if not cr: try: fd = os.open(os.ctermid(), os.O_RDONLY) try: cr = ioctl_gwinsz(fd) finally: os.close(fd) except Exception: pass if not cr or not cr[0] or not cr[1]: cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS)) return int(cr[1]), int(cr[0]) def echo_via_pager(text_or_generator, color=None): """This function takes a text and shows it via an environment specific pager on stdout. .. versionchanged:: 3.0 Added the `color` flag. :param text_or_generator: the text to page, or alternatively, a generator emitting the text to page. :param color: controls if the pager supports ANSI colors or not. The default is autodetection. """ color = resolve_color_default(color) if inspect.isgeneratorfunction(text_or_generator): i = text_or_generator() elif isinstance(text_or_generator, string_types): i = [text_or_generator] else: i = iter(text_or_generator) # convert every element of i to a text type if necessary text_generator = (el if isinstance(el, string_types) else text_type(el) for el in i) from ._termui_impl import pager return pager(itertools.chain(text_generator, "\n"), color) def progressbar( iterable=None, length=None, label=None, show_eta=True, show_percent=None, show_pos=False, item_show_func=None, fill_char="#", empty_char="-", bar_template="%(label)s [%(bar)s] %(info)s", info_sep=" ", width=36, file=None, color=None, ): """This function creates an iterable context manager that can be used to iterate over something while showing a progress bar. It will either iterate over the `iterable` or `length` items (that are counted up). While iteration happens, this function will print a rendered progress bar to the given `file` (defaults to stdout) and will attempt to calculate remaining time and more. By default, this progress bar will not be rendered if the file is not a terminal. The context manager creates the progress bar. When the context manager is entered the progress bar is already created. With every iteration over the progress bar, the iterable passed to the bar is advanced and the bar is updated. When the context manager exits, a newline is printed and the progress bar is finalized on screen. Note: The progress bar is currently designed for use cases where the total progress can be expected to take at least several seconds. Because of this, the ProgressBar class object won't display progress that is considered too fast, and progress where the time between steps is less than a second. No printing must happen or the progress bar will be unintentionally destroyed. Example usage:: with progressbar(items) as bar: for item in bar: do_something_with(item) Alternatively, if no iterable is specified, one can manually update the progress bar through the `update()` method instead of directly iterating over the progress bar. The update method accepts the number of steps to increment the bar with:: with progressbar(length=chunks.total_bytes) as bar: for chunk in chunks: process_chunk(chunk) bar.update(chunks.bytes) .. versionadded:: 2.0 .. versionadded:: 4.0 Added the `color` parameter. Added a `update` method to the progressbar object. :param iterable: an iterable to iterate over. If not provided the length is required. :param length: the number of items to iterate over. By default the progressbar will attempt to ask the iterator about its length, which might or might not work. If an iterable is also provided this parameter can be used to override the length. If an iterable is not provided the progress bar will iterate over a range of that length. :param label: the label to show next to the progress bar. :param show_eta: enables or disables the estimated time display. This is automatically disabled if the length cannot be determined. :param show_percent: enables or disables the percentage display. The default is `True` if the iterable has a length or `False` if not. :param show_pos: enables or disables the absolute position display. The default is `False`. :param item_show_func: a function called with the current item which can return a string to show the current item next to the progress bar. Note that the current item can be `None`! :param fill_char: the character to use to show the filled part of the progress bar. :param empty_char: the character to use to show the non-filled part of the progress bar. :param bar_template: the format string to use as template for the bar. The parameters in it are ``label`` for the label, ``bar`` for the progress bar and ``info`` for the info section. :param info_sep: the separator between multiple info items (eta etc.) :param width: the width of the progress bar in characters, 0 means full terminal width :param file: the file to write to. If this is not a terminal then only the label is printed. :param color: controls if the terminal supports ANSI colors or not. The default is autodetection. This is only needed if ANSI codes are included anywhere in the progress bar output which is not the case by default. """ from ._termui_impl import ProgressBar color = resolve_color_default(color) return ProgressBar( iterable=iterable, length=length, show_eta=show_eta, show_percent=show_percent, show_pos=show_pos, item_show_func=item_show_func, fill_char=fill_char, empty_char=empty_char, bar_template=bar_template, info_sep=info_sep, file=file, label=label, width=width, color=color, ) def clear(): """Clears the terminal screen. This will have the effect of clearing the whole visible space of the terminal and moving the cursor to the top left. This does not do anything if not connected to a terminal. .. versionadded:: 2.0 """ if not isatty(sys.stdout): return # If we're on Windows and we don't have colorama available, then we # clear the screen by shelling out. Otherwise we can use an escape # sequence. if WIN: os.system("cls") else: sys.stdout.write("\033[2J\033[1;1H") def style( text, fg=None, bg=None, bold=None, dim=None, underline=None, blink=None, reverse=None, reset=True, ): """Styles a text with ANSI styles and returns the new string. By default the styling is self contained which means that at the end of the string a reset code is issued. This can be prevented by passing ``reset=False``. Examples:: click.echo(click.style('Hello World!', fg='green')) click.echo(click.style('ATTENTION!', blink=True)) click.echo(click.style('Some things', reverse=True, fg='cyan')) Supported color names: * ``black`` (might be a gray) * ``red`` * ``green`` * ``yellow`` (might be an orange) * ``blue`` * ``magenta`` * ``cyan`` * ``white`` (might be light gray) * ``bright_black`` * ``bright_red`` * ``bright_green`` * ``bright_yellow`` * ``bright_blue`` * ``bright_magenta`` * ``bright_cyan`` * ``bright_white`` * ``reset`` (reset the color code only) .. versionadded:: 2.0 .. versionadded:: 7.0 Added support for bright colors. :param text: the string to style with ansi codes. :param fg: if provided this will become the foreground color. :param bg: if provided this will become the background color. :param bold: if provided this will enable or disable bold mode. :param dim: if provided this will enable or disable dim mode. This is badly supported. :param underline: if provided this will enable or disable underline. :param blink: if provided this will enable or disable blinking. :param reverse: if provided this will enable or disable inverse rendering (foreground becomes background and the other way round). :param reset: by default a reset-all code is added at the end of the string which means that styles do not carry over. This can be disabled to compose styles. """ bits = [] if fg: try: bits.append("\033[{}m".format(_ansi_colors[fg])) except KeyError: raise TypeError("Unknown color '{}'".format(fg)) if bg: try: bits.append("\033[{}m".format(_ansi_colors[bg] + 10)) except KeyError: raise TypeError("Unknown color '{}'".format(bg)) if bold is not None: bits.append("\033[{}m".format(1 if bold else 22)) if dim is not None: bits.append("\033[{}m".format(2 if dim else 22)) if underline is not None: bits.append("\033[{}m".format(4 if underline else 24)) if blink is not None: bits.append("\033[{}m".format(5 if blink else 25)) if reverse is not None: bits.append("\033[{}m".format(7 if reverse else 27)) bits.append(text) if reset: bits.append(_ansi_reset_all) return "".join(bits) def unstyle(text): """Removes ANSI styling information from a string. Usually it's not necessary to use this function as Click's echo function will automatically remove styling if necessary. .. versionadded:: 2.0 :param text: the text to remove style information from. """ return strip_ansi(text) def secho(message=None, file=None, nl=True, err=False, color=None, **styles): """This function combines :func:`echo` and :func:`style` into one call. As such the following two calls are the same:: click.secho('Hello World!', fg='green') click.echo(click.style('Hello World!', fg='green')) All keyword arguments are forwarded to the underlying functions depending on which one they go with. .. versionadded:: 2.0 """ if message is not None: message = style(message, **styles) return echo(message, file=file, nl=nl, err=err, color=color) def edit( text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None ): r"""Edits the given text in the defined editor. If an editor is given (should be the full path to the executable but the regular operating system search path is used for finding the executable) it overrides the detected editor. Optionally, some environment variables can be used. If the editor is closed without changes, `None` is returned. In case a file is edited directly the return value is always `None` and `require_save` and `extension` are ignored. If the editor cannot be opened a :exc:`UsageError` is raised. Note for Windows: to simplify cross-platform usage, the newlines are automatically converted from POSIX to Windows and vice versa. As such, the message here will have ``\n`` as newline markers. :param text: the text to edit. :param editor: optionally the editor to use. Defaults to automatic detection. :param env: environment variables to forward to the editor. :param require_save: if this is true, then not saving in the editor will make the return value become `None`. :param extension: the extension to tell the editor about. This defaults to `.txt` but changing this might change syntax highlighting. :param filename: if provided it will edit this file instead of the provided text contents. It will not use a temporary file as an indirection in that case. """ from ._termui_impl import Editor editor = Editor( editor=editor, env=env, require_save=require_save, extension=extension ) if filename is None: return editor.edit(text) editor.edit_file(filename) def launch(url, wait=False, locate=False): """This function launches the given URL (or filename) in the default viewer application for this file type. If this is an executable, it might launch the executable in a new session. The return value is the exit code of the launched application. Usually, ``0`` indicates success. Examples:: click.launch('https://click.palletsprojects.com/') click.launch('/my/downloaded/file', locate=True) .. versionadded:: 2.0 :param url: URL or filename of the thing to launch. :param wait: waits for the program to stop. :param locate: if this is set to `True` then instead of launching the application associated with the URL it will attempt to launch a file manager with the file located. This might have weird effects if the URL does not point to the filesystem. """ from ._termui_impl import open_url return open_url(url, wait=wait, locate=locate) # If this is provided, getchar() calls into this instead. This is used # for unittesting purposes. _getchar = None def getchar(echo=False): """Fetches a single character from the terminal and returns it. This will always return a unicode character and under certain rare circumstances this might return more than one character. The situations which more than one character is returned is when for whatever reason multiple characters end up in the terminal buffer or standard input was not actually a terminal. Note that this will always read from the terminal, even if something is piped into the standard input. Note for Windows: in rare cases when typing non-ASCII characters, this function might wait for a second character and then return both at once. This is because certain Unicode characters look like special-key markers. .. versionadded:: 2.0 :param echo: if set to `True`, the character read will also show up on the terminal. The default is to not show it. """ f = _getchar if f is None: from ._termui_impl import getchar as f return f(echo) def raw_terminal(): from ._termui_impl import raw_terminal as f return f() def pause(info="Press any key to continue ...", err=False): """This command stops execution and waits for the user to press any key to continue. This is similar to the Windows batch "pause" command. If the program is not run through a terminal, this command will instead do nothing. .. versionadded:: 2.0 .. versionadded:: 4.0 Added the `err` parameter. :param info: the info string to print before pausing. :param err: if set to message goes to ``stderr`` instead of ``stdout``, the same as with echo. """ if not isatty(sys.stdin) or not isatty(sys.stdout): return try: if info: echo(info, nl=False, err=err) try: getchar() except (KeyboardInterrupt, EOFError): pass finally: if info: echo(err=err)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/utils.py
import os import sys from ._compat import _default_text_stderr from ._compat import _default_text_stdout from ._compat import auto_wrap_for_ansi from ._compat import binary_streams from ._compat import filename_to_ui from ._compat import get_filesystem_encoding from ._compat import get_streerror from ._compat import is_bytes from ._compat import open_stream from ._compat import PY2 from ._compat import should_strip_ansi from ._compat import string_types from ._compat import strip_ansi from ._compat import text_streams from ._compat import text_type from ._compat import WIN from .globals import resolve_color_default if not PY2: from ._compat import _find_binary_writer elif WIN: from ._winconsole import _get_windows_argv from ._winconsole import _hash_py_argv from ._winconsole import _initial_argv_hash echo_native_types = string_types + (bytes, bytearray) def _posixify(name): return "-".join(name.split()).lower() def safecall(func): """Wraps a function so that it swallows exceptions.""" def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception: pass return wrapper def make_str(value): """Converts a value into a valid string.""" if isinstance(value, bytes): try: return value.decode(get_filesystem_encoding()) except UnicodeError: return value.decode("utf-8", "replace") return text_type(value) def make_default_short_help(help, max_length=45): """Return a condensed version of help string.""" words = help.split() total_length = 0 result = [] done = False for word in words: if word[-1:] == ".": done = True new_length = 1 + len(word) if result else len(word) if total_length + new_length > max_length: result.append("...") done = True else: if result: result.append(" ") result.append(word) if done: break total_length += new_length return "".join(result) class LazyFile(object): """A lazy file works like a regular file but it does not fully open the file but it does perform some basic checks early to see if the filename parameter does make sense. This is useful for safely opening files for writing. """ def __init__( self, filename, mode="r", encoding=None, errors="strict", atomic=False ): self.name = filename self.mode = mode self.encoding = encoding self.errors = errors self.atomic = atomic if filename == "-": self._f, self.should_close = open_stream(filename, mode, encoding, errors) else: if "r" in mode: # Open and close the file in case we're opening it for # reading so that we can catch at least some errors in # some cases early. open(filename, mode).close() self._f = None self.should_close = True def __getattr__(self, name): return getattr(self.open(), name) def __repr__(self): if self._f is not None: return repr(self._f) return "<unopened file '{}' {}>".format(self.name, self.mode) def open(self): """Opens the file if it's not yet open. This call might fail with a :exc:`FileError`. Not handling this error will produce an error that Click shows. """ if self._f is not None: return self._f try: rv, self.should_close = open_stream( self.name, self.mode, self.encoding, self.errors, atomic=self.atomic ) except (IOError, OSError) as e: # noqa: E402 from .exceptions import FileError raise FileError(self.name, hint=get_streerror(e)) self._f = rv return rv def close(self): """Closes the underlying file, no matter what.""" if self._f is not None: self._f.close() def close_intelligently(self): """This function only closes the file if it was opened by the lazy file wrapper. For instance this will never close stdin. """ if self.should_close: self.close() def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): self.close_intelligently() def __iter__(self): self.open() return iter(self._f) class KeepOpenFile(object): def __init__(self, file): self._file = file def __getattr__(self, name): return getattr(self._file, name) def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): pass def __repr__(self): return repr(self._file) def __iter__(self): return iter(self._file) def echo(message=None, file=None, nl=True, err=False, color=None): """Prints a message plus a newline to the given file or stdout. On first sight, this looks like the print function, but it has improved support for handling Unicode and binary data that does not fail no matter how badly configured the system is. Primarily it means that you can print binary data as well as Unicode data on both 2.x and 3.x to the given file in the most appropriate way possible. This is a very carefree function in that it will try its best to not fail. As of Click 6.0 this includes support for unicode output on the Windows console. In addition to that, if `colorama`_ is installed, the echo function will also support clever handling of ANSI codes. Essentially it will then do the following: - add transparent handling of ANSI color codes on Windows. - hide ANSI codes automatically if the destination file is not a terminal. .. _colorama: https://pypi.org/project/colorama/ .. versionchanged:: 6.0 As of Click 6.0 the echo function will properly support unicode output on the windows console. Not that click does not modify the interpreter in any way which means that `sys.stdout` or the print statement or function will still not provide unicode support. .. versionchanged:: 2.0 Starting with version 2.0 of Click, the echo function will work with colorama if it's installed. .. versionadded:: 3.0 The `err` parameter was added. .. versionchanged:: 4.0 Added the `color` flag. :param message: the message to print :param file: the file to write to (defaults to ``stdout``) :param err: if set to true the file defaults to ``stderr`` instead of ``stdout``. This is faster and easier than calling :func:`get_text_stderr` yourself. :param nl: if set to `True` (the default) a newline is printed afterwards. :param color: controls if the terminal supports ANSI colors or not. The default is autodetection. """ if file is None: if err: file = _default_text_stderr() else: file = _default_text_stdout() # Convert non bytes/text into the native string type. if message is not None and not isinstance(message, echo_native_types): message = text_type(message) if nl: message = message or u"" if isinstance(message, text_type): message += u"\n" else: message += b"\n" # If there is a message, and we're in Python 3, and the value looks # like bytes, we manually need to find the binary stream and write the # message in there. This is done separately so that most stream # types will work as you would expect. Eg: you can write to StringIO # for other cases. if message and not PY2 and is_bytes(message): binary_file = _find_binary_writer(file) if binary_file is not None: file.flush() binary_file.write(message) binary_file.flush() return # ANSI-style support. If there is no message or we are dealing with # bytes nothing is happening. If we are connected to a file we want # to strip colors. If we are on windows we either wrap the stream # to strip the color or we use the colorama support to translate the # ansi codes to API calls. if message and not is_bytes(message): color = resolve_color_default(color) if should_strip_ansi(file, color): message = strip_ansi(message) elif WIN: if auto_wrap_for_ansi is not None: file = auto_wrap_for_ansi(file) elif not color: message = strip_ansi(message) if message: file.write(message) file.flush() def get_binary_stream(name): """Returns a system stream for byte processing. This essentially returns the stream from the sys module with the given name but it solves some compatibility issues between different Python versions. Primarily this function is necessary for getting binary streams on Python 3. :param name: the name of the stream to open. Valid names are ``'stdin'``, ``'stdout'`` and ``'stderr'`` """ opener = binary_streams.get(name) if opener is None: raise TypeError("Unknown standard stream '{}'".format(name)) return opener() def get_text_stream(name, encoding=None, errors="strict"): """Returns a system stream for text processing. This usually returns a wrapped stream around a binary stream returned from :func:`get_binary_stream` but it also can take shortcuts on Python 3 for already correctly configured streams. :param name: the name of the stream to open. Valid names are ``'stdin'``, ``'stdout'`` and ``'stderr'`` :param encoding: overrides the detected default encoding. :param errors: overrides the default error mode. """ opener = text_streams.get(name) if opener is None: raise TypeError("Unknown standard stream '{}'".format(name)) return opener(encoding, errors) def open_file( filename, mode="r", encoding=None, errors="strict", lazy=False, atomic=False ): """This is similar to how the :class:`File` works but for manual usage. Files are opened non lazy by default. This can open regular files as well as stdin/stdout if ``'-'`` is passed. If stdin/stdout is returned the stream is wrapped so that the context manager will not close the stream accidentally. This makes it possible to always use the function like this without having to worry to accidentally close a standard stream:: with open_file(filename) as f: ... .. versionadded:: 3.0 :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). :param mode: the mode in which to open the file. :param encoding: the encoding to use. :param errors: the error handling for this file. :param lazy: can be flipped to true to open the file lazily. :param atomic: in atomic mode writes go into a temporary file and it's moved on close. """ if lazy: return LazyFile(filename, mode, encoding, errors, atomic=atomic) f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) if not should_close: f = KeepOpenFile(f) return f def get_os_args(): """This returns the argument part of sys.argv in the most appropriate form for processing. What this means is that this return value is in a format that works for Click to process but does not necessarily correspond well to what's actually standard for the interpreter. On most environments the return value is ``sys.argv[:1]`` unchanged. However if you are on Windows and running Python 2 the return value will actually be a list of unicode strings instead because the default behavior on that platform otherwise will not be able to carry all possible values that sys.argv can have. .. versionadded:: 6.0 """ # We can only extract the unicode argv if sys.argv has not been # changed since the startup of the application. if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): return _get_windows_argv() return sys.argv[1:] def format_filename(filename, shorten=False): """Formats a filename for user display. The main purpose of this function is to ensure that the filename can be displayed at all. This will decode the filename to unicode if necessary in a way that it will not fail. Optionally, it can shorten the filename to not include the full path to the filename. :param filename: formats a filename for UI display. This will also convert the filename into unicode without failing. :param shorten: this optionally shortens the filename to strip of the path that leads up to it. """ if shorten: filename = os.path.basename(filename) return filename_to_ui(filename) def get_app_dir(app_name, roaming=True, force_posix=False): r"""Returns the config folder for the application. The default behavior is to return whatever is most appropriate for the operating system. To give you an idea, for an app called ``"Foo Bar"``, something like the following folders could be returned: Mac OS X: ``~/Library/Application Support/Foo Bar`` Mac OS X (POSIX): ``~/.foo-bar`` Unix: ``~/.config/foo-bar`` Unix (POSIX): ``~/.foo-bar`` Win XP (roaming): ``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar`` Win XP (not roaming): ``C:\Documents and Settings\<user>\Application Data\Foo Bar`` Win 7 (roaming): ``C:\Users\<user>\AppData\Roaming\Foo Bar`` Win 7 (not roaming): ``C:\Users\<user>\AppData\Local\Foo Bar`` .. versionadded:: 2.0 :param app_name: the application name. This should be properly capitalized and can contain whitespace. :param roaming: controls if the folder should be roaming or not on Windows. Has no affect otherwise. :param force_posix: if this is set to `True` then on any POSIX system the folder will be stored in the home folder with a leading dot instead of the XDG config home or darwin's application support folder. """ if WIN: key = "APPDATA" if roaming else "LOCALAPPDATA" folder = os.environ.get(key) if folder is None: folder = os.path.expanduser("~") return os.path.join(folder, app_name) if force_posix: return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name)))) if sys.platform == "darwin": return os.path.join( os.path.expanduser("~/Library/Application Support"), app_name ) return os.path.join( os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), _posixify(app_name), ) class PacifyFlushWrapper(object): """This wrapper is used to catch and suppress BrokenPipeErrors resulting from ``.flush()`` being called on broken pipe during the shutdown/final-GC of the Python interpreter. Notably ``.flush()`` is always called on ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any other cleanup code, and the case where the underlying file is not a broken pipe, all calls and attributes are proxied. """ def __init__(self, wrapped): self.wrapped = wrapped def flush(self): try: self.wrapped.flush() except IOError as e: import errno if e.errno != errno.EPIPE: raise def __getattr__(self, attr): return getattr(self.wrapped, attr)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_bashcomplete.py
import copy import os import re from .core import Argument from .core import MultiCommand from .core import Option from .parser import split_arg_string from .types import Choice from .utils import echo try: from collections import abc except ImportError: import collections as abc WORDBREAK = "=" # Note, only BASH version 4.4 and later have the nosort option. COMPLETION_SCRIPT_BASH = """ %(complete_func)s() { local IFS=$'\n' COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ COMP_CWORD=$COMP_CWORD \\ %(autocomplete_var)s=complete $1 ) ) return 0 } %(complete_func)setup() { local COMPLETION_OPTIONS="" local BASH_VERSION_ARR=(${BASH_VERSION//./ }) # Only BASH version 4.4 and later have the nosort option. if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] \ && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then COMPLETION_OPTIONS="-o nosort" fi complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s } %(complete_func)setup """ COMPLETION_SCRIPT_ZSH = """ #compdef %(script_names)s %(complete_func)s() { local -a completions local -a completions_with_descriptions local -a response (( ! $+commands[%(script_names)s] )) && return 1 response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ COMP_CWORD=$((CURRENT-1)) \\ %(autocomplete_var)s=\"complete_zsh\" \\ %(script_names)s )}") for key descr in ${(kv)response}; do if [[ "$descr" == "_" ]]; then completions+=("$key") else completions_with_descriptions+=("$key":"$descr") fi done if [ -n "$completions_with_descriptions" ]; then _describe -V unsorted completions_with_descriptions -U fi if [ -n "$completions" ]; then compadd -U -V unsorted -a completions fi compstate[insert]="automenu" } compdef %(complete_func)s %(script_names)s """ COMPLETION_SCRIPT_FISH = ( "complete --no-files --command %(script_names)s --arguments" ' "(env %(autocomplete_var)s=complete_fish' " COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t)" ' %(script_names)s)"' ) _completion_scripts = { "bash": COMPLETION_SCRIPT_BASH, "zsh": COMPLETION_SCRIPT_ZSH, "fish": COMPLETION_SCRIPT_FISH, } _invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]") def get_completion_script(prog_name, complete_var, shell): cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_")) script = _completion_scripts.get(shell, COMPLETION_SCRIPT_BASH) return ( script % { "complete_func": "_{}_completion".format(cf_name), "script_names": prog_name, "autocomplete_var": complete_var, } ).strip() + ";" def resolve_ctx(cli, prog_name, args): """Parse into a hierarchy of contexts. Contexts are connected through the parent variable. :param cli: command definition :param prog_name: the program that is running :param args: full list of args :return: the final context/command parsed """ ctx = cli.make_context(prog_name, args, resilient_parsing=True) args = ctx.protected_args + ctx.args while args: if isinstance(ctx.command, MultiCommand): if not ctx.command.chain: cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) if cmd is None: return ctx ctx = cmd.make_context( cmd_name, args, parent=ctx, resilient_parsing=True ) args = ctx.protected_args + ctx.args else: # Walk chained subcommand contexts saving the last one. while args: cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) if cmd is None: return ctx sub_ctx = cmd.make_context( cmd_name, args, parent=ctx, allow_extra_args=True, allow_interspersed_args=False, resilient_parsing=True, ) args = sub_ctx.args ctx = sub_ctx args = sub_ctx.protected_args + sub_ctx.args else: break return ctx def start_of_option(param_str): """ :param param_str: param_str to check :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--") """ return param_str and param_str[:1] == "-" def is_incomplete_option(all_args, cmd_param): """ :param all_args: the full original list of args supplied :param cmd_param: the current command paramter :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and corresponds to this cmd_param. In other words whether this cmd_param option can still accept values """ if not isinstance(cmd_param, Option): return False if cmd_param.is_flag: return False last_option = None for index, arg_str in enumerate( reversed([arg for arg in all_args if arg != WORDBREAK]) ): if index + 1 > cmd_param.nargs: break if start_of_option(arg_str): last_option = arg_str return True if last_option and last_option in cmd_param.opts else False def is_incomplete_argument(current_params, cmd_param): """ :param current_params: the current params and values for this argument as already entered :param cmd_param: the current command parameter :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In other words whether or not the this cmd_param argument can still accept values """ if not isinstance(cmd_param, Argument): return False current_param_values = current_params[cmd_param.name] if current_param_values is None: return True if cmd_param.nargs == -1: return True if ( isinstance(current_param_values, abc.Iterable) and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs ): return True return False def get_user_autocompletions(ctx, args, incomplete, cmd_param): """ :param ctx: context associated with the parsed command :param args: full list of args :param incomplete: the incomplete text to autocomplete :param cmd_param: command definition :return: all the possible user-specified completions for the param """ results = [] if isinstance(cmd_param.type, Choice): # Choices don't support descriptions. results = [ (c, None) for c in cmd_param.type.choices if str(c).startswith(incomplete) ] elif cmd_param.autocompletion is not None: dynamic_completions = cmd_param.autocompletion( ctx=ctx, args=args, incomplete=incomplete ) results = [ c if isinstance(c, tuple) else (c, None) for c in dynamic_completions ] return results def get_visible_commands_starting_with(ctx, starts_with): """ :param ctx: context associated with the parsed command :starts_with: string that visible commands must start with. :return: all visible (not hidden) commands that start with starts_with. """ for c in ctx.command.list_commands(ctx): if c.startswith(starts_with): command = ctx.command.get_command(ctx, c) if not command.hidden: yield command def add_subcommand_completions(ctx, incomplete, completions_out): # Add subcommand completions. if isinstance(ctx.command, MultiCommand): completions_out.extend( [ (c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete) ] ) # Walk up the context list and add any other completion # possibilities from chained commands while ctx.parent is not None: ctx = ctx.parent if isinstance(ctx.command, MultiCommand) and ctx.command.chain: remaining_commands = [ c for c in get_visible_commands_starting_with(ctx, incomplete) if c.name not in ctx.protected_args ] completions_out.extend( [(c.name, c.get_short_help_str()) for c in remaining_commands] ) def get_choices(cli, prog_name, args, incomplete): """ :param cli: command definition :param prog_name: the program that is running :param args: full list of args :param incomplete: the incomplete text to autocomplete :return: all the possible completions for the incomplete """ all_args = copy.deepcopy(args) ctx = resolve_ctx(cli, prog_name, args) if ctx is None: return [] has_double_dash = "--" in all_args # In newer versions of bash long opts with '='s are partitioned, but # it's easier to parse without the '=' if start_of_option(incomplete) and WORDBREAK in incomplete: partition_incomplete = incomplete.partition(WORDBREAK) all_args.append(partition_incomplete[0]) incomplete = partition_incomplete[2] elif incomplete == WORDBREAK: incomplete = "" completions = [] if not has_double_dash and start_of_option(incomplete): # completions for partial options for param in ctx.command.params: if isinstance(param, Option) and not param.hidden: param_opts = [ param_opt for param_opt in param.opts + param.secondary_opts if param_opt not in all_args or param.multiple ] completions.extend( [(o, param.help) for o in param_opts if o.startswith(incomplete)] ) return completions # completion for option values from user supplied values for param in ctx.command.params: if is_incomplete_option(all_args, param): return get_user_autocompletions(ctx, all_args, incomplete, param) # completion for argument values from user supplied values for param in ctx.command.params: if is_incomplete_argument(ctx.params, param): return get_user_autocompletions(ctx, all_args, incomplete, param) add_subcommand_completions(ctx, incomplete, completions) # Sort before returning so that proper ordering can be enforced in custom types. return sorted(completions) def do_complete(cli, prog_name, include_descriptions): cwords = split_arg_string(os.environ["COMP_WORDS"]) cword = int(os.environ["COMP_CWORD"]) args = cwords[1:cword] try: incomplete = cwords[cword] except IndexError: incomplete = "" for item in get_choices(cli, prog_name, args, incomplete): echo(item[0]) if include_descriptions: # ZSH has trouble dealing with empty array parameters when # returned from commands, use '_' to indicate no description # is present. echo(item[1] if item[1] else "_") return True def do_complete_fish(cli, prog_name): cwords = split_arg_string(os.environ["COMP_WORDS"]) incomplete = os.environ["COMP_CWORD"] args = cwords[1:] for item in get_choices(cli, prog_name, args, incomplete): if item[1]: echo("{arg}\t{desc}".format(arg=item[0], desc=item[1])) else: echo(item[0]) return True def bashcomplete(cli, prog_name, complete_var, complete_instr): if "_" in complete_instr: command, shell = complete_instr.split("_", 1) else: command = complete_instr shell = "bash" if command == "source": echo(get_completion_script(prog_name, complete_var, shell)) return True elif command == "complete": if shell == "fish": return do_complete_fish(cli, prog_name) elif shell in {"bash", "zsh"}: return do_complete(cli, prog_name, shell == "zsh") return False
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/exceptions.py
from ._compat import filename_to_ui from ._compat import get_text_stderr from ._compat import PY2 from .utils import echo def _join_param_hints(param_hint): if isinstance(param_hint, (tuple, list)): return " / ".join(repr(x) for x in param_hint) return param_hint class ClickException(Exception): """An exception that Click can handle and show to the user.""" #: The exit code for this exception exit_code = 1 def __init__(self, message): ctor_msg = message if PY2: if ctor_msg is not None: ctor_msg = ctor_msg.encode("utf-8") Exception.__init__(self, ctor_msg) self.message = message def format_message(self): return self.message def __str__(self): return self.message if PY2: __unicode__ = __str__ def __str__(self): return self.message.encode("utf-8") def show(self, file=None): if file is None: file = get_text_stderr() echo("Error: {}".format(self.format_message()), file=file) class UsageError(ClickException): """An internal exception that signals a usage error. This typically aborts any further handling. :param message: the error message to display. :param ctx: optionally the context that caused this error. Click will fill in the context automatically in some situations. """ exit_code = 2 def __init__(self, message, ctx=None): ClickException.__init__(self, message) self.ctx = ctx self.cmd = self.ctx.command if self.ctx else None def show(self, file=None): if file is None: file = get_text_stderr() color = None hint = "" if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None: hint = "Try '{} {}' for help.\n".format( self.ctx.command_path, self.ctx.help_option_names[0] ) if self.ctx is not None: color = self.ctx.color echo("{}\n{}".format(self.ctx.get_usage(), hint), file=file, color=color) echo("Error: {}".format(self.format_message()), file=file, color=color) class BadParameter(UsageError): """An exception that formats out a standardized error message for a bad parameter. This is useful when thrown from a callback or type as Click will attach contextual information to it (for instance, which parameter it is). .. versionadded:: 2.0 :param param: the parameter object that caused this error. This can be left out, and Click will attach this info itself if possible. :param param_hint: a string that shows up as parameter name. This can be used as alternative to `param` in cases where custom validation should happen. If it is a string it's used as such, if it's a list then each item is quoted and separated. """ def __init__(self, message, ctx=None, param=None, param_hint=None): UsageError.__init__(self, message, ctx) self.param = param self.param_hint = param_hint def format_message(self): if self.param_hint is not None: param_hint = self.param_hint elif self.param is not None: param_hint = self.param.get_error_hint(self.ctx) else: return "Invalid value: {}".format(self.message) param_hint = _join_param_hints(param_hint) return "Invalid value for {}: {}".format(param_hint, self.message) class MissingParameter(BadParameter): """Raised if click required an option or argument but it was not provided when invoking the script. .. versionadded:: 4.0 :param param_type: a string that indicates the type of the parameter. The default is to inherit the parameter type from the given `param`. Valid values are ``'parameter'``, ``'option'`` or ``'argument'``. """ def __init__( self, message=None, ctx=None, param=None, param_hint=None, param_type=None ): BadParameter.__init__(self, message, ctx, param, param_hint) self.param_type = param_type def format_message(self): if self.param_hint is not None: param_hint = self.param_hint elif self.param is not None: param_hint = self.param.get_error_hint(self.ctx) else: param_hint = None param_hint = _join_param_hints(param_hint) param_type = self.param_type if param_type is None and self.param is not None: param_type = self.param.param_type_name msg = self.message if self.param is not None: msg_extra = self.param.type.get_missing_message(self.param) if msg_extra: if msg: msg += ". {}".format(msg_extra) else: msg = msg_extra return "Missing {}{}{}{}".format( param_type, " {}".format(param_hint) if param_hint else "", ". " if msg else ".", msg or "", ) def __str__(self): if self.message is None: param_name = self.param.name if self.param else None return "missing parameter: {}".format(param_name) else: return self.message if PY2: __unicode__ = __str__ def __str__(self): return self.__unicode__().encode("utf-8") class NoSuchOption(UsageError): """Raised if click attempted to handle an option that does not exist. .. versionadded:: 4.0 """ def __init__(self, option_name, message=None, possibilities=None, ctx=None): if message is None: message = "no such option: {}".format(option_name) UsageError.__init__(self, message, ctx) self.option_name = option_name self.possibilities = possibilities def format_message(self): bits = [self.message] if self.possibilities: if len(self.possibilities) == 1: bits.append("Did you mean {}?".format(self.possibilities[0])) else: possibilities = sorted(self.possibilities) bits.append("(Possible options: {})".format(", ".join(possibilities))) return " ".join(bits) class BadOptionUsage(UsageError): """Raised if an option is generally supplied but the use of the option was incorrect. This is for instance raised if the number of arguments for an option is not correct. .. versionadded:: 4.0 :param option_name: the name of the option being used incorrectly. """ def __init__(self, option_name, message, ctx=None): UsageError.__init__(self, message, ctx) self.option_name = option_name class BadArgumentUsage(UsageError): """Raised if an argument is generally supplied but the use of the argument was incorrect. This is for instance raised if the number of values for an argument is not correct. .. versionadded:: 6.0 """ def __init__(self, message, ctx=None): UsageError.__init__(self, message, ctx) class FileError(ClickException): """Raised if a file cannot be opened.""" def __init__(self, filename, hint=None): ui_filename = filename_to_ui(filename) if hint is None: hint = "unknown error" ClickException.__init__(self, hint) self.ui_filename = ui_filename self.filename = filename def format_message(self): return "Could not open file {}: {}".format(self.ui_filename, self.message) class Abort(RuntimeError): """An internal signalling exception that signals Click to abort.""" class Exit(RuntimeError): """An exception that indicates that the application should exit with some status code. :param code: the status code to exit with. """ __slots__ = ("exit_code",) def __init__(self, code=0): self.exit_code = code
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_compat.py
# flake8: noqa import codecs import io import os import re import sys from weakref import WeakKeyDictionary PY2 = sys.version_info[0] == 2 CYGWIN = sys.platform.startswith("cygwin") MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version) # Determine local App Engine environment, per Google's own suggestion APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get( "SERVER_SOFTWARE", "" ) WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2 DEFAULT_COLUMNS = 80 _ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") def get_filesystem_encoding(): return sys.getfilesystemencoding() or sys.getdefaultencoding() def _make_text_stream( stream, encoding, errors, force_readable=False, force_writable=False ): if encoding is None: encoding = get_best_encoding(stream) if errors is None: errors = "replace" return _NonClosingTextIOWrapper( stream, encoding, errors, line_buffering=True, force_readable=force_readable, force_writable=force_writable, ) def is_ascii_encoding(encoding): """Checks if a given encoding is ascii.""" try: return codecs.lookup(encoding).name == "ascii" except LookupError: return False def get_best_encoding(stream): """Returns the default stream encoding if not found.""" rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() if is_ascii_encoding(rv): return "utf-8" return rv class _NonClosingTextIOWrapper(io.TextIOWrapper): def __init__( self, stream, encoding, errors, force_readable=False, force_writable=False, **extra ): self._stream = stream = _FixupStream(stream, force_readable, force_writable) io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) # The io module is a place where the Python 3 text behavior # was forced upon Python 2, so we need to unbreak # it to look like Python 2. if PY2: def write(self, x): if isinstance(x, str) or is_bytes(x): try: self.flush() except Exception: pass return self.buffer.write(str(x)) return io.TextIOWrapper.write(self, x) def writelines(self, lines): for line in lines: self.write(line) def __del__(self): try: self.detach() except Exception: pass def isatty(self): # https://bitbucket.org/pypy/pypy/issue/1803 return self._stream.isatty() class _FixupStream(object): """The new io interface needs more from streams than streams traditionally implement. As such, this fix-up code is necessary in some circumstances. The forcing of readable and writable flags are there because some tools put badly patched objects on sys (one such offender are certain version of jupyter notebook). """ def __init__(self, stream, force_readable=False, force_writable=False): self._stream = stream self._force_readable = force_readable self._force_writable = force_writable def __getattr__(self, name): return getattr(self._stream, name) def read1(self, size): f = getattr(self._stream, "read1", None) if f is not None: return f(size) # We only dispatch to readline instead of read in Python 2 as we # do not want cause problems with the different implementation # of line buffering. if PY2: return self._stream.readline(size) return self._stream.read(size) def readable(self): if self._force_readable: return True x = getattr(self._stream, "readable", None) if x is not None: return x() try: self._stream.read(0) except Exception: return False return True def writable(self): if self._force_writable: return True x = getattr(self._stream, "writable", None) if x is not None: return x() try: self._stream.write("") except Exception: try: self._stream.write(b"") except Exception: return False return True def seekable(self): x = getattr(self._stream, "seekable", None) if x is not None: return x() try: self._stream.seek(self._stream.tell()) except Exception: return False return True if PY2: text_type = unicode raw_input = raw_input string_types = (str, unicode) int_types = (int, long) iteritems = lambda x: x.iteritems() range_type = xrange def is_bytes(x): return isinstance(x, (buffer, bytearray)) _identifier_re = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$") # For Windows, we need to force stdout/stdin/stderr to binary if it's # fetched for that. This obviously is not the most correct way to do # it as it changes global state. Unfortunately, there does not seem to # be a clear better way to do it as just reopening the file in binary # mode does not change anything. # # An option would be to do what Python 3 does and to open the file as # binary only, patch it back to the system, and then use a wrapper # stream that converts newlines. It's not quite clear what's the # correct option here. # # This code also lives in _winconsole for the fallback to the console # emulation stream. # # There are also Windows environments where the `msvcrt` module is not # available (which is why we use try-catch instead of the WIN variable # here), such as the Google App Engine development server on Windows. In # those cases there is just nothing we can do. def set_binary_mode(f): return f try: import msvcrt except ImportError: pass else: def set_binary_mode(f): try: fileno = f.fileno() except Exception: pass else: msvcrt.setmode(fileno, os.O_BINARY) return f try: import fcntl except ImportError: pass else: def set_binary_mode(f): try: fileno = f.fileno() except Exception: pass else: flags = fcntl.fcntl(fileno, fcntl.F_GETFL) fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) return f def isidentifier(x): return _identifier_re.search(x) is not None def get_binary_stdin(): return set_binary_mode(sys.stdin) def get_binary_stdout(): _wrap_std_stream("stdout") return set_binary_mode(sys.stdout) def get_binary_stderr(): _wrap_std_stream("stderr") return set_binary_mode(sys.stderr) def get_text_stdin(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdin, encoding, errors) if rv is not None: return rv return _make_text_stream(sys.stdin, encoding, errors, force_readable=True) def get_text_stdout(encoding=None, errors=None): _wrap_std_stream("stdout") rv = _get_windows_console_stream(sys.stdout, encoding, errors) if rv is not None: return rv return _make_text_stream(sys.stdout, encoding, errors, force_writable=True) def get_text_stderr(encoding=None, errors=None): _wrap_std_stream("stderr") rv = _get_windows_console_stream(sys.stderr, encoding, errors) if rv is not None: return rv return _make_text_stream(sys.stderr, encoding, errors, force_writable=True) def filename_to_ui(value): if isinstance(value, bytes): value = value.decode(get_filesystem_encoding(), "replace") return value else: import io text_type = str raw_input = input string_types = (str,) int_types = (int,) range_type = range isidentifier = lambda x: x.isidentifier() iteritems = lambda x: iter(x.items()) def is_bytes(x): return isinstance(x, (bytes, memoryview, bytearray)) def _is_binary_reader(stream, default=False): try: return isinstance(stream.read(0), bytes) except Exception: return default # This happens in some cases where the stream was already # closed. In this case, we assume the default. def _is_binary_writer(stream, default=False): try: stream.write(b"") except Exception: try: stream.write("") return False except Exception: pass return default return True def _find_binary_reader(stream): # We need to figure out if the given stream is already binary. # This can happen because the official docs recommend detaching # the streams to get binary streams. Some code might do this, so # we need to deal with this case explicitly. if _is_binary_reader(stream, False): return stream buf = getattr(stream, "buffer", None) # Same situation here; this time we assume that the buffer is # actually binary in case it's closed. if buf is not None and _is_binary_reader(buf, True): return buf def _find_binary_writer(stream): # We need to figure out if the given stream is already binary. # This can happen because the official docs recommend detatching # the streams to get binary streams. Some code might do this, so # we need to deal with this case explicitly. if _is_binary_writer(stream, False): return stream buf = getattr(stream, "buffer", None) # Same situation here; this time we assume that the buffer is # actually binary in case it's closed. if buf is not None and _is_binary_writer(buf, True): return buf def _stream_is_misconfigured(stream): """A stream is misconfigured if its encoding is ASCII.""" # If the stream does not have an encoding set, we assume it's set # to ASCII. This appears to happen in certain unittest # environments. It's not quite clear what the correct behavior is # but this at least will force Click to recover somehow. return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") def _is_compat_stream_attr(stream, attr, value): """A stream attribute is compatible if it is equal to the desired value or the desired value is unset and the attribute has a value. """ stream_value = getattr(stream, attr, None) return stream_value == value or (value is None and stream_value is not None) def _is_compatible_text_stream(stream, encoding, errors): """Check if a stream's encoding and errors attributes are compatible with the desired values. """ return _is_compat_stream_attr( stream, "encoding", encoding ) and _is_compat_stream_attr(stream, "errors", errors) def _force_correct_text_stream( text_stream, encoding, errors, is_binary, find_binary, force_readable=False, force_writable=False, ): if is_binary(text_stream, False): binary_reader = text_stream else: # If the stream looks compatible, and won't default to a # misconfigured ascii encoding, return it as-is. if _is_compatible_text_stream(text_stream, encoding, errors) and not ( encoding is None and _stream_is_misconfigured(text_stream) ): return text_stream # Otherwise, get the underlying binary reader. binary_reader = find_binary(text_stream) # If that's not possible, silently use the original reader # and get mojibake instead of exceptions. if binary_reader is None: return text_stream # Default errors to replace instead of strict in order to get # something that works. if errors is None: errors = "replace" # Wrap the binary stream in a text stream with the correct # encoding parameters. return _make_text_stream( binary_reader, encoding, errors, force_readable=force_readable, force_writable=force_writable, ) def _force_correct_text_reader(text_reader, encoding, errors, force_readable=False): return _force_correct_text_stream( text_reader, encoding, errors, _is_binary_reader, _find_binary_reader, force_readable=force_readable, ) def _force_correct_text_writer(text_writer, encoding, errors, force_writable=False): return _force_correct_text_stream( text_writer, encoding, errors, _is_binary_writer, _find_binary_writer, force_writable=force_writable, ) def get_binary_stdin(): reader = _find_binary_reader(sys.stdin) if reader is None: raise RuntimeError("Was not able to determine binary stream for sys.stdin.") return reader def get_binary_stdout(): writer = _find_binary_writer(sys.stdout) if writer is None: raise RuntimeError( "Was not able to determine binary stream for sys.stdout." ) return writer def get_binary_stderr(): writer = _find_binary_writer(sys.stderr) if writer is None: raise RuntimeError( "Was not able to determine binary stream for sys.stderr." ) return writer def get_text_stdin(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdin, encoding, errors) if rv is not None: return rv return _force_correct_text_reader( sys.stdin, encoding, errors, force_readable=True ) def get_text_stdout(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stdout, encoding, errors) if rv is not None: return rv return _force_correct_text_writer( sys.stdout, encoding, errors, force_writable=True ) def get_text_stderr(encoding=None, errors=None): rv = _get_windows_console_stream(sys.stderr, encoding, errors) if rv is not None: return rv return _force_correct_text_writer( sys.stderr, encoding, errors, force_writable=True ) def filename_to_ui(value): if isinstance(value, bytes): value = value.decode(get_filesystem_encoding(), "replace") else: value = value.encode("utf-8", "surrogateescape").decode("utf-8", "replace") return value def get_streerror(e, default=None): if hasattr(e, "strerror"): msg = e.strerror else: if default is not None: msg = default else: msg = str(e) if isinstance(msg, bytes): msg = msg.decode("utf-8", "replace") return msg def _wrap_io_open(file, mode, encoding, errors): """On Python 2, :func:`io.open` returns a text file wrapper that requires passing ``unicode`` to ``write``. Need to open the file in binary mode then wrap it in a subclass that can write ``str`` and ``unicode``. Also handles not passing ``encoding`` and ``errors`` in binary mode. """ binary = "b" in mode if binary: kwargs = {} else: kwargs = {"encoding": encoding, "errors": errors} if not PY2 or binary: return io.open(file, mode, **kwargs) f = io.open(file, "{}b".format(mode.replace("t", ""))) return _make_text_stream(f, **kwargs) def open_stream(filename, mode="r", encoding=None, errors="strict", atomic=False): binary = "b" in mode # Standard streams first. These are simple because they don't need # special handling for the atomic flag. It's entirely ignored. if filename == "-": if any(m in mode for m in ["w", "a", "x"]): if binary: return get_binary_stdout(), False return get_text_stdout(encoding=encoding, errors=errors), False if binary: return get_binary_stdin(), False return get_text_stdin(encoding=encoding, errors=errors), False # Non-atomic writes directly go out through the regular open functions. if not atomic: return _wrap_io_open(filename, mode, encoding, errors), True # Some usability stuff for atomic writes if "a" in mode: raise ValueError( "Appending to an existing file is not supported, because that" " would involve an expensive `copy`-operation to a temporary" " file. Open the file in normal `w`-mode and copy explicitly" " if that's what you're after." ) if "x" in mode: raise ValueError("Use the `overwrite`-parameter instead.") if "w" not in mode: raise ValueError("Atomic writes only make sense with `w`-mode.") # Atomic writes are more complicated. They work by opening a file # as a proxy in the same folder and then using the fdopen # functionality to wrap it in a Python file. Then we wrap it in an # atomic file that moves the file over on close. import errno import random try: perm = os.stat(filename).st_mode except OSError: perm = None flags = os.O_RDWR | os.O_CREAT | os.O_EXCL if binary: flags |= getattr(os, "O_BINARY", 0) while True: tmp_filename = os.path.join( os.path.dirname(filename), ".__atomic-write{:08x}".format(random.randrange(1 << 32)), ) try: fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) break except OSError as e: if e.errno == errno.EEXIST or ( os.name == "nt" and e.errno == errno.EACCES and os.path.isdir(e.filename) and os.access(e.filename, os.W_OK) ): continue raise if perm is not None: os.chmod(tmp_filename, perm) # in case perm includes bits in umask f = _wrap_io_open(fd, mode, encoding, errors) return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True # Used in a destructor call, needs extra protection from interpreter cleanup. if hasattr(os, "replace"): _replace = os.replace _can_replace = True else: _replace = os.rename _can_replace = not WIN class _AtomicFile(object): def __init__(self, f, tmp_filename, real_filename): self._f = f self._tmp_filename = tmp_filename self._real_filename = real_filename self.closed = False @property def name(self): return self._real_filename def close(self, delete=False): if self.closed: return self._f.close() if not _can_replace: try: os.remove(self._real_filename) except OSError: pass _replace(self._tmp_filename, self._real_filename) self.closed = True def __getattr__(self, name): return getattr(self._f, name) def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): self.close(delete=exc_type is not None) def __repr__(self): return repr(self._f) auto_wrap_for_ansi = None colorama = None get_winterm_size = None def strip_ansi(value): return _ansi_re.sub("", value) def _is_jupyter_kernel_output(stream): if WIN: # TODO: Couldn't test on Windows, should't try to support until # someone tests the details wrt colorama. return while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): stream = stream._stream return stream.__class__.__module__.startswith("ipykernel.") def should_strip_ansi(stream=None, color=None): if color is None: if stream is None: stream = sys.stdin return not isatty(stream) and not _is_jupyter_kernel_output(stream) return not color # If we're on Windows, we provide transparent integration through # colorama. This will make ANSI colors through the echo function # work automatically. if WIN: # Windows has a smaller terminal DEFAULT_COLUMNS = 79 from ._winconsole import _get_windows_console_stream, _wrap_std_stream def _get_argv_encoding(): import locale return locale.getpreferredencoding() if PY2: def raw_input(prompt=""): sys.stderr.flush() if prompt: stdout = _default_text_stdout() stdout.write(prompt) stdin = _default_text_stdin() return stdin.readline().rstrip("\r\n") try: import colorama except ImportError: pass else: _ansi_stream_wrappers = WeakKeyDictionary() def auto_wrap_for_ansi(stream, color=None): """This function wraps a stream so that calls through colorama are issued to the win32 console API to recolor on demand. It also ensures to reset the colors if a write call is interrupted to not destroy the console afterwards. """ try: cached = _ansi_stream_wrappers.get(stream) except Exception: cached = None if cached is not None: return cached strip = should_strip_ansi(stream, color) ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) rv = ansi_wrapper.stream _write = rv.write def _safe_write(s): try: return _write(s) except: ansi_wrapper.reset_all() raise rv.write = _safe_write try: _ansi_stream_wrappers[stream] = rv except Exception: pass return rv def get_winterm_size(): win = colorama.win32.GetConsoleScreenBufferInfo( colorama.win32.STDOUT ).srWindow return win.Right - win.Left, win.Bottom - win.Top else: def _get_argv_encoding(): return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding() _get_windows_console_stream = lambda *x: None _wrap_std_stream = lambda *x: None def term_len(x): return len(strip_ansi(x)) def isatty(stream): try: return stream.isatty() except Exception: return False def _make_cached_stream_func(src_func, wrapper_func): cache = WeakKeyDictionary() def func(): stream = src_func() try: rv = cache.get(stream) except Exception: rv = None if rv is not None: return rv rv = wrapper_func() try: stream = src_func() # In case wrapper_func() modified the stream cache[stream] = rv except Exception: pass return rv return func _default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) _default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) _default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) binary_streams = { "stdin": get_binary_stdin, "stdout": get_binary_stdout, "stderr": get_binary_stderr, } text_streams = { "stdin": get_text_stdin, "stdout": get_text_stdout, "stderr": get_text_stderr, }
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/_termui_impl.py
# -*- coding: utf-8 -*- """ This module contains implementations for the termui module. To keep the import time of Click down, some infrequently used functionality is placed in this module and only imported as needed. """ import contextlib import math import os import sys import time from ._compat import _default_text_stdout from ._compat import CYGWIN from ._compat import get_best_encoding from ._compat import int_types from ._compat import isatty from ._compat import open_stream from ._compat import range_type from ._compat import strip_ansi from ._compat import term_len from ._compat import WIN from .exceptions import ClickException from .utils import echo if os.name == "nt": BEFORE_BAR = "\r" AFTER_BAR = "\n" else: BEFORE_BAR = "\r\033[?25l" AFTER_BAR = "\033[?25h\n" def _length_hint(obj): """Returns the length hint of an object.""" try: return len(obj) except (AttributeError, TypeError): try: get_hint = type(obj).__length_hint__ except AttributeError: return None try: hint = get_hint(obj) except TypeError: return None if hint is NotImplemented or not isinstance(hint, int_types) or hint < 0: return None return hint class ProgressBar(object): def __init__( self, iterable, length=None, fill_char="#", empty_char=" ", bar_template="%(bar)s", info_sep=" ", show_eta=True, show_percent=None, show_pos=False, item_show_func=None, label=None, file=None, color=None, width=30, ): self.fill_char = fill_char self.empty_char = empty_char self.bar_template = bar_template self.info_sep = info_sep self.show_eta = show_eta self.show_percent = show_percent self.show_pos = show_pos self.item_show_func = item_show_func self.label = label or "" if file is None: file = _default_text_stdout() self.file = file self.color = color self.width = width self.autowidth = width == 0 if length is None: length = _length_hint(iterable) if iterable is None: if length is None: raise TypeError("iterable or length is required") iterable = range_type(length) self.iter = iter(iterable) self.length = length self.length_known = length is not None self.pos = 0 self.avg = [] self.start = self.last_eta = time.time() self.eta_known = False self.finished = False self.max_width = None self.entered = False self.current_item = None self.is_hidden = not isatty(self.file) self._last_line = None self.short_limit = 0.5 def __enter__(self): self.entered = True self.render_progress() return self def __exit__(self, exc_type, exc_value, tb): self.render_finish() def __iter__(self): if not self.entered: raise RuntimeError("You need to use progress bars in a with block.") self.render_progress() return self.generator() def __next__(self): # Iteration is defined in terms of a generator function, # returned by iter(self); use that to define next(). This works # because `self.iter` is an iterable consumed by that generator, # so it is re-entry safe. Calling `next(self.generator())` # twice works and does "what you want". return next(iter(self)) # Python 2 compat next = __next__ def is_fast(self): return time.time() - self.start <= self.short_limit def render_finish(self): if self.is_hidden or self.is_fast(): return self.file.write(AFTER_BAR) self.file.flush() @property def pct(self): if self.finished: return 1.0 return min(self.pos / (float(self.length) or 1), 1.0) @property def time_per_iteration(self): if not self.avg: return 0.0 return sum(self.avg) / float(len(self.avg)) @property def eta(self): if self.length_known and not self.finished: return self.time_per_iteration * (self.length - self.pos) return 0.0 def format_eta(self): if self.eta_known: t = int(self.eta) seconds = t % 60 t //= 60 minutes = t % 60 t //= 60 hours = t % 24 t //= 24 if t > 0: return "{}d {:02}:{:02}:{:02}".format(t, hours, minutes, seconds) else: return "{:02}:{:02}:{:02}".format(hours, minutes, seconds) return "" def format_pos(self): pos = str(self.pos) if self.length_known: pos += "/{}".format(self.length) return pos def format_pct(self): return "{: 4}%".format(int(self.pct * 100))[1:] def format_bar(self): if self.length_known: bar_length = int(self.pct * self.width) bar = self.fill_char * bar_length bar += self.empty_char * (self.width - bar_length) elif self.finished: bar = self.fill_char * self.width else: bar = list(self.empty_char * (self.width or 1)) if self.time_per_iteration != 0: bar[ int( (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) * self.width ) ] = self.fill_char bar = "".join(bar) return bar def format_progress_line(self): show_percent = self.show_percent info_bits = [] if self.length_known and show_percent is None: show_percent = not self.show_pos if self.show_pos: info_bits.append(self.format_pos()) if show_percent: info_bits.append(self.format_pct()) if self.show_eta and self.eta_known and not self.finished: info_bits.append(self.format_eta()) if self.item_show_func is not None: item_info = self.item_show_func(self.current_item) if item_info is not None: info_bits.append(item_info) return ( self.bar_template % { "label": self.label, "bar": self.format_bar(), "info": self.info_sep.join(info_bits), } ).rstrip() def render_progress(self): from .termui import get_terminal_size if self.is_hidden: return buf = [] # Update width in case the terminal has been resized if self.autowidth: old_width = self.width self.width = 0 clutter_length = term_len(self.format_progress_line()) new_width = max(0, get_terminal_size()[0] - clutter_length) if new_width < old_width: buf.append(BEFORE_BAR) buf.append(" " * self.max_width) self.max_width = new_width self.width = new_width clear_width = self.width if self.max_width is not None: clear_width = self.max_width buf.append(BEFORE_BAR) line = self.format_progress_line() line_len = term_len(line) if self.max_width is None or self.max_width < line_len: self.max_width = line_len buf.append(line) buf.append(" " * (clear_width - line_len)) line = "".join(buf) # Render the line only if it changed. if line != self._last_line and not self.is_fast(): self._last_line = line echo(line, file=self.file, color=self.color, nl=False) self.file.flush() def make_step(self, n_steps): self.pos += n_steps if self.length_known and self.pos >= self.length: self.finished = True if (time.time() - self.last_eta) < 1.0: return self.last_eta = time.time() # self.avg is a rolling list of length <= 7 of steps where steps are # defined as time elapsed divided by the total progress through # self.length. if self.pos: step = (time.time() - self.start) / self.pos else: step = time.time() - self.start self.avg = self.avg[-6:] + [step] self.eta_known = self.length_known def update(self, n_steps): self.make_step(n_steps) self.render_progress() def finish(self): self.eta_known = 0 self.current_item = None self.finished = True def generator(self): """Return a generator which yields the items added to the bar during construction, and updates the progress bar *after* the yielded block returns. """ # WARNING: the iterator interface for `ProgressBar` relies on # this and only works because this is a simple generator which # doesn't create or manage additional state. If this function # changes, the impact should be evaluated both against # `iter(bar)` and `next(bar)`. `next()` in particular may call # `self.generator()` repeatedly, and this must remain safe in # order for that interface to work. if not self.entered: raise RuntimeError("You need to use progress bars in a with block.") if self.is_hidden: for rv in self.iter: yield rv else: for rv in self.iter: self.current_item = rv yield rv self.update(1) self.finish() self.render_progress() def pager(generator, color=None): """Decide what method to use for paging through text.""" stdout = _default_text_stdout() if not isatty(sys.stdin) or not isatty(stdout): return _nullpager(stdout, generator, color) pager_cmd = (os.environ.get("PAGER", None) or "").strip() if pager_cmd: if WIN: return _tempfilepager(generator, pager_cmd, color) return _pipepager(generator, pager_cmd, color) if os.environ.get("TERM") in ("dumb", "emacs"): return _nullpager(stdout, generator, color) if WIN or sys.platform.startswith("os2"): return _tempfilepager(generator, "more <", color) if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: return _pipepager(generator, "less", color) import tempfile fd, filename = tempfile.mkstemp() os.close(fd) try: if hasattr(os, "system") and os.system('more "{}"'.format(filename)) == 0: return _pipepager(generator, "more", color) return _nullpager(stdout, generator, color) finally: os.unlink(filename) def _pipepager(generator, cmd, color): """Page through text by feeding it to another program. Invoking a pager through this might support colors. """ import subprocess env = dict(os.environ) # If we're piping to less we might support colors under the # condition that cmd_detail = cmd.rsplit("/", 1)[-1].split() if color is None and cmd_detail[0] == "less": less_flags = "{}{}".format(os.environ.get("LESS", ""), " ".join(cmd_detail[1:])) if not less_flags: env["LESS"] = "-R" color = True elif "r" in less_flags or "R" in less_flags: color = True c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) encoding = get_best_encoding(c.stdin) try: for text in generator: if not color: text = strip_ansi(text) c.stdin.write(text.encode(encoding, "replace")) except (IOError, KeyboardInterrupt): pass else: c.stdin.close() # Less doesn't respect ^C, but catches it for its own UI purposes (aborting # search or other commands inside less). # # That means when the user hits ^C, the parent process (click) terminates, # but less is still alive, paging the output and messing up the terminal. # # If the user wants to make the pager exit on ^C, they should set # `LESS='-K'`. It's not our decision to make. while True: try: c.wait() except KeyboardInterrupt: pass else: break def _tempfilepager(generator, cmd, color): """Page through text by invoking a program on a temporary file.""" import tempfile filename = tempfile.mktemp() # TODO: This never terminates if the passed generator never terminates. text = "".join(generator) if not color: text = strip_ansi(text) encoding = get_best_encoding(sys.stdout) with open_stream(filename, "wb")[0] as f: f.write(text.encode(encoding)) try: os.system('{} "{}"'.format(cmd, filename)) finally: os.unlink(filename) def _nullpager(stream, generator, color): """Simply print unformatted text. This is the ultimate fallback.""" for text in generator: if not color: text = strip_ansi(text) stream.write(text) class Editor(object): def __init__(self, editor=None, env=None, require_save=True, extension=".txt"): self.editor = editor self.env = env self.require_save = require_save self.extension = extension def get_editor(self): if self.editor is not None: return self.editor for key in "VISUAL", "EDITOR": rv = os.environ.get(key) if rv: return rv if WIN: return "notepad" for editor in "sensible-editor", "vim", "nano": if os.system("which {} >/dev/null 2>&1".format(editor)) == 0: return editor return "vi" def edit_file(self, filename): import subprocess editor = self.get_editor() if self.env: environ = os.environ.copy() environ.update(self.env) else: environ = None try: c = subprocess.Popen( '{} "{}"'.format(editor, filename), env=environ, shell=True, ) exit_code = c.wait() if exit_code != 0: raise ClickException("{}: Editing failed!".format(editor)) except OSError as e: raise ClickException("{}: Editing failed: {}".format(editor, e)) def edit(self, text): import tempfile text = text or "" if text and not text.endswith("\n"): text += "\n" fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) try: if WIN: encoding = "utf-8-sig" text = text.replace("\n", "\r\n") else: encoding = "utf-8" text = text.encode(encoding) f = os.fdopen(fd, "wb") f.write(text) f.close() timestamp = os.path.getmtime(name) self.edit_file(name) if self.require_save and os.path.getmtime(name) == timestamp: return None f = open(name, "rb") try: rv = f.read() finally: f.close() return rv.decode("utf-8-sig").replace("\r\n", "\n") finally: os.unlink(name) def open_url(url, wait=False, locate=False): import subprocess def _unquote_file(url): try: import urllib except ImportError: import urllib if url.startswith("file://"): url = urllib.unquote(url[7:]) return url if sys.platform == "darwin": args = ["open"] if wait: args.append("-W") if locate: args.append("-R") args.append(_unquote_file(url)) null = open("/dev/null", "w") try: return subprocess.Popen(args, stderr=null).wait() finally: null.close() elif WIN: if locate: url = _unquote_file(url) args = 'explorer /select,"{}"'.format(_unquote_file(url.replace('"', ""))) else: args = 'start {} "" "{}"'.format( "/WAIT" if wait else "", url.replace('"', "") ) return os.system(args) elif CYGWIN: if locate: url = _unquote_file(url) args = 'cygstart "{}"'.format(os.path.dirname(url).replace('"', "")) else: args = 'cygstart {} "{}"'.format("-w" if wait else "", url.replace('"', "")) return os.system(args) try: if locate: url = os.path.dirname(_unquote_file(url)) or "." else: url = _unquote_file(url) c = subprocess.Popen(["xdg-open", url]) if wait: return c.wait() return 0 except OSError: if url.startswith(("http://", "https://")) and not locate and not wait: import webbrowser webbrowser.open(url) return 0 return 1 def _translate_ch_to_exc(ch): if ch == u"\x03": raise KeyboardInterrupt() if ch == u"\x04" and not WIN: # Unix-like, Ctrl+D raise EOFError() if ch == u"\x1a" and WIN: # Windows, Ctrl+Z raise EOFError() if WIN: import msvcrt @contextlib.contextmanager def raw_terminal(): yield def getchar(echo): # The function `getch` will return a bytes object corresponding to # the pressed character. Since Windows 10 build 1803, it will also # return \x00 when called a second time after pressing a regular key. # # `getwch` does not share this probably-bugged behavior. Moreover, it # returns a Unicode object by default, which is what we want. # # Either of these functions will return \x00 or \xe0 to indicate # a special key, and you need to call the same function again to get # the "rest" of the code. The fun part is that \u00e0 is # "latin small letter a with grave", so if you type that on a French # keyboard, you _also_ get a \xe0. # E.g., consider the Up arrow. This returns \xe0 and then \x48. The # resulting Unicode string reads as "a with grave" + "capital H". # This is indistinguishable from when the user actually types # "a with grave" and then "capital H". # # When \xe0 is returned, we assume it's part of a special-key sequence # and call `getwch` again, but that means that when the user types # the \u00e0 character, `getchar` doesn't return until a second # character is typed. # The alternative is returning immediately, but that would mess up # cross-platform handling of arrow keys and others that start with # \xe0. Another option is using `getch`, but then we can't reliably # read non-ASCII characters, because return values of `getch` are # limited to the current 8-bit codepage. # # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` # is doing the right thing in more situations than with `getch`. if echo: func = msvcrt.getwche else: func = msvcrt.getwch rv = func() if rv in (u"\x00", u"\xe0"): # \x00 and \xe0 are control characters that indicate special key, # see above. rv += func() _translate_ch_to_exc(rv) return rv else: import tty import termios @contextlib.contextmanager def raw_terminal(): if not isatty(sys.stdin): f = open("/dev/tty") fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) try: tty.setraw(fd) yield fd finally: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) sys.stdout.flush() if f is not None: f.close() except termios.error: pass def getchar(echo): with raw_terminal() as fd: ch = os.read(fd, 32) ch = ch.decode(get_best_encoding(sys.stdin), "replace") if echo and isatty(sys.stdout): sys.stdout.write(ch) _translate_ch_to_exc(ch) return ch
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/testing.py
import contextlib import os import shlex import shutil import sys import tempfile from . import formatting from . import termui from . import utils from ._compat import iteritems from ._compat import PY2 from ._compat import string_types if PY2: from cStringIO import StringIO else: import io from ._compat import _find_binary_reader class EchoingStdin(object): def __init__(self, input, output): self._input = input self._output = output def __getattr__(self, x): return getattr(self._input, x) def _echo(self, rv): self._output.write(rv) return rv def read(self, n=-1): return self._echo(self._input.read(n)) def readline(self, n=-1): return self._echo(self._input.readline(n)) def readlines(self): return [self._echo(x) for x in self._input.readlines()] def __iter__(self): return iter(self._echo(x) for x in self._input) def __repr__(self): return repr(self._input) def make_input_stream(input, charset): # Is already an input stream. if hasattr(input, "read"): if PY2: return input rv = _find_binary_reader(input) if rv is not None: return rv raise TypeError("Could not find binary reader for input stream.") if input is None: input = b"" elif not isinstance(input, bytes): input = input.encode(charset) if PY2: return StringIO(input) return io.BytesIO(input) class Result(object): """Holds the captured result of an invoked CLI script.""" def __init__( self, runner, stdout_bytes, stderr_bytes, exit_code, exception, exc_info=None ): #: The runner that created the result self.runner = runner #: The standard output as bytes. self.stdout_bytes = stdout_bytes #: The standard error as bytes, or None if not available self.stderr_bytes = stderr_bytes #: The exit code as integer. self.exit_code = exit_code #: The exception that happened if one did. self.exception = exception #: The traceback self.exc_info = exc_info @property def output(self): """The (standard) output as unicode string.""" return self.stdout @property def stdout(self): """The standard output as unicode string.""" return self.stdout_bytes.decode(self.runner.charset, "replace").replace( "\r\n", "\n" ) @property def stderr(self): """The standard error as unicode string.""" if self.stderr_bytes is None: raise ValueError("stderr not separately captured") return self.stderr_bytes.decode(self.runner.charset, "replace").replace( "\r\n", "\n" ) def __repr__(self): return "<{} {}>".format( type(self).__name__, repr(self.exception) if self.exception else "okay" ) class CliRunner(object): """The CLI runner provides functionality to invoke a Click command line script for unittesting purposes in a isolated environment. This only works in single-threaded systems without any concurrency as it changes the global interpreter state. :param charset: the character set for the input and output data. This is UTF-8 by default and should not be changed currently as the reporting to Click only works in Python 2 properly. :param env: a dictionary with environment variables for overriding. :param echo_stdin: if this is set to `True`, then reading from stdin writes to stdout. This is useful for showing examples in some circumstances. Note that regular prompts will automatically echo the input. :param mix_stderr: if this is set to `False`, then stdout and stderr are preserved as independent streams. This is useful for Unix-philosophy apps that have predictable stdout and noisy stderr, such that each may be measured independently """ def __init__(self, charset=None, env=None, echo_stdin=False, mix_stderr=True): if charset is None: charset = "utf-8" self.charset = charset self.env = env or {} self.echo_stdin = echo_stdin self.mix_stderr = mix_stderr def get_default_prog_name(self, cli): """Given a command object it will return the default program name for it. The default is the `name` attribute or ``"root"`` if not set. """ return cli.name or "root" def make_env(self, overrides=None): """Returns the environment overrides for invoking a script.""" rv = dict(self.env) if overrides: rv.update(overrides) return rv @contextlib.contextmanager def isolation(self, input=None, env=None, color=False): """A context manager that sets up the isolation for invoking of a command line tool. This sets up stdin with the given input data and `os.environ` with the overrides from the given dictionary. This also rebinds some internals in Click to be mocked (like the prompt functionality). This is automatically done in the :meth:`invoke` method. .. versionadded:: 4.0 The ``color`` parameter was added. :param input: the input stream to put into sys.stdin. :param env: the environment overrides as dictionary. :param color: whether the output should contain color codes. The application can still override this explicitly. """ input = make_input_stream(input, self.charset) old_stdin = sys.stdin old_stdout = sys.stdout old_stderr = sys.stderr old_forced_width = formatting.FORCED_WIDTH formatting.FORCED_WIDTH = 80 env = self.make_env(env) if PY2: bytes_output = StringIO() if self.echo_stdin: input = EchoingStdin(input, bytes_output) sys.stdout = bytes_output if not self.mix_stderr: bytes_error = StringIO() sys.stderr = bytes_error else: bytes_output = io.BytesIO() if self.echo_stdin: input = EchoingStdin(input, bytes_output) input = io.TextIOWrapper(input, encoding=self.charset) sys.stdout = io.TextIOWrapper(bytes_output, encoding=self.charset) if not self.mix_stderr: bytes_error = io.BytesIO() sys.stderr = io.TextIOWrapper(bytes_error, encoding=self.charset) if self.mix_stderr: sys.stderr = sys.stdout sys.stdin = input def visible_input(prompt=None): sys.stdout.write(prompt or "") val = input.readline().rstrip("\r\n") sys.stdout.write("{}\n".format(val)) sys.stdout.flush() return val def hidden_input(prompt=None): sys.stdout.write("{}\n".format(prompt or "")) sys.stdout.flush() return input.readline().rstrip("\r\n") def _getchar(echo): char = sys.stdin.read(1) if echo: sys.stdout.write(char) sys.stdout.flush() return char default_color = color def should_strip_ansi(stream=None, color=None): if color is None: return not default_color return not color old_visible_prompt_func = termui.visible_prompt_func old_hidden_prompt_func = termui.hidden_prompt_func old__getchar_func = termui._getchar old_should_strip_ansi = utils.should_strip_ansi termui.visible_prompt_func = visible_input termui.hidden_prompt_func = hidden_input termui._getchar = _getchar utils.should_strip_ansi = should_strip_ansi old_env = {} try: for key, value in iteritems(env): old_env[key] = os.environ.get(key) if value is None: try: del os.environ[key] except Exception: pass else: os.environ[key] = value yield (bytes_output, not self.mix_stderr and bytes_error) finally: for key, value in iteritems(old_env): if value is None: try: del os.environ[key] except Exception: pass else: os.environ[key] = value sys.stdout = old_stdout sys.stderr = old_stderr sys.stdin = old_stdin termui.visible_prompt_func = old_visible_prompt_func termui.hidden_prompt_func = old_hidden_prompt_func termui._getchar = old__getchar_func utils.should_strip_ansi = old_should_strip_ansi formatting.FORCED_WIDTH = old_forced_width def invoke( self, cli, args=None, input=None, env=None, catch_exceptions=True, color=False, **extra ): """Invokes a command in an isolated environment. The arguments are forwarded directly to the command line script, the `extra` keyword arguments are passed to the :meth:`~clickpkg.Command.main` function of the command. This returns a :class:`Result` object. .. versionadded:: 3.0 The ``catch_exceptions`` parameter was added. .. versionchanged:: 3.0 The result object now has an `exc_info` attribute with the traceback if available. .. versionadded:: 4.0 The ``color`` parameter was added. :param cli: the command to invoke :param args: the arguments to invoke. It may be given as an iterable or a string. When given as string it will be interpreted as a Unix shell command. More details at :func:`shlex.split`. :param input: the input data for `sys.stdin`. :param env: the environment overrides. :param catch_exceptions: Whether to catch any other exceptions than ``SystemExit``. :param extra: the keyword arguments to pass to :meth:`main`. :param color: whether the output should contain color codes. The application can still override this explicitly. """ exc_info = None with self.isolation(input=input, env=env, color=color) as outstreams: exception = None exit_code = 0 if isinstance(args, string_types): args = shlex.split(args) try: prog_name = extra.pop("prog_name") except KeyError: prog_name = self.get_default_prog_name(cli) try: cli.main(args=args or (), prog_name=prog_name, **extra) except SystemExit as e: exc_info = sys.exc_info() exit_code = e.code if exit_code is None: exit_code = 0 if exit_code != 0: exception = e if not isinstance(exit_code, int): sys.stdout.write(str(exit_code)) sys.stdout.write("\n") exit_code = 1 except Exception as e: if not catch_exceptions: raise exception = e exit_code = 1 exc_info = sys.exc_info() finally: sys.stdout.flush() stdout = outstreams[0].getvalue() if self.mix_stderr: stderr = None else: stderr = outstreams[1].getvalue() return Result( runner=self, stdout_bytes=stdout, stderr_bytes=stderr, exit_code=exit_code, exception=exception, exc_info=exc_info, ) @contextlib.contextmanager def isolated_filesystem(self): """A context manager that creates a temporary folder and changes the current working directory to it for isolated filesystem tests. """ cwd = os.getcwd() t = tempfile.mkdtemp() os.chdir(t) try: yield t finally: os.chdir(cwd) try: shutil.rmtree(t) except (OSError, IOError): # noqa: B014 pass
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/click/decorators.py
import inspect import sys from functools import update_wrapper from ._compat import iteritems from ._unicodefun import _check_for_unicode_literals from .core import Argument from .core import Command from .core import Group from .core import Option from .globals import get_current_context from .utils import echo def pass_context(f): """Marks a callback as wanting to receive the current context object as first argument. """ def new_func(*args, **kwargs): return f(get_current_context(), *args, **kwargs) return update_wrapper(new_func, f) def pass_obj(f): """Similar to :func:`pass_context`, but only pass the object on the context onwards (:attr:`Context.obj`). This is useful if that object represents the state of a nested system. """ def new_func(*args, **kwargs): return f(get_current_context().obj, *args, **kwargs) return update_wrapper(new_func, f) def make_pass_decorator(object_type, ensure=False): """Given an object type this creates a decorator that will work similar to :func:`pass_obj` but instead of passing the object of the current context, it will find the innermost context of type :func:`object_type`. This generates a decorator that works roughly like this:: from functools import update_wrapper def decorator(f): @pass_context def new_func(ctx, *args, **kwargs): obj = ctx.find_object(object_type) return ctx.invoke(f, obj, *args, **kwargs) return update_wrapper(new_func, f) return decorator :param object_type: the type of the object to pass. :param ensure: if set to `True`, a new object will be created and remembered on the context if it's not there yet. """ def decorator(f): def new_func(*args, **kwargs): ctx = get_current_context() if ensure: obj = ctx.ensure_object(object_type) else: obj = ctx.find_object(object_type) if obj is None: raise RuntimeError( "Managed to invoke callback without a context" " object of type '{}' existing".format(object_type.__name__) ) return ctx.invoke(f, obj, *args, **kwargs) return update_wrapper(new_func, f) return decorator def _make_command(f, name, attrs, cls): if isinstance(f, Command): raise TypeError("Attempted to convert a callback into a command twice.") try: params = f.__click_params__ params.reverse() del f.__click_params__ except AttributeError: params = [] help = attrs.get("help") if help is None: help = inspect.getdoc(f) if isinstance(help, bytes): help = help.decode("utf-8") else: help = inspect.cleandoc(help) attrs["help"] = help _check_for_unicode_literals() return cls( name=name or f.__name__.lower().replace("_", "-"), callback=f, params=params, **attrs ) def command(name=None, cls=None, **attrs): r"""Creates a new :class:`Command` and uses the decorated function as callback. This will also automatically attach all decorated :func:`option`\s and :func:`argument`\s as parameters to the command. The name of the command defaults to the name of the function with underscores replaced by dashes. If you want to change that, you can pass the intended name as the first argument. All keyword arguments are forwarded to the underlying command class. Once decorated the function turns into a :class:`Command` instance that can be invoked as a command line utility or be attached to a command :class:`Group`. :param name: the name of the command. This defaults to the function name with underscores replaced by dashes. :param cls: the command class to instantiate. This defaults to :class:`Command`. """ if cls is None: cls = Command def decorator(f): cmd = _make_command(f, name, attrs, cls) cmd.__doc__ = f.__doc__ return cmd return decorator def group(name=None, **attrs): """Creates a new :class:`Group` with a function as callback. This works otherwise the same as :func:`command` just that the `cls` parameter is set to :class:`Group`. """ attrs.setdefault("cls", Group) return command(name, **attrs) def _param_memo(f, param): if isinstance(f, Command): f.params.append(param) else: if not hasattr(f, "__click_params__"): f.__click_params__ = [] f.__click_params__.append(param) def argument(*param_decls, **attrs): """Attaches an argument to the command. All positional arguments are passed as parameter declarations to :class:`Argument`; all keyword arguments are forwarded unchanged (except ``cls``). This is equivalent to creating an :class:`Argument` instance manually and attaching it to the :attr:`Command.params` list. :param cls: the argument class to instantiate. This defaults to :class:`Argument`. """ def decorator(f): ArgumentClass = attrs.pop("cls", Argument) _param_memo(f, ArgumentClass(param_decls, **attrs)) return f return decorator def option(*param_decls, **attrs): """Attaches an option to the command. All positional arguments are passed as parameter declarations to :class:`Option`; all keyword arguments are forwarded unchanged (except ``cls``). This is equivalent to creating an :class:`Option` instance manually and attaching it to the :attr:`Command.params` list. :param cls: the option class to instantiate. This defaults to :class:`Option`. """ def decorator(f): # Issue 926, copy attrs, so pre-defined options can re-use the same cls= option_attrs = attrs.copy() if "help" in option_attrs: option_attrs["help"] = inspect.cleandoc(option_attrs["help"]) OptionClass = option_attrs.pop("cls", Option) _param_memo(f, OptionClass(param_decls, **option_attrs)) return f return decorator def confirmation_option(*param_decls, **attrs): """Shortcut for confirmation prompts that can be ignored by passing ``--yes`` as parameter. This is equivalent to decorating a function with :func:`option` with the following parameters:: def callback(ctx, param, value): if not value: ctx.abort() @click.command() @click.option('--yes', is_flag=True, callback=callback, expose_value=False, prompt='Do you want to continue?') def dropdb(): pass """ def decorator(f): def callback(ctx, param, value): if not value: ctx.abort() attrs.setdefault("is_flag", True) attrs.setdefault("callback", callback) attrs.setdefault("expose_value", False) attrs.setdefault("prompt", "Do you want to continue?") attrs.setdefault("help", "Confirm the action without prompting.") return option(*(param_decls or ("--yes",)), **attrs)(f) return decorator def password_option(*param_decls, **attrs): """Shortcut for password prompts. This is equivalent to decorating a function with :func:`option` with the following parameters:: @click.command() @click.option('--password', prompt=True, confirmation_prompt=True, hide_input=True) def changeadmin(password): pass """ def decorator(f): attrs.setdefault("prompt", True) attrs.setdefault("confirmation_prompt", True) attrs.setdefault("hide_input", True) return option(*(param_decls or ("--password",)), **attrs)(f) return decorator def version_option(version=None, *param_decls, **attrs): """Adds a ``--version`` option which immediately ends the program printing out the version number. This is implemented as an eager option that prints the version and exits the program in the callback. :param version: the version number to show. If not provided Click attempts an auto discovery via setuptools. :param prog_name: the name of the program (defaults to autodetection) :param message: custom message to show instead of the default (``'%(prog)s, version %(version)s'``) :param others: everything else is forwarded to :func:`option`. """ if version is None: if hasattr(sys, "_getframe"): module = sys._getframe(1).f_globals.get("__name__") else: module = "" def decorator(f): prog_name = attrs.pop("prog_name", None) message = attrs.pop("message", "%(prog)s, version %(version)s") def callback(ctx, param, value): if not value or ctx.resilient_parsing: return prog = prog_name if prog is None: prog = ctx.find_root().info_name ver = version if ver is None: try: import pkg_resources except ImportError: pass else: for dist in pkg_resources.working_set: scripts = dist.get_entry_map().get("console_scripts") or {} for _, entry_point in iteritems(scripts): if entry_point.module_name == module: ver = dist.version break if ver is None: raise RuntimeError("Could not determine version") echo(message % {"prog": prog, "version": ver}, color=ctx.color) ctx.exit() attrs.setdefault("is_flag", True) attrs.setdefault("expose_value", False) attrs.setdefault("is_eager", True) attrs.setdefault("help", "Show the version and exit.") attrs["callback"] = callback return option(*(param_decls or ("--version",)), **attrs)(f) return decorator def help_option(*param_decls, **attrs): """Adds a ``--help`` option which immediately ends the program printing out the help page. This is usually unnecessary to add as this is added by default to all commands unless suppressed. Like :func:`version_option`, this is implemented as eager option that prints in the callback and exits. All arguments are forwarded to :func:`option`. """ def decorator(f): def callback(ctx, param, value): if value and not ctx.resilient_parsing: echo(ctx.get_help(), color=ctx.color) ctx.exit() attrs.setdefault("is_flag", True) attrs.setdefault("expose_value", False) attrs.setdefault("help", "Show this message and exit.") attrs.setdefault("is_eager", True) attrs["callback"] = callback return option(*(param_decls or ("--help",)), **attrs)(f) return decorator
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/PKG-INFO
Metadata-Version: 2.1 Name: SQLAlchemy Version: 1.3.18 Summary: Database Abstraction Library Home-page: http://www.sqlalchemy.org Author: Mike Bayer Author-email: mike_mp@zzzcomputing.com License: MIT Project-URL: Documentation, https://docs.sqlalchemy.org Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ Description: SQLAlchemy ========== The Python SQL Toolkit and Object Relational Mapper Introduction ------------- SQLAlchemy is the Python SQL toolkit and Object Relational Mapper that gives application developers the full power and flexibility of SQL. SQLAlchemy provides a full suite of well known enterprise-level persistence patterns, designed for efficient and high-performing database access, adapted into a simple and Pythonic domain language. Major SQLAlchemy features include: * An industrial strength ORM, built from the core on the identity map, unit of work, and data mapper patterns. These patterns allow transparent persistence of objects using a declarative configuration system. Domain models can be constructed and manipulated naturally, and changes are synchronized with the current transaction automatically. * A relationally-oriented query system, exposing the full range of SQL's capabilities explicitly, including joins, subqueries, correlation, and most everything else, in terms of the object model. Writing queries with the ORM uses the same techniques of relational composition you use when writing SQL. While you can drop into literal SQL at any time, it's virtually never needed. * A comprehensive and flexible system of eager loading for related collections and objects. Collections are cached within a session, and can be loaded on individual access, all at once using joins, or by query per collection across the full result set. * A Core SQL construction system and DBAPI interaction layer. The SQLAlchemy Core is separate from the ORM and is a full database abstraction layer in its own right, and includes an extensible Python-based SQL expression language, schema metadata, connection pooling, type coercion, and custom types. * All primary and foreign key constraints are assumed to be composite and natural. Surrogate integer primary keys are of course still the norm, but SQLAlchemy never assumes or hardcodes to this model. * Database introspection and generation. Database schemas can be "reflected" in one step into Python structures representing database metadata; those same structures can then generate CREATE statements right back out - all within the Core, independent of the ORM. SQLAlchemy's philosophy: * SQL databases behave less and less like object collections the more size and performance start to matter; object collections behave less and less like tables and rows the more abstraction starts to matter. SQLAlchemy aims to accommodate both of these principles. * An ORM doesn't need to hide the "R". A relational database provides rich, set-based functionality that should be fully exposed. SQLAlchemy's ORM provides an open-ended set of patterns that allow a developer to construct a custom mediation layer between a domain model and a relational schema, turning the so-called "object relational impedance" issue into a distant memory. * The developer, in all cases, makes all decisions regarding the design, structure, and naming conventions of both the object model as well as the relational schema. SQLAlchemy only provides the means to automate the execution of these decisions. * With SQLAlchemy, there's no such thing as "the ORM generated a bad query" - you retain full control over the structure of queries, including how joins are organized, how subqueries and correlation is used, what columns are requested. Everything SQLAlchemy does is ultimately the result of a developer- initiated decision. * Don't use an ORM if the problem doesn't need one. SQLAlchemy consists of a Core and separate ORM component. The Core offers a full SQL expression language that allows Pythonic construction of SQL constructs that render directly to SQL strings for a target database, returning result sets that are essentially enhanced DBAPI cursors. * Transactions should be the norm. With SQLAlchemy's ORM, nothing goes to permanent storage until commit() is called. SQLAlchemy encourages applications to create a consistent means of delineating the start and end of a series of operations. * Never render a literal value in a SQL statement. Bound parameters are used to the greatest degree possible, allowing query optimizers to cache query plans effectively and making SQL injection attacks a non-issue. Documentation ------------- Latest documentation is at: http://www.sqlalchemy.org/docs/ Installation / Requirements --------------------------- Full documentation for installation is at `Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_. Getting Help / Development / Bug reporting ------------------------------------------ Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_. Code of Conduct --------------- Above all, SQLAlchemy places great emphasis on polite, thoughtful, and constructive communication between users and developers. Please see our current Code of Conduct at `Code of Conduct <http://www.sqlalchemy.org/codeofconduct.html>`_. License ------- SQLAlchemy is distributed under the `MIT license <http://www.opensource.org/licenses/mit-license.php>`_. Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Database :: Front-Ends Classifier: Operating System :: OS Independent Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* Provides-Extra: mysql Provides-Extra: pymysql Provides-Extra: postgresql Provides-Extra: postgresql_psycopg2binary Provides-Extra: postgresql_pg8000 Provides-Extra: postgresql_psycopg2cffi Provides-Extra: oracle Provides-Extra: mssql_pyodbc Provides-Extra: mssql_pymssql Provides-Extra: mssql
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/installed-files.txt
..\sqlalchemy\__init__.py ..\sqlalchemy\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\__pycache__\events.cpython-39.pyc ..\sqlalchemy\__pycache__\exc.cpython-39.pyc ..\sqlalchemy\__pycache__\inspection.cpython-39.pyc ..\sqlalchemy\__pycache__\interfaces.cpython-39.pyc ..\sqlalchemy\__pycache__\log.cpython-39.pyc ..\sqlalchemy\__pycache__\processors.cpython-39.pyc ..\sqlalchemy\__pycache__\schema.cpython-39.pyc ..\sqlalchemy\__pycache__\types.cpython-39.pyc ..\sqlalchemy\connectors\__init__.py ..\sqlalchemy\connectors\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\connectors\__pycache__\mxodbc.cpython-39.pyc ..\sqlalchemy\connectors\__pycache__\pyodbc.cpython-39.pyc ..\sqlalchemy\connectors\__pycache__\zxJDBC.cpython-39.pyc ..\sqlalchemy\connectors\mxodbc.py ..\sqlalchemy\connectors\pyodbc.py ..\sqlalchemy\connectors\zxJDBC.py ..\sqlalchemy\databases\__init__.py ..\sqlalchemy\databases\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\__init__.py ..\sqlalchemy\dialects\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\firebird\__init__.py ..\sqlalchemy\dialects\firebird\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\firebird\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\firebird\__pycache__\fdb.cpython-39.pyc ..\sqlalchemy\dialects\firebird\__pycache__\kinterbasdb.cpython-39.pyc ..\sqlalchemy\dialects\firebird\base.py ..\sqlalchemy\dialects\firebird\fdb.py ..\sqlalchemy\dialects\firebird\kinterbasdb.py ..\sqlalchemy\dialects\mssql\__init__.py ..\sqlalchemy\dialects\mssql\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\adodbapi.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\information_schema.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\mxodbc.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\pymssql.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\pyodbc.cpython-39.pyc ..\sqlalchemy\dialects\mssql\__pycache__\zxjdbc.cpython-39.pyc ..\sqlalchemy\dialects\mssql\adodbapi.py ..\sqlalchemy\dialects\mssql\base.py ..\sqlalchemy\dialects\mssql\information_schema.py ..\sqlalchemy\dialects\mssql\mxodbc.py ..\sqlalchemy\dialects\mssql\provision.py ..\sqlalchemy\dialects\mssql\pymssql.py ..\sqlalchemy\dialects\mssql\pyodbc.py ..\sqlalchemy\dialects\mssql\zxjdbc.py ..\sqlalchemy\dialects\mysql\__init__.py ..\sqlalchemy\dialects\mysql\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\cymysql.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\dml.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\enumerated.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\gaerdbms.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\json.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\mysqlconnector.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\mysqldb.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\oursql.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\pymysql.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\pyodbc.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\reflection.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\types.cpython-39.pyc ..\sqlalchemy\dialects\mysql\__pycache__\zxjdbc.cpython-39.pyc ..\sqlalchemy\dialects\mysql\base.py ..\sqlalchemy\dialects\mysql\cymysql.py ..\sqlalchemy\dialects\mysql\dml.py ..\sqlalchemy\dialects\mysql\enumerated.py ..\sqlalchemy\dialects\mysql\gaerdbms.py ..\sqlalchemy\dialects\mysql\json.py ..\sqlalchemy\dialects\mysql\mysqlconnector.py ..\sqlalchemy\dialects\mysql\mysqldb.py ..\sqlalchemy\dialects\mysql\oursql.py ..\sqlalchemy\dialects\mysql\provision.py ..\sqlalchemy\dialects\mysql\pymysql.py ..\sqlalchemy\dialects\mysql\pyodbc.py ..\sqlalchemy\dialects\mysql\reflection.py ..\sqlalchemy\dialects\mysql\types.py ..\sqlalchemy\dialects\mysql\zxjdbc.py ..\sqlalchemy\dialects\oracle\__init__.py ..\sqlalchemy\dialects\oracle\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\oracle\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\oracle\__pycache__\cx_oracle.cpython-39.pyc ..\sqlalchemy\dialects\oracle\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\dialects\oracle\__pycache__\zxjdbc.cpython-39.pyc ..\sqlalchemy\dialects\oracle\base.py ..\sqlalchemy\dialects\oracle\cx_oracle.py ..\sqlalchemy\dialects\oracle\provision.py ..\sqlalchemy\dialects\oracle\zxjdbc.py ..\sqlalchemy\dialects\postgresql\__init__.py ..\sqlalchemy\dialects\postgresql\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\array.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\dml.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\ext.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\hstore.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\json.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\pg8000.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\psycopg2.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\psycopg2cffi.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\pygresql.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\pypostgresql.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\ranges.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\__pycache__\zxjdbc.cpython-39.pyc ..\sqlalchemy\dialects\postgresql\array.py ..\sqlalchemy\dialects\postgresql\base.py ..\sqlalchemy\dialects\postgresql\dml.py ..\sqlalchemy\dialects\postgresql\ext.py ..\sqlalchemy\dialects\postgresql\hstore.py ..\sqlalchemy\dialects\postgresql\json.py ..\sqlalchemy\dialects\postgresql\pg8000.py ..\sqlalchemy\dialects\postgresql\provision.py ..\sqlalchemy\dialects\postgresql\psycopg2.py ..\sqlalchemy\dialects\postgresql\psycopg2cffi.py ..\sqlalchemy\dialects\postgresql\pygresql.py ..\sqlalchemy\dialects\postgresql\pypostgresql.py ..\sqlalchemy\dialects\postgresql\ranges.py ..\sqlalchemy\dialects\postgresql\zxjdbc.py ..\sqlalchemy\dialects\sqlite\__init__.py ..\sqlalchemy\dialects\sqlite\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\__pycache__\json.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\__pycache__\pysqlcipher.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\__pycache__\pysqlite.cpython-39.pyc ..\sqlalchemy\dialects\sqlite\base.py ..\sqlalchemy\dialects\sqlite\json.py ..\sqlalchemy\dialects\sqlite\provision.py ..\sqlalchemy\dialects\sqlite\pysqlcipher.py ..\sqlalchemy\dialects\sqlite\pysqlite.py ..\sqlalchemy\dialects\sybase\__init__.py ..\sqlalchemy\dialects\sybase\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\dialects\sybase\__pycache__\base.cpython-39.pyc ..\sqlalchemy\dialects\sybase\__pycache__\mxodbc.cpython-39.pyc ..\sqlalchemy\dialects\sybase\__pycache__\pyodbc.cpython-39.pyc ..\sqlalchemy\dialects\sybase\__pycache__\pysybase.cpython-39.pyc ..\sqlalchemy\dialects\sybase\base.py ..\sqlalchemy\dialects\sybase\mxodbc.py ..\sqlalchemy\dialects\sybase\pyodbc.py ..\sqlalchemy\dialects\sybase\pysybase.py ..\sqlalchemy\engine\__init__.py ..\sqlalchemy\engine\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\base.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\default.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\interfaces.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\reflection.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\result.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\strategies.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\threadlocal.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\url.cpython-39.pyc ..\sqlalchemy\engine\__pycache__\util.cpython-39.pyc ..\sqlalchemy\engine\base.py ..\sqlalchemy\engine\default.py ..\sqlalchemy\engine\interfaces.py ..\sqlalchemy\engine\reflection.py ..\sqlalchemy\engine\result.py ..\sqlalchemy\engine\strategies.py ..\sqlalchemy\engine\threadlocal.py ..\sqlalchemy\engine\url.py ..\sqlalchemy\engine\util.py ..\sqlalchemy\event\__init__.py ..\sqlalchemy\event\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\event\__pycache__\api.cpython-39.pyc ..\sqlalchemy\event\__pycache__\attr.cpython-39.pyc ..\sqlalchemy\event\__pycache__\base.cpython-39.pyc ..\sqlalchemy\event\__pycache__\legacy.cpython-39.pyc ..\sqlalchemy\event\__pycache__\registry.cpython-39.pyc ..\sqlalchemy\event\api.py ..\sqlalchemy\event\attr.py ..\sqlalchemy\event\base.py ..\sqlalchemy\event\legacy.py ..\sqlalchemy\event\registry.py ..\sqlalchemy\events.py ..\sqlalchemy\exc.py ..\sqlalchemy\ext\__init__.py ..\sqlalchemy\ext\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\associationproxy.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\automap.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\baked.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\compiler.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\horizontal_shard.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\hybrid.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\indexable.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\instrumentation.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\mutable.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\orderinglist.cpython-39.pyc ..\sqlalchemy\ext\__pycache__\serializer.cpython-39.pyc ..\sqlalchemy\ext\associationproxy.py ..\sqlalchemy\ext\automap.py ..\sqlalchemy\ext\baked.py ..\sqlalchemy\ext\compiler.py ..\sqlalchemy\ext\declarative\__init__.py ..\sqlalchemy\ext\declarative\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\ext\declarative\__pycache__\api.cpython-39.pyc ..\sqlalchemy\ext\declarative\__pycache__\base.cpython-39.pyc ..\sqlalchemy\ext\declarative\__pycache__\clsregistry.cpython-39.pyc ..\sqlalchemy\ext\declarative\api.py ..\sqlalchemy\ext\declarative\base.py ..\sqlalchemy\ext\declarative\clsregistry.py ..\sqlalchemy\ext\horizontal_shard.py ..\sqlalchemy\ext\hybrid.py ..\sqlalchemy\ext\indexable.py ..\sqlalchemy\ext\instrumentation.py ..\sqlalchemy\ext\mutable.py ..\sqlalchemy\ext\orderinglist.py ..\sqlalchemy\ext\serializer.py ..\sqlalchemy\inspection.py ..\sqlalchemy\interfaces.py ..\sqlalchemy\log.py ..\sqlalchemy\orm\__init__.py ..\sqlalchemy\orm\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\attributes.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\base.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\collections.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\dependency.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\deprecated_interfaces.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\descriptor_props.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\dynamic.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\evaluator.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\events.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\exc.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\identity.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\instrumentation.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\interfaces.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\loading.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\mapper.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\path_registry.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\persistence.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\properties.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\query.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\relationships.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\scoping.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\session.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\state.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\strategies.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\strategy_options.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\sync.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\unitofwork.cpython-39.pyc ..\sqlalchemy\orm\__pycache__\util.cpython-39.pyc ..\sqlalchemy\orm\attributes.py ..\sqlalchemy\orm\base.py ..\sqlalchemy\orm\collections.py ..\sqlalchemy\orm\dependency.py ..\sqlalchemy\orm\deprecated_interfaces.py ..\sqlalchemy\orm\descriptor_props.py ..\sqlalchemy\orm\dynamic.py ..\sqlalchemy\orm\evaluator.py ..\sqlalchemy\orm\events.py ..\sqlalchemy\orm\exc.py ..\sqlalchemy\orm\identity.py ..\sqlalchemy\orm\instrumentation.py ..\sqlalchemy\orm\interfaces.py ..\sqlalchemy\orm\loading.py ..\sqlalchemy\orm\mapper.py ..\sqlalchemy\orm\path_registry.py ..\sqlalchemy\orm\persistence.py ..\sqlalchemy\orm\properties.py ..\sqlalchemy\orm\query.py ..\sqlalchemy\orm\relationships.py ..\sqlalchemy\orm\scoping.py ..\sqlalchemy\orm\session.py ..\sqlalchemy\orm\state.py ..\sqlalchemy\orm\strategies.py ..\sqlalchemy\orm\strategy_options.py ..\sqlalchemy\orm\sync.py ..\sqlalchemy\orm\unitofwork.py ..\sqlalchemy\orm\util.py ..\sqlalchemy\pool\__init__.py ..\sqlalchemy\pool\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\pool\__pycache__\base.cpython-39.pyc ..\sqlalchemy\pool\__pycache__\dbapi_proxy.cpython-39.pyc ..\sqlalchemy\pool\__pycache__\impl.cpython-39.pyc ..\sqlalchemy\pool\base.py ..\sqlalchemy\pool\dbapi_proxy.py ..\sqlalchemy\pool\impl.py ..\sqlalchemy\processors.py ..\sqlalchemy\schema.py ..\sqlalchemy\sql\__init__.py ..\sqlalchemy\sql\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\annotation.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\base.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\compiler.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\crud.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\ddl.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\default_comparator.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\dml.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\elements.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\expression.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\functions.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\naming.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\operators.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\schema.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\selectable.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\sqltypes.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\type_api.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\util.cpython-39.pyc ..\sqlalchemy\sql\__pycache__\visitors.cpython-39.pyc ..\sqlalchemy\sql\annotation.py ..\sqlalchemy\sql\base.py ..\sqlalchemy\sql\compiler.py ..\sqlalchemy\sql\crud.py ..\sqlalchemy\sql\ddl.py ..\sqlalchemy\sql\default_comparator.py ..\sqlalchemy\sql\dml.py ..\sqlalchemy\sql\elements.py ..\sqlalchemy\sql\expression.py ..\sqlalchemy\sql\functions.py ..\sqlalchemy\sql\naming.py ..\sqlalchemy\sql\operators.py ..\sqlalchemy\sql\schema.py ..\sqlalchemy\sql\selectable.py ..\sqlalchemy\sql\sqltypes.py ..\sqlalchemy\sql\type_api.py ..\sqlalchemy\sql\util.py ..\sqlalchemy\sql\visitors.py ..\sqlalchemy\testing\__init__.py ..\sqlalchemy\testing\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\assertions.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\assertsql.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\config.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\engines.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\entities.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\exclusions.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\fixtures.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\mock.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\pickleable.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\profiling.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\provision.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\replay_fixture.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\requirements.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\schema.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\util.cpython-39.pyc ..\sqlalchemy\testing\__pycache__\warnings.cpython-39.pyc ..\sqlalchemy\testing\assertions.py ..\sqlalchemy\testing\assertsql.py ..\sqlalchemy\testing\config.py ..\sqlalchemy\testing\engines.py ..\sqlalchemy\testing\entities.py ..\sqlalchemy\testing\exclusions.py ..\sqlalchemy\testing\fixtures.py ..\sqlalchemy\testing\mock.py ..\sqlalchemy\testing\pickleable.py ..\sqlalchemy\testing\plugin\__init__.py ..\sqlalchemy\testing\plugin\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\testing\plugin\__pycache__\bootstrap.cpython-39.pyc ..\sqlalchemy\testing\plugin\__pycache__\plugin_base.cpython-39.pyc ..\sqlalchemy\testing\plugin\__pycache__\pytestplugin.cpython-39.pyc ..\sqlalchemy\testing\plugin\bootstrap.py ..\sqlalchemy\testing\plugin\plugin_base.py ..\sqlalchemy\testing\plugin\pytestplugin.py ..\sqlalchemy\testing\profiling.py ..\sqlalchemy\testing\provision.py ..\sqlalchemy\testing\replay_fixture.py ..\sqlalchemy\testing\requirements.py ..\sqlalchemy\testing\schema.py ..\sqlalchemy\testing\suite\__init__.py ..\sqlalchemy\testing\suite\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_cte.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_ddl.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_dialect.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_insert.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_reflection.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_results.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_select.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_sequence.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_types.cpython-39.pyc ..\sqlalchemy\testing\suite\__pycache__\test_update_delete.cpython-39.pyc ..\sqlalchemy\testing\suite\test_cte.py ..\sqlalchemy\testing\suite\test_ddl.py ..\sqlalchemy\testing\suite\test_dialect.py ..\sqlalchemy\testing\suite\test_insert.py ..\sqlalchemy\testing\suite\test_reflection.py ..\sqlalchemy\testing\suite\test_results.py ..\sqlalchemy\testing\suite\test_select.py ..\sqlalchemy\testing\suite\test_sequence.py ..\sqlalchemy\testing\suite\test_types.py ..\sqlalchemy\testing\suite\test_update_delete.py ..\sqlalchemy\testing\util.py ..\sqlalchemy\testing\warnings.py ..\sqlalchemy\types.py ..\sqlalchemy\util\__init__.py ..\sqlalchemy\util\__pycache__\__init__.cpython-39.pyc ..\sqlalchemy\util\__pycache__\_collections.cpython-39.pyc ..\sqlalchemy\util\__pycache__\compat.cpython-39.pyc ..\sqlalchemy\util\__pycache__\deprecations.cpython-39.pyc ..\sqlalchemy\util\__pycache__\langhelpers.cpython-39.pyc ..\sqlalchemy\util\__pycache__\queue.cpython-39.pyc ..\sqlalchemy\util\__pycache__\topological.cpython-39.pyc ..\sqlalchemy\util\_collections.py ..\sqlalchemy\util\compat.py ..\sqlalchemy\util\deprecations.py ..\sqlalchemy\util\langhelpers.py ..\sqlalchemy\util\queue.py ..\sqlalchemy\util\topological.py PKG-INFO SOURCES.txt dependency_links.txt requires.txt top_level.txt
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/SOURCES.txt
AUTHORS CHANGES LICENSE MANIFEST.in README.dialects.rst README.rst README.unittests.rst setup.cfg setup.py tox.ini doc/contents.html doc/copyright.html doc/errors.html doc/genindex.html doc/glossary.html doc/index.html doc/intro.html doc/notfound.html doc/search.html doc/searchindex.js doc/_images/sqla_arch_small.png doc/_images/sqla_engine_arch.png doc/_static/basic.css doc/_static/changelog.css doc/_static/detectmobile.js doc/_static/docs.css doc/_static/doctools.js doc/_static/documentation_options.js doc/_static/dragons.png doc/_static/file.png doc/_static/init.js doc/_static/jquery-3.5.1.js doc/_static/jquery.js doc/_static/language_data.js doc/_static/minus.png doc/_static/plus.png doc/_static/pygments.css doc/_static/searchtools.js doc/_static/sphinx_paramlinks.css doc/_static/underscore-1.3.1.js doc/_static/underscore.js doc/build/Makefile doc/build/conf.py doc/build/contents.rst doc/build/copyright.rst doc/build/errors.rst doc/build/glossary.rst doc/build/index.rst doc/build/intro.rst doc/build/requirements.txt doc/build/sqla_arch_small.png doc/build/changelog/README.txt doc/build/changelog/changelog_01.rst doc/build/changelog/changelog_02.rst doc/build/changelog/changelog_03.rst doc/build/changelog/changelog_04.rst doc/build/changelog/changelog_05.rst doc/build/changelog/changelog_06.rst doc/build/changelog/changelog_07.rst doc/build/changelog/changelog_08.rst doc/build/changelog/changelog_09.rst doc/build/changelog/changelog_10.rst doc/build/changelog/changelog_11.rst doc/build/changelog/changelog_12.rst doc/build/changelog/changelog_13.rst doc/build/changelog/index.rst doc/build/changelog/migration_04.rst doc/build/changelog/migration_05.rst doc/build/changelog/migration_06.rst doc/build/changelog/migration_07.rst doc/build/changelog/migration_08.rst doc/build/changelog/migration_09.rst doc/build/changelog/migration_10.rst doc/build/changelog/migration_11.rst doc/build/changelog/migration_12.rst doc/build/changelog/migration_13.rst doc/build/changelog/unreleased_11/README.txt doc/build/changelog/unreleased_12/README.txt doc/build/changelog/unreleased_13/README.txt doc/build/core/api_basics.rst doc/build/core/compiler.rst doc/build/core/connections.rst doc/build/core/constraints.rst doc/build/core/custom_types.rst doc/build/core/ddl.rst doc/build/core/defaults.rst doc/build/core/dml.rst doc/build/core/engines.rst doc/build/core/engines_connections.rst doc/build/core/event.rst doc/build/core/events.rst doc/build/core/exceptions.rst doc/build/core/expression_api.rst doc/build/core/functions.rst doc/build/core/index.rst doc/build/core/inspection.rst doc/build/core/interfaces.rst doc/build/core/internals.rst doc/build/core/metadata.rst doc/build/core/pooling.rst doc/build/core/reflection.rst doc/build/core/schema.rst doc/build/core/selectable.rst doc/build/core/serializer.rst doc/build/core/sqla_engine_arch.png doc/build/core/sqlelement.rst doc/build/core/tutorial.rst doc/build/core/type_api.rst doc/build/core/type_basics.rst doc/build/core/types.rst doc/build/core/visitors.rst doc/build/dialects/firebird.rst doc/build/dialects/index.rst doc/build/dialects/mssql.rst doc/build/dialects/mysql.rst doc/build/dialects/oracle.rst doc/build/dialects/postgresql.rst doc/build/dialects/sqlite.rst doc/build/dialects/sybase.rst doc/build/faq/connections.rst doc/build/faq/index.rst doc/build/faq/metadata_schema.rst doc/build/faq/ormconfiguration.rst doc/build/faq/performance.rst doc/build/faq/sessions.rst doc/build/faq/sqlexpressions.rst doc/build/orm/backref.rst doc/build/orm/basic_relationships.rst doc/build/orm/cascades.rst doc/build/orm/classical.rst doc/build/orm/collections.rst doc/build/orm/composites.rst doc/build/orm/constructors.rst doc/build/orm/contextual.rst doc/build/orm/deprecated.rst doc/build/orm/events.rst doc/build/orm/examples.rst doc/build/orm/exceptions.rst doc/build/orm/extending.rst doc/build/orm/index.rst doc/build/orm/inheritance.rst doc/build/orm/inheritance_loading.rst doc/build/orm/internals.rst doc/build/orm/join_conditions.rst doc/build/orm/loading.rst doc/build/orm/loading_columns.rst doc/build/orm/loading_objects.rst doc/build/orm/loading_relationships.rst doc/build/orm/mapped_attributes.rst doc/build/orm/mapped_sql_expr.rst doc/build/orm/mapper_config.rst doc/build/orm/mapping_api.rst doc/build/orm/mapping_columns.rst doc/build/orm/mapping_styles.rst doc/build/orm/nonstandard_mappings.rst doc/build/orm/persistence_techniques.rst doc/build/orm/query.rst doc/build/orm/relationship_api.rst doc/build/orm/relationship_persistence.rst doc/build/orm/relationships.rst doc/build/orm/scalar_mapping.rst doc/build/orm/self_referential.rst doc/build/orm/session.rst doc/build/orm/session_api.rst doc/build/orm/session_basics.rst doc/build/orm/session_events.rst doc/build/orm/session_state_management.rst doc/build/orm/session_transaction.rst doc/build/orm/tutorial.rst doc/build/orm/versioning.rst doc/build/orm/extensions/associationproxy.rst doc/build/orm/extensions/automap.rst doc/build/orm/extensions/baked.rst doc/build/orm/extensions/horizontal_shard.rst doc/build/orm/extensions/hybrid.rst doc/build/orm/extensions/index.rst doc/build/orm/extensions/indexable.rst doc/build/orm/extensions/instrumentation.rst doc/build/orm/extensions/mutable.rst doc/build/orm/extensions/orderinglist.rst doc/build/orm/extensions/declarative/api.rst doc/build/orm/extensions/declarative/basic_use.rst doc/build/orm/extensions/declarative/index.rst doc/build/orm/extensions/declarative/inheritance.rst doc/build/orm/extensions/declarative/mixins.rst doc/build/orm/extensions/declarative/relationships.rst doc/build/orm/extensions/declarative/table_config.rst doc/build/texinputs/Makefile doc/build/texinputs/sphinx.sty doc/changelog/changelog_01.html doc/changelog/changelog_02.html doc/changelog/changelog_03.html doc/changelog/changelog_04.html doc/changelog/changelog_05.html doc/changelog/changelog_06.html doc/changelog/changelog_07.html doc/changelog/changelog_08.html doc/changelog/changelog_09.html doc/changelog/changelog_10.html doc/changelog/changelog_11.html doc/changelog/changelog_12.html doc/changelog/changelog_13.html doc/changelog/index.html doc/changelog/migration_04.html doc/changelog/migration_05.html doc/changelog/migration_06.html doc/changelog/migration_07.html doc/changelog/migration_08.html doc/changelog/migration_09.html doc/changelog/migration_10.html doc/changelog/migration_11.html doc/changelog/migration_12.html doc/changelog/migration_13.html doc/core/api_basics.html doc/core/compiler.html doc/core/connections.html doc/core/constraints.html doc/core/custom_types.html doc/core/ddl.html doc/core/defaults.html doc/core/dml.html doc/core/engines.html doc/core/engines_connections.html doc/core/event.html doc/core/events.html doc/core/exceptions.html doc/core/expression_api.html doc/core/functions.html doc/core/index.html doc/core/inspection.html doc/core/interfaces.html doc/core/internals.html doc/core/metadata.html doc/core/pooling.html doc/core/reflection.html doc/core/schema.html doc/core/selectable.html doc/core/serializer.html doc/core/sqlelement.html doc/core/tutorial.html doc/core/type_api.html doc/core/type_basics.html doc/core/types.html doc/core/visitors.html doc/dialects/firebird.html doc/dialects/index.html doc/dialects/mssql.html doc/dialects/mysql.html doc/dialects/oracle.html doc/dialects/postgresql.html doc/dialects/sqlite.html doc/dialects/sybase.html doc/faq/connections.html doc/faq/index.html doc/faq/metadata_schema.html doc/faq/ormconfiguration.html doc/faq/performance.html doc/faq/sessions.html doc/faq/sqlexpressions.html doc/orm/backref.html doc/orm/basic_relationships.html doc/orm/cascades.html doc/orm/classical.html doc/orm/collections.html doc/orm/composites.html doc/orm/constructors.html doc/orm/contextual.html doc/orm/deprecated.html doc/orm/events.html doc/orm/examples.html doc/orm/exceptions.html doc/orm/extending.html doc/orm/index.html doc/orm/inheritance.html doc/orm/inheritance_loading.html doc/orm/internals.html doc/orm/join_conditions.html doc/orm/loading.html doc/orm/loading_columns.html doc/orm/loading_objects.html doc/orm/loading_relationships.html doc/orm/mapped_attributes.html doc/orm/mapped_sql_expr.html doc/orm/mapper_config.html doc/orm/mapping_api.html doc/orm/mapping_columns.html doc/orm/mapping_styles.html doc/orm/nonstandard_mappings.html doc/orm/persistence_techniques.html doc/orm/query.html doc/orm/relationship_api.html doc/orm/relationship_persistence.html doc/orm/relationships.html doc/orm/scalar_mapping.html doc/orm/self_referential.html doc/orm/session.html doc/orm/session_api.html doc/orm/session_basics.html doc/orm/session_events.html doc/orm/session_state_management.html doc/orm/session_transaction.html doc/orm/tutorial.html doc/orm/versioning.html doc/orm/extensions/associationproxy.html doc/orm/extensions/automap.html doc/orm/extensions/baked.html doc/orm/extensions/horizontal_shard.html doc/orm/extensions/hybrid.html doc/orm/extensions/index.html doc/orm/extensions/indexable.html doc/orm/extensions/instrumentation.html doc/orm/extensions/mutable.html doc/orm/extensions/orderinglist.html doc/orm/extensions/declarative/api.html doc/orm/extensions/declarative/basic_use.html doc/orm/extensions/declarative/index.html doc/orm/extensions/declarative/inheritance.html doc/orm/extensions/declarative/mixins.html doc/orm/extensions/declarative/relationships.html doc/orm/extensions/declarative/table_config.html examples/__init__.py examples/adjacency_list/__init__.py examples/adjacency_list/adjacency_list.py examples/association/__init__.py examples/association/basic_association.py examples/association/dict_of_sets_with_default.py examples/association/proxied_association.py examples/custom_attributes/__init__.py examples/custom_attributes/active_column_defaults.py examples/custom_attributes/custom_management.py examples/custom_attributes/listen_for_events.py examples/dogpile_caching/__init__.py examples/dogpile_caching/advanced.py examples/dogpile_caching/caching_query.py examples/dogpile_caching/environment.py examples/dogpile_caching/fixture_data.py examples/dogpile_caching/helloworld.py examples/dogpile_caching/local_session_caching.py examples/dogpile_caching/model.py examples/dogpile_caching/relationship_caching.py examples/dynamic_dict/__init__.py examples/dynamic_dict/dynamic_dict.py examples/elementtree/__init__.py examples/elementtree/adjacency_list.py examples/elementtree/optimized_al.py examples/elementtree/pickle_type.py examples/elementtree/test.xml examples/elementtree/test2.xml examples/elementtree/test3.xml examples/generic_associations/__init__.py examples/generic_associations/discriminator_on_association.py examples/generic_associations/generic_fk.py examples/generic_associations/table_per_association.py examples/generic_associations/table_per_related.py examples/graphs/__init__.py examples/graphs/directed_graph.py examples/inheritance/__init__.py examples/inheritance/concrete.py examples/inheritance/joined.py examples/inheritance/single.py examples/join_conditions/__init__.py examples/join_conditions/cast.py examples/join_conditions/threeway.py examples/large_collection/__init__.py examples/large_collection/large_collection.py examples/materialized_paths/__init__.py examples/materialized_paths/materialized_paths.py examples/nested_sets/__init__.py examples/nested_sets/nested_sets.py examples/performance/__init__.py examples/performance/__main__.py examples/performance/bulk_inserts.py examples/performance/bulk_updates.py examples/performance/large_resultsets.py examples/performance/short_selects.py examples/performance/single_inserts.py examples/postgis/__init__.py examples/postgis/postgis.py examples/sharding/__init__.py examples/sharding/attribute_shard.py examples/space_invaders/__init__.py examples/space_invaders/space_invaders.py examples/versioned_history/__init__.py examples/versioned_history/history_meta.py examples/versioned_history/test_versioning.py examples/versioned_rows/__init__.py examples/versioned_rows/versioned_map.py examples/versioned_rows/versioned_rows.py examples/versioned_rows/versioned_rows_w_versionid.py examples/versioned_rows/versioned_update_old_row.py examples/vertical/__init__.py examples/vertical/dictlike-polymorphic.py examples/vertical/dictlike.py lib/SQLAlchemy.egg-info/PKG-INFO lib/SQLAlchemy.egg-info/SOURCES.txt lib/SQLAlchemy.egg-info/dependency_links.txt lib/SQLAlchemy.egg-info/requires.txt lib/SQLAlchemy.egg-info/top_level.txt lib/sqlalchemy/__init__.py lib/sqlalchemy/events.py lib/sqlalchemy/exc.py lib/sqlalchemy/inspection.py lib/sqlalchemy/interfaces.py lib/sqlalchemy/log.py lib/sqlalchemy/processors.py lib/sqlalchemy/schema.py lib/sqlalchemy/types.py lib/sqlalchemy/cextension/processors.c lib/sqlalchemy/cextension/resultproxy.c lib/sqlalchemy/cextension/utils.c lib/sqlalchemy/connectors/__init__.py lib/sqlalchemy/connectors/mxodbc.py lib/sqlalchemy/connectors/pyodbc.py lib/sqlalchemy/connectors/zxJDBC.py lib/sqlalchemy/databases/__init__.py lib/sqlalchemy/dialects/__init__.py lib/sqlalchemy/dialects/type_migration_guidelines.txt lib/sqlalchemy/dialects/firebird/__init__.py lib/sqlalchemy/dialects/firebird/base.py lib/sqlalchemy/dialects/firebird/fdb.py lib/sqlalchemy/dialects/firebird/kinterbasdb.py lib/sqlalchemy/dialects/mssql/__init__.py lib/sqlalchemy/dialects/mssql/adodbapi.py lib/sqlalchemy/dialects/mssql/base.py lib/sqlalchemy/dialects/mssql/information_schema.py lib/sqlalchemy/dialects/mssql/mxodbc.py lib/sqlalchemy/dialects/mssql/provision.py lib/sqlalchemy/dialects/mssql/pymssql.py lib/sqlalchemy/dialects/mssql/pyodbc.py lib/sqlalchemy/dialects/mssql/zxjdbc.py lib/sqlalchemy/dialects/mysql/__init__.py lib/sqlalchemy/dialects/mysql/base.py lib/sqlalchemy/dialects/mysql/cymysql.py lib/sqlalchemy/dialects/mysql/dml.py lib/sqlalchemy/dialects/mysql/enumerated.py lib/sqlalchemy/dialects/mysql/gaerdbms.py lib/sqlalchemy/dialects/mysql/json.py lib/sqlalchemy/dialects/mysql/mysqlconnector.py lib/sqlalchemy/dialects/mysql/mysqldb.py lib/sqlalchemy/dialects/mysql/oursql.py lib/sqlalchemy/dialects/mysql/provision.py lib/sqlalchemy/dialects/mysql/pymysql.py lib/sqlalchemy/dialects/mysql/pyodbc.py lib/sqlalchemy/dialects/mysql/reflection.py lib/sqlalchemy/dialects/mysql/types.py lib/sqlalchemy/dialects/mysql/zxjdbc.py lib/sqlalchemy/dialects/oracle/__init__.py lib/sqlalchemy/dialects/oracle/base.py lib/sqlalchemy/dialects/oracle/cx_oracle.py lib/sqlalchemy/dialects/oracle/provision.py lib/sqlalchemy/dialects/oracle/zxjdbc.py lib/sqlalchemy/dialects/postgresql/__init__.py lib/sqlalchemy/dialects/postgresql/array.py lib/sqlalchemy/dialects/postgresql/base.py lib/sqlalchemy/dialects/postgresql/dml.py lib/sqlalchemy/dialects/postgresql/ext.py lib/sqlalchemy/dialects/postgresql/hstore.py lib/sqlalchemy/dialects/postgresql/json.py lib/sqlalchemy/dialects/postgresql/pg8000.py lib/sqlalchemy/dialects/postgresql/provision.py lib/sqlalchemy/dialects/postgresql/psycopg2.py lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py lib/sqlalchemy/dialects/postgresql/pygresql.py lib/sqlalchemy/dialects/postgresql/pypostgresql.py lib/sqlalchemy/dialects/postgresql/ranges.py lib/sqlalchemy/dialects/postgresql/zxjdbc.py lib/sqlalchemy/dialects/sqlite/__init__.py lib/sqlalchemy/dialects/sqlite/base.py lib/sqlalchemy/dialects/sqlite/json.py lib/sqlalchemy/dialects/sqlite/provision.py lib/sqlalchemy/dialects/sqlite/pysqlcipher.py lib/sqlalchemy/dialects/sqlite/pysqlite.py lib/sqlalchemy/dialects/sybase/__init__.py lib/sqlalchemy/dialects/sybase/base.py lib/sqlalchemy/dialects/sybase/mxodbc.py lib/sqlalchemy/dialects/sybase/pyodbc.py lib/sqlalchemy/dialects/sybase/pysybase.py lib/sqlalchemy/engine/__init__.py lib/sqlalchemy/engine/base.py lib/sqlalchemy/engine/default.py lib/sqlalchemy/engine/interfaces.py lib/sqlalchemy/engine/reflection.py lib/sqlalchemy/engine/result.py lib/sqlalchemy/engine/strategies.py lib/sqlalchemy/engine/threadlocal.py lib/sqlalchemy/engine/url.py lib/sqlalchemy/engine/util.py lib/sqlalchemy/event/__init__.py lib/sqlalchemy/event/api.py lib/sqlalchemy/event/attr.py lib/sqlalchemy/event/base.py lib/sqlalchemy/event/legacy.py lib/sqlalchemy/event/registry.py lib/sqlalchemy/ext/__init__.py lib/sqlalchemy/ext/associationproxy.py lib/sqlalchemy/ext/automap.py lib/sqlalchemy/ext/baked.py lib/sqlalchemy/ext/compiler.py lib/sqlalchemy/ext/horizontal_shard.py lib/sqlalchemy/ext/hybrid.py lib/sqlalchemy/ext/indexable.py lib/sqlalchemy/ext/instrumentation.py lib/sqlalchemy/ext/mutable.py lib/sqlalchemy/ext/orderinglist.py lib/sqlalchemy/ext/serializer.py lib/sqlalchemy/ext/declarative/__init__.py lib/sqlalchemy/ext/declarative/api.py lib/sqlalchemy/ext/declarative/base.py lib/sqlalchemy/ext/declarative/clsregistry.py lib/sqlalchemy/orm/__init__.py lib/sqlalchemy/orm/attributes.py lib/sqlalchemy/orm/base.py lib/sqlalchemy/orm/collections.py lib/sqlalchemy/orm/dependency.py lib/sqlalchemy/orm/deprecated_interfaces.py lib/sqlalchemy/orm/descriptor_props.py lib/sqlalchemy/orm/dynamic.py lib/sqlalchemy/orm/evaluator.py lib/sqlalchemy/orm/events.py lib/sqlalchemy/orm/exc.py lib/sqlalchemy/orm/identity.py lib/sqlalchemy/orm/instrumentation.py lib/sqlalchemy/orm/interfaces.py lib/sqlalchemy/orm/loading.py lib/sqlalchemy/orm/mapper.py lib/sqlalchemy/orm/path_registry.py lib/sqlalchemy/orm/persistence.py lib/sqlalchemy/orm/properties.py lib/sqlalchemy/orm/query.py lib/sqlalchemy/orm/relationships.py lib/sqlalchemy/orm/scoping.py lib/sqlalchemy/orm/session.py lib/sqlalchemy/orm/state.py lib/sqlalchemy/orm/strategies.py lib/sqlalchemy/orm/strategy_options.py lib/sqlalchemy/orm/sync.py lib/sqlalchemy/orm/unitofwork.py lib/sqlalchemy/orm/util.py lib/sqlalchemy/pool/__init__.py lib/sqlalchemy/pool/base.py lib/sqlalchemy/pool/dbapi_proxy.py lib/sqlalchemy/pool/impl.py lib/sqlalchemy/sql/__init__.py lib/sqlalchemy/sql/annotation.py lib/sqlalchemy/sql/base.py lib/sqlalchemy/sql/compiler.py lib/sqlalchemy/sql/crud.py lib/sqlalchemy/sql/ddl.py lib/sqlalchemy/sql/default_comparator.py lib/sqlalchemy/sql/dml.py lib/sqlalchemy/sql/elements.py lib/sqlalchemy/sql/expression.py lib/sqlalchemy/sql/functions.py lib/sqlalchemy/sql/naming.py lib/sqlalchemy/sql/operators.py lib/sqlalchemy/sql/schema.py lib/sqlalchemy/sql/selectable.py lib/sqlalchemy/sql/sqltypes.py lib/sqlalchemy/sql/type_api.py lib/sqlalchemy/sql/util.py lib/sqlalchemy/sql/visitors.py lib/sqlalchemy/testing/__init__.py lib/sqlalchemy/testing/assertions.py lib/sqlalchemy/testing/assertsql.py lib/sqlalchemy/testing/config.py lib/sqlalchemy/testing/engines.py lib/sqlalchemy/testing/entities.py lib/sqlalchemy/testing/exclusions.py lib/sqlalchemy/testing/fixtures.py lib/sqlalchemy/testing/mock.py lib/sqlalchemy/testing/pickleable.py lib/sqlalchemy/testing/profiling.py lib/sqlalchemy/testing/provision.py lib/sqlalchemy/testing/replay_fixture.py lib/sqlalchemy/testing/requirements.py lib/sqlalchemy/testing/schema.py lib/sqlalchemy/testing/util.py lib/sqlalchemy/testing/warnings.py lib/sqlalchemy/testing/plugin/__init__.py lib/sqlalchemy/testing/plugin/bootstrap.py lib/sqlalchemy/testing/plugin/plugin_base.py lib/sqlalchemy/testing/plugin/pytestplugin.py lib/sqlalchemy/testing/suite/__init__.py lib/sqlalchemy/testing/suite/test_cte.py lib/sqlalchemy/testing/suite/test_ddl.py lib/sqlalchemy/testing/suite/test_dialect.py lib/sqlalchemy/testing/suite/test_insert.py lib/sqlalchemy/testing/suite/test_reflection.py lib/sqlalchemy/testing/suite/test_results.py lib/sqlalchemy/testing/suite/test_select.py lib/sqlalchemy/testing/suite/test_sequence.py lib/sqlalchemy/testing/suite/test_types.py lib/sqlalchemy/testing/suite/test_update_delete.py lib/sqlalchemy/util/__init__.py lib/sqlalchemy/util/_collections.py lib/sqlalchemy/util/compat.py lib/sqlalchemy/util/deprecations.py lib/sqlalchemy/util/langhelpers.py lib/sqlalchemy/util/queue.py lib/sqlalchemy/util/topological.py test/__init__.py test/binary_data_one.dat test/binary_data_two.dat test/conftest.py test/requirements.py test/aaa_profiling/__init__.py test/aaa_profiling/test_compiler.py test/aaa_profiling/test_memusage.py test/aaa_profiling/test_misc.py test/aaa_profiling/test_orm.py test/aaa_profiling/test_pool.py test/aaa_profiling/test_resultset.py test/aaa_profiling/test_zoomark.py test/aaa_profiling/test_zoomark_orm.py test/base/__init__.py test/base/test_dependency.py test/base/test_events.py test/base/test_except.py test/base/test_inspect.py test/base/test_tutorials.py test/base/test_utils.py test/base/test_warnings.py test/dialect/__init__.py test/dialect/test_all.py test/dialect/test_firebird.py test/dialect/test_mxodbc.py test/dialect/test_pyodbc.py test/dialect/test_sqlite.py test/dialect/test_suite.py test/dialect/test_sybase.py test/dialect/mssql/__init__.py test/dialect/mssql/test_compiler.py test/dialect/mssql/test_engine.py test/dialect/mssql/test_query.py test/dialect/mssql/test_reflection.py test/dialect/mssql/test_types.py test/dialect/mysql/__init__.py test/dialect/mysql/test_compiler.py test/dialect/mysql/test_dialect.py test/dialect/mysql/test_for_update.py test/dialect/mysql/test_on_duplicate.py test/dialect/mysql/test_query.py test/dialect/mysql/test_reflection.py test/dialect/mysql/test_types.py test/dialect/oracle/__init__.py test/dialect/oracle/test_compiler.py test/dialect/oracle/test_dialect.py test/dialect/oracle/test_reflection.py test/dialect/oracle/test_types.py test/dialect/postgresql/__init__.py test/dialect/postgresql/test_compiler.py test/dialect/postgresql/test_dialect.py test/dialect/postgresql/test_on_conflict.py test/dialect/postgresql/test_query.py test/dialect/postgresql/test_reflection.py test/dialect/postgresql/test_types.py test/engine/__init__.py test/engine/test_bind.py test/engine/test_ddlevents.py test/engine/test_deprecations.py test/engine/test_execute.py test/engine/test_logging.py test/engine/test_parseconnect.py test/engine/test_pool.py test/engine/test_processors.py test/engine/test_reconnect.py test/engine/test_reflection.py test/engine/test_transaction.py test/ext/__init__.py test/ext/test_associationproxy.py test/ext/test_automap.py test/ext/test_baked.py test/ext/test_compiler.py test/ext/test_deprecations.py test/ext/test_extendedattr.py test/ext/test_horizontal_shard.py test/ext/test_hybrid.py test/ext/test_indexable.py test/ext/test_mutable.py test/ext/test_orderinglist.py test/ext/test_serializer.py test/ext/declarative/__init__.py test/ext/declarative/test_basic.py test/ext/declarative/test_clsregistry.py test/ext/declarative/test_concurrency.py test/ext/declarative/test_inheritance.py test/ext/declarative/test_mixin.py test/ext/declarative/test_reflection.py test/orm/__init__.py test/orm/_fixtures.py test/orm/test_ac_relationships.py test/orm/test_association.py test/orm/test_assorted_eager.py test/orm/test_attributes.py test/orm/test_backref_mutations.py test/orm/test_bind.py test/orm/test_bulk.py test/orm/test_bundle.py test/orm/test_cascade.py test/orm/test_collection.py test/orm/test_compile.py test/orm/test_composites.py test/orm/test_cycles.py test/orm/test_default_strategies.py test/orm/test_defaults.py test/orm/test_deferred.py test/orm/test_deprecations.py test/orm/test_descriptor.py test/orm/test_dynamic.py test/orm/test_eager_relations.py test/orm/test_evaluator.py test/orm/test_events.py test/orm/test_expire.py test/orm/test_froms.py test/orm/test_generative.py test/orm/test_hasparent.py test/orm/test_immediate_load.py test/orm/test_inspect.py test/orm/test_instrumentation.py test/orm/test_joins.py test/orm/test_lazy_relations.py test/orm/test_load_on_fks.py test/orm/test_loading.py test/orm/test_lockmode.py test/orm/test_manytomany.py test/orm/test_mapper.py test/orm/test_merge.py test/orm/test_naturalpks.py test/orm/test_of_type.py test/orm/test_onetoone.py test/orm/test_options.py test/orm/test_pickled.py test/orm/test_query.py test/orm/test_rel_fn.py test/orm/test_relationships.py test/orm/test_scoping.py test/orm/test_selectable.py test/orm/test_selectin_relations.py test/orm/test_session.py test/orm/test_subquery_relations.py test/orm/test_sync.py test/orm/test_transaction.py test/orm/test_unitofwork.py test/orm/test_unitofworkv2.py test/orm/test_update_delete.py test/orm/test_utils.py test/orm/test_validators.py test/orm/test_versioning.py test/orm/inheritance/__init__.py test/orm/inheritance/_poly_fixtures.py test/orm/inheritance/test_abc_inheritance.py test/orm/inheritance/test_abc_polymorphic.py test/orm/inheritance/test_assorted_poly.py test/orm/inheritance/test_basic.py test/orm/inheritance/test_concrete.py test/orm/inheritance/test_magazine.py test/orm/inheritance/test_manytomany.py test/orm/inheritance/test_poly_linked_list.py test/orm/inheritance/test_poly_loading.py test/orm/inheritance/test_poly_persistence.py test/orm/inheritance/test_polymorphic_rel.py test/orm/inheritance/test_productspec.py test/orm/inheritance/test_relationship.py test/orm/inheritance/test_selects.py test/orm/inheritance/test_single.py test/orm/inheritance/test_with_poly.py test/perf/invalidate_stresstest.py test/perf/orm2010.py test/sql/__init__.py test/sql/test_case_statement.py test/sql/test_compiler.py test/sql/test_computed.py test/sql/test_constraints.py test/sql/test_cte.py test/sql/test_ddlemit.py test/sql/test_defaults.py test/sql/test_delete.py test/sql/test_deprecations.py test/sql/test_functions.py test/sql/test_generative.py test/sql/test_insert.py test/sql/test_insert_exec.py test/sql/test_inspect.py test/sql/test_join_rewriting.py test/sql/test_labels.py test/sql/test_lateral.py test/sql/test_metadata.py test/sql/test_operators.py test/sql/test_query.py test/sql/test_quote.py test/sql/test_resultset.py test/sql/test_returning.py test/sql/test_rowcount.py test/sql/test_selectable.py test/sql/test_sequences.py test/sql/test_tablesample.py test/sql/test_text.py test/sql/test_type_expressions.py test/sql/test_types.py test/sql/test_unicode.py test/sql/test_update.py test/sql/test_utils.py
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/requires.txt
[mssql] pyodbc [mssql_pymssql] pymssql [mssql_pyodbc] pyodbc [mysql] mysqlclient [oracle] cx_oracle [postgresql] psycopg2 [postgresql_pg8000] pg8000 [postgresql_psycopg2binary] psycopg2-binary [postgresql_psycopg2cffi] psycopg2cffi [pymysql] pymysql
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/top_level.txt
sqlalchemy
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/SQLAlchemy-1.3.18-py3.9.egg-info/dependency_links.txt
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/RECORD
Werkzeug-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 Werkzeug-1.0.1.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 Werkzeug-1.0.1.dist-info/METADATA,sha256=d0zmVNa4UC2-nAo2A8_81oiy123D6JTGRSuY_Ymgyt4,4730 Werkzeug-1.0.1.dist-info/RECORD,, Werkzeug-1.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 Werkzeug-1.0.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 Werkzeug-1.0.1.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 werkzeug/__init__.py,sha256=rb-yPiXOjTLbtDOl5fQp5hN7oBdaoXAoQ-slAAvfZAo,502 werkzeug/__pycache__/__init__.cpython-39.pyc,, werkzeug/__pycache__/_compat.cpython-39.pyc,, werkzeug/__pycache__/_internal.cpython-39.pyc,, werkzeug/__pycache__/_reloader.cpython-39.pyc,, werkzeug/__pycache__/datastructures.cpython-39.pyc,, werkzeug/__pycache__/exceptions.cpython-39.pyc,, werkzeug/__pycache__/filesystem.cpython-39.pyc,, werkzeug/__pycache__/formparser.cpython-39.pyc,, werkzeug/__pycache__/http.cpython-39.pyc,, werkzeug/__pycache__/local.cpython-39.pyc,, werkzeug/__pycache__/posixemulation.cpython-39.pyc,, werkzeug/__pycache__/routing.cpython-39.pyc,, werkzeug/__pycache__/security.cpython-39.pyc,, werkzeug/__pycache__/serving.cpython-39.pyc,, werkzeug/__pycache__/test.cpython-39.pyc,, werkzeug/__pycache__/testapp.cpython-39.pyc,, werkzeug/__pycache__/urls.cpython-39.pyc,, werkzeug/__pycache__/useragents.cpython-39.pyc,, werkzeug/__pycache__/utils.cpython-39.pyc,, werkzeug/__pycache__/wsgi.cpython-39.pyc,, werkzeug/_compat.py,sha256=zjufTNrhQ8BgYSGSh-sVu6iW3r3O9WzjE9j-qJobx-g,6671 werkzeug/_internal.py,sha256=d_4AqheyS6dHMViwdc0drFrjs67ZzT6Ej2gWf-Z-Iys,14351 werkzeug/_reloader.py,sha256=I3mg3oRQ0lLzl06oEoVopN3bN7CtINuuUQdqDcmTnEs,11531 werkzeug/datastructures.py,sha256=AonxOcwU0TPMEzfKF1368ySULxHgxE-JE-DEAGdo2ts,100480 werkzeug/debug/__init__.py,sha256=3RtUMc5Y9hYyK11ugHltgkQ9Dt-ViR945Vy_X5NV7zU,17289 werkzeug/debug/__pycache__/__init__.cpython-39.pyc,, werkzeug/debug/__pycache__/console.cpython-39.pyc,, werkzeug/debug/__pycache__/repr.cpython-39.pyc,, werkzeug/debug/__pycache__/tbtools.cpython-39.pyc,, werkzeug/debug/console.py,sha256=OATaO7KHYMqpbzIFe1HeW9Mnl3wZgA3jMQoGDPn5URc,5488 werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621 werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400 werkzeug/debug/shared/jquery.js,sha256=CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo,88145 werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 werkzeug/debug/shared/style.css,sha256=gZ9uhmb5zj3XLuT9RvnMp6jMINgQ-VVBCp-2AZbG3YQ,6604 werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 werkzeug/debug/tbtools.py,sha256=2iJ8RURUZUSbopOIehy53LnVJWx47lsHN2V2l6hc7Wc,20363 werkzeug/exceptions.py,sha256=UTYSDkmAsH-vt8VSidlEffwqBVNXuT7bRg-_NqgUe8A,25188 werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101 werkzeug/formparser.py,sha256=Sto0jZid9im9ZVIf56vilCdyX-arK33wSftkYsLCnzo,21788 werkzeug/http.py,sha256=KVRV3yFK14PJeI56qClEq4qxFdvKUQVy4C_dwuWz9_Q,43107 werkzeug/local.py,sha256=_Tk7gB238pPWUU7habxFkZF02fiCMRVW6d62YWL1Rh0,14371 werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549 werkzeug/middleware/__pycache__/__init__.cpython-39.pyc,, werkzeug/middleware/__pycache__/dispatcher.cpython-39.pyc,, werkzeug/middleware/__pycache__/http_proxy.cpython-39.pyc,, werkzeug/middleware/__pycache__/lint.cpython-39.pyc,, werkzeug/middleware/__pycache__/profiler.cpython-39.pyc,, werkzeug/middleware/__pycache__/proxy_fix.cpython-39.pyc,, werkzeug/middleware/__pycache__/shared_data.cpython-39.pyc,, werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240 werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117 werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967 werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471 werkzeug/middleware/proxy_fix.py,sha256=K5oZ3DPXOzdZi0Xba5zW7ClPOxgUuqXHQHvY2-AWCGw,6431 werkzeug/middleware/shared_data.py,sha256=sPSRTKqtKSVBUyN8fr6jOJbdq9cdOLu6pg3gz4Y_1Xo,9599 werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541 werkzeug/routing.py,sha256=6-iZ7CKeUILYAehoKXLbmi5E6LgLbwuzUh8TNplnf5Q,79019 werkzeug/security.py,sha256=81149MplFq7-hD4RK4sKp9kzXXejjV9D4lWBzaRyeQ8,8106 werkzeug/serving.py,sha256=YvTqvurA-Mnj8mkqRe2kBdVr2ap4ibCq1ByQjOA6g1w,38694 werkzeug/test.py,sha256=GJ9kxTMSJ-nB7kfGtxuROr9JGmXxDRev-2U1SkeUJGE,39564 werkzeug/testapp.py,sha256=bHekqMsqRfVxwgFbvOMem-DYa_sdB7R47yUXpt1RUTo,9329 werkzeug/urls.py,sha256=T8-hV_1vwhu6xhX93FwsHteK-W-kIE2orj5WoMf-WFw,39322 werkzeug/useragents.py,sha256=TSoGv5IOvP375eK5gLLpsLQCeUgTR6sO1WftmAP_YvM,5563 werkzeug/utils.py,sha256=hrVK4u_wi8z9viBO9bgOLlm1aaIvCpn-p2d1FeZQDEo,25251 werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384 werkzeug/wrappers/__pycache__/__init__.cpython-39.pyc,, werkzeug/wrappers/__pycache__/accept.cpython-39.pyc,, werkzeug/wrappers/__pycache__/auth.cpython-39.pyc,, werkzeug/wrappers/__pycache__/base_request.cpython-39.pyc,, werkzeug/wrappers/__pycache__/base_response.cpython-39.pyc,, werkzeug/wrappers/__pycache__/common_descriptors.cpython-39.pyc,, werkzeug/wrappers/__pycache__/cors.cpython-39.pyc,, werkzeug/wrappers/__pycache__/etag.cpython-39.pyc,, werkzeug/wrappers/__pycache__/json.cpython-39.pyc,, werkzeug/wrappers/__pycache__/request.cpython-39.pyc,, werkzeug/wrappers/__pycache__/response.cpython-39.pyc,, werkzeug/wrappers/__pycache__/user_agent.cpython-39.pyc,, werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760 werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158 werkzeug/wrappers/base_request.py,sha256=4TuGlKWeKQdlq4eU94hJYcXSfWo8Rk7CS1Ef5lJ3ZM0,26012 werkzeug/wrappers/base_response.py,sha256=JTxJZ8o-IBetpoWJqt2HFwPaNWNDAlM3_GXJe1Whw80,27784 werkzeug/wrappers/common_descriptors.py,sha256=X2Ktd5zUWsmcd4ciaF62Dd8Lru9pLGP_XDUNukc8cXs,12829 werkzeug/wrappers/cors.py,sha256=XMbaCol4dWTGvb-dCJBoN0p3JX91v93AIAHd7tnB3L4,3466 werkzeug/wrappers/etag.py,sha256=XMXtyfByBsOjxwaX8U7ZtUY7JXkbQLP45oXZ0qkyTNs,12217 werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343 werkzeug/wrappers/request.py,sha256=QbHGqDpGPN684pnOPEokwkPESfm-NnfYM7ydOMxW_NI,1514 werkzeug/wrappers/response.py,sha256=Oqv8TMG_dnOKTq_V30ddgkO5B7IJhkVPODvm7cbhZ3c,2524 werkzeug/wrappers/user_agent.py,sha256=YJb-vr12cujG7sQMG9V89VsJa-03SWSenhg1W4cT0EY,435 werkzeug/wsgi.py,sha256=ZGk85NzRyQTzkYis-xl8V9ydJgfClBdStvhzDzER2mw,34367
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/WHEEL
Wheel-Version: 1.0 Generator: bdist_wheel (0.34.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/LICENSE.rst
Copyright 2007 Pallets Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/top_level.txt
werkzeug
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/INSTALLER
pip
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Werkzeug-1.0.1.dist-info/METADATA
Metadata-Version: 2.1 Name: Werkzeug Version: 1.0.1 Summary: The comprehensive WSGI web application library. Home-page: https://palletsprojects.com/p/werkzeug/ Author: Armin Ronacher Author-email: armin.ronacher@active-4.com Maintainer: Pallets Maintainer-email: contact@palletsprojects.com License: BSD-3-Clause Project-URL: Documentation, https://werkzeug.palletsprojects.com/ Project-URL: Code, https://github.com/pallets/werkzeug Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content Classifier: Topic :: Internet :: WWW/HTTP :: WSGI Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware Classifier: Topic :: Software Development :: Libraries :: Application Frameworks Classifier: Topic :: Software Development :: Libraries :: Python Modules Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* Description-Content-Type: text/x-rst Provides-Extra: dev Requires-Dist: pytest ; extra == 'dev' Requires-Dist: pytest-timeout ; extra == 'dev' Requires-Dist: coverage ; extra == 'dev' Requires-Dist: tox ; extra == 'dev' Requires-Dist: sphinx ; extra == 'dev' Requires-Dist: pallets-sphinx-themes ; extra == 'dev' Requires-Dist: sphinx-issues ; extra == 'dev' Provides-Extra: watchdog Requires-Dist: watchdog ; extra == 'watchdog' Werkzeug ======== *werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") Werkzeug is a comprehensive `WSGI`_ web application library. It began as a simple collection of various utilities for WSGI applications and has become one of the most advanced WSGI utility libraries. It includes: - An interactive debugger that allows inspecting stack traces and source code in the browser with an interactive interpreter for any frame in the stack. - A full-featured request object with objects to interact with headers, query args, form data, files, and cookies. - A response object that can wrap other WSGI applications and handle streaming data. - A routing system for matching URLs to endpoints and generating URLs for endpoints, with an extensible system for capturing variables from URLs. - HTTP utilities to handle entity tags, cache control, dates, user agents, cookies, files, and more. - A threaded WSGI server for use while developing applications locally. - A test client for simulating HTTP requests during testing without requiring running a server. Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up to the developer to choose a template engine, database adapter, and even how to handle requests. It can be used to build all sorts of end user applications such as blogs, wikis, or bulletin boards. `Flask`_ wraps Werkzeug, using it to handle the details of WSGI while providing more structure and patterns for defining powerful applications. Installing ---------- Install and update using `pip`_: .. code-block:: text pip install -U Werkzeug A Simple Example ---------------- .. code-block:: python from werkzeug.wrappers import Request, Response @Request.application def application(request): return Response('Hello, World!') if __name__ == '__main__': from werkzeug.serving import run_simple run_simple('localhost', 4000, application) Links ----- - Website: https://palletsprojects.com/p/werkzeug/ - Documentation: https://werkzeug.palletsprojects.com/ - Releases: https://pypi.org/project/Werkzeug/ - Code: https://github.com/pallets/werkzeug - Issue tracker: https://github.com/pallets/werkzeug/issues - Test status: https://dev.azure.com/pallets/werkzeug/_build - Official chat: https://discord.gg/t6rrQZH .. _WSGI: https://wsgi.readthedocs.io/en/latest/ .. _Flask: https://www.palletsprojects.com/p/flask/ .. _pip: https://pip.pypa.io/en/stable/quickstart/
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/tzfile.py
#!/usr/bin/env python ''' $Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $ ''' from datetime import datetime from struct import unpack, calcsize from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo from pytz.tzinfo import memorized_datetime, memorized_timedelta def _byte_string(s): """Cast a string or byte string to an ASCII byte string.""" return s.encode('ASCII') _NULL = _byte_string('\0') def _std_string(s): """Cast a string or byte string to an ASCII string.""" return str(s.decode('ASCII')) def build_tzinfo(zone, fp): head_fmt = '>4s c 15x 6l' head_size = calcsize(head_fmt) (magic, format, ttisgmtcnt, ttisstdcnt, leapcnt, timecnt, typecnt, charcnt) = unpack(head_fmt, fp.read(head_size)) # Make sure it is a tzfile(5) file assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic) # Read out the transition times, localtime indices and ttinfo structures. data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict( timecnt=timecnt, ttinfo='lBB' * typecnt, charcnt=charcnt) data_size = calcsize(data_fmt) data = unpack(data_fmt, fp.read(data_size)) # make sure we unpacked the right number of values assert len(data) == 2 * timecnt + 3 * typecnt + 1 transitions = [memorized_datetime(trans) for trans in data[:timecnt]] lindexes = list(data[timecnt:2 * timecnt]) ttinfo_raw = data[2 * timecnt:-1] tznames_raw = data[-1] del data # Process ttinfo into separate structs ttinfo = [] tznames = {} i = 0 while i < len(ttinfo_raw): # have we looked up this timezone name yet? tzname_offset = ttinfo_raw[i + 2] if tzname_offset not in tznames: nul = tznames_raw.find(_NULL, tzname_offset) if nul < 0: nul = len(tznames_raw) tznames[tzname_offset] = _std_string( tznames_raw[tzname_offset:nul]) ttinfo.append((ttinfo_raw[i], bool(ttinfo_raw[i + 1]), tznames[tzname_offset])) i += 3 # Now build the timezone object if len(ttinfo) == 1 or len(transitions) == 0: ttinfo[0][0], ttinfo[0][2] cls = type(zone, (StaticTzInfo,), dict( zone=zone, _utcoffset=memorized_timedelta(ttinfo[0][0]), _tzname=ttinfo[0][2])) else: # Early dates use the first standard time ttinfo i = 0 while ttinfo[i][1]: i += 1 if ttinfo[i] == ttinfo[lindexes[0]]: transitions[0] = datetime.min else: transitions.insert(0, datetime.min) lindexes.insert(0, i) # calculate transition info transition_info = [] for i in range(len(transitions)): inf = ttinfo[lindexes[i]] utcoffset = inf[0] if not inf[1]: dst = 0 else: for j in range(i - 1, -1, -1): prev_inf = ttinfo[lindexes[j]] if not prev_inf[1]: break dst = inf[0] - prev_inf[0] # dst offset # Bad dst? Look further. DST > 24 hours happens when # a timzone has moved across the international dateline. if dst <= 0 or dst > 3600 * 3: for j in range(i + 1, len(transitions)): stdinf = ttinfo[lindexes[j]] if not stdinf[1]: dst = inf[0] - stdinf[0] if dst > 0: break # Found a useful std time. tzname = inf[2] # Round utcoffset and dst to the nearest minute or the # datetime library will complain. Conversions to these timezones # might be up to plus or minus 30 seconds out, but it is # the best we can do. utcoffset = int((utcoffset + 30) // 60) * 60 dst = int((dst + 30) // 60) * 60 transition_info.append(memorized_ttinfo(utcoffset, dst, tzname)) cls = type(zone, (DstTzInfo,), dict( zone=zone, _utc_transition_times=transitions, _transition_info=transition_info)) return cls() if __name__ == '__main__': import os.path from pprint import pprint base = os.path.join(os.path.dirname(__file__), 'zoneinfo') tz = build_tzinfo('Australia/Melbourne', open(os.path.join(base, 'Australia', 'Melbourne'), 'rb')) tz = build_tzinfo('US/Eastern', open(os.path.join(base, 'US', 'Eastern'), 'rb')) pprint(tz._utc_transition_times)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/tzinfo.py
'''Base classes and helpers for building zone specific tzinfo classes''' from datetime import datetime, timedelta, tzinfo from bisect import bisect_right try: set except NameError: from sets import Set as set import pytz from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError __all__ = [] _timedelta_cache = {} def memorized_timedelta(seconds): '''Create only one instance of each distinct timedelta''' try: return _timedelta_cache[seconds] except KeyError: delta = timedelta(seconds=seconds) _timedelta_cache[seconds] = delta return delta _epoch = datetime.utcfromtimestamp(0) _datetime_cache = {0: _epoch} def memorized_datetime(seconds): '''Create only one instance of each distinct datetime''' try: return _datetime_cache[seconds] except KeyError: # NB. We can't just do datetime.utcfromtimestamp(seconds) as this # fails with negative values under Windows (Bug #90096) dt = _epoch + timedelta(seconds=seconds) _datetime_cache[seconds] = dt return dt _ttinfo_cache = {} def memorized_ttinfo(*args): '''Create only one instance of each distinct tuple''' try: return _ttinfo_cache[args] except KeyError: ttinfo = ( memorized_timedelta(args[0]), memorized_timedelta(args[1]), args[2] ) _ttinfo_cache[args] = ttinfo return ttinfo _notime = memorized_timedelta(0) def _to_seconds(td): '''Convert a timedelta to seconds''' return td.seconds + td.days * 24 * 60 * 60 class BaseTzInfo(tzinfo): # Overridden in subclass _utcoffset = None _tzname = None zone = None def __str__(self): return self.zone class StaticTzInfo(BaseTzInfo): '''A timezone that has a constant offset from UTC These timezones are rare, as most locations have changed their offset at some point in their history ''' def fromutc(self, dt): '''See datetime.tzinfo.fromutc''' if dt.tzinfo is not None and dt.tzinfo is not self: raise ValueError('fromutc: dt.tzinfo is not self') return (dt + self._utcoffset).replace(tzinfo=self) def utcoffset(self, dt, is_dst=None): '''See datetime.tzinfo.utcoffset is_dst is ignored for StaticTzInfo, and exists only to retain compatibility with DstTzInfo. ''' return self._utcoffset def dst(self, dt, is_dst=None): '''See datetime.tzinfo.dst is_dst is ignored for StaticTzInfo, and exists only to retain compatibility with DstTzInfo. ''' return _notime def tzname(self, dt, is_dst=None): '''See datetime.tzinfo.tzname is_dst is ignored for StaticTzInfo, and exists only to retain compatibility with DstTzInfo. ''' return self._tzname def localize(self, dt, is_dst=False): '''Convert naive time to local time''' if dt.tzinfo is not None: raise ValueError('Not naive datetime (tzinfo is already set)') return dt.replace(tzinfo=self) def normalize(self, dt, is_dst=False): '''Correct the timezone information on the given datetime. This is normally a no-op, as StaticTzInfo timezones never have ambiguous cases to correct: >>> from pytz import timezone >>> gmt = timezone('GMT') >>> isinstance(gmt, StaticTzInfo) True >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt) >>> gmt.normalize(dt) is dt True The supported method of converting between timezones is to use datetime.astimezone(). Currently normalize() also works: >>> la = timezone('America/Los_Angeles') >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3)) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> gmt.normalize(dt).strftime(fmt) '2011-05-07 08:02:03 GMT (+0000)' ''' if dt.tzinfo is self: return dt if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.astimezone(self) def __repr__(self): return '<StaticTzInfo %r>' % (self.zone,) def __reduce__(self): # Special pickle to zone remains a singleton and to cope with # database changes. return pytz._p, (self.zone,) class DstTzInfo(BaseTzInfo): '''A timezone that has a variable offset from UTC The offset might change if daylight saving time comes into effect, or at a point in history when the region decides to change their timezone definition. ''' # Overridden in subclass # Sorted list of DST transition times, UTC _utc_transition_times = None # [(utcoffset, dstoffset, tzname)] corresponding to # _utc_transition_times entries _transition_info = None zone = None # Set in __init__ _tzinfos = None _dst = None # DST offset def __init__(self, _inf=None, _tzinfos=None): if _inf: self._tzinfos = _tzinfos self._utcoffset, self._dst, self._tzname = _inf else: _tzinfos = {} self._tzinfos = _tzinfos self._utcoffset, self._dst, self._tzname = ( self._transition_info[0]) _tzinfos[self._transition_info[0]] = self for inf in self._transition_info[1:]: if inf not in _tzinfos: _tzinfos[inf] = self.__class__(inf, _tzinfos) def fromutc(self, dt): '''See datetime.tzinfo.fromutc''' if (dt.tzinfo is not None and getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos): raise ValueError('fromutc: dt.tzinfo is not self') dt = dt.replace(tzinfo=None) idx = max(0, bisect_right(self._utc_transition_times, dt) - 1) inf = self._transition_info[idx] return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) def normalize(self, dt): '''Correct the timezone information on the given datetime If date arithmetic crosses DST boundaries, the tzinfo is not magically adjusted. This method normalizes the tzinfo to the correct one. To test, first we need to do some setup >>> from pytz import timezone >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' We next create a datetime right on an end-of-DST transition point, the instant when the wallclocks are wound back one hour. >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' Now, if we subtract a few minutes from it, note that the timezone information has not changed. >>> before = loc_dt - timedelta(minutes=10) >>> before.strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' But we can fix that by calling the normalize method >>> before = eastern.normalize(before) >>> before.strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' The supported method of converting between timezones is to use datetime.astimezone(). Currently, normalize() also works: >>> th = timezone('Asia/Bangkok') >>> am = timezone('Europe/Amsterdam') >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> am.normalize(dt).strftime(fmt) '2011-05-06 20:02:03 CEST (+0200)' ''' if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') # Convert dt in localtime to UTC offset = dt.tzinfo._utcoffset dt = dt.replace(tzinfo=None) dt = dt - offset # convert it back, and return it return self.fromutc(dt) def localize(self, dt, is_dst=False): '''Convert naive time to local time. This method should be used to construct localtimes, rather than passing a tzinfo argument to a datetime constructor. is_dst is used to determine the correct timezone in the ambigous period at the end of daylight saving time. >>> from pytz import timezone >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> amdam = timezone('Europe/Amsterdam') >>> dt = datetime(2004, 10, 31, 2, 0, 0) >>> loc_dt1 = amdam.localize(dt, is_dst=True) >>> loc_dt2 = amdam.localize(dt, is_dst=False) >>> loc_dt1.strftime(fmt) '2004-10-31 02:00:00 CEST (+0200)' >>> loc_dt2.strftime(fmt) '2004-10-31 02:00:00 CET (+0100)' >>> str(loc_dt2 - loc_dt1) '1:00:00' Use is_dst=None to raise an AmbiguousTimeError for ambiguous times at the end of daylight saving time >>> try: ... loc_dt1 = amdam.localize(dt, is_dst=None) ... except AmbiguousTimeError: ... print('Ambiguous') Ambiguous is_dst defaults to False >>> amdam.localize(dt) == amdam.localize(dt, False) True is_dst is also used to determine the correct timezone in the wallclock times jumped over at the start of daylight saving time. >>> pacific = timezone('US/Pacific') >>> dt = datetime(2008, 3, 9, 2, 0, 0) >>> ploc_dt1 = pacific.localize(dt, is_dst=True) >>> ploc_dt2 = pacific.localize(dt, is_dst=False) >>> ploc_dt1.strftime(fmt) '2008-03-09 02:00:00 PDT (-0700)' >>> ploc_dt2.strftime(fmt) '2008-03-09 02:00:00 PST (-0800)' >>> str(ploc_dt2 - ploc_dt1) '1:00:00' Use is_dst=None to raise a NonExistentTimeError for these skipped times. >>> try: ... loc_dt1 = pacific.localize(dt, is_dst=None) ... except NonExistentTimeError: ... print('Non-existent') Non-existent ''' if dt.tzinfo is not None: raise ValueError('Not naive datetime (tzinfo is already set)') # Find the two best possibilities. possible_loc_dt = set() for delta in [timedelta(days=-1), timedelta(days=1)]: loc_dt = dt + delta idx = max(0, bisect_right( self._utc_transition_times, loc_dt) - 1) inf = self._transition_info[idx] tzinfo = self._tzinfos[inf] loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) if loc_dt.replace(tzinfo=None) == dt: possible_loc_dt.add(loc_dt) if len(possible_loc_dt) == 1: return possible_loc_dt.pop() # If there are no possibly correct timezones, we are attempting # to convert a time that never happened - the time period jumped # during the start-of-DST transition period. if len(possible_loc_dt) == 0: # If we refuse to guess, raise an exception. if is_dst is None: raise NonExistentTimeError(dt) # If we are forcing the pre-DST side of the DST transition, we # obtain the correct timezone by winding the clock forward a few # hours. elif is_dst: return self.localize( dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6) # If we are forcing the post-DST side of the DST transition, we # obtain the correct timezone by winding the clock back. else: return self.localize( dt - timedelta(hours=6), is_dst=False) + timedelta(hours=6) # If we get this far, we have multiple possible timezones - this # is an ambiguous case occuring during the end-of-DST transition. # If told to be strict, raise an exception since we have an # ambiguous case if is_dst is None: raise AmbiguousTimeError(dt) # Filter out the possiblilities that don't match the requested # is_dst filtered_possible_loc_dt = [ p for p in possible_loc_dt if bool(p.tzinfo._dst) == is_dst ] # Hopefully we only have one possibility left. Return it. if len(filtered_possible_loc_dt) == 1: return filtered_possible_loc_dt[0] if len(filtered_possible_loc_dt) == 0: filtered_possible_loc_dt = list(possible_loc_dt) # If we get this far, we have in a wierd timezone transition # where the clocks have been wound back but is_dst is the same # in both (eg. Europe/Warsaw 1915 when they switched to CET). # At this point, we just have to guess unless we allow more # hints to be passed in (such as the UTC offset or abbreviation), # but that is just getting silly. # # Choose the earliest (by UTC) applicable timezone if is_dst=True # Choose the latest (by UTC) applicable timezone if is_dst=False # i.e., behave like end-of-DST transition dates = {} # utc -> local for local_dt in filtered_possible_loc_dt: utc_time = ( local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset) assert utc_time not in dates dates[utc_time] = local_dt return dates[[min, max][not is_dst](dates)] def utcoffset(self, dt, is_dst=None): '''See datetime.tzinfo.utcoffset The is_dst parameter may be used to remove ambiguity during DST transitions. >>> from pytz import timezone >>> tz = timezone('America/St_Johns') >>> ambiguous = datetime(2009, 10, 31, 23, 30) >>> str(tz.utcoffset(ambiguous, is_dst=False)) '-1 day, 20:30:00' >>> str(tz.utcoffset(ambiguous, is_dst=True)) '-1 day, 21:30:00' >>> try: ... tz.utcoffset(ambiguous) ... except AmbiguousTimeError: ... print('Ambiguous') Ambiguous ''' if dt is None: return None elif dt.tzinfo is not self: dt = self.localize(dt, is_dst) return dt.tzinfo._utcoffset else: return self._utcoffset def dst(self, dt, is_dst=None): '''See datetime.tzinfo.dst The is_dst parameter may be used to remove ambiguity during DST transitions. >>> from pytz import timezone >>> tz = timezone('America/St_Johns') >>> normal = datetime(2009, 9, 1) >>> str(tz.dst(normal)) '1:00:00' >>> str(tz.dst(normal, is_dst=False)) '1:00:00' >>> str(tz.dst(normal, is_dst=True)) '1:00:00' >>> ambiguous = datetime(2009, 10, 31, 23, 30) >>> str(tz.dst(ambiguous, is_dst=False)) '0:00:00' >>> str(tz.dst(ambiguous, is_dst=True)) '1:00:00' >>> try: ... tz.dst(ambiguous) ... except AmbiguousTimeError: ... print('Ambiguous') Ambiguous ''' if dt is None: return None elif dt.tzinfo is not self: dt = self.localize(dt, is_dst) return dt.tzinfo._dst else: return self._dst def tzname(self, dt, is_dst=None): '''See datetime.tzinfo.tzname The is_dst parameter may be used to remove ambiguity during DST transitions. >>> from pytz import timezone >>> tz = timezone('America/St_Johns') >>> normal = datetime(2009, 9, 1) >>> tz.tzname(normal) 'NDT' >>> tz.tzname(normal, is_dst=False) 'NDT' >>> tz.tzname(normal, is_dst=True) 'NDT' >>> ambiguous = datetime(2009, 10, 31, 23, 30) >>> tz.tzname(ambiguous, is_dst=False) 'NST' >>> tz.tzname(ambiguous, is_dst=True) 'NDT' >>> try: ... tz.tzname(ambiguous) ... except AmbiguousTimeError: ... print('Ambiguous') Ambiguous ''' if dt is None: return self.zone elif dt.tzinfo is not self: dt = self.localize(dt, is_dst) return dt.tzinfo._tzname else: return self._tzname def __repr__(self): if self._dst: dst = 'DST' else: dst = 'STD' if self._utcoffset > _notime: return '<DstTzInfo %r %s+%s %s>' % ( self.zone, self._tzname, self._utcoffset, dst ) else: return '<DstTzInfo %r %s%s %s>' % ( self.zone, self._tzname, self._utcoffset, dst ) def __reduce__(self): # Special pickle to zone remains a singleton and to cope with # database changes. return pytz._p, ( self.zone, _to_seconds(self._utcoffset), _to_seconds(self._dst), self._tzname ) def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): """Factory function for unpickling pytz tzinfo instances. This is shared for both StaticTzInfo and DstTzInfo instances, because database changes could cause a zones implementation to switch between these two base classes and we can't break pickles on a pytz version upgrade. """ # Raises a KeyError if zone no longer exists, which should never happen # and would be a bug. tz = pytz.timezone(zone) # A StaticTzInfo - just return it if utcoffset is None: return tz # This pickle was created from a DstTzInfo. We need to # determine which of the list of tzinfo instances for this zone # to use in order to restore the state of any datetime instances using # it correctly. utcoffset = memorized_timedelta(utcoffset) dstoffset = memorized_timedelta(dstoffset) try: return tz._tzinfos[(utcoffset, dstoffset, tzname)] except KeyError: # The particular state requested in this timezone no longer exists. # This indicates a corrupt pickle, or the timezone database has been # corrected violently enough to make this particular # (utcoffset,dstoffset) no longer exist in the zone, or the # abbreviation has been changed. pass # See if we can find an entry differing only by tzname. Abbreviations # get changed from the initial guess by the database maintainers to # match reality when this information is discovered. for localized_tz in tz._tzinfos.values(): if (localized_tz._utcoffset == utcoffset and localized_tz._dst == dstoffset): return localized_tz # This (utcoffset, dstoffset) information has been removed from the # zone. Add it back. This might occur when the database maintainers have # corrected incorrect information. datetime instances using this # incorrect information will continue to do so, exactly as they were # before being pickled. This is purely an overly paranoid safety net - I # doubt this will ever been needed in real life. inf = (utcoffset, dstoffset, tzname) tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) return tz._tzinfos[inf]
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/__init__.py
''' datetime.tzinfo timezone definitions generated from the Olson timezone database: ftp://elsie.nci.nih.gov/pub/tz*.tar.gz See the datetime section of the Python Library Reference for information on how to use these modules. ''' import sys import datetime import os.path from pytz.exceptions import AmbiguousTimeError from pytz.exceptions import InvalidTimeError from pytz.exceptions import NonExistentTimeError from pytz.exceptions import UnknownTimeZoneError from pytz.lazy import LazyDict, LazyList, LazySet # noqa from pytz.tzinfo import unpickler, BaseTzInfo from pytz.tzfile import build_tzinfo # The IANA (nee Olson) database is updated several times a year. OLSON_VERSION = '2020a' VERSION = '2020.1' # pip compatible version number. __version__ = VERSION OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling __all__ = [ 'timezone', 'utc', 'country_timezones', 'country_names', 'AmbiguousTimeError', 'InvalidTimeError', 'NonExistentTimeError', 'UnknownTimeZoneError', 'all_timezones', 'all_timezones_set', 'common_timezones', 'common_timezones_set', 'BaseTzInfo', 'FixedOffset', ] if sys.version_info[0] > 2: # Python 3.x # Python 3.x doesn't have unicode(), making writing code # for Python 2.3 and Python 3.x a pain. unicode = str def ascii(s): r""" >>> ascii('Hello') 'Hello' >>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... UnicodeEncodeError: ... """ if type(s) == bytes: s = s.decode('ASCII') else: s.encode('ASCII') # Raise an exception if not ASCII return s # But the string - not a byte string. else: # Python 2.x def ascii(s): r""" >>> ascii('Hello') 'Hello' >>> ascii(u'Hello') 'Hello' >>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... UnicodeEncodeError: ... """ return s.encode('ASCII') def open_resource(name): """Open a resource from the zoneinfo subdir for reading. Uses the pkg_resources module if available and no standard file found at the calculated location. It is possible to specify different location for zoneinfo subdir by using the PYTZ_TZDATADIR environment variable. """ name_parts = name.lstrip('/').split('/') for part in name_parts: if part == os.path.pardir or os.path.sep in part: raise ValueError('Bad path segment: %r' % part) zoneinfo_dir = os.environ.get('PYTZ_TZDATADIR', None) if zoneinfo_dir is not None: filename = os.path.join(zoneinfo_dir, *name_parts) else: filename = os.path.join(os.path.dirname(__file__), 'zoneinfo', *name_parts) if not os.path.exists(filename): # http://bugs.launchpad.net/bugs/383171 - we avoid using this # unless absolutely necessary to help when a broken version of # pkg_resources is installed. try: from pkg_resources import resource_stream except ImportError: resource_stream = None if resource_stream is not None: return resource_stream(__name__, 'zoneinfo/' + name) return open(filename, 'rb') def resource_exists(name): """Return true if the given resource exists""" try: open_resource(name).close() return True except IOError: return False _tzinfo_cache = {} def timezone(zone): r''' Return a datetime.tzinfo implementation for the given timezone >>> from datetime import datetime, timedelta >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> eastern.zone 'US/Eastern' >>> timezone(unicode('US/Eastern')) is eastern True >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:10:00 EST (-0500)' Raises UnknownTimeZoneError if passed an unknown zone. >>> try: ... timezone('Asia/Shangri-La') ... except UnknownTimeZoneError: ... print('Unknown') Unknown >>> try: ... timezone(unicode('\N{TRADE MARK SIGN}')) ... except UnknownTimeZoneError: ... print('Unknown') Unknown ''' if zone is None: raise UnknownTimeZoneError(None) if zone.upper() == 'UTC': return utc try: zone = ascii(zone) except UnicodeEncodeError: # All valid timezones are ASCII raise UnknownTimeZoneError(zone) zone = _case_insensitive_zone_lookup(_unmunge_zone(zone)) if zone not in _tzinfo_cache: if zone in all_timezones_set: # noqa fp = open_resource(zone) try: _tzinfo_cache[zone] = build_tzinfo(zone, fp) finally: fp.close() else: raise UnknownTimeZoneError(zone) return _tzinfo_cache[zone] def _unmunge_zone(zone): """Undo the time zone name munging done by older versions of pytz.""" return zone.replace('_plus_', '+').replace('_minus_', '-') _all_timezones_lower_to_standard = None def _case_insensitive_zone_lookup(zone): """case-insensitively matching timezone, else return zone unchanged""" global _all_timezones_lower_to_standard if _all_timezones_lower_to_standard is None: _all_timezones_lower_to_standard = dict((tz.lower(), tz) for tz in all_timezones) # noqa return _all_timezones_lower_to_standard.get(zone.lower()) or zone # noqa ZERO = datetime.timedelta(0) HOUR = datetime.timedelta(hours=1) class UTC(BaseTzInfo): """UTC Optimized UTC implementation. It unpickles using the single module global instance defined beneath this class declaration. """ zone = "UTC" _utcoffset = ZERO _dst = ZERO _tzname = zone def fromutc(self, dt): if dt.tzinfo is None: return self.localize(dt) return super(utc.__class__, self).fromutc(dt) def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return ZERO def __reduce__(self): return _UTC, () def localize(self, dt, is_dst=False): '''Convert naive time to local time''' if dt.tzinfo is not None: raise ValueError('Not naive datetime (tzinfo is already set)') return dt.replace(tzinfo=self) def normalize(self, dt, is_dst=False): '''Correct the timezone information on the given datetime''' if dt.tzinfo is self: return dt if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.astimezone(self) def __repr__(self): return "<UTC>" def __str__(self): return "UTC" UTC = utc = UTC() # UTC is a singleton def _UTC(): """Factory function for utc unpickling. Makes sure that unpickling a utc instance always returns the same module global. These examples belong in the UTC class above, but it is obscured; or in the README.rst, but we are not depending on Python 2.4 so integrating the README.rst examples with the unit tests is not trivial. >>> import datetime, pickle >>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) >>> naive = dt.replace(tzinfo=None) >>> p = pickle.dumps(dt, 1) >>> naive_p = pickle.dumps(naive, 1) >>> len(p) - len(naive_p) 17 >>> new = pickle.loads(p) >>> new == dt True >>> new is dt False >>> new.tzinfo is dt.tzinfo True >>> utc is UTC is timezone('UTC') True >>> utc is timezone('GMT') False """ return utc _UTC.__safe_for_unpickling__ = True def _p(*args): """Factory function for unpickling pytz tzinfo instances. Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle by shortening the path. """ return unpickler(*args) _p.__safe_for_unpickling__ = True class _CountryTimezoneDict(LazyDict): """Map ISO 3166 country code to a list of timezone names commonly used in that country. iso3166_code is the two letter code used to identify the country. >>> def print_list(list_of_strings): ... 'We use a helper so doctests work under Python 2.3 -> 3.x' ... for s in list_of_strings: ... print(s) >>> print_list(country_timezones['nz']) Pacific/Auckland Pacific/Chatham >>> print_list(country_timezones['ch']) Europe/Zurich >>> print_list(country_timezones['CH']) Europe/Zurich >>> print_list(country_timezones[unicode('ch')]) Europe/Zurich >>> print_list(country_timezones['XXX']) Traceback (most recent call last): ... KeyError: 'XXX' Previously, this information was exposed as a function rather than a dictionary. This is still supported:: >>> print_list(country_timezones('nz')) Pacific/Auckland Pacific/Chatham """ def __call__(self, iso3166_code): """Backwards compatibility.""" return self[iso3166_code] def _fill(self): data = {} zone_tab = open_resource('zone.tab') try: for line in zone_tab: line = line.decode('UTF-8') if line.startswith('#'): continue code, coordinates, zone = line.split(None, 4)[:3] if zone not in all_timezones_set: # noqa continue try: data[code].append(zone) except KeyError: data[code] = [zone] self.data = data finally: zone_tab.close() country_timezones = _CountryTimezoneDict() class _CountryNameDict(LazyDict): '''Dictionary proving ISO3166 code -> English name. >>> print(country_names['au']) Australia ''' def _fill(self): data = {} zone_tab = open_resource('iso3166.tab') try: for line in zone_tab.readlines(): line = line.decode('UTF-8') if line.startswith('#'): continue code, name = line.split(None, 1) data[code] = name.strip() self.data = data finally: zone_tab.close() country_names = _CountryNameDict() # Time-zone info based solely on fixed offsets class _FixedOffset(datetime.tzinfo): zone = None # to match the standard pytz API def __init__(self, minutes): if abs(minutes) >= 1440: raise ValueError("absolute offset is too large", minutes) self._minutes = minutes self._offset = datetime.timedelta(minutes=minutes) def utcoffset(self, dt): return self._offset def __reduce__(self): return FixedOffset, (self._minutes, ) def dst(self, dt): return ZERO def tzname(self, dt): return None def __repr__(self): return 'pytz.FixedOffset(%d)' % self._minutes def localize(self, dt, is_dst=False): '''Convert naive time to local time''' if dt.tzinfo is not None: raise ValueError('Not naive datetime (tzinfo is already set)') return dt.replace(tzinfo=self) def normalize(self, dt, is_dst=False): '''Correct the timezone information on the given datetime''' if dt.tzinfo is self: return dt if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.astimezone(self) def FixedOffset(offset, _tzinfos={}): """return a fixed-offset timezone based off a number of minutes. >>> one = FixedOffset(-330) >>> one pytz.FixedOffset(-330) >>> str(one.utcoffset(datetime.datetime.now())) '-1 day, 18:30:00' >>> str(one.dst(datetime.datetime.now())) '0:00:00' >>> two = FixedOffset(1380) >>> two pytz.FixedOffset(1380) >>> str(two.utcoffset(datetime.datetime.now())) '23:00:00' >>> str(two.dst(datetime.datetime.now())) '0:00:00' The datetime.timedelta must be between the range of -1 and 1 day, non-inclusive. >>> FixedOffset(1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', 1440) >>> FixedOffset(-1440) Traceback (most recent call last): ... ValueError: ('absolute offset is too large', -1440) An offset of 0 is special-cased to return UTC. >>> FixedOffset(0) is UTC True There should always be only one instance of a FixedOffset per timedelta. This should be true for multiple creation calls. >>> FixedOffset(-330) is one True >>> FixedOffset(1380) is two True It should also be true for pickling. >>> import pickle >>> pickle.loads(pickle.dumps(one)) is one True >>> pickle.loads(pickle.dumps(two)) is two True """ if offset == 0: return UTC info = _tzinfos.get(offset) if info is None: # We haven't seen this one before. we need to save it. # Use setdefault to avoid a race condition and make sure we have # only one info = _tzinfos.setdefault(offset, _FixedOffset(offset)) return info FixedOffset.__safe_for_unpickling__ = True def _test(): import doctest sys.path.insert(0, os.pardir) import pytz return doctest.testmod(pytz) if __name__ == '__main__': _test() all_timezones = \ ['Africa/Abidjan', 'Africa/Accra', 'Africa/Addis_Ababa', 'Africa/Algiers', 'Africa/Asmara', 'Africa/Asmera', 'Africa/Bamako', 'Africa/Bangui', 'Africa/Banjul', 'Africa/Bissau', 'Africa/Blantyre', 'Africa/Brazzaville', 'Africa/Bujumbura', 'Africa/Cairo', 'Africa/Casablanca', 'Africa/Ceuta', 'Africa/Conakry', 'Africa/Dakar', 'Africa/Dar_es_Salaam', 'Africa/Djibouti', 'Africa/Douala', 'Africa/El_Aaiun', 'Africa/Freetown', 'Africa/Gaborone', 'Africa/Harare', 'Africa/Johannesburg', 'Africa/Juba', 'Africa/Kampala', 'Africa/Khartoum', 'Africa/Kigali', 'Africa/Kinshasa', 'Africa/Lagos', 'Africa/Libreville', 'Africa/Lome', 'Africa/Luanda', 'Africa/Lubumbashi', 'Africa/Lusaka', 'Africa/Malabo', 'Africa/Maputo', 'Africa/Maseru', 'Africa/Mbabane', 'Africa/Mogadishu', 'Africa/Monrovia', 'Africa/Nairobi', 'Africa/Ndjamena', 'Africa/Niamey', 'Africa/Nouakchott', 'Africa/Ouagadougou', 'Africa/Porto-Novo', 'Africa/Sao_Tome', 'Africa/Timbuktu', 'Africa/Tripoli', 'Africa/Tunis', 'Africa/Windhoek', 'America/Adak', 'America/Anchorage', 'America/Anguilla', 'America/Antigua', 'America/Araguaina', 'America/Argentina/Buenos_Aires', 'America/Argentina/Catamarca', 'America/Argentina/ComodRivadavia', 'America/Argentina/Cordoba', 'America/Argentina/Jujuy', 'America/Argentina/La_Rioja', 'America/Argentina/Mendoza', 'America/Argentina/Rio_Gallegos', 'America/Argentina/Salta', 'America/Argentina/San_Juan', 'America/Argentina/San_Luis', 'America/Argentina/Tucuman', 'America/Argentina/Ushuaia', 'America/Aruba', 'America/Asuncion', 'America/Atikokan', 'America/Atka', 'America/Bahia', 'America/Bahia_Banderas', 'America/Barbados', 'America/Belem', 'America/Belize', 'America/Blanc-Sablon', 'America/Boa_Vista', 'America/Bogota', 'America/Boise', 'America/Buenos_Aires', 'America/Cambridge_Bay', 'America/Campo_Grande', 'America/Cancun', 'America/Caracas', 'America/Catamarca', 'America/Cayenne', 'America/Cayman', 'America/Chicago', 'America/Chihuahua', 'America/Coral_Harbour', 'America/Cordoba', 'America/Costa_Rica', 'America/Creston', 'America/Cuiaba', 'America/Curacao', 'America/Danmarkshavn', 'America/Dawson', 'America/Dawson_Creek', 'America/Denver', 'America/Detroit', 'America/Dominica', 'America/Edmonton', 'America/Eirunepe', 'America/El_Salvador', 'America/Ensenada', 'America/Fort_Nelson', 'America/Fort_Wayne', 'America/Fortaleza', 'America/Glace_Bay', 'America/Godthab', 'America/Goose_Bay', 'America/Grand_Turk', 'America/Grenada', 'America/Guadeloupe', 'America/Guatemala', 'America/Guayaquil', 'America/Guyana', 'America/Halifax', 'America/Havana', 'America/Hermosillo', 'America/Indiana/Indianapolis', 'America/Indiana/Knox', 'America/Indiana/Marengo', 'America/Indiana/Petersburg', 'America/Indiana/Tell_City', 'America/Indiana/Vevay', 'America/Indiana/Vincennes', 'America/Indiana/Winamac', 'America/Indianapolis', 'America/Inuvik', 'America/Iqaluit', 'America/Jamaica', 'America/Jujuy', 'America/Juneau', 'America/Kentucky/Louisville', 'America/Kentucky/Monticello', 'America/Knox_IN', 'America/Kralendijk', 'America/La_Paz', 'America/Lima', 'America/Los_Angeles', 'America/Louisville', 'America/Lower_Princes', 'America/Maceio', 'America/Managua', 'America/Manaus', 'America/Marigot', 'America/Martinique', 'America/Matamoros', 'America/Mazatlan', 'America/Mendoza', 'America/Menominee', 'America/Merida', 'America/Metlakatla', 'America/Mexico_City', 'America/Miquelon', 'America/Moncton', 'America/Monterrey', 'America/Montevideo', 'America/Montreal', 'America/Montserrat', 'America/Nassau', 'America/New_York', 'America/Nipigon', 'America/Nome', 'America/Noronha', 'America/North_Dakota/Beulah', 'America/North_Dakota/Center', 'America/North_Dakota/New_Salem', 'America/Nuuk', 'America/Ojinaga', 'America/Panama', 'America/Pangnirtung', 'America/Paramaribo', 'America/Phoenix', 'America/Port-au-Prince', 'America/Port_of_Spain', 'America/Porto_Acre', 'America/Porto_Velho', 'America/Puerto_Rico', 'America/Punta_Arenas', 'America/Rainy_River', 'America/Rankin_Inlet', 'America/Recife', 'America/Regina', 'America/Resolute', 'America/Rio_Branco', 'America/Rosario', 'America/Santa_Isabel', 'America/Santarem', 'America/Santiago', 'America/Santo_Domingo', 'America/Sao_Paulo', 'America/Scoresbysund', 'America/Shiprock', 'America/Sitka', 'America/St_Barthelemy', 'America/St_Johns', 'America/St_Kitts', 'America/St_Lucia', 'America/St_Thomas', 'America/St_Vincent', 'America/Swift_Current', 'America/Tegucigalpa', 'America/Thule', 'America/Thunder_Bay', 'America/Tijuana', 'America/Toronto', 'America/Tortola', 'America/Vancouver', 'America/Virgin', 'America/Whitehorse', 'America/Winnipeg', 'America/Yakutat', 'America/Yellowknife', 'Antarctica/Casey', 'Antarctica/Davis', 'Antarctica/DumontDUrville', 'Antarctica/Macquarie', 'Antarctica/Mawson', 'Antarctica/McMurdo', 'Antarctica/Palmer', 'Antarctica/Rothera', 'Antarctica/South_Pole', 'Antarctica/Syowa', 'Antarctica/Troll', 'Antarctica/Vostok', 'Arctic/Longyearbyen', 'Asia/Aden', 'Asia/Almaty', 'Asia/Amman', 'Asia/Anadyr', 'Asia/Aqtau', 'Asia/Aqtobe', 'Asia/Ashgabat', 'Asia/Ashkhabad', 'Asia/Atyrau', 'Asia/Baghdad', 'Asia/Bahrain', 'Asia/Baku', 'Asia/Bangkok', 'Asia/Barnaul', 'Asia/Beirut', 'Asia/Bishkek', 'Asia/Brunei', 'Asia/Calcutta', 'Asia/Chita', 'Asia/Choibalsan', 'Asia/Chongqing', 'Asia/Chungking', 'Asia/Colombo', 'Asia/Dacca', 'Asia/Damascus', 'Asia/Dhaka', 'Asia/Dili', 'Asia/Dubai', 'Asia/Dushanbe', 'Asia/Famagusta', 'Asia/Gaza', 'Asia/Harbin', 'Asia/Hebron', 'Asia/Ho_Chi_Minh', 'Asia/Hong_Kong', 'Asia/Hovd', 'Asia/Irkutsk', 'Asia/Istanbul', 'Asia/Jakarta', 'Asia/Jayapura', 'Asia/Jerusalem', 'Asia/Kabul', 'Asia/Kamchatka', 'Asia/Karachi', 'Asia/Kashgar', 'Asia/Kathmandu', 'Asia/Katmandu', 'Asia/Khandyga', 'Asia/Kolkata', 'Asia/Krasnoyarsk', 'Asia/Kuala_Lumpur', 'Asia/Kuching', 'Asia/Kuwait', 'Asia/Macao', 'Asia/Macau', 'Asia/Magadan', 'Asia/Makassar', 'Asia/Manila', 'Asia/Muscat', 'Asia/Nicosia', 'Asia/Novokuznetsk', 'Asia/Novosibirsk', 'Asia/Omsk', 'Asia/Oral', 'Asia/Phnom_Penh', 'Asia/Pontianak', 'Asia/Pyongyang', 'Asia/Qatar', 'Asia/Qostanay', 'Asia/Qyzylorda', 'Asia/Rangoon', 'Asia/Riyadh', 'Asia/Saigon', 'Asia/Sakhalin', 'Asia/Samarkand', 'Asia/Seoul', 'Asia/Shanghai', 'Asia/Singapore', 'Asia/Srednekolymsk', 'Asia/Taipei', 'Asia/Tashkent', 'Asia/Tbilisi', 'Asia/Tehran', 'Asia/Tel_Aviv', 'Asia/Thimbu', 'Asia/Thimphu', 'Asia/Tokyo', 'Asia/Tomsk', 'Asia/Ujung_Pandang', 'Asia/Ulaanbaatar', 'Asia/Ulan_Bator', 'Asia/Urumqi', 'Asia/Ust-Nera', 'Asia/Vientiane', 'Asia/Vladivostok', 'Asia/Yakutsk', 'Asia/Yangon', 'Asia/Yekaterinburg', 'Asia/Yerevan', 'Atlantic/Azores', 'Atlantic/Bermuda', 'Atlantic/Canary', 'Atlantic/Cape_Verde', 'Atlantic/Faeroe', 'Atlantic/Faroe', 'Atlantic/Jan_Mayen', 'Atlantic/Madeira', 'Atlantic/Reykjavik', 'Atlantic/South_Georgia', 'Atlantic/St_Helena', 'Atlantic/Stanley', 'Australia/ACT', 'Australia/Adelaide', 'Australia/Brisbane', 'Australia/Broken_Hill', 'Australia/Canberra', 'Australia/Currie', 'Australia/Darwin', 'Australia/Eucla', 'Australia/Hobart', 'Australia/LHI', 'Australia/Lindeman', 'Australia/Lord_Howe', 'Australia/Melbourne', 'Australia/NSW', 'Australia/North', 'Australia/Perth', 'Australia/Queensland', 'Australia/South', 'Australia/Sydney', 'Australia/Tasmania', 'Australia/Victoria', 'Australia/West', 'Australia/Yancowinna', 'Brazil/Acre', 'Brazil/DeNoronha', 'Brazil/East', 'Brazil/West', 'CET', 'CST6CDT', 'Canada/Atlantic', 'Canada/Central', 'Canada/Eastern', 'Canada/Mountain', 'Canada/Newfoundland', 'Canada/Pacific', 'Canada/Saskatchewan', 'Canada/Yukon', 'Chile/Continental', 'Chile/EasterIsland', 'Cuba', 'EET', 'EST', 'EST5EDT', 'Egypt', 'Eire', 'Etc/GMT', 'Etc/GMT+0', 'Etc/GMT+1', 'Etc/GMT+10', 'Etc/GMT+11', 'Etc/GMT+12', 'Etc/GMT+2', 'Etc/GMT+3', 'Etc/GMT+4', 'Etc/GMT+5', 'Etc/GMT+6', 'Etc/GMT+7', 'Etc/GMT+8', 'Etc/GMT+9', 'Etc/GMT-0', 'Etc/GMT-1', 'Etc/GMT-10', 'Etc/GMT-11', 'Etc/GMT-12', 'Etc/GMT-13', 'Etc/GMT-14', 'Etc/GMT-2', 'Etc/GMT-3', 'Etc/GMT-4', 'Etc/GMT-5', 'Etc/GMT-6', 'Etc/GMT-7', 'Etc/GMT-8', 'Etc/GMT-9', 'Etc/GMT0', 'Etc/Greenwich', 'Etc/UCT', 'Etc/UTC', 'Etc/Universal', 'Etc/Zulu', 'Europe/Amsterdam', 'Europe/Andorra', 'Europe/Astrakhan', 'Europe/Athens', 'Europe/Belfast', 'Europe/Belgrade', 'Europe/Berlin', 'Europe/Bratislava', 'Europe/Brussels', 'Europe/Bucharest', 'Europe/Budapest', 'Europe/Busingen', 'Europe/Chisinau', 'Europe/Copenhagen', 'Europe/Dublin', 'Europe/Gibraltar', 'Europe/Guernsey', 'Europe/Helsinki', 'Europe/Isle_of_Man', 'Europe/Istanbul', 'Europe/Jersey', 'Europe/Kaliningrad', 'Europe/Kiev', 'Europe/Kirov', 'Europe/Lisbon', 'Europe/Ljubljana', 'Europe/London', 'Europe/Luxembourg', 'Europe/Madrid', 'Europe/Malta', 'Europe/Mariehamn', 'Europe/Minsk', 'Europe/Monaco', 'Europe/Moscow', 'Europe/Nicosia', 'Europe/Oslo', 'Europe/Paris', 'Europe/Podgorica', 'Europe/Prague', 'Europe/Riga', 'Europe/Rome', 'Europe/Samara', 'Europe/San_Marino', 'Europe/Sarajevo', 'Europe/Saratov', 'Europe/Simferopol', 'Europe/Skopje', 'Europe/Sofia', 'Europe/Stockholm', 'Europe/Tallinn', 'Europe/Tirane', 'Europe/Tiraspol', 'Europe/Ulyanovsk', 'Europe/Uzhgorod', 'Europe/Vaduz', 'Europe/Vatican', 'Europe/Vienna', 'Europe/Vilnius', 'Europe/Volgograd', 'Europe/Warsaw', 'Europe/Zagreb', 'Europe/Zaporozhye', 'Europe/Zurich', 'GB', 'GB-Eire', 'GMT', 'GMT+0', 'GMT-0', 'GMT0', 'Greenwich', 'HST', 'Hongkong', 'Iceland', 'Indian/Antananarivo', 'Indian/Chagos', 'Indian/Christmas', 'Indian/Cocos', 'Indian/Comoro', 'Indian/Kerguelen', 'Indian/Mahe', 'Indian/Maldives', 'Indian/Mauritius', 'Indian/Mayotte', 'Indian/Reunion', 'Iran', 'Israel', 'Jamaica', 'Japan', 'Kwajalein', 'Libya', 'MET', 'MST', 'MST7MDT', 'Mexico/BajaNorte', 'Mexico/BajaSur', 'Mexico/General', 'NZ', 'NZ-CHAT', 'Navajo', 'PRC', 'PST8PDT', 'Pacific/Apia', 'Pacific/Auckland', 'Pacific/Bougainville', 'Pacific/Chatham', 'Pacific/Chuuk', 'Pacific/Easter', 'Pacific/Efate', 'Pacific/Enderbury', 'Pacific/Fakaofo', 'Pacific/Fiji', 'Pacific/Funafuti', 'Pacific/Galapagos', 'Pacific/Gambier', 'Pacific/Guadalcanal', 'Pacific/Guam', 'Pacific/Honolulu', 'Pacific/Johnston', 'Pacific/Kiritimati', 'Pacific/Kosrae', 'Pacific/Kwajalein', 'Pacific/Majuro', 'Pacific/Marquesas', 'Pacific/Midway', 'Pacific/Nauru', 'Pacific/Niue', 'Pacific/Norfolk', 'Pacific/Noumea', 'Pacific/Pago_Pago', 'Pacific/Palau', 'Pacific/Pitcairn', 'Pacific/Pohnpei', 'Pacific/Ponape', 'Pacific/Port_Moresby', 'Pacific/Rarotonga', 'Pacific/Saipan', 'Pacific/Samoa', 'Pacific/Tahiti', 'Pacific/Tarawa', 'Pacific/Tongatapu', 'Pacific/Truk', 'Pacific/Wake', 'Pacific/Wallis', 'Pacific/Yap', 'Poland', 'Portugal', 'ROC', 'ROK', 'Singapore', 'Turkey', 'UCT', 'US/Alaska', 'US/Aleutian', 'US/Arizona', 'US/Central', 'US/East-Indiana', 'US/Eastern', 'US/Hawaii', 'US/Indiana-Starke', 'US/Michigan', 'US/Mountain', 'US/Pacific', 'US/Samoa', 'UTC', 'Universal', 'W-SU', 'WET', 'Zulu'] all_timezones = LazyList( tz for tz in all_timezones if resource_exists(tz)) all_timezones_set = LazySet(all_timezones) common_timezones = \ ['Africa/Abidjan', 'Africa/Accra', 'Africa/Addis_Ababa', 'Africa/Algiers', 'Africa/Asmara', 'Africa/Bamako', 'Africa/Bangui', 'Africa/Banjul', 'Africa/Bissau', 'Africa/Blantyre', 'Africa/Brazzaville', 'Africa/Bujumbura', 'Africa/Cairo', 'Africa/Casablanca', 'Africa/Ceuta', 'Africa/Conakry', 'Africa/Dakar', 'Africa/Dar_es_Salaam', 'Africa/Djibouti', 'Africa/Douala', 'Africa/El_Aaiun', 'Africa/Freetown', 'Africa/Gaborone', 'Africa/Harare', 'Africa/Johannesburg', 'Africa/Juba', 'Africa/Kampala', 'Africa/Khartoum', 'Africa/Kigali', 'Africa/Kinshasa', 'Africa/Lagos', 'Africa/Libreville', 'Africa/Lome', 'Africa/Luanda', 'Africa/Lubumbashi', 'Africa/Lusaka', 'Africa/Malabo', 'Africa/Maputo', 'Africa/Maseru', 'Africa/Mbabane', 'Africa/Mogadishu', 'Africa/Monrovia', 'Africa/Nairobi', 'Africa/Ndjamena', 'Africa/Niamey', 'Africa/Nouakchott', 'Africa/Ouagadougou', 'Africa/Porto-Novo', 'Africa/Sao_Tome', 'Africa/Tripoli', 'Africa/Tunis', 'Africa/Windhoek', 'America/Adak', 'America/Anchorage', 'America/Anguilla', 'America/Antigua', 'America/Araguaina', 'America/Argentina/Buenos_Aires', 'America/Argentina/Catamarca', 'America/Argentina/Cordoba', 'America/Argentina/Jujuy', 'America/Argentina/La_Rioja', 'America/Argentina/Mendoza', 'America/Argentina/Rio_Gallegos', 'America/Argentina/Salta', 'America/Argentina/San_Juan', 'America/Argentina/San_Luis', 'America/Argentina/Tucuman', 'America/Argentina/Ushuaia', 'America/Aruba', 'America/Asuncion', 'America/Atikokan', 'America/Bahia', 'America/Bahia_Banderas', 'America/Barbados', 'America/Belem', 'America/Belize', 'America/Blanc-Sablon', 'America/Boa_Vista', 'America/Bogota', 'America/Boise', 'America/Cambridge_Bay', 'America/Campo_Grande', 'America/Cancun', 'America/Caracas', 'America/Cayenne', 'America/Cayman', 'America/Chicago', 'America/Chihuahua', 'America/Costa_Rica', 'America/Creston', 'America/Cuiaba', 'America/Curacao', 'America/Danmarkshavn', 'America/Dawson', 'America/Dawson_Creek', 'America/Denver', 'America/Detroit', 'America/Dominica', 'America/Edmonton', 'America/Eirunepe', 'America/El_Salvador', 'America/Fort_Nelson', 'America/Fortaleza', 'America/Glace_Bay', 'America/Goose_Bay', 'America/Grand_Turk', 'America/Grenada', 'America/Guadeloupe', 'America/Guatemala', 'America/Guayaquil', 'America/Guyana', 'America/Halifax', 'America/Havana', 'America/Hermosillo', 'America/Indiana/Indianapolis', 'America/Indiana/Knox', 'America/Indiana/Marengo', 'America/Indiana/Petersburg', 'America/Indiana/Tell_City', 'America/Indiana/Vevay', 'America/Indiana/Vincennes', 'America/Indiana/Winamac', 'America/Inuvik', 'America/Iqaluit', 'America/Jamaica', 'America/Juneau', 'America/Kentucky/Louisville', 'America/Kentucky/Monticello', 'America/Kralendijk', 'America/La_Paz', 'America/Lima', 'America/Los_Angeles', 'America/Lower_Princes', 'America/Maceio', 'America/Managua', 'America/Manaus', 'America/Marigot', 'America/Martinique', 'America/Matamoros', 'America/Mazatlan', 'America/Menominee', 'America/Merida', 'America/Metlakatla', 'America/Mexico_City', 'America/Miquelon', 'America/Moncton', 'America/Monterrey', 'America/Montevideo', 'America/Montserrat', 'America/Nassau', 'America/New_York', 'America/Nipigon', 'America/Nome', 'America/Noronha', 'America/North_Dakota/Beulah', 'America/North_Dakota/Center', 'America/North_Dakota/New_Salem', 'America/Nuuk', 'America/Ojinaga', 'America/Panama', 'America/Pangnirtung', 'America/Paramaribo', 'America/Phoenix', 'America/Port-au-Prince', 'America/Port_of_Spain', 'America/Porto_Velho', 'America/Puerto_Rico', 'America/Punta_Arenas', 'America/Rainy_River', 'America/Rankin_Inlet', 'America/Recife', 'America/Regina', 'America/Resolute', 'America/Rio_Branco', 'America/Santarem', 'America/Santiago', 'America/Santo_Domingo', 'America/Sao_Paulo', 'America/Scoresbysund', 'America/Sitka', 'America/St_Barthelemy', 'America/St_Johns', 'America/St_Kitts', 'America/St_Lucia', 'America/St_Thomas', 'America/St_Vincent', 'America/Swift_Current', 'America/Tegucigalpa', 'America/Thule', 'America/Thunder_Bay', 'America/Tijuana', 'America/Toronto', 'America/Tortola', 'America/Vancouver', 'America/Whitehorse', 'America/Winnipeg', 'America/Yakutat', 'America/Yellowknife', 'Antarctica/Casey', 'Antarctica/Davis', 'Antarctica/DumontDUrville', 'Antarctica/Macquarie', 'Antarctica/Mawson', 'Antarctica/McMurdo', 'Antarctica/Palmer', 'Antarctica/Rothera', 'Antarctica/Syowa', 'Antarctica/Troll', 'Antarctica/Vostok', 'Arctic/Longyearbyen', 'Asia/Aden', 'Asia/Almaty', 'Asia/Amman', 'Asia/Anadyr', 'Asia/Aqtau', 'Asia/Aqtobe', 'Asia/Ashgabat', 'Asia/Atyrau', 'Asia/Baghdad', 'Asia/Bahrain', 'Asia/Baku', 'Asia/Bangkok', 'Asia/Barnaul', 'Asia/Beirut', 'Asia/Bishkek', 'Asia/Brunei', 'Asia/Chita', 'Asia/Choibalsan', 'Asia/Colombo', 'Asia/Damascus', 'Asia/Dhaka', 'Asia/Dili', 'Asia/Dubai', 'Asia/Dushanbe', 'Asia/Famagusta', 'Asia/Gaza', 'Asia/Hebron', 'Asia/Ho_Chi_Minh', 'Asia/Hong_Kong', 'Asia/Hovd', 'Asia/Irkutsk', 'Asia/Jakarta', 'Asia/Jayapura', 'Asia/Jerusalem', 'Asia/Kabul', 'Asia/Kamchatka', 'Asia/Karachi', 'Asia/Kathmandu', 'Asia/Khandyga', 'Asia/Kolkata', 'Asia/Krasnoyarsk', 'Asia/Kuala_Lumpur', 'Asia/Kuching', 'Asia/Kuwait', 'Asia/Macau', 'Asia/Magadan', 'Asia/Makassar', 'Asia/Manila', 'Asia/Muscat', 'Asia/Nicosia', 'Asia/Novokuznetsk', 'Asia/Novosibirsk', 'Asia/Omsk', 'Asia/Oral', 'Asia/Phnom_Penh', 'Asia/Pontianak', 'Asia/Pyongyang', 'Asia/Qatar', 'Asia/Qostanay', 'Asia/Qyzylorda', 'Asia/Riyadh', 'Asia/Sakhalin', 'Asia/Samarkand', 'Asia/Seoul', 'Asia/Shanghai', 'Asia/Singapore', 'Asia/Srednekolymsk', 'Asia/Taipei', 'Asia/Tashkent', 'Asia/Tbilisi', 'Asia/Tehran', 'Asia/Thimphu', 'Asia/Tokyo', 'Asia/Tomsk', 'Asia/Ulaanbaatar', 'Asia/Urumqi', 'Asia/Ust-Nera', 'Asia/Vientiane', 'Asia/Vladivostok', 'Asia/Yakutsk', 'Asia/Yangon', 'Asia/Yekaterinburg', 'Asia/Yerevan', 'Atlantic/Azores', 'Atlantic/Bermuda', 'Atlantic/Canary', 'Atlantic/Cape_Verde', 'Atlantic/Faroe', 'Atlantic/Madeira', 'Atlantic/Reykjavik', 'Atlantic/South_Georgia', 'Atlantic/St_Helena', 'Atlantic/Stanley', 'Australia/Adelaide', 'Australia/Brisbane', 'Australia/Broken_Hill', 'Australia/Currie', 'Australia/Darwin', 'Australia/Eucla', 'Australia/Hobart', 'Australia/Lindeman', 'Australia/Lord_Howe', 'Australia/Melbourne', 'Australia/Perth', 'Australia/Sydney', 'Canada/Atlantic', 'Canada/Central', 'Canada/Eastern', 'Canada/Mountain', 'Canada/Newfoundland', 'Canada/Pacific', 'Europe/Amsterdam', 'Europe/Andorra', 'Europe/Astrakhan', 'Europe/Athens', 'Europe/Belgrade', 'Europe/Berlin', 'Europe/Bratislava', 'Europe/Brussels', 'Europe/Bucharest', 'Europe/Budapest', 'Europe/Busingen', 'Europe/Chisinau', 'Europe/Copenhagen', 'Europe/Dublin', 'Europe/Gibraltar', 'Europe/Guernsey', 'Europe/Helsinki', 'Europe/Isle_of_Man', 'Europe/Istanbul', 'Europe/Jersey', 'Europe/Kaliningrad', 'Europe/Kiev', 'Europe/Kirov', 'Europe/Lisbon', 'Europe/Ljubljana', 'Europe/London', 'Europe/Luxembourg', 'Europe/Madrid', 'Europe/Malta', 'Europe/Mariehamn', 'Europe/Minsk', 'Europe/Monaco', 'Europe/Moscow', 'Europe/Oslo', 'Europe/Paris', 'Europe/Podgorica', 'Europe/Prague', 'Europe/Riga', 'Europe/Rome', 'Europe/Samara', 'Europe/San_Marino', 'Europe/Sarajevo', 'Europe/Saratov', 'Europe/Simferopol', 'Europe/Skopje', 'Europe/Sofia', 'Europe/Stockholm', 'Europe/Tallinn', 'Europe/Tirane', 'Europe/Ulyanovsk', 'Europe/Uzhgorod', 'Europe/Vaduz', 'Europe/Vatican', 'Europe/Vienna', 'Europe/Vilnius', 'Europe/Volgograd', 'Europe/Warsaw', 'Europe/Zagreb', 'Europe/Zaporozhye', 'Europe/Zurich', 'GMT', 'Indian/Antananarivo', 'Indian/Chagos', 'Indian/Christmas', 'Indian/Cocos', 'Indian/Comoro', 'Indian/Kerguelen', 'Indian/Mahe', 'Indian/Maldives', 'Indian/Mauritius', 'Indian/Mayotte', 'Indian/Reunion', 'Pacific/Apia', 'Pacific/Auckland', 'Pacific/Bougainville', 'Pacific/Chatham', 'Pacific/Chuuk', 'Pacific/Easter', 'Pacific/Efate', 'Pacific/Enderbury', 'Pacific/Fakaofo', 'Pacific/Fiji', 'Pacific/Funafuti', 'Pacific/Galapagos', 'Pacific/Gambier', 'Pacific/Guadalcanal', 'Pacific/Guam', 'Pacific/Honolulu', 'Pacific/Kiritimati', 'Pacific/Kosrae', 'Pacific/Kwajalein', 'Pacific/Majuro', 'Pacific/Marquesas', 'Pacific/Midway', 'Pacific/Nauru', 'Pacific/Niue', 'Pacific/Norfolk', 'Pacific/Noumea', 'Pacific/Pago_Pago', 'Pacific/Palau', 'Pacific/Pitcairn', 'Pacific/Pohnpei', 'Pacific/Port_Moresby', 'Pacific/Rarotonga', 'Pacific/Saipan', 'Pacific/Tahiti', 'Pacific/Tarawa', 'Pacific/Tongatapu', 'Pacific/Wake', 'Pacific/Wallis', 'US/Alaska', 'US/Arizona', 'US/Central', 'US/Eastern', 'US/Hawaii', 'US/Mountain', 'US/Pacific', 'UTC'] common_timezones = LazyList( tz for tz in common_timezones if tz in all_timezones) common_timezones_set = LazySet(common_timezones)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/reference.py
''' Reference tzinfo implementations from the Python docs. Used for testing against as they are only correct for the years 1987 to 2006. Do not use these for real code. ''' from datetime import tzinfo, timedelta, datetime from pytz import HOUR, ZERO, UTC __all__ = [ 'FixedOffset', 'LocalTimezone', 'USTimeZone', 'Eastern', 'Central', 'Mountain', 'Pacific', 'UTC' ] # A class building tzinfo objects for fixed-offset time zones. # Note that FixedOffset(0, "UTC") is a different way to build a # UTC tzinfo object. class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): self.__offset = timedelta(minutes=offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return ZERO import time as _time STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: DSTOFFSET = timedelta(seconds=-_time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET # A class capturing the platform's idea of local time. class LocalTimezone(tzinfo): def utcoffset(self, dt): if self._isdst(dt): return DSTOFFSET else: return STDOFFSET def dst(self, dt): if self._isdst(dt): return DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 Local = LocalTimezone() def first_sunday_on_or_after(dt): days_to_go = 6 - dt.weekday() if days_to_go: dt += timedelta(days_to_go) return dt # In the US, DST starts at 2am (standard time) on the first Sunday in April. DSTSTART = datetime(1, 4, 1, 2) # and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. # which is the first Sunday on or after Oct 25. DSTEND = datetime(1, 10, 25, 1) # A complete implementation of current DST rules for major US time zones. class USTimeZone(tzinfo): def __init__(self, hours, reprname, stdname, dstname): self.stdoffset = timedelta(hours=hours) self.reprname = reprname self.stdname = stdname self.dstname = dstname def __repr__(self): return self.reprname def tzname(self, dt): if self.dst(dt): return self.dstname else: return self.stdname def utcoffset(self, dt): return self.stdoffset + self.dst(dt) def dst(self, dt): if dt is None or dt.tzinfo is None: # An exception may be sensible here, in one or both cases. # It depends on how you want to treat them. The default # fromutc() implementation (called by the default astimezone() # implementation) passes a datetime with dt.tzinfo is self. return ZERO assert dt.tzinfo is self # Find first Sunday in April & the last in October. start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) # Can't compare naive to aware objects, so strip the timezone from # dt first. if start <= dt.replace(tzinfo=None) < end: return HOUR else: return ZERO Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") Central = USTimeZone(-6, "Central", "CST", "CDT") Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/exceptions.py
''' Custom exceptions raised by pytz. ''' __all__ = [ 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError', 'NonExistentTimeError', ] class Error(Exception): '''Base class for all exceptions raised by the pytz library''' class UnknownTimeZoneError(KeyError, Error): '''Exception raised when pytz is passed an unknown timezone. >>> isinstance(UnknownTimeZoneError(), LookupError) True This class is actually a subclass of KeyError to provide backwards compatibility with code relying on the undocumented behavior of earlier pytz releases. >>> isinstance(UnknownTimeZoneError(), KeyError) True And also a subclass of pytz.exceptions.Error, as are other pytz exceptions. >>> isinstance(UnknownTimeZoneError(), Error) True ''' pass class InvalidTimeError(Error): '''Base class for invalid time exceptions.''' class AmbiguousTimeError(InvalidTimeError): '''Exception raised when attempting to create an ambiguous wallclock time. At the end of a DST transition period, a particular wallclock time will occur twice (once before the clocks are set back, once after). Both possibilities may be correct, unless further information is supplied. See DstTzInfo.normalize() for more info ''' class NonExistentTimeError(InvalidTimeError): '''Exception raised when attempting to create a wallclock time that cannot exist. At the start of a DST transition period, the wallclock time jumps forward. The instants jumped over never occur. '''
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/lazy.py
from threading import RLock try: from collections.abc import Mapping as DictMixin except ImportError: # Python < 3.3 try: from UserDict import DictMixin # Python 2 except ImportError: # Python 3.0-3.3 from collections import Mapping as DictMixin # With lazy loading, we might end up with multiple threads triggering # it at the same time. We need a lock. _fill_lock = RLock() class LazyDict(DictMixin): """Dictionary populated on first use.""" data = None def __getitem__(self, key): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return self.data[key.upper()] def __contains__(self, key): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return key in self.data def __iter__(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return iter(self.data) def __len__(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return len(self.data) def keys(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return self.data.keys() class LazyList(list): """List populated on first use.""" _props = [ '__str__', '__repr__', '__unicode__', '__hash__', '__sizeof__', '__cmp__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove', 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__', '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__', '__getitem__', '__setitem__', '__delitem__', '__iter__', '__reversed__', '__getslice__', '__setslice__', '__delslice__'] def __new__(cls, fill_iter=None): if fill_iter is None: return list() # We need a new class as we will be dynamically messing with its # methods. class LazyList(list): pass fill_iter = [fill_iter] def lazy(name): def _lazy(self, *args, **kw): _fill_lock.acquire() try: if len(fill_iter) > 0: list.extend(self, fill_iter.pop()) for method_name in cls._props: delattr(LazyList, method_name) finally: _fill_lock.release() return getattr(list, name)(self, *args, **kw) return _lazy for name in cls._props: setattr(LazyList, name, lazy(name)) new_list = LazyList() return new_list # Not all versions of Python declare the same magic methods. # Filter out properties that don't exist in this version of Python # from the list. LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)] class LazySet(set): """Set populated on first use.""" _props = ( '__str__', '__repr__', '__unicode__', '__hash__', '__sizeof__', '__cmp__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__contains__', '__len__', '__nonzero__', '__getitem__', '__setitem__', '__delitem__', '__iter__', '__sub__', '__and__', '__xor__', '__or__', '__rsub__', '__rand__', '__rxor__', '__ror__', '__isub__', '__iand__', '__ixor__', '__ior__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update') def __new__(cls, fill_iter=None): if fill_iter is None: return set() class LazySet(set): pass fill_iter = [fill_iter] def lazy(name): def _lazy(self, *args, **kw): _fill_lock.acquire() try: if len(fill_iter) > 0: for i in fill_iter.pop(): set.add(self, i) for method_name in cls._props: delattr(LazySet, method_name) finally: _fill_lock.release() return getattr(set, name)(self, *args, **kw) return _lazy for name in cls._props: setattr(LazySet, name, lazy(name)) new_set = LazySet() return new_set # Not all versions of Python declare the same magic methods. # Filter out properties that don't exist in this version of Python # from the list. LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/leapseconds
# Allowance for leap seconds added to each time zone file. # This file is in the public domain. # This file is generated automatically from the data in the public-domain # NIST format leap-seconds.list file, which can be copied from # <ftp://ftp.nist.gov/pub/time/leap-seconds.list> # or <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>. # For more about leap-seconds.list, please see # The NTP Timescale and Leap Seconds # <https://www.eecis.udel.edu/~mills/leap.html>. # The rules for leap seconds are specified in Annex 1 (Time scales) of: # Standard-frequency and time-signal emissions. # International Telecommunication Union - Radiocommunication Sector # (ITU-R) Recommendation TF.460-6 (02/2002) # <https://www.itu.int/rec/R-REC-TF.460-6-200202-I/>. # The International Earth Rotation and Reference Systems Service (IERS) # periodically uses leap seconds to keep UTC to within 0.9 s of UT1 # (a proxy for Earth's angle in space as measured by astronomers) # and publishes leap second data in a copyrighted file # <https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat>. # See: Levine J. Coordinated Universal Time and the leap second. # URSI Radio Sci Bull. 2016;89(4):30-6. doi:10.23919/URSIRSB.2016.7909995 # <https://ieeexplore.ieee.org/document/7909995>. # There were no leap seconds before 1972, as no official mechanism # accounted for the discrepancy between atomic time (TAI) and the earth's # rotation. The first ("1 Jan 1972") data line in leap-seconds.list # does not denote a leap second; it denotes the start of the current definition # of UTC. # All leap-seconds are Stationary (S) at the given UTC time. # The correction (+ or -) is made at the given time, so in the unlikely # event of a negative leap second, a line would look like this: # Leap YEAR MON DAY 23:59:59 - S # Typical lines look like this: # Leap YEAR MON DAY 23:59:60 + S Leap 1972 Jun 30 23:59:60 + S Leap 1972 Dec 31 23:59:60 + S Leap 1973 Dec 31 23:59:60 + S Leap 1974 Dec 31 23:59:60 + S Leap 1975 Dec 31 23:59:60 + S Leap 1976 Dec 31 23:59:60 + S Leap 1977 Dec 31 23:59:60 + S Leap 1978 Dec 31 23:59:60 + S Leap 1979 Dec 31 23:59:60 + S Leap 1981 Jun 30 23:59:60 + S Leap 1982 Jun 30 23:59:60 + S Leap 1983 Jun 30 23:59:60 + S Leap 1985 Jun 30 23:59:60 + S Leap 1987 Dec 31 23:59:60 + S Leap 1989 Dec 31 23:59:60 + S Leap 1990 Dec 31 23:59:60 + S Leap 1992 Jun 30 23:59:60 + S Leap 1993 Jun 30 23:59:60 + S Leap 1994 Jun 30 23:59:60 + S Leap 1995 Dec 31 23:59:60 + S Leap 1997 Jun 30 23:59:60 + S Leap 1998 Dec 31 23:59:60 + S Leap 2005 Dec 31 23:59:60 + S Leap 2008 Dec 31 23:59:60 + S Leap 2012 Jun 30 23:59:60 + S Leap 2015 Jun 30 23:59:60 + S Leap 2016 Dec 31 23:59:60 + S # UTC timestamp when this leap second list expires. # Any additional leap seconds will come after this. # This Expires line is commented out for now, # so that pre-2020a zic implementations do not reject this file. #Expires 2020 Dec 28 00:00:00 # POSIX timestamps for the data in this file: #updated 1467936000 (2016-07-08 00:00:00 UTC) #expires 1609113600 (2020-12-28 00:00:00 UTC) # Updated through IERS Bulletin C59 # File expires on: 28 December 2020
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/zone1970.tab
# tzdb timezone descriptions # # This file is in the public domain. # # From Paul Eggert (2018-06-27): # This file contains a table where each row stands for a timezone where # civil timestamps have agreed since 1970. Columns are separated by # a single tab. Lines beginning with '#' are comments. All text uses # UTF-8 encoding. The columns of the table are as follows: # # 1. The countries that overlap the timezone, as a comma-separated list # of ISO 3166 2-character country codes. See the file 'iso3166.tab'. # 2. Latitude and longitude of the timezone's principal location # in ISO 6709 sign-degrees-minutes-seconds format, # either ±DDMM±DDDMM or ±DDMMSS±DDDMMSS, # first latitude (+ is north), then longitude (+ is east). # 3. Timezone name used in value of TZ environment variable. # Please see the theory.html file for how these names are chosen. # If multiple timezones overlap a country, each has a row in the # table, with each column 1 containing the country code. # 4. Comments; present if and only if a country has multiple timezones. # # If a timezone covers multiple countries, the most-populous city is used, # and that country is listed first in column 1; any other countries # are listed alphabetically by country code. The table is sorted # first by country code, then (if possible) by an order within the # country that (1) makes some geographical sense, and (2) puts the # most populous timezones first, where that does not contradict (1). # # This table is intended as an aid for users, to help them select timezones # appropriate for their practical needs. It is not intended to take or # endorse any position on legal or territorial claims. # #country- #codes coordinates TZ comments AD +4230+00131 Europe/Andorra AE,OM +2518+05518 Asia/Dubai AF +3431+06912 Asia/Kabul AL +4120+01950 Europe/Tirane AM +4011+04430 Asia/Yerevan AQ -6617+11031 Antarctica/Casey Casey AQ -6835+07758 Antarctica/Davis Davis AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville AQ -6736+06253 Antarctica/Mawson Mawson AQ -6448-06406 Antarctica/Palmer Palmer AQ -6734-06808 Antarctica/Rothera Rothera AQ -690022+0393524 Antarctica/Syowa Syowa AQ -720041+0023206 Antarctica/Troll Troll AQ -7824+10654 Antarctica/Vostok Vostok AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2649-06513 America/Argentina/Tucuman Tucumán (TM) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3319-06621 America/Argentina/San_Luis San Luis (SL) AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway AT +4813+01620 Europe/Vienna AU -3133+15905 Australia/Lord_Howe Lord Howe Island AU -5430+15857 Antarctica/Macquarie Macquarie Island AU -4253+14719 Australia/Hobart Tasmania (most areas) AU -3956+14352 Australia/Currie Tasmania (King Island) AU -3749+14458 Australia/Melbourne Victoria AU -3352+15113 Australia/Sydney New South Wales (most areas) AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna) AU -2728+15302 Australia/Brisbane Queensland (most areas) AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands) AU -3455+13835 Australia/Adelaide South Australia AU -1228+13050 Australia/Darwin Northern Territory AU -3157+11551 Australia/Perth Western Australia (most areas) AU -3143+12852 Australia/Eucla Western Australia (Eucla) AZ +4023+04951 Asia/Baku BB +1306-05937 America/Barbados BD +2343+09025 Asia/Dhaka BE +5050+00420 Europe/Brussels BG +4241+02319 Europe/Sofia BM +3217-06446 Atlantic/Bermuda BN +0456+11455 Asia/Brunei BO -1630-06809 America/La_Paz BR -0351-03225 America/Noronha Atlantic islands BR -0127-04829 America/Belem Pará (east); Amapá BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0803-03454 America/Recife Pernambuco BR -0712-04812 America/Araguaina Tocantins BR -0940-03543 America/Maceio Alagoas, Sergipe BR -1259-03831 America/Bahia Bahia BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS) BR -2027-05437 America/Campo_Grande Mato Grosso do Sul BR -1535-05605 America/Cuiaba Mato Grosso BR -0226-05452 America/Santarem Pará (west) BR -0846-06354 America/Porto_Velho Rondônia BR +0249-06040 America/Boa_Vista Roraima BR -0308-06001 America/Manaus Amazonas (east) BR -0640-06952 America/Eirunepe Amazonas (west) BR -0958-06748 America/Rio_Branco Acre BS +2505-07721 America/Nassau BT +2728+08939 Asia/Thimphu BY +5354+02734 Europe/Minsk BZ +1730-08812 America/Belize CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73) CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay) CA +6344-06828 America/Iqaluit Eastern - NU (most east areas) CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung) CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances) CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +4906-11631 America/Creston MST - BC (Creston) CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) CA +4916-12307 America/Vancouver Pacific - BC (most areas) CA +6043-13503 America/Whitehorse Pacific - Yukon (east) CA +6404-13925 America/Dawson Pacific - Yukon (west) CC -1210+09655 Indian/Cocos CH,DE,LI +4723+00832 Europe/Zurich Swiss time CI,BF,GM,GN,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga CL -3327-07040 America/Santiago Chile (most areas) CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CN +3114+12128 Asia/Shanghai Beijing Time CN +4348+08735 Asia/Urumqi Xinjiang Time CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde CW,AW,BQ,SX +1211-06900 America/Curacao CX -1025+10543 Indian/Christmas CY +3510+03322 Asia/Nicosia Cyprus (most areas) CY +3507+03357 Asia/Famagusta Northern Cyprus CZ,SK +5005+01426 Europe/Prague DE +5230+01322 Europe/Berlin Germany (most areas) DK +5540+01235 Europe/Copenhagen DO +1828-06954 America/Santo_Domingo DZ +3647+00303 Africa/Algiers EC -0210-07950 America/Guayaquil Ecuador (mainland) EC -0054-08936 Pacific/Galapagos Galápagos Islands EE +5925+02445 Europe/Tallinn EG +3003+03115 Africa/Cairo EH +2709-01312 Africa/El_Aaiun ES +4024-00341 Europe/Madrid Spain (mainland) ES +3553-00519 Africa/Ceuta Ceuta, Melilla ES +2806-01524 Atlantic/Canary Canary Islands FI,AX +6010+02458 Europe/Helsinki FJ -1808+17825 Pacific/Fiji FK -5142-05751 Atlantic/Stanley FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape FM +0519+16259 Pacific/Kosrae Kosrae FO +6201-00646 Atlantic/Faroe FR +4852+00220 Europe/Paris GB,GG,IM,JE +513030-0000731 Europe/London GE +4143+04449 Asia/Tbilisi GF +0456-05220 America/Cayenne GH +0533-00013 Africa/Accra GI +3608-00521 Europe/Gibraltar GL +6411-05144 America/Nuuk Greenland (most areas) GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik GR +3758+02343 Europe/Athens GS -5416-03632 Atlantic/South_Georgia GT +1438-09031 America/Guatemala GU,MP +1328+14445 Pacific/Guam GW +1151-01535 Africa/Bissau GY +0648-05810 America/Guyana HK +2217+11409 Asia/Hong_Kong HN +1406-08713 America/Tegucigalpa HT +1832-07220 America/Port-au-Prince HU +4730+01905 Europe/Budapest ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas IE +5320-00615 Europe/Dublin IL +314650+0351326 Asia/Jerusalem IN +2232+08822 Asia/Kolkata IO -0720+07225 Indian/Chagos IQ +3321+04425 Asia/Baghdad IR +3540+05126 Asia/Tehran IS +6409-02151 Atlantic/Reykjavik IT,SM,VA +4154+01229 Europe/Rome JM +175805-0764736 America/Jamaica JO +3157+03556 Asia/Amman JP +353916+1394441 Asia/Tokyo KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi KG +4254+07436 Asia/Bishkek KI +0125+17300 Pacific/Tarawa Gilbert Islands KI -0308-17105 Pacific/Enderbury Phoenix Islands KI +0152-15720 Pacific/Kiritimati Line Islands KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe KZ +4431+05016 Asia/Aqtau Mangghystaū/Mankistau KZ +4707+05156 Asia/Atyrau Atyraū/Atirau/Gur'yev KZ +5113+05121 Asia/Oral West Kazakhstan LB +3353+03530 Asia/Beirut LK +0656+07951 Asia/Colombo LR +0618-01047 Africa/Monrovia LT +5441+02519 Europe/Vilnius LU +4936+00609 Europe/Luxembourg LV +5657+02406 Europe/Riga LY +3254+01311 Africa/Tripoli MA +3339-00735 Africa/Casablanca MC +4342+00723 Europe/Monaco MD +4700+02850 Europe/Chisinau MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) MH +0905+16720 Pacific/Kwajalein Kwajalein MM +1647+09610 Asia/Yangon MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar MO +221150+1133230 Asia/Macau MQ +1436-06105 America/Martinique MT +3554+01431 Europe/Malta MU -2010+05730 Indian/Mauritius MV +0410+07330 Indian/Maldives MX +1924-09909 America/Mexico_City Central Time MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo MX +2058-08937 America/Merida Central Time - Campeche, Yucatán MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo León, Tamaulipas (most areas) MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo León, Tamaulipas (US border) MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas) MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border) MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora MX +3232-11701 America/Tijuana Pacific Time US - Baja California MX +2048-10515 America/Bahia_Banderas Central Time - Bahía de Banderas MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula) MY +0133+11020 Asia/Kuching Sabah, Sarawak MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time NA -2234+01706 Africa/Windhoek NC -2216+16627 Pacific/Noumea NF -2903+16758 Pacific/Norfolk NG,AO,BJ,CD,CF,CG,CM,GA,GQ,NE +0627+00324 Africa/Lagos West Africa Time NI +1209-08617 America/Managua NL +5222+00454 Europe/Amsterdam NO,SJ +5955+01045 Europe/Oslo NP +2743+08519 Asia/Kathmandu NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue NZ,AQ -3652+17446 Pacific/Auckland New Zealand time NZ -4357-17633 Pacific/Chatham Chatham Islands PA,KY +0858-07932 America/Panama PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi PL +5215+02100 Europe/Warsaw PM +4703-05620 America/Miquelon PN -2504-13005 Pacific/Pitcairn PR +182806-0660622 America/Puerto_Rico PS +3130+03428 Asia/Gaza Gaza Strip PS +313200+0350542 Asia/Hebron West Bank PT +3843-00908 Europe/Lisbon Portugal (mainland) PT +3238-01654 Atlantic/Madeira Madeira Islands PT +3744-02540 Atlantic/Azores Azores PW +0720+13429 Pacific/Palau PY -2516-05740 America/Asuncion QA,BH +2517+05132 Asia/Qatar RE,TF -2052+05528 Indian/Reunion Réunion, Crozet, Scattered Islands RO +4426+02606 Europe/Bucharest RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area # Mention RU and UA alphabetically. See "territorial claims" above. RU,UA +4457+03406 Europe/Simferopol Crimea RU +5836+04939 Europe/Kirov MSK+00 - Kirov RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan RU +4844+04425 Europe/Volgograd MSK+01 - Volgograd RU +5134+04602 Europe/Saratov MSK+01 - Saratov RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals RU +5500+07324 Asia/Omsk MSK+03 - Omsk RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk RU +5322+08345 Asia/Barnaul MSK+04 - Altai RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea SA,KW,YE +2438+04643 Asia/Riyadh SB -0932+16012 Pacific/Guadalcanal SC -0440+05528 Indian/Mahe SD +1536+03232 Africa/Khartoum SE +5920+01803 Europe/Stockholm SG +0117+10351 Asia/Singapore SR +0550-05510 America/Paramaribo SS +0451+03137 Africa/Juba ST +0020+00644 Africa/Sao_Tome SV +1342-08912 America/El_Salvador SY +3330+03618 Asia/Damascus TC +2128-07108 America/Grand_Turk TD +1207+01503 Africa/Ndjamena TF -492110+0701303 Indian/Kerguelen Kerguelen, St Paul Island, Amsterdam Island TH,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas) TJ +3835+06848 Asia/Dushanbe TK -0922-17114 Pacific/Fakaofo TL -0833+12535 Asia/Dili TM +3757+05823 Asia/Ashgabat TN +3648+01011 Africa/Tunis TO -2110-17510 Pacific/Tongatapu TR +4101+02858 Europe/Istanbul TT,AG,AI,BL,DM,GD,GP,KN,LC,MF,MS,VC,VG,VI +1039-06131 America/Port_of_Spain TV -0831+17913 Pacific/Funafuti TW +2503+12130 Asia/Taipei UA +5026+03031 Europe/Kiev Ukraine (most areas) UA +4837+02218 Europe/Uzhgorod Transcarpathia UA +4750+03510 Europe/Zaporozhye Zaporozhye and east Lugansk UM +1917+16637 Pacific/Wake Wake Island US +404251-0740023 America/New_York Eastern (most areas) US +421953-0830245 America/Detroit Eastern - MI (most areas) US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne) US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas) US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn) US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski) US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford) US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike) US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland) US +415100-0873900 America/Chicago Central (most areas) US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry) US +411745-0863730 America/Indiana/Knox Central - IN (Starke) US +450628-0873651 America/Menominee Central - MI (Wisconsin border) US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) US +515248-1763929 America/Adak Aleutian Islands US,UM +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) UZ +4120+06918 Asia/Tashkent Uzbekistan (east) VE +1030-06656 America/Caracas VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south) VU -1740+16825 Pacific/Efate WF -1318-17610 Pacific/Wallis WS -1350-17144 Pacific/Apia ZA,LS,SZ -2615+02800 Africa/Johannesburg
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Universal
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/iso3166.tab
# ISO 3166 alpha-2 country codes # # This file is in the public domain, so clarified as of # 2009-05-17 by Arthur David Olson. # # From Paul Eggert (2015-05-02): # This file contains a table of two-letter country codes. Columns are # separated by a single tab. Lines beginning with '#' are comments. # All text uses UTF-8 encoding. The columns of the table are as follows: # # 1. ISO 3166-1 alpha-2 country code, current as of # ISO 3166-1 N976 (2018-11-06). See: Updates on ISO 3166-1 # https://isotc.iso.org/livelink/livelink/Open/16944257 # 2. The usual English name for the coded region, # chosen so that alphabetic sorting of subsets produces helpful lists. # This is not the same as the English name in the ISO 3166 tables. # # The table is sorted by country code. # # This table is intended as an aid for users, to help them select time # zone data appropriate for their practical needs. It is not intended # to take or endorse any position on legal or territorial claims. # #country- #code name of country, territory, area, or subdivision AD Andorra AE United Arab Emirates AF Afghanistan AG Antigua & Barbuda AI Anguilla AL Albania AM Armenia AO Angola AQ Antarctica AR Argentina AS Samoa (American) AT Austria AU Australia AW Aruba AX Åland Islands AZ Azerbaijan BA Bosnia & Herzegovina BB Barbados BD Bangladesh BE Belgium BF Burkina Faso BG Bulgaria BH Bahrain BI Burundi BJ Benin BL St Barthelemy BM Bermuda BN Brunei BO Bolivia BQ Caribbean NL BR Brazil BS Bahamas BT Bhutan BV Bouvet Island BW Botswana BY Belarus BZ Belize CA Canada CC Cocos (Keeling) Islands CD Congo (Dem. Rep.) CF Central African Rep. CG Congo (Rep.) CH Switzerland CI Côte d'Ivoire CK Cook Islands CL Chile CM Cameroon CN China CO Colombia CR Costa Rica CU Cuba CV Cape Verde CW Curaçao CX Christmas Island CY Cyprus CZ Czech Republic DE Germany DJ Djibouti DK Denmark DM Dominica DO Dominican Republic DZ Algeria EC Ecuador EE Estonia EG Egypt EH Western Sahara ER Eritrea ES Spain ET Ethiopia FI Finland FJ Fiji FK Falkland Islands FM Micronesia FO Faroe Islands FR France GA Gabon GB Britain (UK) GD Grenada GE Georgia GF French Guiana GG Guernsey GH Ghana GI Gibraltar GL Greenland GM Gambia GN Guinea GP Guadeloupe GQ Equatorial Guinea GR Greece GS South Georgia & the South Sandwich Islands GT Guatemala GU Guam GW Guinea-Bissau GY Guyana HK Hong Kong HM Heard Island & McDonald Islands HN Honduras HR Croatia HT Haiti HU Hungary ID Indonesia IE Ireland IL Israel IM Isle of Man IN India IO British Indian Ocean Territory IQ Iraq IR Iran IS Iceland IT Italy JE Jersey JM Jamaica JO Jordan JP Japan KE Kenya KG Kyrgyzstan KH Cambodia KI Kiribati KM Comoros KN St Kitts & Nevis KP Korea (North) KR Korea (South) KW Kuwait KY Cayman Islands KZ Kazakhstan LA Laos LB Lebanon LC St Lucia LI Liechtenstein LK Sri Lanka LR Liberia LS Lesotho LT Lithuania LU Luxembourg LV Latvia LY Libya MA Morocco MC Monaco MD Moldova ME Montenegro MF St Martin (French) MG Madagascar MH Marshall Islands MK North Macedonia ML Mali MM Myanmar (Burma) MN Mongolia MO Macau MP Northern Mariana Islands MQ Martinique MR Mauritania MS Montserrat MT Malta MU Mauritius MV Maldives MW Malawi MX Mexico MY Malaysia MZ Mozambique NA Namibia NC New Caledonia NE Niger NF Norfolk Island NG Nigeria NI Nicaragua NL Netherlands NO Norway NP Nepal NR Nauru NU Niue NZ New Zealand OM Oman PA Panama PE Peru PF French Polynesia PG Papua New Guinea PH Philippines PK Pakistan PL Poland PM St Pierre & Miquelon PN Pitcairn PR Puerto Rico PS Palestine PT Portugal PW Palau PY Paraguay QA Qatar RE Réunion RO Romania RS Serbia RU Russia RW Rwanda SA Saudi Arabia SB Solomon Islands SC Seychelles SD Sudan SE Sweden SG Singapore SH St Helena SI Slovenia SJ Svalbard & Jan Mayen SK Slovakia SL Sierra Leone SM San Marino SN Senegal SO Somalia SR Suriname SS South Sudan ST Sao Tome & Principe SV El Salvador SX St Maarten (Dutch) SY Syria SZ Eswatini (Swaziland) TC Turks & Caicos Is TD Chad TF French Southern & Antarctic Lands TG Togo TH Thailand TJ Tajikistan TK Tokelau TL East Timor TM Turkmenistan TN Tunisia TO Tonga TR Turkey TT Trinidad & Tobago TV Tuvalu TW Taiwan TZ Tanzania UA Ukraine UG Uganda UM US minor outlying islands US United States UY Uruguay UZ Uzbekistan VA Vatican City VC St Vincent VE Venezuela VG Virgin Islands (UK) VI Virgin Islands (US) VN Vietnam VU Vanuatu WF Wallis & Futuna WS Samoa (western) YE Yemen YT Mayotte ZA South Africa ZM Zambia ZW Zimbabwe
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Greenwich
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/GMT-0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/GMT
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Zulu
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/GMT0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/zone.tab
# tzdb timezone descriptions (deprecated version) # # This file is in the public domain, so clarified as of # 2009-05-17 by Arthur David Olson. # # From Paul Eggert (2018-06-27): # This file is intended as a backward-compatibility aid for older programs. # New programs should use zone1970.tab. This file is like zone1970.tab (see # zone1970.tab's comments), but with the following additional restrictions: # # 1. This file contains only ASCII characters. # 2. The first data column contains exactly one country code. # # Because of (2), each row stands for an area that is the intersection # of a region identified by a country code and of a timezone where civil # clocks have agreed since 1970; this is a narrower definition than # that of zone1970.tab. # # This table is intended as an aid for users, to help them select timezones # appropriate for their practical needs. It is not intended to take or # endorse any position on legal or territorial claims. # #country- #code coordinates TZ comments AD +4230+00131 Europe/Andorra AE +2518+05518 Asia/Dubai AF +3431+06912 Asia/Kabul AG +1703-06148 America/Antigua AI +1812-06304 America/Anguilla AL +4120+01950 Europe/Tirane AM +4011+04430 Asia/Yerevan AO -0848+01314 Africa/Luanda AQ -7750+16636 Antarctica/McMurdo New Zealand time - McMurdo, South Pole AQ -6617+11031 Antarctica/Casey Casey AQ -6835+07758 Antarctica/Davis Davis AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville AQ -6736+06253 Antarctica/Mawson Mawson AQ -6448-06406 Antarctica/Palmer Palmer AQ -6734-06808 Antarctica/Rothera Rothera AQ -690022+0393524 Antarctica/Syowa Syowa AQ -720041+0023206 Antarctica/Troll Troll AQ -7824+10654 Antarctica/Vostok Vostok AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF) AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF) AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN) AR -2411-06518 America/Argentina/Jujuy Jujuy (JY) AR -2649-06513 America/Argentina/Tucuman Tucuman (TM) AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH) AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR) AR -3132-06831 America/Argentina/San_Juan San Juan (SJ) AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ) AR -3319-06621 America/Argentina/San_Luis San Luis (SL) AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC) AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF) AS -1416-17042 Pacific/Pago_Pago AT +4813+01620 Europe/Vienna AU -3133+15905 Australia/Lord_Howe Lord Howe Island AU -5430+15857 Antarctica/Macquarie Macquarie Island AU -4253+14719 Australia/Hobart Tasmania (most areas) AU -3956+14352 Australia/Currie Tasmania (King Island) AU -3749+14458 Australia/Melbourne Victoria AU -3352+15113 Australia/Sydney New South Wales (most areas) AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna) AU -2728+15302 Australia/Brisbane Queensland (most areas) AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands) AU -3455+13835 Australia/Adelaide South Australia AU -1228+13050 Australia/Darwin Northern Territory AU -3157+11551 Australia/Perth Western Australia (most areas) AU -3143+12852 Australia/Eucla Western Australia (Eucla) AW +1230-06958 America/Aruba AX +6006+01957 Europe/Mariehamn AZ +4023+04951 Asia/Baku BA +4352+01825 Europe/Sarajevo BB +1306-05937 America/Barbados BD +2343+09025 Asia/Dhaka BE +5050+00420 Europe/Brussels BF +1222-00131 Africa/Ouagadougou BG +4241+02319 Europe/Sofia BH +2623+05035 Asia/Bahrain BI -0323+02922 Africa/Bujumbura BJ +0629+00237 Africa/Porto-Novo BL +1753-06251 America/St_Barthelemy BM +3217-06446 Atlantic/Bermuda BN +0456+11455 Asia/Brunei BO -1630-06809 America/La_Paz BQ +120903-0681636 America/Kralendijk BR -0351-03225 America/Noronha Atlantic islands BR -0127-04829 America/Belem Para (east); Amapa BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB) BR -0803-03454 America/Recife Pernambuco BR -0712-04812 America/Araguaina Tocantins BR -0940-03543 America/Maceio Alagoas, Sergipe BR -1259-03831 America/Bahia Bahia BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS) BR -2027-05437 America/Campo_Grande Mato Grosso do Sul BR -1535-05605 America/Cuiaba Mato Grosso BR -0226-05452 America/Santarem Para (west) BR -0846-06354 America/Porto_Velho Rondonia BR +0249-06040 America/Boa_Vista Roraima BR -0308-06001 America/Manaus Amazonas (east) BR -0640-06952 America/Eirunepe Amazonas (west) BR -0958-06748 America/Rio_Branco Acre BS +2505-07721 America/Nassau BT +2728+08939 Asia/Thimphu BW -2439+02555 Africa/Gaborone BY +5354+02734 Europe/Minsk BZ +1730-08812 America/Belize CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast) CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton) CA +4606-06447 America/Moncton Atlantic - New Brunswick CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas) CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore) CA +4339-07923 America/Toronto Eastern - ON, QC (most areas) CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73) CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay) CA +6344-06828 America/Iqaluit Eastern - NU (most east areas) CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung) CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H) CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances) CA +744144-0944945 America/Resolute Central - NU (Resolute) CA +624900-0920459 America/Rankin_Inlet Central - NU (central) CA +5024-10439 America/Regina CST - SK (most areas) CA +5017-10750 America/Swift_Current CST - SK (midwest) CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W) CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west) CA +6227-11421 America/Yellowknife Mountain - NT (central) CA +682059-1334300 America/Inuvik Mountain - NT (west) CA +4906-11631 America/Creston MST - BC (Creston) CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John) CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson) CA +4916-12307 America/Vancouver Pacific - BC (most areas) CA +6043-13503 America/Whitehorse Pacific - Yukon (east) CA +6404-13925 America/Dawson Pacific - Yukon (west) CC -1210+09655 Indian/Cocos CD -0418+01518 Africa/Kinshasa Dem. Rep. of Congo (west) CD -1140+02728 Africa/Lubumbashi Dem. Rep. of Congo (east) CF +0422+01835 Africa/Bangui CG -0416+01517 Africa/Brazzaville CH +4723+00832 Europe/Zurich CI +0519-00402 Africa/Abidjan CK -2114-15946 Pacific/Rarotonga CL -3327-07040 America/Santiago Chile (most areas) CL -5309-07055 America/Punta_Arenas Region of Magallanes CL -2709-10926 Pacific/Easter Easter Island CM +0403+00942 Africa/Douala CN +3114+12128 Asia/Shanghai Beijing Time CN +4348+08735 Asia/Urumqi Xinjiang Time CO +0436-07405 America/Bogota CR +0956-08405 America/Costa_Rica CU +2308-08222 America/Havana CV +1455-02331 Atlantic/Cape_Verde CW +1211-06900 America/Curacao CX -1025+10543 Indian/Christmas CY +3510+03322 Asia/Nicosia Cyprus (most areas) CY +3507+03357 Asia/Famagusta Northern Cyprus CZ +5005+01426 Europe/Prague DE +5230+01322 Europe/Berlin Germany (most areas) DE +4742+00841 Europe/Busingen Busingen DJ +1136+04309 Africa/Djibouti DK +5540+01235 Europe/Copenhagen DM +1518-06124 America/Dominica DO +1828-06954 America/Santo_Domingo DZ +3647+00303 Africa/Algiers EC -0210-07950 America/Guayaquil Ecuador (mainland) EC -0054-08936 Pacific/Galapagos Galapagos Islands EE +5925+02445 Europe/Tallinn EG +3003+03115 Africa/Cairo EH +2709-01312 Africa/El_Aaiun ER +1520+03853 Africa/Asmara ES +4024-00341 Europe/Madrid Spain (mainland) ES +3553-00519 Africa/Ceuta Ceuta, Melilla ES +2806-01524 Atlantic/Canary Canary Islands ET +0902+03842 Africa/Addis_Ababa FI +6010+02458 Europe/Helsinki FJ -1808+17825 Pacific/Fiji FK -5142-05751 Atlantic/Stanley FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape FM +0519+16259 Pacific/Kosrae Kosrae FO +6201-00646 Atlantic/Faroe FR +4852+00220 Europe/Paris GA +0023+00927 Africa/Libreville GB +513030-0000731 Europe/London GD +1203-06145 America/Grenada GE +4143+04449 Asia/Tbilisi GF +0456-05220 America/Cayenne GG +492717-0023210 Europe/Guernsey GH +0533-00013 Africa/Accra GI +3608-00521 Europe/Gibraltar GL +6411-05144 America/Nuuk Greenland (most areas) GL +7646-01840 America/Danmarkshavn National Park (east coast) GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit GL +7634-06847 America/Thule Thule/Pituffik GM +1328-01639 Africa/Banjul GN +0931-01343 Africa/Conakry GP +1614-06132 America/Guadeloupe GQ +0345+00847 Africa/Malabo GR +3758+02343 Europe/Athens GS -5416-03632 Atlantic/South_Georgia GT +1438-09031 America/Guatemala GU +1328+14445 Pacific/Guam GW +1151-01535 Africa/Bissau GY +0648-05810 America/Guyana HK +2217+11409 Asia/Hong_Kong HN +1406-08713 America/Tegucigalpa HR +4548+01558 Europe/Zagreb HT +1832-07220 America/Port-au-Prince HU +4730+01905 Europe/Budapest ID -0610+10648 Asia/Jakarta Java, Sumatra ID -0002+10920 Asia/Pontianak Borneo (west, central) ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west) ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas IE +5320-00615 Europe/Dublin IL +314650+0351326 Asia/Jerusalem IM +5409-00428 Europe/Isle_of_Man IN +2232+08822 Asia/Kolkata IO -0720+07225 Indian/Chagos IQ +3321+04425 Asia/Baghdad IR +3540+05126 Asia/Tehran IS +6409-02151 Atlantic/Reykjavik IT +4154+01229 Europe/Rome JE +491101-0020624 Europe/Jersey JM +175805-0764736 America/Jamaica JO +3157+03556 Asia/Amman JP +353916+1394441 Asia/Tokyo KE -0117+03649 Africa/Nairobi KG +4254+07436 Asia/Bishkek KH +1133+10455 Asia/Phnom_Penh KI +0125+17300 Pacific/Tarawa Gilbert Islands KI -0308-17105 Pacific/Enderbury Phoenix Islands KI +0152-15720 Pacific/Kiritimati Line Islands KM -1141+04316 Indian/Comoro KN +1718-06243 America/St_Kitts KP +3901+12545 Asia/Pyongyang KR +3733+12658 Asia/Seoul KW +2920+04759 Asia/Kuwait KY +1918-08123 America/Cayman KZ +4315+07657 Asia/Almaty Kazakhstan (most areas) KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe KZ +4431+05016 Asia/Aqtau Mangghystau/Mankistau KZ +4707+05156 Asia/Atyrau Atyrau/Atirau/Gur'yev KZ +5113+05121 Asia/Oral West Kazakhstan LA +1758+10236 Asia/Vientiane LB +3353+03530 Asia/Beirut LC +1401-06100 America/St_Lucia LI +4709+00931 Europe/Vaduz LK +0656+07951 Asia/Colombo LR +0618-01047 Africa/Monrovia LS -2928+02730 Africa/Maseru LT +5441+02519 Europe/Vilnius LU +4936+00609 Europe/Luxembourg LV +5657+02406 Europe/Riga LY +3254+01311 Africa/Tripoli MA +3339-00735 Africa/Casablanca MC +4342+00723 Europe/Monaco MD +4700+02850 Europe/Chisinau ME +4226+01916 Europe/Podgorica MF +1804-06305 America/Marigot MG -1855+04731 Indian/Antananarivo MH +0709+17112 Pacific/Majuro Marshall Islands (most areas) MH +0905+16720 Pacific/Kwajalein Kwajalein MK +4159+02126 Europe/Skopje ML +1239-00800 Africa/Bamako MM +1647+09610 Asia/Yangon MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas) MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar MO +221150+1133230 Asia/Macau MP +1512+14545 Pacific/Saipan MQ +1436-06105 America/Martinique MR +1806-01557 Africa/Nouakchott MS +1643-06213 America/Montserrat MT +3554+01431 Europe/Malta MU -2010+05730 Indian/Mauritius MV +0410+07330 Indian/Maldives MW -1547+03500 Africa/Blantyre MX +1924-09909 America/Mexico_City Central Time MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo MX +2058-08937 America/Merida Central Time - Campeche, Yucatan MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo Leon, Tamaulipas (most areas) MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo Leon, Tamaulipas (US border) MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas) MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border) MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora MX +3232-11701 America/Tijuana Pacific Time US - Baja California MX +2048-10515 America/Bahia_Banderas Central Time - Bahia de Banderas MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula) MY +0133+11020 Asia/Kuching Sabah, Sarawak MZ -2558+03235 Africa/Maputo NA -2234+01706 Africa/Windhoek NC -2216+16627 Pacific/Noumea NE +1331+00207 Africa/Niamey NF -2903+16758 Pacific/Norfolk NG +0627+00324 Africa/Lagos NI +1209-08617 America/Managua NL +5222+00454 Europe/Amsterdam NO +5955+01045 Europe/Oslo NP +2743+08519 Asia/Kathmandu NR -0031+16655 Pacific/Nauru NU -1901-16955 Pacific/Niue NZ -3652+17446 Pacific/Auckland New Zealand (most areas) NZ -4357-17633 Pacific/Chatham Chatham Islands OM +2336+05835 Asia/Muscat PA +0858-07932 America/Panama PE -1203-07703 America/Lima PF -1732-14934 Pacific/Tahiti Society Islands PF -0900-13930 Pacific/Marquesas Marquesas Islands PF -2308-13457 Pacific/Gambier Gambier Islands PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas) PG -0613+15534 Pacific/Bougainville Bougainville PH +1435+12100 Asia/Manila PK +2452+06703 Asia/Karachi PL +5215+02100 Europe/Warsaw PM +4703-05620 America/Miquelon PN -2504-13005 Pacific/Pitcairn PR +182806-0660622 America/Puerto_Rico PS +3130+03428 Asia/Gaza Gaza Strip PS +313200+0350542 Asia/Hebron West Bank PT +3843-00908 Europe/Lisbon Portugal (mainland) PT +3238-01654 Atlantic/Madeira Madeira Islands PT +3744-02540 Atlantic/Azores Azores PW +0720+13429 Pacific/Palau PY -2516-05740 America/Asuncion QA +2517+05132 Asia/Qatar RE -2052+05528 Indian/Reunion RO +4426+02606 Europe/Bucharest RS +4450+02030 Europe/Belgrade RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area # The obsolescent zone.tab format cannot represent Europe/Simferopol well. # Put it in RU section and list as UA. See "territorial claims" above. # Programs should use zone1970.tab instead; see above. UA +4457+03406 Europe/Simferopol Crimea RU +5836+04939 Europe/Kirov MSK+00 - Kirov RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan RU +4844+04425 Europe/Volgograd MSK+01 - Volgograd RU +5134+04602 Europe/Saratov MSK+01 - Saratov RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals RU +5500+07324 Asia/Omsk MSK+03 - Omsk RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk RU +5322+08345 Asia/Barnaul MSK+04 - Altai RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky RU +5934+15048 Asia/Magadan MSK+08 - Magadan RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea RW -0157+03004 Africa/Kigali SA +2438+04643 Asia/Riyadh SB -0932+16012 Pacific/Guadalcanal SC -0440+05528 Indian/Mahe SD +1536+03232 Africa/Khartoum SE +5920+01803 Europe/Stockholm SG +0117+10351 Asia/Singapore SH -1555-00542 Atlantic/St_Helena SI +4603+01431 Europe/Ljubljana SJ +7800+01600 Arctic/Longyearbyen SK +4809+01707 Europe/Bratislava SL +0830-01315 Africa/Freetown SM +4355+01228 Europe/San_Marino SN +1440-01726 Africa/Dakar SO +0204+04522 Africa/Mogadishu SR +0550-05510 America/Paramaribo SS +0451+03137 Africa/Juba ST +0020+00644 Africa/Sao_Tome SV +1342-08912 America/El_Salvador SX +180305-0630250 America/Lower_Princes SY +3330+03618 Asia/Damascus SZ -2618+03106 Africa/Mbabane TC +2128-07108 America/Grand_Turk TD +1207+01503 Africa/Ndjamena TF -492110+0701303 Indian/Kerguelen TG +0608+00113 Africa/Lome TH +1345+10031 Asia/Bangkok TJ +3835+06848 Asia/Dushanbe TK -0922-17114 Pacific/Fakaofo TL -0833+12535 Asia/Dili TM +3757+05823 Asia/Ashgabat TN +3648+01011 Africa/Tunis TO -2110-17510 Pacific/Tongatapu TR +4101+02858 Europe/Istanbul TT +1039-06131 America/Port_of_Spain TV -0831+17913 Pacific/Funafuti TW +2503+12130 Asia/Taipei TZ -0648+03917 Africa/Dar_es_Salaam UA +5026+03031 Europe/Kiev Ukraine (most areas) UA +4837+02218 Europe/Uzhgorod Transcarpathia UA +4750+03510 Europe/Zaporozhye Zaporozhye and east Lugansk UG +0019+03225 Africa/Kampala UM +2813-17722 Pacific/Midway Midway Islands UM +1917+16637 Pacific/Wake Wake Island US +404251-0740023 America/New_York Eastern (most areas) US +421953-0830245 America/Detroit Eastern - MI (most areas) US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area) US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne) US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas) US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn) US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski) US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford) US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike) US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland) US +415100-0873900 America/Chicago Central (most areas) US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry) US +411745-0863730 America/Indiana/Knox Central - IN (Starke) US +450628-0873651 America/Menominee Central - MI (Wisconsin border) US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver) US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural) US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer) US +394421-1045903 America/Denver Mountain (most areas) US +433649-1161209 America/Boise Mountain - ID (south); OR (east) US +332654-1120424 America/Phoenix MST - Arizona (except Navajo) US +340308-1181434 America/Los_Angeles Pacific US +611305-1495401 America/Anchorage Alaska (most areas) US +581807-1342511 America/Juneau Alaska - Juneau area US +571035-1351807 America/Sitka Alaska - Sitka area US +550737-1313435 America/Metlakatla Alaska - Annette Island US +593249-1394338 America/Yakutat Alaska - Yakutat US +643004-1652423 America/Nome Alaska (west) US +515248-1763929 America/Adak Aleutian Islands US +211825-1575130 Pacific/Honolulu Hawaii UY -345433-0561245 America/Montevideo UZ +3940+06648 Asia/Samarkand Uzbekistan (west) UZ +4120+06918 Asia/Tashkent Uzbekistan (east) VA +415408+0122711 Europe/Vatican VC +1309-06114 America/St_Vincent VE +1030-06656 America/Caracas VG +1827-06437 America/Tortola VI +1821-06456 America/St_Thomas VN +1045+10640 Asia/Ho_Chi_Minh VU -1740+16825 Pacific/Efate WF -1318-17610 Pacific/Wallis WS -1350-17144 Pacific/Apia YE +1245+04512 Asia/Aden YT -1247+04514 Indian/Mayotte ZA -2615+02800 Africa/Johannesburg ZM -1525+02817 Africa/Lusaka ZW -1750+03103 Africa/Harare
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/UCT
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/GMT+0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/tzdata.zi
# version unknown # This zic input file is in the public domain. R d 1916 o - Jun 14 23s 1 S R d 1916 1919 - O Su>=1 23s 0 - R d 1917 o - Mar 24 23s 1 S R d 1918 o - Mar 9 23s 1 S R d 1919 o - Mar 1 23s 1 S R d 1920 o - F 14 23s 1 S R d 1920 o - O 23 23s 0 - R d 1921 o - Mar 14 23s 1 S R d 1921 o - Jun 21 23s 0 - R d 1939 o - S 11 23s 1 S R d 1939 o - N 19 1 0 - R d 1944 1945 - Ap M>=1 2 1 S R d 1944 o - O 8 2 0 - R d 1945 o - S 16 1 0 - R d 1971 o - Ap 25 23s 1 S R d 1971 o - S 26 23s 0 - R d 1977 o - May 6 0 1 S R d 1977 o - O 21 0 0 - R d 1978 o - Mar 24 1 1 S R d 1978 o - S 22 3 0 - R d 1980 o - Ap 25 0 1 S R d 1980 o - O 31 2 0 - Z Africa/Algiers 0:12:12 - LMT 1891 Mar 15 0:1 0:9:21 - PMT 1911 Mar 11 0 d WE%sT 1940 F 25 2 1 d CE%sT 1946 O 7 0 - WET 1956 Ja 29 1 - CET 1963 Ap 14 0 d WE%sT 1977 O 21 1 d CE%sT 1979 O 26 0 d WE%sT 1981 May 1 - CET Z Atlantic/Cape_Verde -1:34:4 - LMT 1912 Ja 1 2u -2 - -02 1942 S -2 1 -01 1945 O 15 -2 - -02 1975 N 25 2 -1 - -01 Z Africa/Ndjamena 1:0:12 - LMT 1912 1 - WAT 1979 O 14 1 1 WAST 1980 Mar 8 1 - WAT Z Africa/Abidjan -0:16:8 - LMT 1912 0 - GMT L Africa/Abidjan Africa/Bamako L Africa/Abidjan Africa/Banjul L Africa/Abidjan Africa/Conakry L Africa/Abidjan Africa/Dakar L Africa/Abidjan Africa/Freetown L Africa/Abidjan Africa/Lome L Africa/Abidjan Africa/Nouakchott L Africa/Abidjan Africa/Ouagadougou L Africa/Abidjan Atlantic/St_Helena R K 1940 o - Jul 15 0 1 S R K 1940 o - O 1 0 0 - R K 1941 o - Ap 15 0 1 S R K 1941 o - S 16 0 0 - R K 1942 1944 - Ap 1 0 1 S R K 1942 o - O 27 0 0 - R K 1943 1945 - N 1 0 0 - R K 1945 o - Ap 16 0 1 S R K 1957 o - May 10 0 1 S R K 1957 1958 - O 1 0 0 - R K 1958 o - May 1 0 1 S R K 1959 1981 - May 1 1 1 S R K 1959 1965 - S 30 3 0 - R K 1966 1994 - O 1 3 0 - R K 1982 o - Jul 25 1 1 S R K 1983 o - Jul 12 1 1 S R K 1984 1988 - May 1 1 1 S R K 1989 o - May 6 1 1 S R K 1990 1994 - May 1 1 1 S R K 1995 2010 - Ap lastF 0s 1 S R K 1995 2005 - S lastTh 24 0 - R K 2006 o - S 21 24 0 - R K 2007 o - S Th>=1 24 0 - R K 2008 o - Au lastTh 24 0 - R K 2009 o - Au 20 24 0 - R K 2010 o - Au 10 24 0 - R K 2010 o - S 9 24 1 S R K 2010 o - S lastTh 24 0 - R K 2014 o - May 15 24 1 S R K 2014 o - Jun 26 24 0 - R K 2014 o - Jul 31 24 1 S R K 2014 o - S lastTh 24 0 - Z Africa/Cairo 2:5:9 - LMT 1900 O 2 K EE%sT R GH 1920 1942 - S 1 0 0:20 - R GH 1920 1942 - D 31 0 0 - Z Africa/Accra -0:0:52 - LMT 1918 0 GH GMT/+0020 Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u -1 - -01 1975 0 - GMT Z Africa/Nairobi 2:27:16 - LMT 1928 Jul 3 - EAT 1930 2:30 - +0230 1940 2:45 - +0245 1960 3 - EAT L Africa/Nairobi Africa/Addis_Ababa L Africa/Nairobi Africa/Asmara L Africa/Nairobi Africa/Dar_es_Salaam L Africa/Nairobi Africa/Djibouti L Africa/Nairobi Africa/Kampala L Africa/Nairobi Africa/Mogadishu L Africa/Nairobi Indian/Antananarivo L Africa/Nairobi Indian/Comoro L Africa/Nairobi Indian/Mayotte Z Africa/Monrovia -0:43:8 - LMT 1882 -0:43:8 - MMT 1919 Mar -0:44:30 - MMT 1972 Ja 7 0 - GMT R L 1951 o - O 14 2 1 S R L 1952 o - Ja 1 0 0 - R L 1953 o - O 9 2 1 S R L 1954 o - Ja 1 0 0 - R L 1955 o - S 30 0 1 S R L 1956 o - Ja 1 0 0 - R L 1982 1984 - Ap 1 0 1 S R L 1982 1985 - O 1 0 0 - R L 1985 o - Ap 6 0 1 S R L 1986 o - Ap 4 0 1 S R L 1986 o - O 3 0 0 - R L 1987 1989 - Ap 1 0 1 S R L 1987 1989 - O 1 0 0 - R L 1997 o - Ap 4 0 1 S R L 1997 o - O 4 0 0 - R L 2013 o - Mar lastF 1 1 S R L 2013 o - O lastF 2 0 - Z Africa/Tripoli 0:52:44 - LMT 1920 1 L CE%sT 1959 2 - EET 1982 1 L CE%sT 1990 May 4 2 - EET 1996 S 30 1 L CE%sT 1997 O 4 2 - EET 2012 N 10 2 1 L CE%sT 2013 O 25 2 2 - EET R MU 1982 o - O 10 0 1 - R MU 1983 o - Mar 21 0 0 - R MU 2008 o - O lastSu 2 1 - R MU 2009 o - Mar lastSu 2 0 - Z Indian/Mauritius 3:50 - LMT 1907 4 MU +04/+05 R M 1939 o - S 12 0 1 - R M 1939 o - N 19 0 0 - R M 1940 o - F 25 0 1 - R M 1945 o - N 18 0 0 - R M 1950 o - Jun 11 0 1 - R M 1950 o - O 29 0 0 - R M 1967 o - Jun 3 12 1 - R M 1967 o - O 1 0 0 - R M 1974 o - Jun 24 0 1 - R M 1974 o - S 1 0 0 - R M 1976 1977 - May 1 0 1 - R M 1976 o - Au 1 0 0 - R M 1977 o - S 28 0 0 - R M 1978 o - Jun 1 0 1 - R M 1978 o - Au 4 0 0 - R M 2008 o - Jun 1 0 1 - R M 2008 o - S 1 0 0 - R M 2009 o - Jun 1 0 1 - R M 2009 o - Au 21 0 0 - R M 2010 o - May 2 0 1 - R M 2010 o - Au 8 0 0 - R M 2011 o - Ap 3 0 1 - R M 2011 o - Jul 31 0 0 - R M 2012 2013 - Ap lastSu 2 1 - R M 2012 o - Jul 20 3 0 - R M 2012 o - Au 20 2 1 - R M 2012 o - S 30 3 0 - R M 2013 o - Jul 7 3 0 - R M 2013 o - Au 10 2 1 - R M 2013 2018 - O lastSu 3 0 - R M 2014 2018 - Mar lastSu 2 1 - R M 2014 o - Jun 28 3 0 - R M 2014 o - Au 2 2 1 - R M 2015 o - Jun 14 3 0 - R M 2015 o - Jul 19 2 1 - R M 2016 o - Jun 5 3 0 - R M 2016 o - Jul 10 2 1 - R M 2017 o - May 21 3 0 - R M 2017 o - Jul 2 2 1 - R M 2018 o - May 13 3 0 - R M 2018 o - Jun 17 2 1 - R M 2019 o - May 5 3 -1 - R M 2019 o - Jun 9 2 0 - R M 2020 o - Ap 19 3 -1 - R M 2020 o - May 31 2 0 - R M 2021 o - Ap 11 3 -1 - R M 2021 o - May 16 2 0 - R M 2022 o - Mar 27 3 -1 - R M 2022 o - May 8 2 0 - R M 2023 o - Mar 19 3 -1 - R M 2023 o - Ap 23 2 0 - R M 2024 o - Mar 10 3 -1 - R M 2024 o - Ap 14 2 0 - R M 2025 o - F 23 3 -1 - R M 2025 o - Ap 6 2 0 - R M 2026 o - F 15 3 -1 - R M 2026 o - Mar 22 2 0 - R M 2027 o - F 7 3 -1 - R M 2027 o - Mar 14 2 0 - R M 2028 o - Ja 23 3 -1 - R M 2028 o - Mar 5 2 0 - R M 2029 o - Ja 14 3 -1 - R M 2029 o - F 18 2 0 - R M 2029 o - D 30 3 -1 - R M 2030 o - F 10 2 0 - R M 2030 o - D 22 3 -1 - R M 2031 o - Ja 26 2 0 - R M 2031 o - D 14 3 -1 - R M 2032 o - Ja 18 2 0 - R M 2032 o - N 28 3 -1 - R M 2033 o - Ja 9 2 0 - R M 2033 o - N 20 3 -1 - R M 2033 o - D 25 2 0 - R M 2034 o - N 5 3 -1 - R M 2034 o - D 17 2 0 - R M 2035 o - O 28 3 -1 - R M 2035 o - D 9 2 0 - R M 2036 o - O 19 3 -1 - R M 2036 o - N 23 2 0 - R M 2037 o - O 4 3 -1 - R M 2037 o - N 15 2 0 - R M 2038 o - S 26 3 -1 - R M 2038 o - O 31 2 0 - R M 2039 o - S 18 3 -1 - R M 2039 o - O 23 2 0 - R M 2040 o - S 2 3 -1 - R M 2040 o - O 14 2 0 - R M 2041 o - Au 25 3 -1 - R M 2041 o - S 29 2 0 - R M 2042 o - Au 10 3 -1 - R M 2042 o - S 21 2 0 - R M 2043 o - Au 2 3 -1 - R M 2043 o - S 13 2 0 - R M 2044 o - Jul 24 3 -1 - R M 2044 o - Au 28 2 0 - R M 2045 o - Jul 9 3 -1 - R M 2045 o - Au 20 2 0 - R M 2046 o - Jul 1 3 -1 - R M 2046 o - Au 5 2 0 - R M 2047 o - Jun 23 3 -1 - R M 2047 o - Jul 28 2 0 - R M 2048 o - Jun 7 3 -1 - R M 2048 o - Jul 19 2 0 - R M 2049 o - May 30 3 -1 - R M 2049 o - Jul 4 2 0 - R M 2050 o - May 15 3 -1 - R M 2050 o - Jun 26 2 0 - R M 2051 o - May 7 3 -1 - R M 2051 o - Jun 18 2 0 - R M 2052 o - Ap 28 3 -1 - R M 2052 o - Jun 2 2 0 - R M 2053 o - Ap 13 3 -1 - R M 2053 o - May 25 2 0 - R M 2054 o - Ap 5 3 -1 - R M 2054 o - May 10 2 0 - R M 2055 o - Mar 28 3 -1 - R M 2055 o - May 2 2 0 - R M 2056 o - Mar 12 3 -1 - R M 2056 o - Ap 23 2 0 - R M 2057 o - Mar 4 3 -1 - R M 2057 o - Ap 8 2 0 - R M 2058 o - F 17 3 -1 - R M 2058 o - Mar 31 2 0 - R M 2059 o - F 9 3 -1 - R M 2059 o - Mar 23 2 0 - R M 2060 o - F 1 3 -1 - R M 2060 o - Mar 7 2 0 - R M 2061 o - Ja 16 3 -1 - R M 2061 o - F 27 2 0 - R M 2062 o - Ja 8 3 -1 - R M 2062 o - F 12 2 0 - R M 2062 o - D 31 3 -1 - R M 2063 o - F 4 2 0 - R M 2063 o - D 16 3 -1 - R M 2064 o - Ja 27 2 0 - R M 2064 o - D 7 3 -1 - R M 2065 o - Ja 11 2 0 - R M 2065 o - N 22 3 -1 - R M 2066 o - Ja 3 2 0 - R M 2066 o - N 14 3 -1 - R M 2066 o - D 26 2 0 - R M 2067 o - N 6 3 -1 - R M 2067 o - D 11 2 0 - R M 2068 o - O 21 3 -1 - R M 2068 o - D 2 2 0 - R M 2069 o - O 13 3 -1 - R M 2069 o - N 17 2 0 - R M 2070 o - O 5 3 -1 - R M 2070 o - N 9 2 0 - R M 2071 o - S 20 3 -1 - R M 2071 o - N 1 2 0 - R M 2072 o - S 11 3 -1 - R M 2072 o - O 16 2 0 - R M 2073 o - Au 27 3 -1 - R M 2073 o - O 8 2 0 - R M 2074 o - Au 19 3 -1 - R M 2074 o - S 30 2 0 - R M 2075 o - Au 11 3 -1 - R M 2075 o - S 15 2 0 - R M 2076 o - Jul 26 3 -1 - R M 2076 o - S 6 2 0 - R M 2077 o - Jul 18 3 -1 - R M 2077 o - Au 22 2 0 - R M 2078 o - Jul 10 3 -1 - R M 2078 o - Au 14 2 0 - R M 2079 o - Jun 25 3 -1 - R M 2079 o - Au 6 2 0 - R M 2080 o - Jun 16 3 -1 - R M 2080 o - Jul 21 2 0 - R M 2081 o - Jun 1 3 -1 - R M 2081 o - Jul 13 2 0 - R M 2082 o - May 24 3 -1 - R M 2082 o - Jun 28 2 0 - R M 2083 o - May 16 3 -1 - R M 2083 o - Jun 20 2 0 - R M 2084 o - Ap 30 3 -1 - R M 2084 o - Jun 11 2 0 - R M 2085 o - Ap 22 3 -1 - R M 2085 o - May 27 2 0 - R M 2086 o - Ap 14 3 -1 - R M 2086 o - May 19 2 0 - R M 2087 o - Mar 30 3 -1 - R M 2087 o - May 11 2 0 - Z Africa/Casablanca -0:30:20 - LMT 1913 O 26 0 M +00/+01 1984 Mar 16 1 - +01 1986 0 M +00/+01 2018 O 28 3 1 M +01/+00 Z Africa/El_Aaiun -0:52:48 - LMT 1934 -1 - -01 1976 Ap 14 0 M +00/+01 2018 O 28 3 1 M +01/+00 Z Africa/Maputo 2:10:20 - LMT 1903 Mar 2 - CAT L Africa/Maputo Africa/Blantyre L Africa/Maputo Africa/Bujumbura L Africa/Maputo Africa/Gaborone L Africa/Maputo Africa/Harare L Africa/Maputo Africa/Kigali L Africa/Maputo Africa/Lubumbashi L Africa/Maputo Africa/Lusaka R NA 1994 o - Mar 21 0 -1 WAT R NA 1994 2017 - S Su>=1 2 0 CAT R NA 1995 2017 - Ap Su>=1 2 -1 WAT Z Africa/Windhoek 1:8:24 - LMT 1892 F 8 1:30 - +0130 1903 Mar 2 - SAST 1942 S 20 2 2 1 SAST 1943 Mar 21 2 2 - SAST 1990 Mar 21 2 NA %s Z Africa/Lagos 0:13:36 - LMT 1919 S 1 - WAT L Africa/Lagos Africa/Bangui L Africa/Lagos Africa/Brazzaville L Africa/Lagos Africa/Douala L Africa/Lagos Africa/Kinshasa L Africa/Lagos Africa/Libreville L Africa/Lagos Africa/Luanda L Africa/Lagos Africa/Malabo L Africa/Lagos Africa/Niamey L Africa/Lagos Africa/Porto-Novo Z Indian/Reunion 3:41:52 - LMT 1911 Jun 4 - +04 Z Africa/Sao_Tome 0:26:56 - LMT 1884 -0:36:45 - LMT 1912 Ja 1 0u 0 - GMT 2018 Ja 1 1 1 - WAT 2019 Ja 1 2 0 - GMT Z Indian/Mahe 3:41:48 - LMT 1906 Jun 4 - +04 R SA 1942 1943 - S Su>=15 2 1 - R SA 1943 1944 - Mar Su>=15 2 0 - Z Africa/Johannesburg 1:52 - LMT 1892 F 8 1:30 - SAST 1903 Mar 2 SA SAST L Africa/Johannesburg Africa/Maseru L Africa/Johannesburg Africa/Mbabane R SD 1970 o - May 1 0 1 S R SD 1970 1985 - O 15 0 0 - R SD 1971 o - Ap 30 0 1 S R SD 1972 1985 - Ap lastSu 0 1 S Z Africa/Khartoum 2:10:8 - LMT 1931 2 SD CA%sT 2000 Ja 15 12 3 - EAT 2017 N 2 - CAT Z Africa/Juba 2:6:28 - LMT 1931 2 SD CA%sT 2000 Ja 15 12 3 - EAT R n 1939 o - Ap 15 23s 1 S R n 1939 o - N 18 23s 0 - R n 1940 o - F 25 23s 1 S R n 1941 o - O 6 0 0 - R n 1942 o - Mar 9 0 1 S R n 1942 o - N 2 3 0 - R n 1943 o - Mar 29 2 1 S R n 1943 o - Ap 17 2 0 - R n 1943 o - Ap 25 2 1 S R n 1943 o - O 4 2 0 - R n 1944 1945 - Ap M>=1 2 1 S R n 1944 o - O 8 0 0 - R n 1945 o - S 16 0 0 - R n 1977 o - Ap 30 0s 1 S R n 1977 o - S 24 0s 0 - R n 1978 o - May 1 0s 1 S R n 1978 o - O 1 0s 0 - R n 1988 o - Jun 1 0s 1 S R n 1988 1990 - S lastSu 0s 0 - R n 1989 o - Mar 26 0s 1 S R n 1990 o - May 1 0s 1 S R n 2005 o - May 1 0s 1 S R n 2005 o - S 30 1s 0 - R n 2006 2008 - Mar lastSu 2s 1 S R n 2006 2008 - O lastSu 2s 0 - Z Africa/Tunis 0:40:44 - LMT 1881 May 12 0:9:21 - PMT 1911 Mar 11 1 n CE%sT Z Antarctica/Casey 0 - -00 1969 8 - +08 2009 O 18 2 11 - +11 2010 Mar 5 2 8 - +08 2011 O 28 2 11 - +11 2012 F 21 17u 8 - +08 2016 O 22 11 - +11 2018 Mar 11 4 8 - +08 Z Antarctica/Davis 0 - -00 1957 Ja 13 7 - +07 1964 N 0 - -00 1969 F 7 - +07 2009 O 18 2 5 - +05 2010 Mar 10 20u 7 - +07 2011 O 28 2 5 - +05 2012 F 21 20u 7 - +07 Z Antarctica/Mawson 0 - -00 1954 F 13 6 - +06 2009 O 18 2 5 - +05 Z Indian/Kerguelen 0 - -00 1950 5 - +05 Z Antarctica/DumontDUrville 0 - -00 1947 10 - +10 1952 Ja 14 0 - -00 1956 N 10 - +10 Z Antarctica/Syowa 0 - -00 1957 Ja 29 3 - +03 R Tr 2005 ma - Mar lastSu 1u 2 +02 R Tr 2004 ma - O lastSu 1u 0 +00 Z Antarctica/Troll 0 - -00 2005 F 12 0 Tr %s Z Antarctica/Vostok 0 - -00 1957 D 16 6 - +06 Z Antarctica/Rothera 0 - -00 1976 D -3 - -03 Z Asia/Kabul 4:36:48 - LMT 1890 4 - +04 1945 4:30 - +0430 R AM 2011 o - Mar lastSu 2s 1 - R AM 2011 o - O lastSu 2s 0 - Z Asia/Yerevan 2:58 - LMT 1924 May 2 3 - +03 1957 Mar 4 R +04/+05 1991 Mar 31 2s 3 R +03/+04 1995 S 24 2s 4 - +04 1997 4 R +04/+05 2011 4 AM +04/+05 R AZ 1997 2015 - Mar lastSu 4 1 - R AZ 1997 2015 - O lastSu 5 0 - Z Asia/Baku 3:19:24 - LMT 1924 May 2 3 - +03 1957 Mar 4 R +04/+05 1991 Mar 31 2s 3 R +03/+04 1992 S lastSu 2s 4 - +04 1996 4 E +04/+05 1997 4 AZ +04/+05 R BD 2009 o - Jun 19 23 1 - R BD 2009 o - D 31 24 0 - Z Asia/Dhaka 6:1:40 - LMT 1890 5:53:20 - HMT 1941 O 6:30 - +0630 1942 May 15 5:30 - +0530 1942 S 6:30 - +0630 1951 S 30 6 - +06 2009 6 BD +06/+07 Z Asia/Thimphu 5:58:36 - LMT 1947 Au 15 5:30 - +0530 1987 O 6 - +06 Z Indian/Chagos 4:49:40 - LMT 1907 5 - +05 1996 6 - +06 Z Asia/Brunei 7:39:40 - LMT 1926 Mar 7:30 - +0730 1933 8 - +08 Z Asia/Yangon 6:24:47 - LMT 1880 6:24:47 - RMT 1920 6:30 - +0630 1942 May 9 - +09 1945 May 3 6:30 - +0630 R Sh 1919 o - Ap 12 24 1 D R Sh 1919 o - S 30 24 0 S R Sh 1940 o - Jun 1 0 1 D R Sh 1940 o - O 12 24 0 S R Sh 1941 o - Mar 15 0 1 D R Sh 1941 o - N 1 24 0 S R Sh 1942 o - Ja 31 0 1 D R Sh 1945 o - S 1 24 0 S R Sh 1946 o - May 15 0 1 D R Sh 1946 o - S 30 24 0 S R Sh 1947 o - Ap 15 0 1 D R Sh 1947 o - O 31 24 0 S R Sh 1948 1949 - May 1 0 1 D R Sh 1948 1949 - S 30 24 0 S R CN 1986 o - May 4 2 1 D R CN 1986 1991 - S Su>=11 2 0 S R CN 1987 1991 - Ap Su>=11 2 1 D Z Asia/Shanghai 8:5:43 - LMT 1901 8 Sh C%sT 1949 May 28 8 CN C%sT Z Asia/Urumqi 5:50:20 - LMT 1928 6 - +06 R HK 1946 o - Ap 21 0 1 S R HK 1946 o - D 1 3:30s 0 - R HK 1947 o - Ap 13 3:30s 1 S R HK 1947 o - N 30 3:30s 0 - R HK 1948 o - May 2 3:30s 1 S R HK 1948 1952 - O Su>=28 3:30s 0 - R HK 1949 1953 - Ap Su>=1 3:30 1 S R HK 1953 1964 - O Su>=31 3:30 0 - R HK 1954 1964 - Mar Su>=18 3:30 1 S R HK 1965 1976 - Ap Su>=16 3:30 1 S R HK 1965 1976 - O Su>=16 3:30 0 - R HK 1973 o - D 30 3:30 1 S R HK 1979 o - May 13 3:30 1 S R HK 1979 o - O 21 3:30 0 - Z Asia/Hong_Kong 7:36:42 - LMT 1904 O 30 0:36:42 8 - HKT 1941 Jun 15 3 8 1 HKST 1941 O 1 4 8 0:30 HKWT 1941 D 25 9 - JST 1945 N 18 2 8 HK HK%sT R f 1946 o - May 15 0 1 D R f 1946 o - O 1 0 0 S R f 1947 o - Ap 15 0 1 D R f 1947 o - N 1 0 0 S R f 1948 1951 - May 1 0 1 D R f 1948 1951 - O 1 0 0 S R f 1952 o - Mar 1 0 1 D R f 1952 1954 - N 1 0 0 S R f 1953 1959 - Ap 1 0 1 D R f 1955 1961 - O 1 0 0 S R f 1960 1961 - Jun 1 0 1 D R f 1974 1975 - Ap 1 0 1 D R f 1974 1975 - O 1 0 0 S R f 1979 o - Jul 1 0 1 D R f 1979 o - O 1 0 0 S Z Asia/Taipei 8:6 - LMT 1896 8 - CST 1937 O 9 - JST 1945 S 21 1 8 f C%sT R _ 1942 1943 - Ap 30 23 1 - R _ 1942 o - N 17 23 0 - R _ 1943 o - S 30 23 0 S R _ 1946 o - Ap 30 23s 1 D R _ 1946 o - S 30 23s 0 S R _ 1947 o - Ap 19 23s 1 D R _ 1947 o - N 30 23s 0 S R _ 1948 o - May 2 23s 1 D R _ 1948 o - O 31 23s 0 S R _ 1949 1950 - Ap Sa>=1 23s 1 D R _ 1949 1950 - O lastSa 23s 0 S R _ 1951 o - Mar 31 23s 1 D R _ 1951 o - O 28 23s 0 S R _ 1952 1953 - Ap Sa>=1 23s 1 D R _ 1952 o - N 1 23s 0 S R _ 1953 1954 - O lastSa 23s 0 S R _ 1954 1956 - Mar Sa>=17 23s 1 D R _ 1955 o - N 5 23s 0 S R _ 1956 1964 - N Su>=1 3:30 0 S R _ 1957 1964 - Mar Su>=18 3:30 1 D R _ 1965 1973 - Ap Su>=16 3:30 1 D R _ 1965 1966 - O Su>=16 2:30 0 S R _ 1967 1976 - O Su>=16 3:30 0 S R _ 1973 o - D 30 3:30 1 D R _ 1975 1976 - Ap Su>=16 3:30 1 D R _ 1979 o - May 13 3:30 1 D R _ 1979 o - O Su>=16 3:30 0 S Z Asia/Macau 7:34:10 - LMT 1904 O 30 8 - CST 1941 D 21 23 9 _ +09/+10 1945 S 30 24 8 _ C%sT R CY 1975 o - Ap 13 0 1 S R CY 1975 o - O 12 0 0 - R CY 1976 o - May 15 0 1 S R CY 1976 o - O 11 0 0 - R CY 1977 1980 - Ap Su>=1 0 1 S R CY 1977 o - S 25 0 0 - R CY 1978 o - O 2 0 0 - R CY 1979 1997 - S lastSu 0 0 - R CY 1981 1998 - Mar lastSu 0 1 S Z Asia/Nicosia 2:13:28 - LMT 1921 N 14 2 CY EE%sT 1998 S 2 E EE%sT Z Asia/Famagusta 2:15:48 - LMT 1921 N 14 2 CY EE%sT 1998 S 2 E EE%sT 2016 S 8 3 - +03 2017 O 29 1u 2 E EE%sT L Asia/Nicosia Europe/Nicosia Z Asia/Tbilisi 2:59:11 - LMT 1880 2:59:11 - TBMT 1924 May 2 3 - +03 1957 Mar 4 R +04/+05 1991 Mar 31 2s 3 R +03/+04 1992 3 e +03/+04 1994 S lastSu 4 e +04/+05 1996 O lastSu 4 1 +05 1997 Mar lastSu 4 e +04/+05 2004 Jun 27 3 R +03/+04 2005 Mar lastSu 2 4 - +04 Z Asia/Dili 8:22:20 - LMT 1912 8 - +08 1942 F 21 23 9 - +09 1976 May 3 8 - +08 2000 S 17 9 - +09 Z Asia/Kolkata 5:53:28 - LMT 1854 Jun 28 5:53:20 - HMT 1870 5:21:10 - MMT 1906 5:30 - IST 1941 O 5:30 1 +0630 1942 May 15 5:30 - IST 1942 S 5:30 1 +0630 1945 O 15 5:30 - IST Z Asia/Jakarta 7:7:12 - LMT 1867 Au 10 7:7:12 - BMT 1923 D 31 23:47:12 7:20 - +0720 1932 N 7:30 - +0730 1942 Mar 23 9 - +09 1945 S 23 7:30 - +0730 1948 May 8 - +08 1950 May 7:30 - +0730 1964 7 - WIB Z Asia/Pontianak 7:17:20 - LMT 1908 May 7:17:20 - PMT 1932 N 7:30 - +0730 1942 Ja 29 9 - +09 1945 S 23 7:30 - +0730 1948 May 8 - +08 1950 May 7:30 - +0730 1964 8 - WITA 1988 7 - WIB Z Asia/Makassar 7:57:36 - LMT 1920 7:57:36 - MMT 1932 N 8 - +08 1942 F 9 9 - +09 1945 S 23 8 - WITA Z Asia/Jayapura 9:22:48 - LMT 1932 N 9 - +09 1944 S 9:30 - +0930 1964 9 - WIT R i 1978 1980 - Mar 20 24 1 - R i 1978 o - O 20 24 0 - R i 1979 o - S 18 24 0 - R i 1980 o - S 22 24 0 - R i 1991 o - May 2 24 1 - R i 1992 1995 - Mar 21 24 1 - R i 1991 1995 - S 21 24 0 - R i 1996 o - Mar 20 24 1 - R i 1996 o - S 20 24 0 - R i 1997 1999 - Mar 21 24 1 - R i 1997 1999 - S 21 24 0 - R i 2000 o - Mar 20 24 1 - R i 2000 o - S 20 24 0 - R i 2001 2003 - Mar 21 24 1 - R i 2001 2003 - S 21 24 0 - R i 2004 o - Mar 20 24 1 - R i 2004 o - S 20 24 0 - R i 2005 o - Mar 21 24 1 - R i 2005 o - S 21 24 0 - R i 2008 o - Mar 20 24 1 - R i 2008 o - S 20 24 0 - R i 2009 2011 - Mar 21 24 1 - R i 2009 2011 - S 21 24 0 - R i 2012 o - Mar 20 24 1 - R i 2012 o - S 20 24 0 - R i 2013 2015 - Mar 21 24 1 - R i 2013 2015 - S 21 24 0 - R i 2016 o - Mar 20 24 1 - R i 2016 o - S 20 24 0 - R i 2017 2019 - Mar 21 24 1 - R i 2017 2019 - S 21 24 0 - R i 2020 o - Mar 20 24 1 - R i 2020 o - S 20 24 0 - R i 2021 2023 - Mar 21 24 1 - R i 2021 2023 - S 21 24 0 - R i 2024 o - Mar 20 24 1 - R i 2024 o - S 20 24 0 - R i 2025 2027 - Mar 21 24 1 - R i 2025 2027 - S 21 24 0 - R i 2028 2029 - Mar 20 24 1 - R i 2028 2029 - S 20 24 0 - R i 2030 2031 - Mar 21 24 1 - R i 2030 2031 - S 21 24 0 - R i 2032 2033 - Mar 20 24 1 - R i 2032 2033 - S 20 24 0 - R i 2034 2035 - Mar 21 24 1 - R i 2034 2035 - S 21 24 0 - R i 2036 2037 - Mar 20 24 1 - R i 2036 2037 - S 20 24 0 - R i 2038 2039 - Mar 21 24 1 - R i 2038 2039 - S 21 24 0 - R i 2040 2041 - Mar 20 24 1 - R i 2040 2041 - S 20 24 0 - R i 2042 2043 - Mar 21 24 1 - R i 2042 2043 - S 21 24 0 - R i 2044 2045 - Mar 20 24 1 - R i 2044 2045 - S 20 24 0 - R i 2046 2047 - Mar 21 24 1 - R i 2046 2047 - S 21 24 0 - R i 2048 2049 - Mar 20 24 1 - R i 2048 2049 - S 20 24 0 - R i 2050 2051 - Mar 21 24 1 - R i 2050 2051 - S 21 24 0 - R i 2052 2053 - Mar 20 24 1 - R i 2052 2053 - S 20 24 0 - R i 2054 2055 - Mar 21 24 1 - R i 2054 2055 - S 21 24 0 - R i 2056 2057 - Mar 20 24 1 - R i 2056 2057 - S 20 24 0 - R i 2058 2059 - Mar 21 24 1 - R i 2058 2059 - S 21 24 0 - R i 2060 2062 - Mar 20 24 1 - R i 2060 2062 - S 20 24 0 - R i 2063 o - Mar 21 24 1 - R i 2063 o - S 21 24 0 - R i 2064 2066 - Mar 20 24 1 - R i 2064 2066 - S 20 24 0 - R i 2067 o - Mar 21 24 1 - R i 2067 o - S 21 24 0 - R i 2068 2070 - Mar 20 24 1 - R i 2068 2070 - S 20 24 0 - R i 2071 o - Mar 21 24 1 - R i 2071 o - S 21 24 0 - R i 2072 2074 - Mar 20 24 1 - R i 2072 2074 - S 20 24 0 - R i 2075 o - Mar 21 24 1 - R i 2075 o - S 21 24 0 - R i 2076 2078 - Mar 20 24 1 - R i 2076 2078 - S 20 24 0 - R i 2079 o - Mar 21 24 1 - R i 2079 o - S 21 24 0 - R i 2080 2082 - Mar 20 24 1 - R i 2080 2082 - S 20 24 0 - R i 2083 o - Mar 21 24 1 - R i 2083 o - S 21 24 0 - R i 2084 2086 - Mar 20 24 1 - R i 2084 2086 - S 20 24 0 - R i 2087 o - Mar 21 24 1 - R i 2087 o - S 21 24 0 - R i 2088 ma - Mar 20 24 1 - R i 2088 ma - S 20 24 0 - Z Asia/Tehran 3:25:44 - LMT 1916 3:25:44 - TMT 1946 3:30 - +0330 1977 N 4 i +04/+05 1979 3:30 i +0330/+0430 R IQ 1982 o - May 1 0 1 - R IQ 1982 1984 - O 1 0 0 - R IQ 1983 o - Mar 31 0 1 - R IQ 1984 1985 - Ap 1 0 1 - R IQ 1985 1990 - S lastSu 1s 0 - R IQ 1986 1990 - Mar lastSu 1s 1 - R IQ 1991 2007 - Ap 1 3s 1 - R IQ 1991 2007 - O 1 3s 0 - Z Asia/Baghdad 2:57:40 - LMT 1890 2:57:36 - BMT 1918 3 - +03 1982 May 3 IQ +03/+04 R Z 1940 o - Jun 1 0 1 D R Z 1942 1944 - N 1 0 0 S R Z 1943 o - Ap 1 2 1 D R Z 1944 o - Ap 1 0 1 D R Z 1945 o - Ap 16 0 1 D R Z 1945 o - N 1 2 0 S R Z 1946 o - Ap 16 2 1 D R Z 1946 o - N 1 0 0 S R Z 1948 o - May 23 0 2 DD R Z 1948 o - S 1 0 1 D R Z 1948 1949 - N 1 2 0 S R Z 1949 o - May 1 0 1 D R Z 1950 o - Ap 16 0 1 D R Z 1950 o - S 15 3 0 S R Z 1951 o - Ap 1 0 1 D R Z 1951 o - N 11 3 0 S R Z 1952 o - Ap 20 2 1 D R Z 1952 o - O 19 3 0 S R Z 1953 o - Ap 12 2 1 D R Z 1953 o - S 13 3 0 S R Z 1954 o - Jun 13 0 1 D R Z 1954 o - S 12 0 0 S R Z 1955 o - Jun 11 2 1 D R Z 1955 o - S 11 0 0 S R Z 1956 o - Jun 3 0 1 D R Z 1956 o - S 30 3 0 S R Z 1957 o - Ap 29 2 1 D R Z 1957 o - S 22 0 0 S R Z 1974 o - Jul 7 0 1 D R Z 1974 o - O 13 0 0 S R Z 1975 o - Ap 20 0 1 D R Z 1975 o - Au 31 0 0 S R Z 1980 o - Au 2 0 1 D R Z 1980 o - S 13 1 0 S R Z 1984 o - May 5 0 1 D R Z 1984 o - Au 25 1 0 S R Z 1985 o - Ap 14 0 1 D R Z 1985 o - S 15 0 0 S R Z 1986 o - May 18 0 1 D R Z 1986 o - S 7 0 0 S R Z 1987 o - Ap 15 0 1 D R Z 1987 o - S 13 0 0 S R Z 1988 o - Ap 10 0 1 D R Z 1988 o - S 4 0 0 S R Z 1989 o - Ap 30 0 1 D R Z 1989 o - S 3 0 0 S R Z 1990 o - Mar 25 0 1 D R Z 1990 o - Au 26 0 0 S R Z 1991 o - Mar 24 0 1 D R Z 1991 o - S 1 0 0 S R Z 1992 o - Mar 29 0 1 D R Z 1992 o - S 6 0 0 S R Z 1993 o - Ap 2 0 1 D R Z 1993 o - S 5 0 0 S R Z 1994 o - Ap 1 0 1 D R Z 1994 o - Au 28 0 0 S R Z 1995 o - Mar 31 0 1 D R Z 1995 o - S 3 0 0 S R Z 1996 o - Mar 15 0 1 D R Z 1996 o - S 16 0 0 S R Z 1997 o - Mar 21 0 1 D R Z 1997 o - S 14 0 0 S R Z 1998 o - Mar 20 0 1 D R Z 1998 o - S 6 0 0 S R Z 1999 o - Ap 2 2 1 D R Z 1999 o - S 3 2 0 S R Z 2000 o - Ap 14 2 1 D R Z 2000 o - O 6 1 0 S R Z 2001 o - Ap 9 1 1 D R Z 2001 o - S 24 1 0 S R Z 2002 o - Mar 29 1 1 D R Z 2002 o - O 7 1 0 S R Z 2003 o - Mar 28 1 1 D R Z 2003 o - O 3 1 0 S R Z 2004 o - Ap 7 1 1 D R Z 2004 o - S 22 1 0 S R Z 2005 2012 - Ap F<=1 2 1 D R Z 2005 o - O 9 2 0 S R Z 2006 o - O 1 2 0 S R Z 2007 o - S 16 2 0 S R Z 2008 o - O 5 2 0 S R Z 2009 o - S 27 2 0 S R Z 2010 o - S 12 2 0 S R Z 2011 o - O 2 2 0 S R Z 2012 o - S 23 2 0 S R Z 2013 ma - Mar F>=23 2 1 D R Z 2013 ma - O lastSu 2 0 S Z Asia/Jerusalem 2:20:54 - LMT 1880 2:20:40 - JMT 1918 2 Z I%sT R JP 1948 o - May Sa>=1 24 1 D R JP 1948 1951 - S Sa>=8 25 0 S R JP 1949 o - Ap Sa>=1 24 1 D R JP 1950 1951 - May Sa>=1 24 1 D Z Asia/Tokyo 9:18:59 - LMT 1887 D 31 15u 9 JP J%sT R J 1973 o - Jun 6 0 1 S R J 1973 1975 - O 1 0 0 - R J 1974 1977 - May 1 0 1 S R J 1976 o - N 1 0 0 - R J 1977 o - O 1 0 0 - R J 1978 o - Ap 30 0 1 S R J 1978 o - S 30 0 0 - R J 1985 o - Ap 1 0 1 S R J 1985 o - O 1 0 0 - R J 1986 1988 - Ap F>=1 0 1 S R J 1986 1990 - O F>=1 0 0 - R J 1989 o - May 8 0 1 S R J 1990 o - Ap 27 0 1 S R J 1991 o - Ap 17 0 1 S R J 1991 o - S 27 0 0 - R J 1992 o - Ap 10 0 1 S R J 1992 1993 - O F>=1 0 0 - R J 1993 1998 - Ap F>=1 0 1 S R J 1994 o - S F>=15 0 0 - R J 1995 1998 - S F>=15 0s 0 - R J 1999 o - Jul 1 0s 1 S R J 1999 2002 - S lastF 0s 0 - R J 2000 2001 - Mar lastTh 0s 1 S R J 2002 2012 - Mar lastTh 24 1 S R J 2003 o - O 24 0s 0 - R J 2004 o - O 15 0s 0 - R J 2005 o - S lastF 0s 0 - R J 2006 2011 - O lastF 0s 0 - R J 2013 o - D 20 0 0 - R J 2014 ma - Mar lastTh 24 1 S R J 2014 ma - O lastF 0s 0 - Z Asia/Amman 2:23:44 - LMT 1931 2 J EE%sT Z Asia/Almaty 5:7:48 - LMT 1924 May 2 5 - +05 1930 Jun 21 6 R +06/+07 1991 Mar 31 2s 5 R +05/+06 1992 Ja 19 2s 6 R +06/+07 2004 O 31 2s 6 - +06 Z Asia/Qyzylorda 4:21:52 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 - +05 1981 Ap 5 1 +06 1981 O 6 - +06 1982 Ap 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1991 S 29 2s 5 R +05/+06 1992 Ja 19 2s 6 R +06/+07 1992 Mar 29 2s 5 R +05/+06 2004 O 31 2s 6 - +06 2018 D 21 5 - +05 Z Asia/Qostanay 4:14:28 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 - +05 1981 Ap 5 1 +06 1981 O 6 - +06 1982 Ap 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 2004 O 31 2s 6 - +06 Z Asia/Aqtobe 3:48:40 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 - +05 1981 Ap 5 1 +06 1981 O 6 - +06 1982 Ap 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 2004 O 31 2s 5 - +05 Z Asia/Aqtau 3:21:4 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 - +05 1981 O 6 - +06 1982 Ap 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 1994 S 25 2s 4 R +04/+05 2004 O 31 2s 5 - +05 Z Asia/Atyrau 3:27:44 - LMT 1924 May 2 3 - +03 1930 Jun 21 5 - +05 1981 O 6 - +06 1982 Ap 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 1999 Mar 28 2s 4 R +04/+05 2004 O 31 2s 5 - +05 Z Asia/Oral 3:25:24 - LMT 1924 May 2 3 - +03 1930 Jun 21 5 - +05 1981 Ap 5 1 +06 1981 O 6 - +06 1982 Ap 5 R +05/+06 1989 Mar 26 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 1992 Mar 29 2s 4 R +04/+05 2004 O 31 2s 5 - +05 R KG 1992 1996 - Ap Su>=7 0s 1 - R KG 1992 1996 - S lastSu 0 0 - R KG 1997 2005 - Mar lastSu 2:30 1 - R KG 1997 2004 - O lastSu 2:30 0 - Z Asia/Bishkek 4:58:24 - LMT 1924 May 2 5 - +05 1930 Jun 21 6 R +06/+07 1991 Mar 31 2s 5 R +05/+06 1991 Au 31 2 5 KG +05/+06 2005 Au 12 6 - +06 R KR 1948 o - Jun 1 0 1 D R KR 1948 o - S 12 24 0 S R KR 1949 o - Ap 3 0 1 D R KR 1949 1951 - S Sa>=7 24 0 S R KR 1950 o - Ap 1 0 1 D R KR 1951 o - May 6 0 1 D R KR 1955 o - May 5 0 1 D R KR 1955 o - S 8 24 0 S R KR 1956 o - May 20 0 1 D R KR 1956 o - S 29 24 0 S R KR 1957 1960 - May Su>=1 0 1 D R KR 1957 1960 - S Sa>=17 24 0 S R KR 1987 1988 - May Su>=8 2 1 D R KR 1987 1988 - O Su>=8 3 0 S Z Asia/Seoul 8:27:52 - LMT 1908 Ap 8:30 - KST 1912 9 - JST 1945 S 8 9 KR K%sT 1954 Mar 21 8:30 KR K%sT 1961 Au 10 9 KR K%sT Z Asia/Pyongyang 8:23 - LMT 1908 Ap 8:30 - KST 1912 9 - JST 1945 Au 24 9 - KST 2015 Au 15 8:30 - KST 2018 May 4 23:30 9 - KST R l 1920 o - Mar 28 0 1 S R l 1920 o - O 25 0 0 - R l 1921 o - Ap 3 0 1 S R l 1921 o - O 3 0 0 - R l 1922 o - Mar 26 0 1 S R l 1922 o - O 8 0 0 - R l 1923 o - Ap 22 0 1 S R l 1923 o - S 16 0 0 - R l 1957 1961 - May 1 0 1 S R l 1957 1961 - O 1 0 0 - R l 1972 o - Jun 22 0 1 S R l 1972 1977 - O 1 0 0 - R l 1973 1977 - May 1 0 1 S R l 1978 o - Ap 30 0 1 S R l 1978 o - S 30 0 0 - R l 1984 1987 - May 1 0 1 S R l 1984 1991 - O 16 0 0 - R l 1988 o - Jun 1 0 1 S R l 1989 o - May 10 0 1 S R l 1990 1992 - May 1 0 1 S R l 1992 o - O 4 0 0 - R l 1993 ma - Mar lastSu 0 1 S R l 1993 1998 - S lastSu 0 0 - R l 1999 ma - O lastSu 0 0 - Z Asia/Beirut 2:22 - LMT 1880 2 l EE%sT R NB 1935 1941 - S 14 0 0:20 - R NB 1935 1941 - D 14 0 0 - Z Asia/Kuala_Lumpur 6:46:46 - LMT 1901 6:55:25 - SMT 1905 Jun 7 - +07 1933 7 0:20 +0720 1936 7:20 - +0720 1941 S 7:30 - +0730 1942 F 16 9 - +09 1945 S 12 7:30 - +0730 1982 8 - +08 Z Asia/Kuching 7:21:20 - LMT 1926 Mar 7:30 - +0730 1933 8 NB +08/+0820 1942 F 16 9 - +09 1945 S 12 8 - +08 Z Indian/Maldives 4:54 - LMT 1880 4:54 - MMT 1960 5 - +05 R X 1983 1984 - Ap 1 0 1 - R X 1983 o - O 1 0 0 - R X 1985 1998 - Mar lastSu 0 1 - R X 1984 1998 - S lastSu 0 0 - R X 2001 o - Ap lastSa 2 1 - R X 2001 2006 - S lastSa 2 0 - R X 2002 2006 - Mar lastSa 2 1 - R X 2015 2016 - Mar lastSa 2 1 - R X 2015 2016 - S lastSa 0 0 - Z Asia/Hovd 6:6:36 - LMT 1905 Au 6 - +06 1978 7 X +07/+08 Z Asia/Ulaanbaatar 7:7:32 - LMT 1905 Au 7 - +07 1978 8 X +08/+09 Z Asia/Choibalsan 7:38 - LMT 1905 Au 7 - +07 1978 8 - +08 1983 Ap 9 X +09/+10 2008 Mar 31 8 X +08/+09 Z Asia/Kathmandu 5:41:16 - LMT 1920 5:30 - +0530 1986 5:45 - +0545 R PK 2002 o - Ap Su>=2 0 1 S R PK 2002 o - O Su>=2 0 0 - R PK 2008 o - Jun 1 0 1 S R PK 2008 2009 - N 1 0 0 - R PK 2009 o - Ap 15 0 1 S Z Asia/Karachi 4:28:12 - LMT 1907 5:30 - +0530 1942 S 5:30 1 +0630 1945 O 15 5:30 - +0530 1951 S 30 5 - +05 1971 Mar 26 5 PK PK%sT R P 1999 2005 - Ap F>=15 0 1 S R P 1999 2003 - O F>=15 0 0 - R P 2004 o - O 1 1 0 - R P 2005 o - O 4 2 0 - R P 2006 2007 - Ap 1 0 1 S R P 2006 o - S 22 0 0 - R P 2007 o - S Th>=8 2 0 - R P 2008 2009 - Mar lastF 0 1 S R P 2008 o - S 1 0 0 - R P 2009 o - S F>=1 1 0 - R P 2010 o - Mar 26 0 1 S R P 2010 o - Au 11 0 0 - R P 2011 o - Ap 1 0:1 1 S R P 2011 o - Au 1 0 0 - R P 2011 o - Au 30 0 1 S R P 2011 o - S 30 0 0 - R P 2012 2014 - Mar lastTh 24 1 S R P 2012 o - S 21 1 0 - R P 2013 o - S F>=21 0 0 - R P 2014 2015 - O F>=21 0 0 - R P 2015 o - Mar lastF 24 1 S R P 2016 2018 - Mar Sa>=24 1 1 S R P 2016 ma - O lastSa 1 0 - R P 2019 ma - Mar lastF 0 1 S Z Asia/Gaza 2:17:52 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 2 Z I%sT 1996 2 J EE%sT 1999 2 P EE%sT 2008 Au 29 2 - EET 2008 S 2 P EE%sT 2010 2 - EET 2010 Mar 27 0:1 2 P EE%sT 2011 Au 2 - EET 2012 2 P EE%sT Z Asia/Hebron 2:20:23 - LMT 1900 O 2 Z EET/EEST 1948 May 15 2 K EE%sT 1967 Jun 5 2 Z I%sT 1996 2 J EE%sT 1999 2 P EE%sT R PH 1936 o - N 1 0 1 D R PH 1937 o - F 1 0 0 S R PH 1954 o - Ap 12 0 1 D R PH 1954 o - Jul 1 0 0 S R PH 1978 o - Mar 22 0 1 D R PH 1978 o - S 21 0 0 S Z Asia/Manila -15:56 - LMT 1844 D 31 8:4 - LMT 1899 May 11 8 PH P%sT 1942 May 9 - JST 1944 N 8 PH P%sT Z Asia/Qatar 3:26:8 - LMT 1920 4 - +04 1972 Jun 3 - +03 L Asia/Qatar Asia/Bahrain Z Asia/Riyadh 3:6:52 - LMT 1947 Mar 14 3 - +03 L Asia/Riyadh Asia/Aden L Asia/Riyadh Asia/Kuwait Z Asia/Singapore 6:55:25 - LMT 1901 6:55:25 - SMT 1905 Jun 7 - +07 1933 7 0:20 +0720 1936 7:20 - +0720 1941 S 7:30 - +0730 1942 F 16 9 - +09 1945 S 12 7:30 - +0730 1982 8 - +08 Z Asia/Colombo 5:19:24 - LMT 1880 5:19:32 - MMT 1906 5:30 - +0530 1942 Ja 5 5:30 0:30 +06 1942 S 5:30 1 +0630 1945 O 16 2 5:30 - +0530 1996 May 25 6:30 - +0630 1996 O 26 0:30 6 - +06 2006 Ap 15 0:30 5:30 - +0530 R S 1920 1923 - Ap Su>=15 2 1 S R S 1920 1923 - O Su>=1 2 0 - R S 1962 o - Ap 29 2 1 S R S 1962 o - O 1 2 0 - R S 1963 1965 - May 1 2 1 S R S 1963 o - S 30 2 0 - R S 1964 o - O 1 2 0 - R S 1965 o - S 30 2 0 - R S 1966 o - Ap 24 2 1 S R S 1966 1976 - O 1 2 0 - R S 1967 1978 - May 1 2 1 S R S 1977 1978 - S 1 2 0 - R S 1983 1984 - Ap 9 2 1 S R S 1983 1984 - O 1 2 0 - R S 1986 o - F 16 2 1 S R S 1986 o - O 9 2 0 - R S 1987 o - Mar 1 2 1 S R S 1987 1988 - O 31 2 0 - R S 1988 o - Mar 15 2 1 S R S 1989 o - Mar 31 2 1 S R S 1989 o - O 1 2 0 - R S 1990 o - Ap 1 2 1 S R S 1990 o - S 30 2 0 - R S 1991 o - Ap 1 0 1 S R S 1991 1992 - O 1 0 0 - R S 1992 o - Ap 8 0 1 S R S 1993 o - Mar 26 0 1 S R S 1993 o - S 25 0 0 - R S 1994 1996 - Ap 1 0 1 S R S 1994 2005 - O 1 0 0 - R S 1997 1998 - Mar lastM 0 1 S R S 1999 2006 - Ap 1 0 1 S R S 2006 o - S 22 0 0 - R S 2007 o - Mar lastF 0 1 S R S 2007 o - N F>=1 0 0 - R S 2008 o - Ap F>=1 0 1 S R S 2008 o - N 1 0 0 - R S 2009 o - Mar lastF 0 1 S R S 2010 2011 - Ap F>=1 0 1 S R S 2012 ma - Mar lastF 0 1 S R S 2009 ma - O lastF 0 0 - Z Asia/Damascus 2:25:12 - LMT 1920 2 S EE%sT Z Asia/Dushanbe 4:35:12 - LMT 1924 May 2 5 - +05 1930 Jun 21 6 R +06/+07 1991 Mar 31 2s 5 1 +05/+06 1991 S 9 2s 5 - +05 Z Asia/Bangkok 6:42:4 - LMT 1880 6:42:4 - BMT 1920 Ap 7 - +07 L Asia/Bangkok Asia/Phnom_Penh L Asia/Bangkok Asia/Vientiane Z Asia/Ashgabat 3:53:32 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 R +05/+06 1991 Mar 31 2 4 R +04/+05 1992 Ja 19 2 5 - +05 Z Asia/Dubai 3:41:12 - LMT 1920 4 - +04 L Asia/Dubai Asia/Muscat Z Asia/Samarkand 4:27:53 - LMT 1924 May 2 4 - +04 1930 Jun 21 5 - +05 1981 Ap 5 1 +06 1981 O 6 - +06 1982 Ap 5 R +05/+06 1992 5 - +05 Z Asia/Tashkent 4:37:11 - LMT 1924 May 2 5 - +05 1930 Jun 21 6 R +06/+07 1991 Mar 31 2 5 R +05/+06 1992 5 - +05 Z Asia/Ho_Chi_Minh 7:6:40 - LMT 1906 Jul 7:6:30 - PLMT 1911 May 7 - +07 1942 D 31 23 8 - +08 1945 Mar 14 23 9 - +09 1945 S 2 7 - +07 1947 Ap 8 - +08 1955 Jul 7 - +07 1959 D 31 23 8 - +08 1975 Jun 13 7 - +07 R AU 1917 o - Ja 1 0:1 1 D R AU 1917 o - Mar 25 2 0 S R AU 1942 o - Ja 1 2 1 D R AU 1942 o - Mar 29 2 0 S R AU 1942 o - S 27 2 1 D R AU 1943 1944 - Mar lastSu 2 0 S R AU 1943 o - O 3 2 1 D Z Australia/Darwin 8:43:20 - LMT 1895 F 9 - ACST 1899 May 9:30 AU AC%sT R AW 1974 o - O lastSu 2s 1 D R AW 1975 o - Mar Su>=1 2s 0 S R AW 1983 o - O lastSu 2s 1 D R AW 1984 o - Mar Su>=1 2s 0 S R AW 1991 o - N 17 2s 1 D R AW 1992 o - Mar Su>=1 2s 0 S R AW 2006 o - D 3 2s 1 D R AW 2007 2009 - Mar lastSu 2s 0 S R AW 2007 2008 - O lastSu 2s 1 D Z Australia/Perth 7:43:24 - LMT 1895 D 8 AU AW%sT 1943 Jul 8 AW AW%sT Z Australia/Eucla 8:35:28 - LMT 1895 D 8:45 AU +0845/+0945 1943 Jul 8:45 AW +0845/+0945 R AQ 1971 o - O lastSu 2s 1 D R AQ 1972 o - F lastSu 2s 0 S R AQ 1989 1991 - O lastSu 2s 1 D R AQ 1990 1992 - Mar Su>=1 2s 0 S R Ho 1992 1993 - O lastSu 2s 1 D R Ho 1993 1994 - Mar Su>=1 2s 0 S Z Australia/Brisbane 10:12:8 - LMT 1895 10 AU AE%sT 1971 10 AQ AE%sT Z Australia/Lindeman 9:55:56 - LMT 1895 10 AU AE%sT 1971 10 AQ AE%sT 1992 Jul 10 Ho AE%sT R AS 1971 1985 - O lastSu 2s 1 D R AS 1986 o - O 19 2s 1 D R AS 1987 2007 - O lastSu 2s 1 D R AS 1972 o - F 27 2s 0 S R AS 1973 1985 - Mar Su>=1 2s 0 S R AS 1986 1990 - Mar Su>=15 2s 0 S R AS 1991 o - Mar 3 2s 0 S R AS 1992 o - Mar 22 2s 0 S R AS 1993 o - Mar 7 2s 0 S R AS 1994 o - Mar 20 2s 0 S R AS 1995 2005 - Mar lastSu 2s 0 S R AS 2006 o - Ap 2 2s 0 S R AS 2007 o - Mar lastSu 2s 0 S R AS 2008 ma - Ap Su>=1 2s 0 S R AS 2008 ma - O Su>=1 2s 1 D Z Australia/Adelaide 9:14:20 - LMT 1895 F 9 - ACST 1899 May 9:30 AU AC%sT 1971 9:30 AS AC%sT R AT 1967 o - O Su>=1 2s 1 D R AT 1968 o - Mar lastSu 2s 0 S R AT 1968 1985 - O lastSu 2s 1 D R AT 1969 1971 - Mar Su>=8 2s 0 S R AT 1972 o - F lastSu 2s 0 S R AT 1973 1981 - Mar Su>=1 2s 0 S R AT 1982 1983 - Mar lastSu 2s 0 S R AT 1984 1986 - Mar Su>=1 2s 0 S R AT 1986 o - O Su>=15 2s 1 D R AT 1987 1990 - Mar Su>=15 2s 0 S R AT 1987 o - O Su>=22 2s 1 D R AT 1988 1990 - O lastSu 2s 1 D R AT 1991 1999 - O Su>=1 2s 1 D R AT 1991 2005 - Mar lastSu 2s 0 S R AT 2000 o - Au lastSu 2s 1 D R AT 2001 ma - O Su>=1 2s 1 D R AT 2006 o - Ap Su>=1 2s 0 S R AT 2007 o - Mar lastSu 2s 0 S R AT 2008 ma - Ap Su>=1 2s 0 S Z Australia/Hobart 9:49:16 - LMT 1895 S 10 - AEST 1916 O 1 2 10 1 AEDT 1917 F 10 AU AE%sT 1967 10 AT AE%sT Z Australia/Currie 9:35:28 - LMT 1895 S 10 - AEST 1916 O 1 2 10 1 AEDT 1917 F 10 AU AE%sT 1971 Jul 10 AT AE%sT R AV 1971 1985 - O lastSu 2s 1 D R AV 1972 o - F lastSu 2s 0 S R AV 1973 1985 - Mar Su>=1 2s 0 S R AV 1986 1990 - Mar Su>=15 2s 0 S R AV 1986 1987 - O Su>=15 2s 1 D R AV 1988 1999 - O lastSu 2s 1 D R AV 1991 1994 - Mar Su>=1 2s 0 S R AV 1995 2005 - Mar lastSu 2s 0 S R AV 2000 o - Au lastSu 2s 1 D R AV 2001 2007 - O lastSu 2s 1 D R AV 2006 o - Ap Su>=1 2s 0 S R AV 2007 o - Mar lastSu 2s 0 S R AV 2008 ma - Ap Su>=1 2s 0 S R AV 2008 ma - O Su>=1 2s 1 D Z Australia/Melbourne 9:39:52 - LMT 1895 F 10 AU AE%sT 1971 10 AV AE%sT R AN 1971 1985 - O lastSu 2s 1 D R AN 1972 o - F 27 2s 0 S R AN 1973 1981 - Mar Su>=1 2s 0 S R AN 1982 o - Ap Su>=1 2s 0 S R AN 1983 1985 - Mar Su>=1 2s 0 S R AN 1986 1989 - Mar Su>=15 2s 0 S R AN 1986 o - O 19 2s 1 D R AN 1987 1999 - O lastSu 2s 1 D R AN 1990 1995 - Mar Su>=1 2s 0 S R AN 1996 2005 - Mar lastSu 2s 0 S R AN 2000 o - Au lastSu 2s 1 D R AN 2001 2007 - O lastSu 2s 1 D R AN 2006 o - Ap Su>=1 2s 0 S R AN 2007 o - Mar lastSu 2s 0 S R AN 2008 ma - Ap Su>=1 2s 0 S R AN 2008 ma - O Su>=1 2s 1 D Z Australia/Sydney 10:4:52 - LMT 1895 F 10 AU AE%sT 1971 10 AN AE%sT Z Australia/Broken_Hill 9:25:48 - LMT 1895 F 10 - AEST 1896 Au 23 9 - ACST 1899 May 9:30 AU AC%sT 1971 9:30 AN AC%sT 2000 9:30 AS AC%sT R LH 1981 1984 - O lastSu 2 1 - R LH 1982 1985 - Mar Su>=1 2 0 - R LH 1985 o - O lastSu 2 0:30 - R LH 1986 1989 - Mar Su>=15 2 0 - R LH 1986 o - O 19 2 0:30 - R LH 1987 1999 - O lastSu 2 0:30 - R LH 1990 1995 - Mar Su>=1 2 0 - R LH 1996 2005 - Mar lastSu 2 0 - R LH 2000 o - Au lastSu 2 0:30 - R LH 2001 2007 - O lastSu 2 0:30 - R LH 2006 o - Ap Su>=1 2 0 - R LH 2007 o - Mar lastSu 2 0 - R LH 2008 ma - Ap Su>=1 2 0 - R LH 2008 ma - O Su>=1 2 0:30 - Z Australia/Lord_Howe 10:36:20 - LMT 1895 F 10 - AEST 1981 Mar 10:30 LH +1030/+1130 1985 Jul 10:30 LH +1030/+11 Z Antarctica/Macquarie 0 - -00 1899 N 10 - AEST 1916 O 1 2 10 1 AEDT 1917 F 10 AU AE%sT 1919 Ap 1 0s 0 - -00 1948 Mar 25 10 AU AE%sT 1967 10 AT AE%sT 2010 Ap 4 3 11 - +11 Z Indian/Christmas 7:2:52 - LMT 1895 F 7 - +07 Z Indian/Cocos 6:27:40 - LMT 1900 6:30 - +0630 R FJ 1998 1999 - N Su>=1 2 1 - R FJ 1999 2000 - F lastSu 3 0 - R FJ 2009 o - N 29 2 1 - R FJ 2010 o - Mar lastSu 3 0 - R FJ 2010 2013 - O Su>=21 2 1 - R FJ 2011 o - Mar Su>=1 3 0 - R FJ 2012 2013 - Ja Su>=18 3 0 - R FJ 2014 o - Ja Su>=18 2 0 - R FJ 2014 2018 - N Su>=1 2 1 - R FJ 2015 ma - Ja Su>=12 3 0 - R FJ 2019 ma - N Su>=8 2 1 - Z Pacific/Fiji 11:55:44 - LMT 1915 O 26 12 FJ +12/+13 Z Pacific/Gambier -8:59:48 - LMT 1912 O -9 - -09 Z Pacific/Marquesas -9:18 - LMT 1912 O -9:30 - -0930 Z Pacific/Tahiti -9:58:16 - LMT 1912 O -10 - -10 R Gu 1959 o - Jun 27 2 1 D R Gu 1961 o - Ja 29 2 0 S R Gu 1967 o - S 1 2 1 D R Gu 1969 o - Ja 26 0:1 0 S R Gu 1969 o - Jun 22 2 1 D R Gu 1969 o - Au 31 2 0 S R Gu 1970 1971 - Ap lastSu 2 1 D R Gu 1970 1971 - S Su>=1 2 0 S R Gu 1973 o - D 16 2 1 D R Gu 1974 o - F 24 2 0 S R Gu 1976 o - May 26 2 1 D R Gu 1976 o - Au 22 2:1 0 S R Gu 1977 o - Ap 24 2 1 D R Gu 1977 o - Au 28 2 0 S Z Pacific/Guam -14:21 - LMT 1844 D 31 9:39 - LMT 1901 10 - GST 1941 D 10 9 - +09 1944 Jul 31 10 Gu G%sT 2000 D 23 10 - ChST L Pacific/Guam Pacific/Saipan Z Pacific/Tarawa 11:32:4 - LMT 1901 12 - +12 Z Pacific/Enderbury -11:24:20 - LMT 1901 -12 - -12 1979 O -11 - -11 1994 D 31 13 - +13 Z Pacific/Kiritimati -10:29:20 - LMT 1901 -10:40 - -1040 1979 O -10 - -10 1994 D 31 14 - +14 Z Pacific/Majuro 11:24:48 - LMT 1901 11 - +11 1914 O 9 - +09 1919 F 11 - +11 1937 10 - +10 1941 Ap 9 - +09 1944 Ja 30 11 - +11 1969 O 12 - +12 Z Pacific/Kwajalein 11:9:20 - LMT 1901 11 - +11 1937 10 - +10 1941 Ap 9 - +09 1944 F 6 11 - +11 1969 O -12 - -12 1993 Au 20 24 12 - +12 Z Pacific/Chuuk -13:52:52 - LMT 1844 D 31 10:7:8 - LMT 1901 10 - +10 1914 O 9 - +09 1919 F 10 - +10 1941 Ap 9 - +09 1945 Au 10 - +10 Z Pacific/Pohnpei -13:27:8 - LMT 1844 D 31 10:32:52 - LMT 1901 11 - +11 1914 O 9 - +09 1919 F 11 - +11 1937 10 - +10 1941 Ap 9 - +09 1945 Au 11 - +11 Z Pacific/Kosrae -13:8:4 - LMT 1844 D 31 10:51:56 - LMT 1901 11 - +11 1914 O 9 - +09 1919 F 11 - +11 1937 10 - +10 1941 Ap 9 - +09 1945 Au 11 - +11 1969 O 12 - +12 1999 11 - +11 Z Pacific/Nauru 11:7:40 - LMT 1921 Ja 15 11:30 - +1130 1942 Au 29 9 - +09 1945 S 8 11:30 - +1130 1979 F 10 2 12 - +12 R NC 1977 1978 - D Su>=1 0 1 - R NC 1978 1979 - F 27 0 0 - R NC 1996 o - D 1 2s 1 - R NC 1997 o - Mar 2 2s 0 - Z Pacific/Noumea 11:5:48 - LMT 1912 Ja 13 11 NC +11/+12 R NZ 1927 o - N 6 2 1 S R NZ 1928 o - Mar 4 2 0 M R NZ 1928 1933 - O Su>=8 2 0:30 S R NZ 1929 1933 - Mar Su>=15 2 0 M R NZ 1934 1940 - Ap lastSu 2 0 M R NZ 1934 1940 - S lastSu 2 0:30 S R NZ 1946 o - Ja 1 0 0 S R NZ 1974 o - N Su>=1 2s 1 D R k 1974 o - N Su>=1 2:45s 1 - R NZ 1975 o - F lastSu 2s 0 S R k 1975 o - F lastSu 2:45s 0 - R NZ 1975 1988 - O lastSu 2s 1 D R k 1975 1988 - O lastSu 2:45s 1 - R NZ 1976 1989 - Mar Su>=1 2s 0 S R k 1976 1989 - Mar Su>=1 2:45s 0 - R NZ 1989 o - O Su>=8 2s 1 D R k 1989 o - O Su>=8 2:45s 1 - R NZ 1990 2006 - O Su>=1 2s 1 D R k 1990 2006 - O Su>=1 2:45s 1 - R NZ 1990 2007 - Mar Su>=15 2s 0 S R k 1990 2007 - Mar Su>=15 2:45s 0 - R NZ 2007 ma - S lastSu 2s 1 D R k 2007 ma - S lastSu 2:45s 1 - R NZ 2008 ma - Ap Su>=1 2s 0 S R k 2008 ma - Ap Su>=1 2:45s 0 - Z Pacific/Auckland 11:39:4 - LMT 1868 N 2 11:30 NZ NZ%sT 1946 12 NZ NZ%sT Z Pacific/Chatham 12:13:48 - LMT 1868 N 2 12:15 - +1215 1946 12:45 k +1245/+1345 L Pacific/Auckland Antarctica/McMurdo R CK 1978 o - N 12 0 0:30 - R CK 1979 1991 - Mar Su>=1 0 0 - R CK 1979 1990 - O lastSu 0 0:30 - Z Pacific/Rarotonga -10:39:4 - LMT 1901 -10:30 - -1030 1978 N 12 -10 CK -10/-0930 Z Pacific/Niue -11:19:40 - LMT 1901 -11:20 - -1120 1951 -11:30 - -1130 1978 O -11 - -11 Z Pacific/Norfolk 11:11:52 - LMT 1901 11:12 - +1112 1951 11:30 - +1130 1974 O 27 2s 11:30 1 +1230 1975 Mar 2 2s 11:30 - +1130 2015 O 4 2s 11 - +11 2019 Jul 11 AN +11/+12 Z Pacific/Palau -15:2:4 - LMT 1844 D 31 8:57:56 - LMT 1901 9 - +09 Z Pacific/Port_Moresby 9:48:40 - LMT 1880 9:48:32 - PMMT 1895 10 - +10 Z Pacific/Bougainville 10:22:16 - LMT 1880 9:48:32 - PMMT 1895 10 - +10 1942 Jul 9 - +09 1945 Au 21 10 - +10 2014 D 28 2 11 - +11 Z Pacific/Pitcairn -8:40:20 - LMT 1901 -8:30 - -0830 1998 Ap 27 -8 - -08 Z Pacific/Pago_Pago 12:37:12 - LMT 1892 Jul 5 -11:22:48 - LMT 1911 -11 - SST L Pacific/Pago_Pago Pacific/Midway R WS 2010 o - S lastSu 0 1 - R WS 2011 o - Ap Sa>=1 4 0 - R WS 2011 o - S lastSa 3 1 - R WS 2012 ma - Ap Su>=1 4 0 - R WS 2012 ma - S lastSu 3 1 - Z Pacific/Apia 12:33:4 - LMT 1892 Jul 5 -11:26:56 - LMT 1911 -11:30 - -1130 1950 -11 WS -11/-10 2011 D 29 24 13 WS +13/+14 Z Pacific/Guadalcanal 10:39:48 - LMT 1912 O 11 - +11 Z Pacific/Fakaofo -11:24:56 - LMT 1901 -11 - -11 2011 D 30 13 - +13 R TO 1999 o - O 7 2s 1 - R TO 2000 o - Mar 19 2s 0 - R TO 2000 2001 - N Su>=1 2 1 - R TO 2001 2002 - Ja lastSu 2 0 - R TO 2016 o - N Su>=1 2 1 - R TO 2017 o - Ja Su>=15 3 0 - Z Pacific/Tongatapu 12:19:20 - LMT 1901 12:20 - +1220 1941 13 - +13 1999 13 TO +13/+14 Z Pacific/Funafuti 11:56:52 - LMT 1901 12 - +12 Z Pacific/Wake 11:6:28 - LMT 1901 12 - +12 R VU 1983 o - S 25 0 1 - R VU 1984 1991 - Mar Su>=23 0 0 - R VU 1984 o - O 23 0 1 - R VU 1985 1991 - S Su>=23 0 1 - R VU 1992 1993 - Ja Su>=23 0 0 - R VU 1992 o - O Su>=23 0 1 - Z Pacific/Efate 11:13:16 - LMT 1912 Ja 13 11 VU +11/+12 Z Pacific/Wallis 12:15:20 - LMT 1901 12 - +12 R G 1916 o - May 21 2s 1 BST R G 1916 o - O 1 2s 0 GMT R G 1917 o - Ap 8 2s 1 BST R G 1917 o - S 17 2s 0 GMT R G 1918 o - Mar 24 2s 1 BST R G 1918 o - S 30 2s 0 GMT R G 1919 o - Mar 30 2s 1 BST R G 1919 o - S 29 2s 0 GMT R G 1920 o - Mar 28 2s 1 BST R G 1920 o - O 25 2s 0 GMT R G 1921 o - Ap 3 2s 1 BST R G 1921 o - O 3 2s 0 GMT R G 1922 o - Mar 26 2s 1 BST R G 1922 o - O 8 2s 0 GMT R G 1923 o - Ap Su>=16 2s 1 BST R G 1923 1924 - S Su>=16 2s 0 GMT R G 1924 o - Ap Su>=9 2s 1 BST R G 1925 1926 - Ap Su>=16 2s 1 BST R G 1925 1938 - O Su>=2 2s 0 GMT R G 1927 o - Ap Su>=9 2s 1 BST R G 1928 1929 - Ap Su>=16 2s 1 BST R G 1930 o - Ap Su>=9 2s 1 BST R G 1931 1932 - Ap Su>=16 2s 1 BST R G 1933 o - Ap Su>=9 2s 1 BST R G 1934 o - Ap Su>=16 2s 1 BST R G 1935 o - Ap Su>=9 2s 1 BST R G 1936 1937 - Ap Su>=16 2s 1 BST R G 1938 o - Ap Su>=9 2s 1 BST R G 1939 o - Ap Su>=16 2s 1 BST R G 1939 o - N Su>=16 2s 0 GMT R G 1940 o - F Su>=23 2s 1 BST R G 1941 o - May Su>=2 1s 2 BDST R G 1941 1943 - Au Su>=9 1s 1 BST R G 1942 1944 - Ap Su>=2 1s 2 BDST R G 1944 o - S Su>=16 1s 1 BST R G 1945 o - Ap M>=2 1s 2 BDST R G 1945 o - Jul Su>=9 1s 1 BST R G 1945 1946 - O Su>=2 2s 0 GMT R G 1946 o - Ap Su>=9 2s 1 BST R G 1947 o - Mar 16 2s 1 BST R G 1947 o - Ap 13 1s 2 BDST R G 1947 o - Au 10 1s 1 BST R G 1947 o - N 2 2s 0 GMT R G 1948 o - Mar 14 2s 1 BST R G 1948 o - O 31 2s 0 GMT R G 1949 o - Ap 3 2s 1 BST R G 1949 o - O 30 2s 0 GMT R G 1950 1952 - Ap Su>=14 2s 1 BST R G 1950 1952 - O Su>=21 2s 0 GMT R G 1953 o - Ap Su>=16 2s 1 BST R G 1953 1960 - O Su>=2 2s 0 GMT R G 1954 o - Ap Su>=9 2s 1 BST R G 1955 1956 - Ap Su>=16 2s 1 BST R G 1957 o - Ap Su>=9 2s 1 BST R G 1958 1959 - Ap Su>=16 2s 1 BST R G 1960 o - Ap Su>=9 2s 1 BST R G 1961 1963 - Mar lastSu 2s 1 BST R G 1961 1968 - O Su>=23 2s 0 GMT R G 1964 1967 - Mar Su>=19 2s 1 BST R G 1968 o - F 18 2s 1 BST R G 1972 1980 - Mar Su>=16 2s 1 BST R G 1972 1980 - O Su>=23 2s 0 GMT R G 1981 1995 - Mar lastSu 1u 1 BST R G 1981 1989 - O Su>=23 1u 0 GMT R G 1990 1995 - O Su>=22 1u 0 GMT Z Europe/London -0:1:15 - LMT 1847 D 1 0s 0 G %s 1968 O 27 1 - BST 1971 O 31 2u 0 G %s 1996 0 E GMT/BST L Europe/London Europe/Jersey L Europe/London Europe/Guernsey L Europe/London Europe/Isle_of_Man R IE 1971 o - O 31 2u -1 - R IE 1972 1980 - Mar Su>=16 2u 0 - R IE 1972 1980 - O Su>=23 2u -1 - R IE 1981 ma - Mar lastSu 1u 0 - R IE 1981 1989 - O Su>=23 1u -1 - R IE 1990 1995 - O Su>=22 1u -1 - R IE 1996 ma - O lastSu 1u -1 - Z Europe/Dublin -0:25 - LMT 1880 Au 2 -0:25:21 - DMT 1916 May 21 2s -0:25:21 1 IST 1916 O 1 2s 0 G %s 1921 D 6 0 G GMT/IST 1940 F 25 2s 0 1 IST 1946 O 6 2s 0 - GMT 1947 Mar 16 2s 0 1 IST 1947 N 2 2s 0 - GMT 1948 Ap 18 2s 0 G GMT/IST 1968 O 27 1 IE IST/GMT R E 1977 1980 - Ap Su>=1 1u 1 S R E 1977 o - S lastSu 1u 0 - R E 1978 o - O 1 1u 0 - R E 1979 1995 - S lastSu 1u 0 - R E 1981 ma - Mar lastSu 1u 1 S R E 1996 ma - O lastSu 1u 0 - R W- 1977 1980 - Ap Su>=1 1s 1 S R W- 1977 o - S lastSu 1s 0 - R W- 1978 o - O 1 1s 0 - R W- 1979 1995 - S lastSu 1s 0 - R W- 1981 ma - Mar lastSu 1s 1 S R W- 1996 ma - O lastSu 1s 0 - R c 1916 o - Ap 30 23 1 S R c 1916 o - O 1 1 0 - R c 1917 1918 - Ap M>=15 2s 1 S R c 1917 1918 - S M>=15 2s 0 - R c 1940 o - Ap 1 2s 1 S R c 1942 o - N 2 2s 0 - R c 1943 o - Mar 29 2s 1 S R c 1943 o - O 4 2s 0 - R c 1944 1945 - Ap M>=1 2s 1 S R c 1944 o - O 2 2s 0 - R c 1945 o - S 16 2s 0 - R c 1977 1980 - Ap Su>=1 2s 1 S R c 1977 o - S lastSu 2s 0 - R c 1978 o - O 1 2s 0 - R c 1979 1995 - S lastSu 2s 0 - R c 1981 ma - Mar lastSu 2s 1 S R c 1996 ma - O lastSu 2s 0 - R e 1977 1980 - Ap Su>=1 0 1 S R e 1977 o - S lastSu 0 0 - R e 1978 o - O 1 0 0 - R e 1979 1995 - S lastSu 0 0 - R e 1981 ma - Mar lastSu 0 1 S R e 1996 ma - O lastSu 0 0 - R R 1917 o - Jul 1 23 1 MST R R 1917 o - D 28 0 0 MMT R R 1918 o - May 31 22 2 MDST R R 1918 o - S 16 1 1 MST R R 1919 o - May 31 23 2 MDST R R 1919 o - Jul 1 0u 1 MSD R R 1919 o - Au 16 0 0 MSK R R 1921 o - F 14 23 1 MSD R R 1921 o - Mar 20 23 2 +05 R R 1921 o - S 1 0 1 MSD R R 1921 o - O 1 0 0 - R R 1981 1984 - Ap 1 0 1 S R R 1981 1983 - O 1 0 0 - R R 1984 1995 - S lastSu 2s 0 - R R 1985 2010 - Mar lastSu 2s 1 S R R 1996 2010 - O lastSu 2s 0 - Z WET 0 E WE%sT Z CET 1 c CE%sT Z MET 1 c ME%sT Z EET 2 E EE%sT R q 1940 o - Jun 16 0 1 S R q 1942 o - N 2 3 0 - R q 1943 o - Mar 29 2 1 S R q 1943 o - Ap 10 3 0 - R q 1974 o - May 4 0 1 S R q 1974 o - O 2 0 0 - R q 1975 o - May 1 0 1 S R q 1975 o - O 2 0 0 - R q 1976 o - May 2 0 1 S R q 1976 o - O 3 0 0 - R q 1977 o - May 8 0 1 S R q 1977 o - O 2 0 0 - R q 1978 o - May 6 0 1 S R q 1978 o - O 1 0 0 - R q 1979 o - May 5 0 1 S R q 1979 o - S 30 0 0 - R q 1980 o - May 3 0 1 S R q 1980 o - O 4 0 0 - R q 1981 o - Ap 26 0 1 S R q 1981 o - S 27 0 0 - R q 1982 o - May 2 0 1 S R q 1982 o - O 3 0 0 - R q 1983 o - Ap 18 0 1 S R q 1983 o - O 1 0 0 - R q 1984 o - Ap 1 0 1 S Z Europe/Tirane 1:19:20 - LMT 1914 1 - CET 1940 Jun 16 1 q CE%sT 1984 Jul 1 E CE%sT Z Europe/Andorra 0:6:4 - LMT 1901 0 - WET 1946 S 30 1 - CET 1985 Mar 31 2 1 E CE%sT R a 1920 o - Ap 5 2s 1 S R a 1920 o - S 13 2s 0 - R a 1946 o - Ap 14 2s 1 S R a 1946 o - O 7 2s 0 - R a 1947 1948 - O Su>=1 2s 0 - R a 1947 o - Ap 6 2s 1 S R a 1948 o - Ap 18 2s 1 S R a 1980 o - Ap 6 0 1 S R a 1980 o - S 28 0 0 - Z Europe/Vienna 1:5:21 - LMT 1893 Ap 1 c CE%sT 1920 1 a CE%sT 1940 Ap 1 2s 1 c CE%sT 1945 Ap 2 2s 1 1 CEST 1945 Ap 12 2s 1 - CET 1946 1 a CE%sT 1981 1 E CE%sT Z Europe/Minsk 1:50:16 - LMT 1880 1:50 - MMT 1924 May 2 2 - EET 1930 Jun 21 3 - MSK 1941 Jun 28 1 c CE%sT 1944 Jul 3 3 R MSK/MSD 1990 3 - MSK 1991 Mar 31 2s 2 R EE%sT 2011 Mar 27 2s 3 - +03 R b 1918 o - Mar 9 0s 1 S R b 1918 1919 - O Sa>=1 23s 0 - R b 1919 o - Mar 1 23s 1 S R b 1920 o - F 14 23s 1 S R b 1920 o - O 23 23s 0 - R b 1921 o - Mar 14 23s 1 S R b 1921 o - O 25 23s 0 - R b 1922 o - Mar 25 23s 1 S R b 1922 1927 - O Sa>=1 23s 0 - R b 1923 o - Ap 21 23s 1 S R b 1924 o - Mar 29 23s 1 S R b 1925 o - Ap 4 23s 1 S R b 1926 o - Ap 17 23s 1 S R b 1927 o - Ap 9 23s 1 S R b 1928 o - Ap 14 23s 1 S R b 1928 1938 - O Su>=2 2s 0 - R b 1929 o - Ap 21 2s 1 S R b 1930 o - Ap 13 2s 1 S R b 1931 o - Ap 19 2s 1 S R b 1932 o - Ap 3 2s 1 S R b 1933 o - Mar 26 2s 1 S R b 1934 o - Ap 8 2s 1 S R b 1935 o - Mar 31 2s 1 S R b 1936 o - Ap 19 2s 1 S R b 1937 o - Ap 4 2s 1 S R b 1938 o - Mar 27 2s 1 S R b 1939 o - Ap 16 2s 1 S R b 1939 o - N 19 2s 0 - R b 1940 o - F 25 2s 1 S R b 1944 o - S 17 2s 0 - R b 1945 o - Ap 2 2s 1 S R b 1945 o - S 16 2s 0 - R b 1946 o - May 19 2s 1 S R b 1946 o - O 7 2s 0 - Z Europe/Brussels 0:17:30 - LMT 1880 0:17:30 - BMT 1892 May 1 0:17:30 0 - WET 1914 N 8 1 - CET 1916 May 1 c CE%sT 1918 N 11 11u 0 b WE%sT 1940 May 20 2s 1 c CE%sT 1944 S 3 1 b CE%sT 1977 1 E CE%sT R BG 1979 o - Mar 31 23 1 S R BG 1979 o - O 1 1 0 - R BG 1980 1982 - Ap Sa>=1 23 1 S R BG 1980 o - S 29 1 0 - R BG 1981 o - S 27 2 0 - Z Europe/Sofia 1:33:16 - LMT 1880 1:56:56 - IMT 1894 N 30 2 - EET 1942 N 2 3 1 c CE%sT 1945 1 - CET 1945 Ap 2 3 2 - EET 1979 Mar 31 23 2 BG EE%sT 1982 S 26 3 2 c EE%sT 1991 2 e EE%sT 1997 2 E EE%sT R CZ 1945 o - Ap M>=1 2s 1 S R CZ 1945 o - O 1 2s 0 - R CZ 1946 o - May 6 2s 1 S R CZ 1946 1949 - O Su>=1 2s 0 - R CZ 1947 1948 - Ap Su>=15 2s 1 S R CZ 1949 o - Ap 9 2s 1 S Z Europe/Prague 0:57:44 - LMT 1850 0:57:44 - PMT 1891 O 1 c CE%sT 1945 May 9 1 CZ CE%sT 1946 D 1 3 1 -1 GMT 1947 F 23 2 1 CZ CE%sT 1979 1 E CE%sT R D 1916 o - May 14 23 1 S R D 1916 o - S 30 23 0 - R D 1940 o - May 15 0 1 S R D 1945 o - Ap 2 2s 1 S R D 1945 o - Au 15 2s 0 - R D 1946 o - May 1 2s 1 S R D 1946 o - S 1 2s 0 - R D 1947 o - May 4 2s 1 S R D 1947 o - Au 10 2s 0 - R D 1948 o - May 9 2s 1 S R D 1948 o - Au 8 2s 0 - Z Europe/Copenhagen 0:50:20 - LMT 1890 0:50:20 - CMT 1894 1 D CE%sT 1942 N 2 2s 1 c CE%sT 1945 Ap 2 2 1 D CE%sT 1980 1 E CE%sT Z Atlantic/Faroe -0:27:4 - LMT 1908 Ja 11 0 - WET 1981 0 E WE%sT R Th 1991 1992 - Mar lastSu 2 1 D R Th 1991 1992 - S lastSu 2 0 S R Th 1993 2006 - Ap Su>=1 2 1 D R Th 1993 2006 - O lastSu 2 0 S R Th 2007 ma - Mar Su>=8 2 1 D R Th 2007 ma - N Su>=1 2 0 S Z America/Danmarkshavn -1:14:40 - LMT 1916 Jul 28 -3 - -03 1980 Ap 6 2 -3 E -03/-02 1996 0 - GMT Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28 -2 - -02 1980 Ap 6 2 -2 c -02/-01 1981 Mar 29 -1 E -01/+00 Z America/Nuuk -3:26:56 - LMT 1916 Jul 28 -3 - -03 1980 Ap 6 2 -3 E -03/-02 Z America/Thule -4:35:8 - LMT 1916 Jul 28 -4 Th A%sT Z Europe/Tallinn 1:39 - LMT 1880 1:39 - TMT 1918 F 1 c CE%sT 1919 Jul 1:39 - TMT 1921 May 2 - EET 1940 Au 6 3 - MSK 1941 S 15 1 c CE%sT 1944 S 22 3 R MSK/MSD 1989 Mar 26 2s 2 1 EEST 1989 S 24 2s 2 c EE%sT 1998 S 22 2 E EE%sT 1999 O 31 4 2 - EET 2002 F 21 2 E EE%sT R FI 1942 o - Ap 2 24 1 S R FI 1942 o - O 4 1 0 - R FI 1981 1982 - Mar lastSu 2 1 S R FI 1981 1982 - S lastSu 3 0 - Z Europe/Helsinki 1:39:49 - LMT 1878 May 31 1:39:49 - HMT 1921 May 2 FI EE%sT 1983 2 E EE%sT L Europe/Helsinki Europe/Mariehamn R F 1916 o - Jun 14 23s 1 S R F 1916 1919 - O Su>=1 23s 0 - R F 1917 o - Mar 24 23s 1 S R F 1918 o - Mar 9 23s 1 S R F 1919 o - Mar 1 23s 1 S R F 1920 o - F 14 23s 1 S R F 1920 o - O 23 23s 0 - R F 1921 o - Mar 14 23s 1 S R F 1921 o - O 25 23s 0 - R F 1922 o - Mar 25 23s 1 S R F 1922 1938 - O Sa>=1 23s 0 - R F 1923 o - May 26 23s 1 S R F 1924 o - Mar 29 23s 1 S R F 1925 o - Ap 4 23s 1 S R F 1926 o - Ap 17 23s 1 S R F 1927 o - Ap 9 23s 1 S R F 1928 o - Ap 14 23s 1 S R F 1929 o - Ap 20 23s 1 S R F 1930 o - Ap 12 23s 1 S R F 1931 o - Ap 18 23s 1 S R F 1932 o - Ap 2 23s 1 S R F 1933 o - Mar 25 23s 1 S R F 1934 o - Ap 7 23s 1 S R F 1935 o - Mar 30 23s 1 S R F 1936 o - Ap 18 23s 1 S R F 1937 o - Ap 3 23s 1 S R F 1938 o - Mar 26 23s 1 S R F 1939 o - Ap 15 23s 1 S R F 1939 o - N 18 23s 0 - R F 1940 o - F 25 2 1 S R F 1941 o - May 5 0 2 M R F 1941 o - O 6 0 1 S R F 1942 o - Mar 9 0 2 M R F 1942 o - N 2 3 1 S R F 1943 o - Mar 29 2 2 M R F 1943 o - O 4 3 1 S R F 1944 o - Ap 3 2 2 M R F 1944 o - O 8 1 1 S R F 1945 o - Ap 2 2 2 M R F 1945 o - S 16 3 0 - R F 1976 o - Mar 28 1 1 S R F 1976 o - S 26 1 0 - Z Europe/Paris 0:9:21 - LMT 1891 Mar 15 0:1 0:9:21 - PMT 1911 Mar 11 0:1 0 F WE%sT 1940 Jun 14 23 1 c CE%sT 1944 Au 25 0 F WE%sT 1945 S 16 3 1 F CE%sT 1977 1 E CE%sT R DE 1946 o - Ap 14 2s 1 S R DE 1946 o - O 7 2s 0 - R DE 1947 1949 - O Su>=1 2s 0 - R DE 1947 o - Ap 6 3s 1 S R DE 1947 o - May 11 2s 2 M R DE 1947 o - Jun 29 3 1 S R DE 1948 o - Ap 18 2s 1 S R DE 1949 o - Ap 10 2s 1 S R So 1945 o - May 24 2 2 M R So 1945 o - S 24 3 1 S R So 1945 o - N 18 2s 0 - Z Europe/Berlin 0:53:28 - LMT 1893 Ap 1 c CE%sT 1945 May 24 2 1 So CE%sT 1946 1 DE CE%sT 1980 1 E CE%sT L Europe/Zurich Europe/Busingen Z Europe/Gibraltar -0:21:24 - LMT 1880 Au 2 0s 0 G %s 1957 Ap 14 2 1 - CET 1982 1 E CE%sT R g 1932 o - Jul 7 0 1 S R g 1932 o - S 1 0 0 - R g 1941 o - Ap 7 0 1 S R g 1942 o - N 2 3 0 - R g 1943 o - Mar 30 0 1 S R g 1943 o - O 4 0 0 - R g 1952 o - Jul 1 0 1 S R g 1952 o - N 2 0 0 - R g 1975 o - Ap 12 0s 1 S R g 1975 o - N 26 0s 0 - R g 1976 o - Ap 11 2s 1 S R g 1976 o - O 10 2s 0 - R g 1977 1978 - Ap Su>=1 2s 1 S R g 1977 o - S 26 2s 0 - R g 1978 o - S 24 4 0 - R g 1979 o - Ap 1 9 1 S R g 1979 o - S 29 2 0 - R g 1980 o - Ap 1 0 1 S R g 1980 o - S 28 0 0 - Z Europe/Athens 1:34:52 - LMT 1895 S 14 1:34:52 - AMT 1916 Jul 28 0:1 2 g EE%sT 1941 Ap 30 1 g CE%sT 1944 Ap 4 2 g EE%sT 1981 2 E EE%sT R h 1918 o - Ap 1 3 1 S R h 1918 o - S 16 3 0 - R h 1919 o - Ap 15 3 1 S R h 1919 o - N 24 3 0 - R h 1945 o - May 1 23 1 S R h 1945 o - N 1 0 0 - R h 1946 o - Mar 31 2s 1 S R h 1946 1949 - O Su>=1 2s 0 - R h 1947 1949 - Ap Su>=4 2s 1 S R h 1950 o - Ap 17 2s 1 S R h 1950 o - O 23 2s 0 - R h 1954 1955 - May 23 0 1 S R h 1954 1955 - O 3 0 0 - R h 1956 o - Jun Su>=1 0 1 S R h 1956 o - S lastSu 0 0 - R h 1957 o - Jun Su>=1 1 1 S R h 1957 o - S lastSu 3 0 - R h 1980 o - Ap 6 1 1 S Z Europe/Budapest 1:16:20 - LMT 1890 O 1 c CE%sT 1918 1 h CE%sT 1941 Ap 8 1 c CE%sT 1945 1 h CE%sT 1980 S 28 2s 1 E CE%sT R w 1917 1919 - F 19 23 1 - R w 1917 o - O 21 1 0 - R w 1918 1919 - N 16 1 0 - R w 1921 o - Mar 19 23 1 - R w 1921 o - Jun 23 1 0 - R w 1939 o - Ap 29 23 1 - R w 1939 o - O 29 2 0 - R w 1940 o - F 25 2 1 - R w 1940 1941 - N Su>=2 1s 0 - R w 1941 1942 - Mar Su>=2 1s 1 - R w 1943 1946 - Mar Su>=1 1s 1 - R w 1942 1948 - O Su>=22 1s 0 - R w 1947 1967 - Ap Su>=1 1s 1 - R w 1949 o - O 30 1s 0 - R w 1950 1966 - O Su>=22 1s 0 - R w 1967 o - O 29 1s 0 - Z Atlantic/Reykjavik -1:28 - LMT 1908 -1 w -01/+00 1968 Ap 7 1s 0 - GMT R I 1916 o - Jun 3 24 1 S R I 1916 1917 - S 30 24 0 - R I 1917 o - Mar 31 24 1 S R I 1918 o - Mar 9 24 1 S R I 1918 o - O 6 24 0 - R I 1919 o - Mar 1 24 1 S R I 1919 o - O 4 24 0 - R I 1920 o - Mar 20 24 1 S R I 1920 o - S 18 24 0 - R I 1940 o - Jun 14 24 1 S R I 1942 o - N 2 2s 0 - R I 1943 o - Mar 29 2s 1 S R I 1943 o - O 4 2s 0 - R I 1944 o - Ap 2 2s 1 S R I 1944 o - S 17 2s 0 - R I 1945 o - Ap 2 2 1 S R I 1945 o - S 15 1 0 - R I 1946 o - Mar 17 2s 1 S R I 1946 o - O 6 2s 0 - R I 1947 o - Mar 16 0s 1 S R I 1947 o - O 5 0s 0 - R I 1948 o - F 29 2s 1 S R I 1948 o - O 3 2s 0 - R I 1966 1968 - May Su>=22 0s 1 S R I 1966 o - S 24 24 0 - R I 1967 1969 - S Su>=22 0s 0 - R I 1969 o - Jun 1 0s 1 S R I 1970 o - May 31 0s 1 S R I 1970 o - S lastSu 0s 0 - R I 1971 1972 - May Su>=22 0s 1 S R I 1971 o - S lastSu 0s 0 - R I 1972 o - O 1 0s 0 - R I 1973 o - Jun 3 0s 1 S R I 1973 1974 - S lastSu 0s 0 - R I 1974 o - May 26 0s 1 S R I 1975 o - Jun 1 0s 1 S R I 1975 1977 - S lastSu 0s 0 - R I 1976 o - May 30 0s 1 S R I 1977 1979 - May Su>=22 0s 1 S R I 1978 o - O 1 0s 0 - R I 1979 o - S 30 0s 0 - Z Europe/Rome 0:49:56 - LMT 1866 D 12 0:49:56 - RMT 1893 O 31 23:49:56 1 I CE%sT 1943 S 10 1 c CE%sT 1944 Jun 4 1 I CE%sT 1980 1 E CE%sT L Europe/Rome Europe/Vatican L Europe/Rome Europe/San_Marino R LV 1989 1996 - Mar lastSu 2s 1 S R LV 1989 1996 - S lastSu 2s 0 - Z Europe/Riga 1:36:34 - LMT 1880 1:36:34 - RMT 1918 Ap 15 2 1:36:34 1 LST 1918 S 16 3 1:36:34 - RMT 1919 Ap 1 2 1:36:34 1 LST 1919 May 22 3 1:36:34 - RMT 1926 May 11 2 - EET 1940 Au 5 3 - MSK 1941 Jul 1 c CE%sT 1944 O 13 3 R MSK/MSD 1989 Mar lastSu 2s 2 1 EEST 1989 S lastSu 2s 2 LV EE%sT 1997 Ja 21 2 E EE%sT 2000 F 29 2 - EET 2001 Ja 2 2 E EE%sT L Europe/Zurich Europe/Vaduz Z Europe/Vilnius 1:41:16 - LMT 1880 1:24 - WMT 1917 1:35:36 - KMT 1919 O 10 1 - CET 1920 Jul 12 2 - EET 1920 O 9 1 - CET 1940 Au 3 3 - MSK 1941 Jun 24 1 c CE%sT 1944 Au 3 R MSK/MSD 1989 Mar 26 2s 2 R EE%sT 1991 S 29 2s 2 c EE%sT 1998 2 - EET 1998 Mar 29 1u 1 E CE%sT 1999 O 31 1u 2 - EET 2003 2 E EE%sT R LX 1916 o - May 14 23 1 S R LX 1916 o - O 1 1 0 - R LX 1917 o - Ap 28 23 1 S R LX 1917 o - S 17 1 0 - R LX 1918 o - Ap M>=15 2s 1 S R LX 1918 o - S M>=15 2s 0 - R LX 1919 o - Mar 1 23 1 S R LX 1919 o - O 5 3 0 - R LX 1920 o - F 14 23 1 S R LX 1920 o - O 24 2 0 - R LX 1921 o - Mar 14 23 1 S R LX 1921 o - O 26 2 0 - R LX 1922 o - Mar 25 23 1 S R LX 1922 o - O Su>=2 1 0 - R LX 1923 o - Ap 21 23 1 S R LX 1923 o - O Su>=2 2 0 - R LX 1924 o - Mar 29 23 1 S R LX 1924 1928 - O Su>=2 1 0 - R LX 1925 o - Ap 5 23 1 S R LX 1926 o - Ap 17 23 1 S R LX 1927 o - Ap 9 23 1 S R LX 1928 o - Ap 14 23 1 S R LX 1929 o - Ap 20 23 1 S Z Europe/Luxembourg 0:24:36 - LMT 1904 Jun 1 LX CE%sT 1918 N 25 0 LX WE%sT 1929 O 6 2s 0 b WE%sT 1940 May 14 3 1 c WE%sT 1944 S 18 3 1 b CE%sT 1977 1 E CE%sT R MT 1973 o - Mar 31 0s 1 S R MT 1973 o - S 29 0s 0 - R MT 1974 o - Ap 21 0s 1 S R MT 1974 o - S 16 0s 0 - R MT 1975 1979 - Ap Su>=15 2 1 S R MT 1975 1980 - S Su>=15 2 0 - R MT 1980 o - Mar 31 2 1 S Z Europe/Malta 0:58:4 - LMT 1893 N 2 0s 1 I CE%sT 1973 Mar 31 1 MT CE%sT 1981 1 E CE%sT R MD 1997 ma - Mar lastSu 2 1 S R MD 1997 ma - O lastSu 3 0 - Z Europe/Chisinau 1:55:20 - LMT 1880 1:55 - CMT 1918 F 15 1:44:24 - BMT 1931 Jul 24 2 z EE%sT 1940 Au 15 2 1 EEST 1941 Jul 17 1 c CE%sT 1944 Au 24 3 R MSK/MSD 1990 May 6 2 2 R EE%sT 1992 2 e EE%sT 1997 2 MD EE%sT Z Europe/Monaco 0:29:32 - LMT 1891 Mar 15 0:9:21 - PMT 1911 Mar 11 0 F WE%sT 1945 S 16 3 1 F CE%sT 1977 1 E CE%sT R N 1916 o - May 1 0 1 NST R N 1916 o - O 1 0 0 AMT R N 1917 o - Ap 16 2s 1 NST R N 1917 o - S 17 2s 0 AMT R N 1918 1921 - Ap M>=1 2s 1 NST R N 1918 1921 - S lastM 2s 0 AMT R N 1922 o - Mar lastSu 2s 1 NST R N 1922 1936 - O Su>=2 2s 0 AMT R N 1923 o - Jun F>=1 2s 1 NST R N 1924 o - Mar lastSu 2s 1 NST R N 1925 o - Jun F>=1 2s 1 NST R N 1926 1931 - May 15 2s 1 NST R N 1932 o - May 22 2s 1 NST R N 1933 1936 - May 15 2s 1 NST R N 1937 o - May 22 2s 1 NST R N 1937 o - Jul 1 0 1 S R N 1937 1939 - O Su>=2 2s 0 - R N 1938 1939 - May 15 2s 1 S R N 1945 o - Ap 2 2s 1 S R N 1945 o - S 16 2s 0 - Z Europe/Amsterdam 0:19:32 - LMT 1835 0:19:32 N %s 1937 Jul 0:20 N +0020/+0120 1940 May 16 1 c CE%sT 1945 Ap 2 2 1 N CE%sT 1977 1 E CE%sT R NO 1916 o - May 22 1 1 S R NO 1916 o - S 30 0 0 - R NO 1945 o - Ap 2 2s 1 S R NO 1945 o - O 1 2s 0 - R NO 1959 1964 - Mar Su>=15 2s 1 S R NO 1959 1965 - S Su>=15 2s 0 - R NO 1965 o - Ap 25 2s 1 S Z Europe/Oslo 0:43 - LMT 1895 1 NO CE%sT 1940 Au 10 23 1 c CE%sT 1945 Ap 2 2 1 NO CE%sT 1980 1 E CE%sT L Europe/Oslo Arctic/Longyearbyen R O 1918 1919 - S 16 2s 0 - R O 1919 o - Ap 15 2s 1 S R O 1944 o - Ap 3 2s 1 S R O 1944 o - O 4 2 0 - R O 1945 o - Ap 29 0 1 S R O 1945 o - N 1 0 0 - R O 1946 o - Ap 14 0s 1 S R O 1946 o - O 7 2s 0 - R O 1947 o - May 4 2s 1 S R O 1947 1949 - O Su>=1 2s 0 - R O 1948 o - Ap 18 2s 1 S R O 1949 o - Ap 10 2s 1 S R O 1957 o - Jun 2 1s 1 S R O 1957 1958 - S lastSu 1s 0 - R O 1958 o - Mar 30 1s 1 S R O 1959 o - May 31 1s 1 S R O 1959 1961 - O Su>=1 1s 0 - R O 1960 o - Ap 3 1s 1 S R O 1961 1964 - May lastSu 1s 1 S R O 1962 1964 - S lastSu 1s 0 - Z Europe/Warsaw 1:24 - LMT 1880 1:24 - WMT 1915 Au 5 1 c CE%sT 1918 S 16 3 2 O EE%sT 1922 Jun 1 O CE%sT 1940 Jun 23 2 1 c CE%sT 1944 O 1 O CE%sT 1977 1 W- CE%sT 1988 1 E CE%sT R p 1916 o - Jun 17 23 1 S R p 1916 o - N 1 1 0 - R p 1917 o - F 28 23s 1 S R p 1917 1921 - O 14 23s 0 - R p 1918 o - Mar 1 23s 1 S R p 1919 o - F 28 23s 1 S R p 1920 o - F 29 23s 1 S R p 1921 o - F 28 23s 1 S R p 1924 o - Ap 16 23s 1 S R p 1924 o - O 14 23s 0 - R p 1926 o - Ap 17 23s 1 S R p 1926 1929 - O Sa>=1 23s 0 - R p 1927 o - Ap 9 23s 1 S R p 1928 o - Ap 14 23s 1 S R p 1929 o - Ap 20 23s 1 S R p 1931 o - Ap 18 23s 1 S R p 1931 1932 - O Sa>=1 23s 0 - R p 1932 o - Ap 2 23s 1 S R p 1934 o - Ap 7 23s 1 S R p 1934 1938 - O Sa>=1 23s 0 - R p 1935 o - Mar 30 23s 1 S R p 1936 o - Ap 18 23s 1 S R p 1937 o - Ap 3 23s 1 S R p 1938 o - Mar 26 23s 1 S R p 1939 o - Ap 15 23s 1 S R p 1939 o - N 18 23s 0 - R p 1940 o - F 24 23s 1 S R p 1940 1941 - O 5 23s 0 - R p 1941 o - Ap 5 23s 1 S R p 1942 1945 - Mar Sa>=8 23s 1 S R p 1942 o - Ap 25 22s 2 M R p 1942 o - Au 15 22s 1 S R p 1942 1945 - O Sa>=24 23s 0 - R p 1943 o - Ap 17 22s 2 M R p 1943 1945 - Au Sa>=25 22s 1 S R p 1944 1945 - Ap Sa>=21 22s 2 M R p 1946 o - Ap Sa>=1 23s 1 S R p 1946 o - O Sa>=1 23s 0 - R p 1947 1949 - Ap Su>=1 2s 1 S R p 1947 1949 - O Su>=1 2s 0 - R p 1951 1965 - Ap Su>=1 2s 1 S R p 1951 1965 - O Su>=1 2s 0 - R p 1977 o - Mar 27 0s 1 S R p 1977 o - S 25 0s 0 - R p 1978 1979 - Ap Su>=1 0s 1 S R p 1978 o - O 1 0s 0 - R p 1979 1982 - S lastSu 1s 0 - R p 1980 o - Mar lastSu 0s 1 S R p 1981 1982 - Mar lastSu 1s 1 S R p 1983 o - Mar lastSu 2s 1 S Z Europe/Lisbon -0:36:45 - LMT 1884 -0:36:45 - LMT 1912 Ja 1 0u 0 p WE%sT 1966 Ap 3 2 1 - CET 1976 S 26 1 0 p WE%sT 1983 S 25 1s 0 W- WE%sT 1992 S 27 1s 1 E CE%sT 1996 Mar 31 1u 0 E WE%sT Z Atlantic/Azores -1:42:40 - LMT 1884 -1:54:32 - HMT 1912 Ja 1 2u -2 p -02/-01 1942 Ap 25 22s -2 p +00 1942 Au 15 22s -2 p -02/-01 1943 Ap 17 22s -2 p +00 1943 Au 28 22s -2 p -02/-01 1944 Ap 22 22s -2 p +00 1944 Au 26 22s -2 p -02/-01 1945 Ap 21 22s -2 p +00 1945 Au 25 22s -2 p -02/-01 1966 Ap 3 2 -1 p -01/+00 1983 S 25 1s -1 W- -01/+00 1992 S 27 1s 0 E WE%sT 1993 Mar 28 1u -1 E -01/+00 Z Atlantic/Madeira -1:7:36 - LMT 1884 -1:7:36 - FMT 1912 Ja 1 1u -1 p -01/+00 1942 Ap 25 22s -1 p +01 1942 Au 15 22s -1 p -01/+00 1943 Ap 17 22s -1 p +01 1943 Au 28 22s -1 p -01/+00 1944 Ap 22 22s -1 p +01 1944 Au 26 22s -1 p -01/+00 1945 Ap 21 22s -1 p +01 1945 Au 25 22s -1 p -01/+00 1966 Ap 3 2 0 p WE%sT 1983 S 25 1s 0 E WE%sT R z 1932 o - May 21 0s 1 S R z 1932 1939 - O Su>=1 0s 0 - R z 1933 1939 - Ap Su>=2 0s 1 S R z 1979 o - May 27 0 1 S R z 1979 o - S lastSu 0 0 - R z 1980 o - Ap 5 23 1 S R z 1980 o - S lastSu 1 0 - R z 1991 1993 - Mar lastSu 0s 1 S R z 1991 1993 - S lastSu 0s 0 - Z Europe/Bucharest 1:44:24 - LMT 1891 O 1:44:24 - BMT 1931 Jul 24 2 z EE%sT 1981 Mar 29 2s 2 c EE%sT 1991 2 z EE%sT 1994 2 e EE%sT 1997 2 E EE%sT Z Europe/Kaliningrad 1:22 - LMT 1893 Ap 1 c CE%sT 1945 Ap 10 2 O EE%sT 1946 Ap 7 3 R MSK/MSD 1989 Mar 26 2s 2 R EE%sT 2011 Mar 27 2s 3 - +03 2014 O 26 2s 2 - EET Z Europe/Moscow 2:30:17 - LMT 1880 2:30:17 - MMT 1916 Jul 3 2:31:19 R %s 1919 Jul 1 0u 3 R %s 1921 O 3 R MSK/MSD 1922 O 2 - EET 1930 Jun 21 3 R MSK/MSD 1991 Mar 31 2s 2 R EE%sT 1992 Ja 19 2s 3 R MSK/MSD 2011 Mar 27 2s 4 - MSK 2014 O 26 2s 3 - MSK Z Europe/Simferopol 2:16:24 - LMT 1880 2:16 - SMT 1924 May 2 2 - EET 1930 Jun 21 3 - MSK 1941 N 1 c CE%sT 1944 Ap 13 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 2 - EET 1992 2 e EE%sT 1994 May 3 e MSK/MSD 1996 Mar 31 0s 3 1 MSD 1996 O 27 3s 3 R MSK/MSD 1997 3 - MSK 1997 Mar lastSu 1u 2 E EE%sT 2014 Mar 30 2 4 - MSK 2014 O 26 2s 3 - MSK Z Europe/Astrakhan 3:12:12 - LMT 1924 May 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s 3 R +03/+04 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 2014 O 26 2s 3 - +03 2016 Mar 27 2s 4 - +04 Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3 3 - +03 1930 Jun 21 4 - +04 1961 N 11 4 R +04/+05 1988 Mar 27 2s 3 R +03/+04 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 2014 O 26 2s 3 - +03 2018 O 28 2s 4 - +04 Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1988 Mar 27 2s 3 R +03/+04 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 2014 O 26 2s 3 - +03 2016 D 4 2s 4 - +04 Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s 3 R +03/+04 1991 Mar 31 2s 4 - +04 1992 Mar 29 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 2014 O 26 2s 3 - +03 Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 - +04 1935 Ja 27 4 R +04/+05 1989 Mar 26 2s 3 R +03/+04 1991 Mar 31 2s 2 R +02/+03 1991 S 29 2s 3 - +03 1991 O 20 3 4 R +04/+05 2010 Mar 28 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 Z Europe/Ulyanovsk 3:13:36 - LMT 1919 Jul 1 0u 3 - +03 1930 Jun 21 4 R +04/+05 1989 Mar 26 2s 3 R +03/+04 1991 Mar 31 2s 2 R +02/+03 1992 Ja 19 2s 3 R +03/+04 2011 Mar 27 2s 4 - +04 2014 O 26 2s 3 - +03 2016 Mar 27 2s 4 - +04 Z Asia/Yekaterinburg 4:2:33 - LMT 1916 Jul 3 3:45:5 - PMT 1919 Jul 15 4 4 - +04 1930 Jun 21 5 R +05/+06 1991 Mar 31 2s 4 R +04/+05 1992 Ja 19 2s 5 R +05/+06 2011 Mar 27 2s 6 - +06 2014 O 26 2s 5 - +05 Z Asia/Omsk 4:53:30 - LMT 1919 N 14 5 - +05 1930 Jun 21 6 R +06/+07 1991 Mar 31 2s 5 R +05/+06 1992 Ja 19 2s 6 R +06/+07 2011 Mar 27 2s 7 - +07 2014 O 26 2s 6 - +06 Z Asia/Barnaul 5:35 - LMT 1919 D 10 6 - +06 1930 Jun 21 7 R +07/+08 1991 Mar 31 2s 6 R +06/+07 1992 Ja 19 2s 7 R +07/+08 1995 May 28 6 R +06/+07 2011 Mar 27 2s 7 - +07 2014 O 26 2s 6 - +06 2016 Mar 27 2s 7 - +07 Z Asia/Novosibirsk 5:31:40 - LMT 1919 D 14 6 6 - +06 1930 Jun 21 7 R +07/+08 1991 Mar 31 2s 6 R +06/+07 1992 Ja 19 2s 7 R +07/+08 1993 May 23 6 R +06/+07 2011 Mar 27 2s 7 - +07 2014 O 26 2s 6 - +06 2016 Jul 24 2s 7 - +07 Z Asia/Tomsk 5:39:51 - LMT 1919 D 22 6 - +06 1930 Jun 21 7 R +07/+08 1991 Mar 31 2s 6 R +06/+07 1992 Ja 19 2s 7 R +07/+08 2002 May 1 3 6 R +06/+07 2011 Mar 27 2s 7 - +07 2014 O 26 2s 6 - +06 2016 May 29 2s 7 - +07 Z Asia/Novokuznetsk 5:48:48 - LMT 1924 May 6 - +06 1930 Jun 21 7 R +07/+08 1991 Mar 31 2s 6 R +06/+07 1992 Ja 19 2s 7 R +07/+08 2010 Mar 28 2s 6 R +06/+07 2011 Mar 27 2s 7 - +07 Z Asia/Krasnoyarsk 6:11:26 - LMT 1920 Ja 6 6 - +06 1930 Jun 21 7 R +07/+08 1991 Mar 31 2s 6 R +06/+07 1992 Ja 19 2s 7 R +07/+08 2011 Mar 27 2s 8 - +08 2014 O 26 2s 7 - +07 Z Asia/Irkutsk 6:57:5 - LMT 1880 6:57:5 - IMT 1920 Ja 25 7 - +07 1930 Jun 21 8 R +08/+09 1991 Mar 31 2s 7 R +07/+08 1992 Ja 19 2s 8 R +08/+09 2011 Mar 27 2s 9 - +09 2014 O 26 2s 8 - +08 Z Asia/Chita 7:33:52 - LMT 1919 D 15 8 - +08 1930 Jun 21 9 R +09/+10 1991 Mar 31 2s 8 R +08/+09 1992 Ja 19 2s 9 R +09/+10 2011 Mar 27 2s 10 - +10 2014 O 26 2s 8 - +08 2016 Mar 27 2 9 - +09 Z Asia/Yakutsk 8:38:58 - LMT 1919 D 15 8 - +08 1930 Jun 21 9 R +09/+10 1991 Mar 31 2s 8 R +08/+09 1992 Ja 19 2s 9 R +09/+10 2011 Mar 27 2s 10 - +10 2014 O 26 2s 9 - +09 Z Asia/Vladivostok 8:47:31 - LMT 1922 N 15 9 - +09 1930 Jun 21 10 R +10/+11 1991 Mar 31 2s 9 R +09/+10 1992 Ja 19 2s 10 R +10/+11 2011 Mar 27 2s 11 - +11 2014 O 26 2s 10 - +10 Z Asia/Khandyga 9:2:13 - LMT 1919 D 15 8 - +08 1930 Jun 21 9 R +09/+10 1991 Mar 31 2s 8 R +08/+09 1992 Ja 19 2s 9 R +09/+10 2004 10 R +10/+11 2011 Mar 27 2s 11 - +11 2011 S 13 0s 10 - +10 2014 O 26 2s 9 - +09 Z Asia/Sakhalin 9:30:48 - LMT 1905 Au 23 9 - +09 1945 Au 25 11 R +11/+12 1991 Mar 31 2s 10 R +10/+11 1992 Ja 19 2s 11 R +11/+12 1997 Mar lastSu 2s 10 R +10/+11 2011 Mar 27 2s 11 - +11 2014 O 26 2s 10 - +10 2016 Mar 27 2s 11 - +11 Z Asia/Magadan 10:3:12 - LMT 1924 May 2 10 - +10 1930 Jun 21 11 R +11/+12 1991 Mar 31 2s 10 R +10/+11 1992 Ja 19 2s 11 R +11/+12 2011 Mar 27 2s 12 - +12 2014 O 26 2s 10 - +10 2016 Ap 24 2s 11 - +11 Z Asia/Srednekolymsk 10:14:52 - LMT 1924 May 2 10 - +10 1930 Jun 21 11 R +11/+12 1991 Mar 31 2s 10 R +10/+11 1992 Ja 19 2s 11 R +11/+12 2011 Mar 27 2s 12 - +12 2014 O 26 2s 11 - +11 Z Asia/Ust-Nera 9:32:54 - LMT 1919 D 15 8 - +08 1930 Jun 21 9 R +09/+10 1981 Ap 11 R +11/+12 1991 Mar 31 2s 10 R +10/+11 1992 Ja 19 2s 11 R +11/+12 2011 Mar 27 2s 12 - +12 2011 S 13 0s 11 - +11 2014 O 26 2s 10 - +10 Z Asia/Kamchatka 10:34:36 - LMT 1922 N 10 11 - +11 1930 Jun 21 12 R +12/+13 1991 Mar 31 2s 11 R +11/+12 1992 Ja 19 2s 12 R +12/+13 2010 Mar 28 2s 11 R +11/+12 2011 Mar 27 2s 12 - +12 Z Asia/Anadyr 11:49:56 - LMT 1924 May 2 12 - +12 1930 Jun 21 13 R +13/+14 1982 Ap 1 0s 12 R +12/+13 1991 Mar 31 2s 11 R +11/+12 1992 Ja 19 2s 12 R +12/+13 2010 Mar 28 2s 11 R +11/+12 2011 Mar 27 2s 12 - +12 Z Europe/Belgrade 1:22 - LMT 1884 1 - CET 1941 Ap 18 23 1 c CE%sT 1945 1 - CET 1945 May 8 2s 1 1 CEST 1945 S 16 2s 1 - CET 1982 N 27 1 E CE%sT L Europe/Belgrade Europe/Ljubljana L Europe/Belgrade Europe/Podgorica L Europe/Belgrade Europe/Sarajevo L Europe/Belgrade Europe/Skopje L Europe/Belgrade Europe/Zagreb L Europe/Prague Europe/Bratislava R s 1918 o - Ap 15 23 1 S R s 1918 1919 - O 6 24s 0 - R s 1919 o - Ap 6 23 1 S R s 1924 o - Ap 16 23 1 S R s 1924 o - O 4 24s 0 - R s 1926 o - Ap 17 23 1 S R s 1926 1929 - O Sa>=1 24s 0 - R s 1927 o - Ap 9 23 1 S R s 1928 o - Ap 15 0 1 S R s 1929 o - Ap 20 23 1 S R s 1937 o - Jun 16 23 1 S R s 1937 o - O 2 24s 0 - R s 1938 o - Ap 2 23 1 S R s 1938 o - Ap 30 23 2 M R s 1938 o - O 2 24 1 S R s 1939 o - O 7 24s 0 - R s 1942 o - May 2 23 1 S R s 1942 o - S 1 1 0 - R s 1943 1946 - Ap Sa>=13 23 1 S R s 1943 1944 - O Su>=1 1 0 - R s 1945 1946 - S lastSu 1 0 - R s 1949 o - Ap 30 23 1 S R s 1949 o - O 2 1 0 - R s 1974 1975 - Ap Sa>=12 23 1 S R s 1974 1975 - O Su>=1 1 0 - R s 1976 o - Mar 27 23 1 S R s 1976 1977 - S lastSu 1 0 - R s 1977 o - Ap 2 23 1 S R s 1978 o - Ap 2 2s 1 S R s 1978 o - O 1 2s 0 - R Sp 1967 o - Jun 3 12 1 S R Sp 1967 o - O 1 0 0 - R Sp 1974 o - Jun 24 0 1 S R Sp 1974 o - S 1 0 0 - R Sp 1976 1977 - May 1 0 1 S R Sp 1976 o - Au 1 0 0 - R Sp 1977 o - S 28 0 0 - R Sp 1978 o - Jun 1 0 1 S R Sp 1978 o - Au 4 0 0 - Z Europe/Madrid -0:14:44 - LMT 1900 D 31 23:45:16 0 s WE%sT 1940 Mar 16 23 1 s CE%sT 1979 1 E CE%sT Z Africa/Ceuta -0:21:16 - LMT 1900 D 31 23:38:44 0 - WET 1918 May 6 23 0 1 WEST 1918 O 7 23 0 - WET 1924 0 s WE%sT 1929 0 - WET 1967 0 Sp WE%sT 1984 Mar 16 1 - CET 1986 1 E CE%sT Z Atlantic/Canary -1:1:36 - LMT 1922 Mar -1 - -01 1946 S 30 1 0 - WET 1980 Ap 6 0s 0 1 WEST 1980 S 28 1u 0 E WE%sT Z Europe/Stockholm 1:12:12 - LMT 1879 1:0:14 - SET 1900 1 - CET 1916 May 14 23 1 1 CEST 1916 O 1 1 1 - CET 1980 1 E CE%sT R CH 1941 1942 - May M>=1 1 1 S R CH 1941 1942 - O M>=1 2 0 - Z Europe/Zurich 0:34:8 - LMT 1853 Jul 16 0:29:46 - BMT 1894 Jun 1 CH CE%sT 1981 1 E CE%sT R T 1916 o - May 1 0 1 S R T 1916 o - O 1 0 0 - R T 1920 o - Mar 28 0 1 S R T 1920 o - O 25 0 0 - R T 1921 o - Ap 3 0 1 S R T 1921 o - O 3 0 0 - R T 1922 o - Mar 26 0 1 S R T 1922 o - O 8 0 0 - R T 1924 o - May 13 0 1 S R T 1924 1925 - O 1 0 0 - R T 1925 o - May 1 0 1 S R T 1940 o - Jul 1 0 1 S R T 1940 o - O 6 0 0 - R T 1940 o - D 1 0 1 S R T 1941 o - S 21 0 0 - R T 1942 o - Ap 1 0 1 S R T 1945 o - O 8 0 0 - R T 1946 o - Jun 1 0 1 S R T 1946 o - O 1 0 0 - R T 1947 1948 - Ap Su>=16 0 1 S R T 1947 1951 - O Su>=2 0 0 - R T 1949 o - Ap 10 0 1 S R T 1950 o - Ap 16 0 1 S R T 1951 o - Ap 22 0 1 S R T 1962 o - Jul 15 0 1 S R T 1963 o - O 30 0 0 - R T 1964 o - May 15 0 1 S R T 1964 o - O 1 0 0 - R T 1973 o - Jun 3 1 1 S R T 1973 1976 - O Su>=31 2 0 - R T 1974 o - Mar 31 2 1 S R T 1975 o - Mar 22 2 1 S R T 1976 o - Mar 21 2 1 S R T 1977 1978 - Ap Su>=1 2 1 S R T 1977 1978 - O Su>=15 2 0 - R T 1978 o - Jun 29 0 0 - R T 1983 o - Jul 31 2 1 S R T 1983 o - O 2 2 0 - R T 1985 o - Ap 20 1s 1 S R T 1985 o - S 28 1s 0 - R T 1986 1993 - Mar lastSu 1s 1 S R T 1986 1995 - S lastSu 1s 0 - R T 1994 o - Mar 20 1s 1 S R T 1995 2006 - Mar lastSu 1s 1 S R T 1996 2006 - O lastSu 1s 0 - Z Europe/Istanbul 1:55:52 - LMT 1880 1:56:56 - IMT 1910 O 2 T EE%sT 1978 Jun 29 3 T +03/+04 1984 N 1 2 2 T EE%sT 2007 2 E EE%sT 2011 Mar 27 1u 2 - EET 2011 Mar 28 1u 2 E EE%sT 2014 Mar 30 1u 2 - EET 2014 Mar 31 1u 2 E EE%sT 2015 O 25 1u 2 1 EEST 2015 N 8 1u 2 E EE%sT 2016 S 7 3 - +03 L Europe/Istanbul Asia/Istanbul Z Europe/Kiev 2:2:4 - LMT 1880 2:2:4 - KMT 1924 May 2 2 - EET 1930 Jun 21 3 - MSK 1941 S 20 1 c CE%sT 1943 N 6 3 R MSK/MSD 1990 Jul 1 2 2 1 EEST 1991 S 29 3 2 e EE%sT 1995 2 E EE%sT Z Europe/Uzhgorod 1:29:12 - LMT 1890 O 1 - CET 1940 1 c CE%sT 1944 O 1 1 CEST 1944 O 26 1 - CET 1945 Jun 29 3 R MSK/MSD 1990 3 - MSK 1990 Jul 1 2 1 - CET 1991 Mar 31 3 2 - EET 1992 2 e EE%sT 1995 2 E EE%sT Z Europe/Zaporozhye 2:20:40 - LMT 1880 2:20 - +0220 1924 May 2 2 - EET 1930 Jun 21 3 - MSK 1941 Au 25 1 c CE%sT 1943 O 25 3 R MSK/MSD 1991 Mar 31 2 2 e EE%sT 1995 2 E EE%sT R u 1918 1919 - Mar lastSu 2 1 D R u 1918 1919 - O lastSu 2 0 S R u 1942 o - F 9 2 1 W R u 1945 o - Au 14 23u 1 P R u 1945 o - S 30 2 0 S R u 1967 2006 - O lastSu 2 0 S R u 1967 1973 - Ap lastSu 2 1 D R u 1974 o - Ja 6 2 1 D R u 1975 o - F lastSu 2 1 D R u 1976 1986 - Ap lastSu 2 1 D R u 1987 2006 - Ap Su>=1 2 1 D R u 2007 ma - Mar Su>=8 2 1 D R u 2007 ma - N Su>=1 2 0 S Z EST -5 - EST Z MST -7 - MST Z HST -10 - HST Z EST5EDT -5 u E%sT Z CST6CDT -6 u C%sT Z MST7MDT -7 u M%sT Z PST8PDT -8 u P%sT R NY 1920 o - Mar lastSu 2 1 D R NY 1920 o - O lastSu 2 0 S R NY 1921 1966 - Ap lastSu 2 1 D R NY 1921 1954 - S lastSu 2 0 S R NY 1955 1966 - O lastSu 2 0 S Z America/New_York -4:56:2 - LMT 1883 N 18 12:3:58 -5 u E%sT 1920 -5 NY E%sT 1942 -5 u E%sT 1946 -5 NY E%sT 1967 -5 u E%sT R Ch 1920 o - Jun 13 2 1 D R Ch 1920 1921 - O lastSu 2 0 S R Ch 1921 o - Mar lastSu 2 1 D R Ch 1922 1966 - Ap lastSu 2 1 D R Ch 1922 1954 - S lastSu 2 0 S R Ch 1955 1966 - O lastSu 2 0 S Z America/Chicago -5:50:36 - LMT 1883 N 18 12:9:24 -6 u C%sT 1920 -6 Ch C%sT 1936 Mar 1 2 -5 - EST 1936 N 15 2 -6 Ch C%sT 1942 -6 u C%sT 1946 -6 Ch C%sT 1967 -6 u C%sT Z America/North_Dakota/Center -6:45:12 - LMT 1883 N 18 12:14:48 -7 u M%sT 1992 O 25 2 -6 u C%sT Z America/North_Dakota/New_Salem -6:45:39 - LMT 1883 N 18 12:14:21 -7 u M%sT 2003 O 26 2 -6 u C%sT Z America/North_Dakota/Beulah -6:47:7 - LMT 1883 N 18 12:12:53 -7 u M%sT 2010 N 7 2 -6 u C%sT R De 1920 1921 - Mar lastSu 2 1 D R De 1920 o - O lastSu 2 0 S R De 1921 o - May 22 2 0 S R De 1965 1966 - Ap lastSu 2 1 D R De 1965 1966 - O lastSu 2 0 S Z America/Denver -6:59:56 - LMT 1883 N 18 12:0:4 -7 u M%sT 1920 -7 De M%sT 1942 -7 u M%sT 1946 -7 De M%sT 1967 -7 u M%sT R CA 1948 o - Mar 14 2:1 1 D R CA 1949 o - Ja 1 2 0 S R CA 1950 1966 - Ap lastSu 1 1 D R CA 1950 1961 - S lastSu 2 0 S R CA 1962 1966 - O lastSu 2 0 S Z America/Los_Angeles -7:52:58 - LMT 1883 N 18 12:7:2 -8 u P%sT 1946 -8 CA P%sT 1967 -8 u P%sT Z America/Juneau 15:2:19 - LMT 1867 O 19 15:33:32 -8:57:41 - LMT 1900 Au 20 12 -8 - PST 1942 -8 u P%sT 1946 -8 - PST 1969 -8 u P%sT 1980 Ap 27 2 -9 u Y%sT 1980 O 26 2 -8 u P%sT 1983 O 30 2 -9 u Y%sT 1983 N 30 -9 u AK%sT Z America/Sitka 14:58:47 - LMT 1867 O 19 15:30 -9:1:13 - LMT 1900 Au 20 12 -8 - PST 1942 -8 u P%sT 1946 -8 - PST 1969 -8 u P%sT 1983 O 30 2 -9 u Y%sT 1983 N 30 -9 u AK%sT Z America/Metlakatla 15:13:42 - LMT 1867 O 19 15:44:55 -8:46:18 - LMT 1900 Au 20 12 -8 - PST 1942 -8 u P%sT 1946 -8 - PST 1969 -8 u P%sT 1983 O 30 2 -8 - PST 2015 N 1 2 -9 u AK%sT 2018 N 4 2 -8 - PST 2019 Ja 20 2 -9 u AK%sT Z America/Yakutat 14:41:5 - LMT 1867 O 19 15:12:18 -9:18:55 - LMT 1900 Au 20 12 -9 - YST 1942 -9 u Y%sT 1946 -9 - YST 1969 -9 u Y%sT 1983 N 30 -9 u AK%sT Z America/Anchorage 14:0:24 - LMT 1867 O 19 14:31:37 -9:59:36 - LMT 1900 Au 20 12 -10 - AST 1942 -10 u A%sT 1967 Ap -10 - AHST 1969 -10 u AH%sT 1983 O 30 2 -9 u Y%sT 1983 N 30 -9 u AK%sT Z America/Nome 12:58:22 - LMT 1867 O 19 13:29:35 -11:1:38 - LMT 1900 Au 20 12 -11 - NST 1942 -11 u N%sT 1946 -11 - NST 1967 Ap -11 - BST 1969 -11 u B%sT 1983 O 30 2 -9 u Y%sT 1983 N 30 -9 u AK%sT Z America/Adak 12:13:22 - LMT 1867 O 19 12:44:35 -11:46:38 - LMT 1900 Au 20 12 -11 - NST 1942 -11 u N%sT 1946 -11 - NST 1967 Ap -11 - BST 1969 -11 u B%sT 1983 O 30 2 -10 u AH%sT 1983 N 30 -10 u H%sT Z Pacific/Honolulu -10:31:26 - LMT 1896 Ja 13 12 -10:30 - HST 1933 Ap 30 2 -10:30 1 HDT 1933 May 21 12 -10:30 u H%sT 1947 Jun 8 2 -10 - HST Z America/Phoenix -7:28:18 - LMT 1883 N 18 11:31:42 -7 u M%sT 1944 Ja 1 0:1 -7 - MST 1944 Ap 1 0:1 -7 u M%sT 1944 O 1 0:1 -7 - MST 1967 -7 u M%sT 1968 Mar 21 -7 - MST Z America/Boise -7:44:49 - LMT 1883 N 18 12:15:11 -8 u P%sT 1923 May 13 2 -7 u M%sT 1974 -7 - MST 1974 F 3 2 -7 u M%sT R In 1941 o - Jun 22 2 1 D R In 1941 1954 - S lastSu 2 0 S R In 1946 1954 - Ap lastSu 2 1 D Z America/Indiana/Indianapolis -5:44:38 - LMT 1883 N 18 12:15:22 -6 u C%sT 1920 -6 In C%sT 1942 -6 u C%sT 1946 -6 In C%sT 1955 Ap 24 2 -5 - EST 1957 S 29 2 -6 - CST 1958 Ap 27 2 -5 - EST 1969 -5 u E%sT 1971 -5 - EST 2006 -5 u E%sT R Ma 1951 o - Ap lastSu 2 1 D R Ma 1951 o - S lastSu 2 0 S R Ma 1954 1960 - Ap lastSu 2 1 D R Ma 1954 1960 - S lastSu 2 0 S Z America/Indiana/Marengo -5:45:23 - LMT 1883 N 18 12:14:37 -6 u C%sT 1951 -6 Ma C%sT 1961 Ap 30 2 -5 - EST 1969 -5 u E%sT 1974 Ja 6 2 -6 1 CDT 1974 O 27 2 -5 u E%sT 1976 -5 - EST 2006 -5 u E%sT R V 1946 o - Ap lastSu 2 1 D R V 1946 o - S lastSu 2 0 S R V 1953 1954 - Ap lastSu 2 1 D R V 1953 1959 - S lastSu 2 0 S R V 1955 o - May 1 0 1 D R V 1956 1963 - Ap lastSu 2 1 D R V 1960 o - O lastSu 2 0 S R V 1961 o - S lastSu 2 0 S R V 1962 1963 - O lastSu 2 0 S Z America/Indiana/Vincennes -5:50:7 - LMT 1883 N 18 12:9:53 -6 u C%sT 1946 -6 V C%sT 1964 Ap 26 2 -5 - EST 1969 -5 u E%sT 1971 -5 - EST 2006 Ap 2 2 -6 u C%sT 2007 N 4 2 -5 u E%sT R Pe 1955 o - May 1 0 1 D R Pe 1955 1960 - S lastSu 2 0 S R Pe 1956 1963 - Ap lastSu 2 1 D R Pe 1961 1963 - O lastSu 2 0 S Z America/Indiana/Tell_City -5:47:3 - LMT 1883 N 18 12:12:57 -6 u C%sT 1946 -6 Pe C%sT 1964 Ap 26 2 -5 - EST 1967 O 29 2 -6 u C%sT 1969 Ap 27 2 -5 u E%sT 1971 -5 - EST 2006 Ap 2 2 -6 u C%sT R Pi 1955 o - May 1 0 1 D R Pi 1955 1960 - S lastSu 2 0 S R Pi 1956 1964 - Ap lastSu 2 1 D R Pi 1961 1964 - O lastSu 2 0 S Z America/Indiana/Petersburg -5:49:7 - LMT 1883 N 18 12:10:53 -6 u C%sT 1955 -6 Pi C%sT 1965 Ap 25 2 -5 - EST 1966 O 30 2 -6 u C%sT 1977 O 30 2 -5 - EST 2006 Ap 2 2 -6 u C%sT 2007 N 4 2 -5 u E%sT R St 1947 1961 - Ap lastSu 2 1 D R St 1947 1954 - S lastSu 2 0 S R St 1955 1956 - O lastSu 2 0 S R St 1957 1958 - S lastSu 2 0 S R St 1959 1961 - O lastSu 2 0 S Z America/Indiana/Knox -5:46:30 - LMT 1883 N 18 12:13:30 -6 u C%sT 1947 -6 St C%sT 1962 Ap 29 2 -5 - EST 1963 O 27 2 -6 u C%sT 1991 O 27 2 -5 - EST 2006 Ap 2 2 -6 u C%sT R Pu 1946 1960 - Ap lastSu 2 1 D R Pu 1946 1954 - S lastSu 2 0 S R Pu 1955 1956 - O lastSu 2 0 S R Pu 1957 1960 - S lastSu 2 0 S Z America/Indiana/Winamac -5:46:25 - LMT 1883 N 18 12:13:35 -6 u C%sT 1946 -6 Pu C%sT 1961 Ap 30 2 -5 - EST 1969 -5 u E%sT 1971 -5 - EST 2006 Ap 2 2 -6 u C%sT 2007 Mar 11 2 -5 u E%sT Z America/Indiana/Vevay -5:40:16 - LMT 1883 N 18 12:19:44 -6 u C%sT 1954 Ap 25 2 -5 - EST 1969 -5 u E%sT 1973 -5 - EST 2006 -5 u E%sT R v 1921 o - May 1 2 1 D R v 1921 o - S 1 2 0 S R v 1941 o - Ap lastSu 2 1 D R v 1941 o - S lastSu 2 0 S R v 1946 o - Ap lastSu 0:1 1 D R v 1946 o - Jun 2 2 0 S R v 1950 1961 - Ap lastSu 2 1 D R v 1950 1955 - S lastSu 2 0 S R v 1956 1961 - O lastSu 2 0 S Z America/Kentucky/Louisville -5:43:2 - LMT 1883 N 18 12:16:58 -6 u C%sT 1921 -6 v C%sT 1942 -6 u C%sT 1946 -6 v C%sT 1961 Jul 23 2 -5 - EST 1968 -5 u E%sT 1974 Ja 6 2 -6 1 CDT 1974 O 27 2 -5 u E%sT Z America/Kentucky/Monticello -5:39:24 - LMT 1883 N 18 12:20:36 -6 u C%sT 1946 -6 - CST 1968 -6 u C%sT 2000 O 29 2 -5 u E%sT R Dt 1948 o - Ap lastSu 2 1 D R Dt 1948 o - S lastSu 2 0 S Z America/Detroit -5:32:11 - LMT 1905 -6 - CST 1915 May 15 2 -5 - EST 1942 -5 u E%sT 1946 -5 Dt E%sT 1967 Jun 14 0:1 -5 u E%sT 1969 -5 - EST 1973 -5 u E%sT 1975 -5 - EST 1975 Ap 27 2 -5 u E%sT R Me 1946 o - Ap lastSu 2 1 D R Me 1946 o - S lastSu 2 0 S R Me 1966 o - Ap lastSu 2 1 D R Me 1966 o - O lastSu 2 0 S Z America/Menominee -5:50:27 - LMT 1885 S 18 12 -6 u C%sT 1946 -6 Me C%sT 1969 Ap 27 2 -5 - EST 1973 Ap 29 2 -6 u C%sT R C 1918 o - Ap 14 2 1 D R C 1918 o - O 27 2 0 S R C 1942 o - F 9 2 1 W R C 1945 o - Au 14 23u 1 P R C 1945 o - S 30 2 0 S R C 1974 1986 - Ap lastSu 2 1 D R C 1974 2006 - O lastSu 2 0 S R C 1987 2006 - Ap Su>=1 2 1 D R C 2007 ma - Mar Su>=8 2 1 D R C 2007 ma - N Su>=1 2 0 S R j 1917 o - Ap 8 2 1 D R j 1917 o - S 17 2 0 S R j 1919 o - May 5 23 1 D R j 1919 o - Au 12 23 0 S R j 1920 1935 - May Su>=1 23 1 D R j 1920 1935 - O lastSu 23 0 S R j 1936 1941 - May M>=9 0 1 D R j 1936 1941 - O M>=2 0 0 S R j 1946 1950 - May Su>=8 2 1 D R j 1946 1950 - O Su>=2 2 0 S R j 1951 1986 - Ap lastSu 2 1 D R j 1951 1959 - S lastSu 2 0 S R j 1960 1986 - O lastSu 2 0 S R j 1987 o - Ap Su>=1 0:1 1 D R j 1987 2006 - O lastSu 0:1 0 S R j 1988 o - Ap Su>=1 0:1 2 DD R j 1989 2006 - Ap Su>=1 0:1 1 D R j 2007 2011 - Mar Su>=8 0:1 1 D R j 2007 2010 - N Su>=1 0:1 0 S Z America/St_Johns -3:30:52 - LMT 1884 -3:30:52 j N%sT 1918 -3:30:52 C N%sT 1919 -3:30:52 j N%sT 1935 Mar 30 -3:30 j N%sT 1942 May 11 -3:30 C N%sT 1946 -3:30 j N%sT 2011 N -3:30 C N%sT Z America/Goose_Bay -4:1:40 - LMT 1884 -3:30:52 - NST 1918 -3:30:52 C N%sT 1919 -3:30:52 - NST 1935 Mar 30 -3:30 - NST 1936 -3:30 j N%sT 1942 May 11 -3:30 C N%sT 1946 -3:30 j N%sT 1966 Mar 15 2 -4 j A%sT 2011 N -4 C A%sT R H 1916 o - Ap 1 0 1 D R H 1916 o - O 1 0 0 S R H 1920 o - May 9 0 1 D R H 1920 o - Au 29 0 0 S R H 1921 o - May 6 0 1 D R H 1921 1922 - S 5 0 0 S R H 1922 o - Ap 30 0 1 D R H 1923 1925 - May Su>=1 0 1 D R H 1923 o - S 4 0 0 S R H 1924 o - S 15 0 0 S R H 1925 o - S 28 0 0 S R H 1926 o - May 16 0 1 D R H 1926 o - S 13 0 0 S R H 1927 o - May 1 0 1 D R H 1927 o - S 26 0 0 S R H 1928 1931 - May Su>=8 0 1 D R H 1928 o - S 9 0 0 S R H 1929 o - S 3 0 0 S R H 1930 o - S 15 0 0 S R H 1931 1932 - S M>=24 0 0 S R H 1932 o - May 1 0 1 D R H 1933 o - Ap 30 0 1 D R H 1933 o - O 2 0 0 S R H 1934 o - May 20 0 1 D R H 1934 o - S 16 0 0 S R H 1935 o - Jun 2 0 1 D R H 1935 o - S 30 0 0 S R H 1936 o - Jun 1 0 1 D R H 1936 o - S 14 0 0 S R H 1937 1938 - May Su>=1 0 1 D R H 1937 1941 - S M>=24 0 0 S R H 1939 o - May 28 0 1 D R H 1940 1941 - May Su>=1 0 1 D R H 1946 1949 - Ap lastSu 2 1 D R H 1946 1949 - S lastSu 2 0 S R H 1951 1954 - Ap lastSu 2 1 D R H 1951 1954 - S lastSu 2 0 S R H 1956 1959 - Ap lastSu 2 1 D R H 1956 1959 - S lastSu 2 0 S R H 1962 1973 - Ap lastSu 2 1 D R H 1962 1973 - O lastSu 2 0 S Z America/Halifax -4:14:24 - LMT 1902 Jun 15 -4 H A%sT 1918 -4 C A%sT 1919 -4 H A%sT 1942 F 9 2s -4 C A%sT 1946 -4 H A%sT 1974 -4 C A%sT Z America/Glace_Bay -3:59:48 - LMT 1902 Jun 15 -4 C A%sT 1953 -4 H A%sT 1954 -4 - AST 1972 -4 H A%sT 1974 -4 C A%sT R o 1933 1935 - Jun Su>=8 1 1 D R o 1933 1935 - S Su>=8 1 0 S R o 1936 1938 - Jun Su>=1 1 1 D R o 1936 1938 - S Su>=1 1 0 S R o 1939 o - May 27 1 1 D R o 1939 1941 - S Sa>=21 1 0 S R o 1940 o - May 19 1 1 D R o 1941 o - May 4 1 1 D R o 1946 1972 - Ap lastSu 2 1 D R o 1946 1956 - S lastSu 2 0 S R o 1957 1972 - O lastSu 2 0 S R o 1993 2006 - Ap Su>=1 0:1 1 D R o 1993 2006 - O lastSu 0:1 0 S Z America/Moncton -4:19:8 - LMT 1883 D 9 -5 - EST 1902 Jun 15 -4 C A%sT 1933 -4 o A%sT 1942 -4 C A%sT 1946 -4 o A%sT 1973 -4 C A%sT 1993 -4 o A%sT 2007 -4 C A%sT Z America/Blanc-Sablon -3:48:28 - LMT 1884 -4 C A%sT 1970 -4 - AST R t 1919 o - Mar 30 23:30 1 D R t 1919 o - O 26 0 0 S R t 1920 o - May 2 2 1 D R t 1920 o - S 26 0 0 S R t 1921 o - May 15 2 1 D R t 1921 o - S 15 2 0 S R t 1922 1923 - May Su>=8 2 1 D R t 1922 1926 - S Su>=15 2 0 S R t 1924 1927 - May Su>=1 2 1 D R t 1927 1937 - S Su>=25 2 0 S R t 1928 1937 - Ap Su>=25 2 1 D R t 1938 1940 - Ap lastSu 2 1 D R t 1938 1939 - S lastSu 2 0 S R t 1945 1946 - S lastSu 2 0 S R t 1946 o - Ap lastSu 2 1 D R t 1947 1949 - Ap lastSu 0 1 D R t 1947 1948 - S lastSu 0 0 S R t 1949 o - N lastSu 0 0 S R t 1950 1973 - Ap lastSu 2 1 D R t 1950 o - N lastSu 2 0 S R t 1951 1956 - S lastSu 2 0 S R t 1957 1973 - O lastSu 2 0 S Z America/Toronto -5:17:32 - LMT 1895 -5 C E%sT 1919 -5 t E%sT 1942 F 9 2s -5 C E%sT 1946 -5 t E%sT 1974 -5 C E%sT Z America/Thunder_Bay -5:57 - LMT 1895 -6 - CST 1910 -5 - EST 1942 -5 C E%sT 1970 -5 t E%sT 1973 -5 - EST 1974 -5 C E%sT Z America/Nipigon -5:53:4 - LMT 1895 -5 C E%sT 1940 S 29 -5 1 EDT 1942 F 9 2s -5 C E%sT Z America/Rainy_River -6:18:16 - LMT 1895 -6 C C%sT 1940 S 29 -6 1 CDT 1942 F 9 2s -6 C C%sT Z America/Atikokan -6:6:28 - LMT 1895 -6 C C%sT 1940 S 29 -6 1 CDT 1942 F 9 2s -6 C C%sT 1945 S 30 2 -5 - EST R W 1916 o - Ap 23 0 1 D R W 1916 o - S 17 0 0 S R W 1918 o - Ap 14 2 1 D R W 1918 o - O 27 2 0 S R W 1937 o - May 16 2 1 D R W 1937 o - S 26 2 0 S R W 1942 o - F 9 2 1 W R W 1945 o - Au 14 23u 1 P R W 1945 o - S lastSu 2 0 S R W 1946 o - May 12 2 1 D R W 1946 o - O 13 2 0 S R W 1947 1949 - Ap lastSu 2 1 D R W 1947 1949 - S lastSu 2 0 S R W 1950 o - May 1 2 1 D R W 1950 o - S 30 2 0 S R W 1951 1960 - Ap lastSu 2 1 D R W 1951 1958 - S lastSu 2 0 S R W 1959 o - O lastSu 2 0 S R W 1960 o - S lastSu 2 0 S R W 1963 o - Ap lastSu 2 1 D R W 1963 o - S 22 2 0 S R W 1966 1986 - Ap lastSu 2s 1 D R W 1966 2005 - O lastSu 2s 0 S R W 1987 2005 - Ap Su>=1 2s 1 D Z America/Winnipeg -6:28:36 - LMT 1887 Jul 16 -6 W C%sT 2006 -6 C C%sT R r 1918 o - Ap 14 2 1 D R r 1918 o - O 27 2 0 S R r 1930 1934 - May Su>=1 0 1 D R r 1930 1934 - O Su>=1 0 0 S R r 1937 1941 - Ap Su>=8 0 1 D R r 1937 o - O Su>=8 0 0 S R r 1938 o - O Su>=1 0 0 S R r 1939 1941 - O Su>=8 0 0 S R r 1942 o - F 9 2 1 W R r 1945 o - Au 14 23u 1 P R r 1945 o - S lastSu 2 0 S R r 1946 o - Ap Su>=8 2 1 D R r 1946 o - O Su>=8 2 0 S R r 1947 1957 - Ap lastSu 2 1 D R r 1947 1957 - S lastSu 2 0 S R r 1959 o - Ap lastSu 2 1 D R r 1959 o - O lastSu 2 0 S R Sw 1957 o - Ap lastSu 2 1 D R Sw 1957 o - O lastSu 2 0 S R Sw 1959 1961 - Ap lastSu 2 1 D R Sw 1959 o - O lastSu 2 0 S R Sw 1960 1961 - S lastSu 2 0 S Z America/Regina -6:58:36 - LMT 1905 S -7 r M%sT 1960 Ap lastSu 2 -6 - CST Z America/Swift_Current -7:11:20 - LMT 1905 S -7 C M%sT 1946 Ap lastSu 2 -7 r M%sT 1950 -7 Sw M%sT 1972 Ap lastSu 2 -6 - CST R Ed 1918 1919 - Ap Su>=8 2 1 D R Ed 1918 o - O 27 2 0 S R Ed 1919 o - May 27 2 0 S R Ed 1920 1923 - Ap lastSu 2 1 D R Ed 1920 o - O lastSu 2 0 S R Ed 1921 1923 - S lastSu 2 0 S R Ed 1942 o - F 9 2 1 W R Ed 1945 o - Au 14 23u 1 P R Ed 1945 o - S lastSu 2 0 S R Ed 1947 o - Ap lastSu 2 1 D R Ed 1947 o - S lastSu 2 0 S R Ed 1972 1986 - Ap lastSu 2 1 D R Ed 1972 2006 - O lastSu 2 0 S Z America/Edmonton -7:33:52 - LMT 1906 S -7 Ed M%sT 1987 -7 C M%sT R Va 1918 o - Ap 14 2 1 D R Va 1918 o - O 27 2 0 S R Va 1942 o - F 9 2 1 W R Va 1945 o - Au 14 23u 1 P R Va 1945 o - S 30 2 0 S R Va 1946 1986 - Ap lastSu 2 1 D R Va 1946 o - S 29 2 0 S R Va 1947 1961 - S lastSu 2 0 S R Va 1962 2006 - O lastSu 2 0 S Z America/Vancouver -8:12:28 - LMT 1884 -8 Va P%sT 1987 -8 C P%sT Z America/Dawson_Creek -8:0:56 - LMT 1884 -8 C P%sT 1947 -8 Va P%sT 1972 Au 30 2 -7 - MST Z America/Fort_Nelson -8:10:47 - LMT 1884 -8 Va P%sT 1946 -8 - PST 1947 -8 Va P%sT 1987 -8 C P%sT 2015 Mar 8 2 -7 - MST Z America/Creston -7:46:4 - LMT 1884 -7 - MST 1916 O -8 - PST 1918 Jun 2 -7 - MST R Y 1918 o - Ap 14 2 1 D R Y 1918 o - O 27 2 0 S R Y 1919 o - May 25 2 1 D R Y 1919 o - N 1 0 0 S R Y 1942 o - F 9 2 1 W R Y 1945 o - Au 14 23u 1 P R Y 1945 o - S 30 2 0 S R Y 1965 o - Ap lastSu 0 2 DD R Y 1965 o - O lastSu 2 0 S R Y 1980 1986 - Ap lastSu 2 1 D R Y 1980 2006 - O lastSu 2 0 S R Y 1987 2006 - Ap Su>=1 2 1 D Z America/Pangnirtung 0 - -00 1921 -4 Y A%sT 1995 Ap Su>=1 2 -5 C E%sT 1999 O 31 2 -6 C C%sT 2000 O 29 2 -5 C E%sT Z America/Iqaluit 0 - -00 1942 Au -5 Y E%sT 1999 O 31 2 -6 C C%sT 2000 O 29 2 -5 C E%sT Z America/Resolute 0 - -00 1947 Au 31 -6 Y C%sT 2000 O 29 2 -5 - EST 2001 Ap 1 3 -6 C C%sT 2006 O 29 2 -5 - EST 2007 Mar 11 3 -6 C C%sT Z America/Rankin_Inlet 0 - -00 1957 -6 Y C%sT 2000 O 29 2 -5 - EST 2001 Ap 1 3 -6 C C%sT Z America/Cambridge_Bay 0 - -00 1920 -7 Y M%sT 1999 O 31 2 -6 C C%sT 2000 O 29 2 -5 - EST 2000 N 5 -6 - CST 2001 Ap 1 3 -7 C M%sT Z America/Yellowknife 0 - -00 1935 -7 Y M%sT 1980 -7 C M%sT Z America/Inuvik 0 - -00 1953 -8 Y P%sT 1979 Ap lastSu 2 -7 Y M%sT 1980 -7 C M%sT Z America/Whitehorse -9:0:12 - LMT 1900 Au 20 -9 Y Y%sT 1967 May 28 -8 Y P%sT 1980 -8 C P%sT 2020 Mar 8 2 -7 - MST Z America/Dawson -9:17:40 - LMT 1900 Au 20 -9 Y Y%sT 1973 O 28 -8 Y P%sT 1980 -8 C P%sT 2020 Mar 8 2 -7 - MST R m 1939 o - F 5 0 1 D R m 1939 o - Jun 25 0 0 S R m 1940 o - D 9 0 1 D R m 1941 o - Ap 1 0 0 S R m 1943 o - D 16 0 1 W R m 1944 o - May 1 0 0 S R m 1950 o - F 12 0 1 D R m 1950 o - Jul 30 0 0 S R m 1996 2000 - Ap Su>=1 2 1 D R m 1996 2000 - O lastSu 2 0 S R m 2001 o - May Su>=1 2 1 D R m 2001 o - S lastSu 2 0 S R m 2002 ma - Ap Su>=1 2 1 D R m 2002 ma - O lastSu 2 0 S Z America/Cancun -5:47:4 - LMT 1922 Ja 1 0:12:56 -6 - CST 1981 D 23 -5 m E%sT 1998 Au 2 2 -6 m C%sT 2015 F 1 2 -5 - EST Z America/Merida -5:58:28 - LMT 1922 Ja 1 0:1:32 -6 - CST 1981 D 23 -5 - EST 1982 D 2 -6 m C%sT Z America/Matamoros -6:40 - LMT 1921 D 31 23:20 -6 - CST 1988 -6 u C%sT 1989 -6 m C%sT 2010 -6 u C%sT Z America/Monterrey -6:41:16 - LMT 1921 D 31 23:18:44 -6 - CST 1988 -6 u C%sT 1989 -6 m C%sT Z America/Mexico_City -6:36:36 - LMT 1922 Ja 1 0:23:24 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 m C%sT 2001 S 30 2 -6 - CST 2002 F 20 -6 m C%sT Z America/Ojinaga -6:57:40 - LMT 1922 Ja 1 0:2:20 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 - CST 1996 -6 m C%sT 1998 -6 - CST 1998 Ap Su>=1 3 -7 m M%sT 2010 -7 u M%sT Z America/Chihuahua -7:4:20 - LMT 1921 D 31 23:55:40 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 - CST 1996 -6 m C%sT 1998 -6 - CST 1998 Ap Su>=1 3 -7 m M%sT Z America/Hermosillo -7:23:52 - LMT 1921 D 31 23:36:8 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 - CST 1942 Ap 24 -7 - MST 1949 Ja 14 -8 - PST 1970 -7 m M%sT 1999 -7 - MST Z America/Mazatlan -7:5:40 - LMT 1921 D 31 23:54:20 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 - CST 1942 Ap 24 -7 - MST 1949 Ja 14 -8 - PST 1970 -7 m M%sT Z America/Bahia_Banderas -7:1 - LMT 1921 D 31 23:59 -7 - MST 1927 Jun 10 23 -6 - CST 1930 N 15 -7 - MST 1931 May 1 23 -6 - CST 1931 O -7 - MST 1932 Ap -6 - CST 1942 Ap 24 -7 - MST 1949 Ja 14 -8 - PST 1970 -7 m M%sT 2010 Ap 4 2 -6 m C%sT Z America/Tijuana -7:48:4 - LMT 1922 Ja 1 0:11:56 -7 - MST 1924 -8 - PST 1927 Jun 10 23 -7 - MST 1930 N 15 -8 - PST 1931 Ap -8 1 PDT 1931 S 30 -8 - PST 1942 Ap 24 -8 1 PWT 1945 Au 14 23u -8 1 PPT 1945 N 12 -8 - PST 1948 Ap 5 -8 1 PDT 1949 Ja 14 -8 - PST 1954 -8 CA P%sT 1961 -8 - PST 1976 -8 u P%sT 1996 -8 m P%sT 2001 -8 u P%sT 2002 F 20 -8 m P%sT 2010 -8 u P%sT R BS 1964 1975 - O lastSu 2 0 S R BS 1964 1975 - Ap lastSu 2 1 D Z America/Nassau -5:9:30 - LMT 1912 Mar 2 -5 BS E%sT 1976 -5 u E%sT R BB 1977 o - Jun 12 2 1 D R BB 1977 1978 - O Su>=1 2 0 S R BB 1978 1980 - Ap Su>=15 2 1 D R BB 1979 o - S 30 2 0 S R BB 1980 o - S 25 2 0 S Z America/Barbados -3:58:29 - LMT 1924 -3:58:29 - BMT 1932 -4 BB A%sT R BZ 1918 1942 - O Su>=2 0 0:30 -0530 R BZ 1919 1943 - F Su>=9 0 0 CST R BZ 1973 o - D 5 0 1 CDT R BZ 1974 o - F 9 0 0 CST R BZ 1982 o - D 18 0 1 CDT R BZ 1983 o - F 12 0 0 CST Z America/Belize -5:52:48 - LMT 1912 Ap -6 BZ %s Z Atlantic/Bermuda -4:19:18 - LMT 1930 Ja 1 2 -4 - AST 1974 Ap 28 2 -4 C A%sT 1976 -4 u A%sT R CR 1979 1980 - F lastSu 0 1 D R CR 1979 1980 - Jun Su>=1 0 0 S R CR 1991 1992 - Ja Sa>=15 0 1 D R CR 1991 o - Jul 1 0 0 S R CR 1992 o - Mar 15 0 0 S Z America/Costa_Rica -5:36:13 - LMT 1890 -5:36:13 - SJMT 1921 Ja 15 -6 CR C%sT R Q 1928 o - Jun 10 0 1 D R Q 1928 o - O 10 0 0 S R Q 1940 1942 - Jun Su>=1 0 1 D R Q 1940 1942 - S Su>=1 0 0 S R Q 1945 1946 - Jun Su>=1 0 1 D R Q 1945 1946 - S Su>=1 0 0 S R Q 1965 o - Jun 1 0 1 D R Q 1965 o - S 30 0 0 S R Q 1966 o - May 29 0 1 D R Q 1966 o - O 2 0 0 S R Q 1967 o - Ap 8 0 1 D R Q 1967 1968 - S Su>=8 0 0 S R Q 1968 o - Ap 14 0 1 D R Q 1969 1977 - Ap lastSu 0 1 D R Q 1969 1971 - O lastSu 0 0 S R Q 1972 1974 - O 8 0 0 S R Q 1975 1977 - O lastSu 0 0 S R Q 1978 o - May 7 0 1 D R Q 1978 1990 - O Su>=8 0 0 S R Q 1979 1980 - Mar Su>=15 0 1 D R Q 1981 1985 - May Su>=5 0 1 D R Q 1986 1989 - Mar Su>=14 0 1 D R Q 1990 1997 - Ap Su>=1 0 1 D R Q 1991 1995 - O Su>=8 0s 0 S R Q 1996 o - O 6 0s 0 S R Q 1997 o - O 12 0s 0 S R Q 1998 1999 - Mar lastSu 0s 1 D R Q 1998 2003 - O lastSu 0s 0 S R Q 2000 2003 - Ap Su>=1 0s 1 D R Q 2004 o - Mar lastSu 0s 1 D R Q 2006 2010 - O lastSu 0s 0 S R Q 2007 o - Mar Su>=8 0s 1 D R Q 2008 o - Mar Su>=15 0s 1 D R Q 2009 2010 - Mar Su>=8 0s 1 D R Q 2011 o - Mar Su>=15 0s 1 D R Q 2011 o - N 13 0s 0 S R Q 2012 o - Ap 1 0s 1 D R Q 2012 ma - N Su>=1 0s 0 S R Q 2013 ma - Mar Su>=8 0s 1 D Z America/Havana -5:29:28 - LMT 1890 -5:29:36 - HMT 1925 Jul 19 12 -5 Q C%sT R DO 1966 o - O 30 0 1 EDT R DO 1967 o - F 28 0 0 EST R DO 1969 1973 - O lastSu 0 0:30 -0430 R DO 1970 o - F 21 0 0 EST R DO 1971 o - Ja 20 0 0 EST R DO 1972 1974 - Ja 21 0 0 EST Z America/Santo_Domingo -4:39:36 - LMT 1890 -4:40 - SDMT 1933 Ap 1 12 -5 DO %s 1974 O 27 -4 - AST 2000 O 29 2 -5 u E%sT 2000 D 3 1 -4 - AST R SV 1987 1988 - May Su>=1 0 1 D R SV 1987 1988 - S lastSu 0 0 S Z America/El_Salvador -5:56:48 - LMT 1921 -6 SV C%sT R GT 1973 o - N 25 0 1 D R GT 1974 o - F 24 0 0 S R GT 1983 o - May 21 0 1 D R GT 1983 o - S 22 0 0 S R GT 1991 o - Mar 23 0 1 D R GT 1991 o - S 7 0 0 S R GT 2006 o - Ap 30 0 1 D R GT 2006 o - O 1 0 0 S Z America/Guatemala -6:2:4 - LMT 1918 O 5 -6 GT C%sT R HT 1983 o - May 8 0 1 D R HT 1984 1987 - Ap lastSu 0 1 D R HT 1983 1987 - O lastSu 0 0 S R HT 1988 1997 - Ap Su>=1 1s 1 D R HT 1988 1997 - O lastSu 1s 0 S R HT 2005 2006 - Ap Su>=1 0 1 D R HT 2005 2006 - O lastSu 0 0 S R HT 2012 2015 - Mar Su>=8 2 1 D R HT 2012 2015 - N Su>=1 2 0 S R HT 2017 ma - Mar Su>=8 2 1 D R HT 2017 ma - N Su>=1 2 0 S Z America/Port-au-Prince -4:49:20 - LMT 1890 -4:49 - PPMT 1917 Ja 24 12 -5 HT E%sT R HN 1987 1988 - May Su>=1 0 1 D R HN 1987 1988 - S lastSu 0 0 S R HN 2006 o - May Su>=1 0 1 D R HN 2006 o - Au M>=1 0 0 S Z America/Tegucigalpa -5:48:52 - LMT 1921 Ap -6 HN C%sT Z America/Jamaica -5:7:10 - LMT 1890 -5:7:10 - KMT 1912 F -5 - EST 1974 -5 u E%sT 1984 -5 - EST Z America/Martinique -4:4:20 - LMT 1890 -4:4:20 - FFMT 1911 May -4 - AST 1980 Ap 6 -4 1 ADT 1980 S 28 -4 - AST R NI 1979 1980 - Mar Su>=16 0 1 D R NI 1979 1980 - Jun M>=23 0 0 S R NI 2005 o - Ap 10 0 1 D R NI 2005 o - O Su>=1 0 0 S R NI 2006 o - Ap 30 2 1 D R NI 2006 o - O Su>=1 1 0 S Z America/Managua -5:45:8 - LMT 1890 -5:45:12 - MMT 1934 Jun 23 -6 - CST 1973 May -5 - EST 1975 F 16 -6 NI C%sT 1992 Ja 1 4 -5 - EST 1992 S 24 -6 - CST 1993 -5 - EST 1997 -6 NI C%sT Z America/Panama -5:18:8 - LMT 1890 -5:19:36 - CMT 1908 Ap 22 -5 - EST L America/Panama America/Cayman Z America/Puerto_Rico -4:24:25 - LMT 1899 Mar 28 12 -4 - AST 1942 May 3 -4 u A%sT 1946 -4 - AST Z America/Miquelon -3:44:40 - LMT 1911 May 15 -4 - AST 1980 May -3 - -03 1987 -3 C -03/-02 Z America/Grand_Turk -4:44:32 - LMT 1890 -5:7:10 - KMT 1912 F -5 - EST 1979 -5 u E%sT 2015 N Su>=1 2 -4 - AST 2018 Mar 11 3 -5 u E%sT R A 1930 o - D 1 0 1 - R A 1931 o - Ap 1 0 0 - R A 1931 o - O 15 0 1 - R A 1932 1940 - Mar 1 0 0 - R A 1932 1939 - N 1 0 1 - R A 1940 o - Jul 1 0 1 - R A 1941 o - Jun 15 0 0 - R A 1941 o - O 15 0 1 - R A 1943 o - Au 1 0 0 - R A 1943 o - O 15 0 1 - R A 1946 o - Mar 1 0 0 - R A 1946 o - O 1 0 1 - R A 1963 o - O 1 0 0 - R A 1963 o - D 15 0 1 - R A 1964 1966 - Mar 1 0 0 - R A 1964 1966 - O 15 0 1 - R A 1967 o - Ap 2 0 0 - R A 1967 1968 - O Su>=1 0 1 - R A 1968 1969 - Ap Su>=1 0 0 - R A 1974 o - Ja 23 0 1 - R A 1974 o - May 1 0 0 - R A 1988 o - D 1 0 1 - R A 1989 1993 - Mar Su>=1 0 0 - R A 1989 1992 - O Su>=15 0 1 - R A 1999 o - O Su>=1 0 1 - R A 2000 o - Mar 3 0 0 - R A 2007 o - D 30 0 1 - R A 2008 2009 - Mar Su>=15 0 0 - R A 2008 o - O Su>=15 0 1 - Z America/Argentina/Buenos_Aires -3:53:48 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 A -03/-02 Z America/Argentina/Cordoba -4:16:48 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar 3 -4 - -04 1991 O 20 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 A -03/-02 Z America/Argentina/Salta -4:21:40 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar 3 -4 - -04 1991 O 20 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/Tucuman -4:20:52 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar 3 -4 - -04 1991 O 20 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 Jun -4 - -04 2004 Jun 13 -3 A -03/-02 Z America/Argentina/La_Rioja -4:27:24 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar -4 - -04 1991 May 7 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 Jun -4 - -04 2004 Jun 20 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/San_Juan -4:34:4 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar -4 - -04 1991 May 7 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 May 31 -4 - -04 2004 Jul 25 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/Jujuy -4:21:12 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1990 Mar 4 -4 - -04 1990 O 28 -4 1 -03 1991 Mar 17 -4 - -04 1991 O 6 -3 1 -02 1992 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/Catamarca -4:23:8 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1991 Mar 3 -4 - -04 1991 O 20 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 Jun -4 - -04 2004 Jun 20 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/Mendoza -4:35:16 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1990 Mar 4 -4 - -04 1990 O 15 -4 1 -03 1991 Mar -4 - -04 1991 O 15 -4 1 -03 1992 Mar -4 - -04 1992 O 18 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 May 23 -4 - -04 2004 S 26 -3 A -03/-02 2008 O 18 -3 - -03 R Sa 2008 2009 - Mar Su>=8 0 0 - R Sa 2007 2008 - O Su>=8 0 1 - Z America/Argentina/San_Luis -4:25:24 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1990 -3 1 -02 1990 Mar 14 -4 - -04 1990 O 15 -4 1 -03 1991 Mar -4 - -04 1991 Jun -3 - -03 1999 O 3 -4 1 -03 2000 Mar 3 -3 - -03 2004 May 31 -4 - -04 2004 Jul 25 -3 A -03/-02 2008 Ja 21 -4 Sa -04/-03 2009 O 11 -3 - -03 Z America/Argentina/Rio_Gallegos -4:36:52 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 Jun -4 - -04 2004 Jun 20 -3 A -03/-02 2008 O 18 -3 - -03 Z America/Argentina/Ushuaia -4:33:12 - LMT 1894 O 31 -4:16:48 - CMT 1920 May -4 - -04 1930 D -4 A -04/-03 1969 O 5 -3 A -03/-02 1999 O 3 -4 A -04/-03 2000 Mar 3 -3 - -03 2004 May 30 -4 - -04 2004 Jun 20 -3 A -03/-02 2008 O 18 -3 - -03 L America/Curacao America/Aruba Z America/La_Paz -4:32:36 - LMT 1890 -4:32:36 - CMT 1931 O 15 -4:32:36 1 BST 1932 Mar 21 -4 - -04 R B 1931 o - O 3 11 1 - R B 1932 1933 - Ap 1 0 0 - R B 1932 o - O 3 0 1 - R B 1949 1952 - D 1 0 1 - R B 1950 o - Ap 16 1 0 - R B 1951 1952 - Ap 1 0 0 - R B 1953 o - Mar 1 0 0 - R B 1963 o - D 9 0 1 - R B 1964 o - Mar 1 0 0 - R B 1965 o - Ja 31 0 1 - R B 1965 o - Mar 31 0 0 - R B 1965 o - D 1 0 1 - R B 1966 1968 - Mar 1 0 0 - R B 1966 1967 - N 1 0 1 - R B 1985 o - N 2 0 1 - R B 1986 o - Mar 15 0 0 - R B 1986 o - O 25 0 1 - R B 1987 o - F 14 0 0 - R B 1987 o - O 25 0 1 - R B 1988 o - F 7 0 0 - R B 1988 o - O 16 0 1 - R B 1989 o - Ja 29 0 0 - R B 1989 o - O 15 0 1 - R B 1990 o - F 11 0 0 - R B 1990 o - O 21 0 1 - R B 1991 o - F 17 0 0 - R B 1991 o - O 20 0 1 - R B 1992 o - F 9 0 0 - R B 1992 o - O 25 0 1 - R B 1993 o - Ja 31 0 0 - R B 1993 1995 - O Su>=11 0 1 - R B 1994 1995 - F Su>=15 0 0 - R B 1996 o - F 11 0 0 - R B 1996 o - O 6 0 1 - R B 1997 o - F 16 0 0 - R B 1997 o - O 6 0 1 - R B 1998 o - Mar 1 0 0 - R B 1998 o - O 11 0 1 - R B 1999 o - F 21 0 0 - R B 1999 o - O 3 0 1 - R B 2000 o - F 27 0 0 - R B 2000 2001 - O Su>=8 0 1 - R B 2001 2006 - F Su>=15 0 0 - R B 2002 o - N 3 0 1 - R B 2003 o - O 19 0 1 - R B 2004 o - N 2 0 1 - R B 2005 o - O 16 0 1 - R B 2006 o - N 5 0 1 - R B 2007 o - F 25 0 0 - R B 2007 o - O Su>=8 0 1 - R B 2008 2017 - O Su>=15 0 1 - R B 2008 2011 - F Su>=15 0 0 - R B 2012 o - F Su>=22 0 0 - R B 2013 2014 - F Su>=15 0 0 - R B 2015 o - F Su>=22 0 0 - R B 2016 2019 - F Su>=15 0 0 - R B 2018 o - N Su>=1 0 1 - Z America/Noronha -2:9:40 - LMT 1914 -2 B -02/-01 1990 S 17 -2 - -02 1999 S 30 -2 B -02/-01 2000 O 15 -2 - -02 2001 S 13 -2 B -02/-01 2002 O -2 - -02 Z America/Belem -3:13:56 - LMT 1914 -3 B -03/-02 1988 S 12 -3 - -03 Z America/Santarem -3:38:48 - LMT 1914 -4 B -04/-03 1988 S 12 -4 - -04 2008 Jun 24 -3 - -03 Z America/Fortaleza -2:34 - LMT 1914 -3 B -03/-02 1990 S 17 -3 - -03 1999 S 30 -3 B -03/-02 2000 O 22 -3 - -03 2001 S 13 -3 B -03/-02 2002 O -3 - -03 Z America/Recife -2:19:36 - LMT 1914 -3 B -03/-02 1990 S 17 -3 - -03 1999 S 30 -3 B -03/-02 2000 O 15 -3 - -03 2001 S 13 -3 B -03/-02 2002 O -3 - -03 Z America/Araguaina -3:12:48 - LMT 1914 -3 B -03/-02 1990 S 17 -3 - -03 1995 S 14 -3 B -03/-02 2003 S 24 -3 - -03 2012 O 21 -3 B -03/-02 2013 S -3 - -03 Z America/Maceio -2:22:52 - LMT 1914 -3 B -03/-02 1990 S 17 -3 - -03 1995 O 13 -3 B -03/-02 1996 S 4 -3 - -03 1999 S 30 -3 B -03/-02 2000 O 22 -3 - -03 2001 S 13 -3 B -03/-02 2002 O -3 - -03 Z America/Bahia -2:34:4 - LMT 1914 -3 B -03/-02 2003 S 24 -3 - -03 2011 O 16 -3 B -03/-02 2012 O 21 -3 - -03 Z America/Sao_Paulo -3:6:28 - LMT 1914 -3 B -03/-02 1963 O 23 -3 1 -02 1964 -3 B -03/-02 Z America/Campo_Grande -3:38:28 - LMT 1914 -4 B -04/-03 Z America/Cuiaba -3:44:20 - LMT 1914 -4 B -04/-03 2003 S 24 -4 - -04 2004 O -4 B -04/-03 Z America/Porto_Velho -4:15:36 - LMT 1914 -4 B -04/-03 1988 S 12 -4 - -04 Z America/Boa_Vista -4:2:40 - LMT 1914 -4 B -04/-03 1988 S 12 -4 - -04 1999 S 30 -4 B -04/-03 2000 O 15 -4 - -04 Z America/Manaus -4:0:4 - LMT 1914 -4 B -04/-03 1988 S 12 -4 - -04 1993 S 28 -4 B -04/-03 1994 S 22 -4 - -04 Z America/Eirunepe -4:39:28 - LMT 1914 -5 B -05/-04 1988 S 12 -5 - -05 1993 S 28 -5 B -05/-04 1994 S 22 -5 - -05 2008 Jun 24 -4 - -04 2013 N 10 -5 - -05 Z America/Rio_Branco -4:31:12 - LMT 1914 -5 B -05/-04 1988 S 12 -5 - -05 2008 Jun 24 -4 - -04 2013 N 10 -5 - -05 R x 1927 1931 - S 1 0 1 - R x 1928 1932 - Ap 1 0 0 - R x 1968 o - N 3 4u 1 - R x 1969 o - Mar 30 3u 0 - R x 1969 o - N 23 4u 1 - R x 1970 o - Mar 29 3u 0 - R x 1971 o - Mar 14 3u 0 - R x 1970 1972 - O Su>=9 4u 1 - R x 1972 1986 - Mar Su>=9 3u 0 - R x 1973 o - S 30 4u 1 - R x 1974 1987 - O Su>=9 4u 1 - R x 1987 o - Ap 12 3u 0 - R x 1988 1990 - Mar Su>=9 3u 0 - R x 1988 1989 - O Su>=9 4u 1 - R x 1990 o - S 16 4u 1 - R x 1991 1996 - Mar Su>=9 3u 0 - R x 1991 1997 - O Su>=9 4u 1 - R x 1997 o - Mar 30 3u 0 - R x 1998 o - Mar Su>=9 3u 0 - R x 1998 o - S 27 4u 1 - R x 1999 o - Ap 4 3u 0 - R x 1999 2010 - O Su>=9 4u 1 - R x 2000 2007 - Mar Su>=9 3u 0 - R x 2008 o - Mar 30 3u 0 - R x 2009 o - Mar Su>=9 3u 0 - R x 2010 o - Ap Su>=1 3u 0 - R x 2011 o - May Su>=2 3u 0 - R x 2011 o - Au Su>=16 4u 1 - R x 2012 2014 - Ap Su>=23 3u 0 - R x 2012 2014 - S Su>=2 4u 1 - R x 2016 2018 - May Su>=9 3u 0 - R x 2016 2018 - Au Su>=9 4u 1 - R x 2019 ma - Ap Su>=2 3u 0 - R x 2019 ma - S Su>=2 4u 1 - Z America/Santiago -4:42:46 - LMT 1890 -4:42:46 - SMT 1910 Ja 10 -5 - -05 1916 Jul -4:42:46 - SMT 1918 S 10 -4 - -04 1919 Jul -4:42:46 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au -4 - -04 1946 Jul 15 -4 1 -03 1946 S -4 - -04 1947 Ap -5 - -05 1947 May 21 23 -4 x -04/-03 Z America/Punta_Arenas -4:43:40 - LMT 1890 -4:42:46 - SMT 1910 Ja 10 -5 - -05 1916 Jul -4:42:46 - SMT 1918 S 10 -4 - -04 1919 Jul -4:42:46 - SMT 1927 S -5 x -05/-04 1932 S -4 - -04 1942 Jun -5 - -05 1942 Au -4 - -04 1947 Ap -5 - -05 1947 May 21 23 -4 x -04/-03 2016 D 4 -3 - -03 Z Pacific/Easter -7:17:28 - LMT 1890 -7:17:28 - EMT 1932 S -7 x -07/-06 1982 Mar 14 3u -6 x -06/-05 Z Antarctica/Palmer 0 - -00 1965 -4 A -04/-03 1969 O 5 -3 A -03/-02 1982 May -4 x -04/-03 2016 D 4 -3 - -03 R CO 1992 o - May 3 0 1 - R CO 1993 o - Ap 4 0 0 - Z America/Bogota -4:56:16 - LMT 1884 Mar 13 -4:56:16 - BMT 1914 N 23 -5 CO -05/-04 Z America/Curacao -4:35:47 - LMT 1912 F 12 -4:30 - -0430 1965 -4 - AST L America/Curacao America/Lower_Princes L America/Curacao America/Kralendijk R EC 1992 o - N 28 0 1 - R EC 1993 o - F 5 0 0 - Z America/Guayaquil -5:19:20 - LMT 1890 -5:14 - QMT 1931 -5 EC -05/-04 Z Pacific/Galapagos -5:58:24 - LMT 1931 -5 - -05 1986 -6 EC -06/-05 R FK 1937 1938 - S lastSu 0 1 - R FK 1938 1942 - Mar Su>=19 0 0 - R FK 1939 o - O 1 0 1 - R FK 1940 1942 - S lastSu 0 1 - R FK 1943 o - Ja 1 0 0 - R FK 1983 o - S lastSu 0 1 - R FK 1984 1985 - Ap lastSu 0 0 - R FK 1984 o - S 16 0 1 - R FK 1985 2000 - S Su>=9 0 1 - R FK 1986 2000 - Ap Su>=16 0 0 - R FK 2001 2010 - Ap Su>=15 2 0 - R FK 2001 2010 - S Su>=1 2 1 - Z Atlantic/Stanley -3:51:24 - LMT 1890 -3:51:24 - SMT 1912 Mar 12 -4 FK -04/-03 1983 May -3 FK -03/-02 1985 S 15 -4 FK -04/-03 2010 S 5 2 -3 - -03 Z America/Cayenne -3:29:20 - LMT 1911 Jul -4 - -04 1967 O -3 - -03 Z America/Guyana -3:52:40 - LMT 1915 Mar -3:45 - -0345 1975 Jul 31 -3 - -03 1991 -4 - -04 R y 1975 1988 - O 1 0 1 - R y 1975 1978 - Mar 1 0 0 - R y 1979 1991 - Ap 1 0 0 - R y 1989 o - O 22 0 1 - R y 1990 o - O 1 0 1 - R y 1991 o - O 6 0 1 - R y 1992 o - Mar 1 0 0 - R y 1992 o - O 5 0 1 - R y 1993 o - Mar 31 0 0 - R y 1993 1995 - O 1 0 1 - R y 1994 1995 - F lastSu 0 0 - R y 1996 o - Mar 1 0 0 - R y 1996 2001 - O Su>=1 0 1 - R y 1997 o - F lastSu 0 0 - R y 1998 2001 - Mar Su>=1 0 0 - R y 2002 2004 - Ap Su>=1 0 0 - R y 2002 2003 - S Su>=1 0 1 - R y 2004 2009 - O Su>=15 0 1 - R y 2005 2009 - Mar Su>=8 0 0 - R y 2010 ma - O Su>=1 0 1 - R y 2010 2012 - Ap Su>=8 0 0 - R y 2013 ma - Mar Su>=22 0 0 - Z America/Asuncion -3:50:40 - LMT 1890 -3:50:40 - AMT 1931 O 10 -4 - -04 1972 O -3 - -03 1974 Ap -4 y -04/-03 R PE 1938 o - Ja 1 0 1 - R PE 1938 o - Ap 1 0 0 - R PE 1938 1939 - S lastSu 0 1 - R PE 1939 1940 - Mar Su>=24 0 0 - R PE 1986 1987 - Ja 1 0 1 - R PE 1986 1987 - Ap 1 0 0 - R PE 1990 o - Ja 1 0 1 - R PE 1990 o - Ap 1 0 0 - R PE 1994 o - Ja 1 0 1 - R PE 1994 o - Ap 1 0 0 - Z America/Lima -5:8:12 - LMT 1890 -5:8:36 - LMT 1908 Jul 28 -5 PE -05/-04 Z Atlantic/South_Georgia -2:26:8 - LMT 1890 -2 - -02 Z America/Paramaribo -3:40:40 - LMT 1911 -3:40:52 - PMT 1935 -3:40:36 - PMT 1945 O -3:30 - -0330 1984 O -3 - -03 Z America/Port_of_Spain -4:6:4 - LMT 1912 Mar 2 -4 - AST L America/Port_of_Spain America/Anguilla L America/Port_of_Spain America/Antigua L America/Port_of_Spain America/Dominica L America/Port_of_Spain America/Grenada L America/Port_of_Spain America/Guadeloupe L America/Port_of_Spain America/Marigot L America/Port_of_Spain America/Montserrat L America/Port_of_Spain America/St_Barthelemy L America/Port_of_Spain America/St_Kitts L America/Port_of_Spain America/St_Lucia L America/Port_of_Spain America/St_Thomas L America/Port_of_Spain America/St_Vincent L America/Port_of_Spain America/Tortola R U 1923 1925 - O 1 0 0:30 - R U 1924 1926 - Ap 1 0 0 - R U 1933 1938 - O lastSu 0 0:30 - R U 1934 1941 - Mar lastSa 24 0 - R U 1939 o - O 1 0 0:30 - R U 1940 o - O 27 0 0:30 - R U 1941 o - Au 1 0 0:30 - R U 1942 o - D 14 0 0:30 - R U 1943 o - Mar 14 0 0 - R U 1959 o - May 24 0 0:30 - R U 1959 o - N 15 0 0 - R U 1960 o - Ja 17 0 1 - R U 1960 o - Mar 6 0 0 - R U 1965 o - Ap 4 0 1 - R U 1965 o - S 26 0 0 - R U 1968 o - May 27 0 0:30 - R U 1968 o - D 1 0 0 - R U 1970 o - Ap 25 0 1 - R U 1970 o - Jun 14 0 0 - R U 1972 o - Ap 23 0 1 - R U 1972 o - Jul 16 0 0 - R U 1974 o - Ja 13 0 1:30 - R U 1974 o - Mar 10 0 0:30 - R U 1974 o - S 1 0 0 - R U 1974 o - D 22 0 1 - R U 1975 o - Mar 30 0 0 - R U 1976 o - D 19 0 1 - R U 1977 o - Mar 6 0 0 - R U 1977 o - D 4 0 1 - R U 1978 1979 - Mar Su>=1 0 0 - R U 1978 o - D 17 0 1 - R U 1979 o - Ap 29 0 1 - R U 1980 o - Mar 16 0 0 - R U 1987 o - D 14 0 1 - R U 1988 o - F 28 0 0 - R U 1988 o - D 11 0 1 - R U 1989 o - Mar 5 0 0 - R U 1989 o - O 29 0 1 - R U 1990 o - F 25 0 0 - R U 1990 1991 - O Su>=21 0 1 - R U 1991 1992 - Mar Su>=1 0 0 - R U 1992 o - O 18 0 1 - R U 1993 o - F 28 0 0 - R U 2004 o - S 19 0 1 - R U 2005 o - Mar 27 2 0 - R U 2005 o - O 9 2 1 - R U 2006 2015 - Mar Su>=8 2 0 - R U 2006 2014 - O Su>=1 2 1 - Z America/Montevideo -3:44:51 - LMT 1908 Jun 10 -3:44:51 - MMT 1920 May -4 - -04 1923 O -3:30 U -0330/-03 1942 D 14 -3 U -03/-0230 1960 -3 U -03/-02 1968 -3 U -03/-0230 1970 -3 U -03/-02 1974 -3 U -03/-0130 1974 Mar 10 -3 U -03/-0230 1974 D 22 -3 U -03/-02 Z America/Caracas -4:27:44 - LMT 1890 -4:27:40 - CMT 1912 F 12 -4:30 - -0430 1965 -4 - -04 2007 D 9 3 -4:30 - -0430 2016 May 1 2:30 -4 - -04 Z Etc/GMT 0 - GMT Z Etc/UTC 0 - UTC L Etc/GMT GMT L Etc/UTC Etc/Universal L Etc/UTC Etc/Zulu L Etc/GMT Etc/Greenwich L Etc/GMT Etc/GMT-0 L Etc/GMT Etc/GMT+0 L Etc/GMT Etc/GMT0 Z Etc/GMT-14 14 - +14 Z Etc/GMT-13 13 - +13 Z Etc/GMT-12 12 - +12 Z Etc/GMT-11 11 - +11 Z Etc/GMT-10 10 - +10 Z Etc/GMT-9 9 - +09 Z Etc/GMT-8 8 - +08 Z Etc/GMT-7 7 - +07 Z Etc/GMT-6 6 - +06 Z Etc/GMT-5 5 - +05 Z Etc/GMT-4 4 - +04 Z Etc/GMT-3 3 - +03 Z Etc/GMT-2 2 - +02 Z Etc/GMT-1 1 - +01 Z Etc/GMT+1 -1 - -01 Z Etc/GMT+2 -2 - -02 Z Etc/GMT+3 -3 - -03 Z Etc/GMT+4 -4 - -04 Z Etc/GMT+5 -5 - -05 Z Etc/GMT+6 -6 - -06 Z Etc/GMT+7 -7 - -07 Z Etc/GMT+8 -8 - -08 Z Etc/GMT+9 -9 - -09 Z Etc/GMT+10 -10 - -10 Z Etc/GMT+11 -11 - -11 Z Etc/GMT+12 -12 - -12 Z Factory 0 - -00 L Africa/Nairobi Africa/Asmera L Africa/Abidjan Africa/Timbuktu L America/Argentina/Catamarca America/Argentina/ComodRivadavia L America/Adak America/Atka L America/Argentina/Buenos_Aires America/Buenos_Aires L America/Argentina/Catamarca America/Catamarca L America/Atikokan America/Coral_Harbour L America/Argentina/Cordoba America/Cordoba L America/Tijuana America/Ensenada L America/Indiana/Indianapolis America/Fort_Wayne L America/Nuuk America/Godthab L America/Indiana/Indianapolis America/Indianapolis L America/Argentina/Jujuy America/Jujuy L America/Indiana/Knox America/Knox_IN L America/Kentucky/Louisville America/Louisville L America/Argentina/Mendoza America/Mendoza L America/Toronto America/Montreal L America/Rio_Branco America/Porto_Acre L America/Argentina/Cordoba America/Rosario L America/Tijuana America/Santa_Isabel L America/Denver America/Shiprock L America/Port_of_Spain America/Virgin L Pacific/Auckland Antarctica/South_Pole L Asia/Ashgabat Asia/Ashkhabad L Asia/Kolkata Asia/Calcutta L Asia/Shanghai Asia/Chongqing L Asia/Shanghai Asia/Chungking L Asia/Dhaka Asia/Dacca L Asia/Shanghai Asia/Harbin L Asia/Urumqi Asia/Kashgar L Asia/Kathmandu Asia/Katmandu L Asia/Macau Asia/Macao L Asia/Yangon Asia/Rangoon L Asia/Ho_Chi_Minh Asia/Saigon L Asia/Jerusalem Asia/Tel_Aviv L Asia/Thimphu Asia/Thimbu L Asia/Makassar Asia/Ujung_Pandang L Asia/Ulaanbaatar Asia/Ulan_Bator L Atlantic/Faroe Atlantic/Faeroe L Europe/Oslo Atlantic/Jan_Mayen L Australia/Sydney Australia/ACT L Australia/Sydney Australia/Canberra L Australia/Lord_Howe Australia/LHI L Australia/Sydney Australia/NSW L Australia/Darwin Australia/North L Australia/Brisbane Australia/Queensland L Australia/Adelaide Australia/South L Australia/Hobart Australia/Tasmania L Australia/Melbourne Australia/Victoria L Australia/Perth Australia/West L Australia/Broken_Hill Australia/Yancowinna L America/Rio_Branco Brazil/Acre L America/Noronha Brazil/DeNoronha L America/Sao_Paulo Brazil/East L America/Manaus Brazil/West L America/Halifax Canada/Atlantic L America/Winnipeg Canada/Central L America/Toronto Canada/Eastern L America/Edmonton Canada/Mountain L America/St_Johns Canada/Newfoundland L America/Vancouver Canada/Pacific L America/Regina Canada/Saskatchewan L America/Whitehorse Canada/Yukon L America/Santiago Chile/Continental L Pacific/Easter Chile/EasterIsland L America/Havana Cuba L Africa/Cairo Egypt L Europe/Dublin Eire L Etc/UTC Etc/UCT L Europe/London Europe/Belfast L Europe/Chisinau Europe/Tiraspol L Europe/London GB L Europe/London GB-Eire L Etc/GMT GMT+0 L Etc/GMT GMT-0 L Etc/GMT GMT0 L Etc/GMT Greenwich L Asia/Hong_Kong Hongkong L Atlantic/Reykjavik Iceland L Asia/Tehran Iran L Asia/Jerusalem Israel L America/Jamaica Jamaica L Asia/Tokyo Japan L Pacific/Kwajalein Kwajalein L Africa/Tripoli Libya L America/Tijuana Mexico/BajaNorte L America/Mazatlan Mexico/BajaSur L America/Mexico_City Mexico/General L Pacific/Auckland NZ L Pacific/Chatham NZ-CHAT L America/Denver Navajo L Asia/Shanghai PRC L Pacific/Honolulu Pacific/Johnston L Pacific/Pohnpei Pacific/Ponape L Pacific/Pago_Pago Pacific/Samoa L Pacific/Chuuk Pacific/Truk L Pacific/Chuuk Pacific/Yap L Europe/Warsaw Poland L Europe/Lisbon Portugal L Asia/Taipei ROC L Asia/Seoul ROK L Asia/Singapore Singapore L Europe/Istanbul Turkey L Etc/UTC UCT L America/Anchorage US/Alaska L America/Adak US/Aleutian L America/Phoenix US/Arizona L America/Chicago US/Central L America/Indiana/Indianapolis US/East-Indiana L America/New_York US/Eastern L Pacific/Honolulu US/Hawaii L America/Indiana/Knox US/Indiana-Starke L America/Detroit US/Michigan L America/Denver US/Mountain L America/Los_Angeles US/Pacific L Pacific/Pago_Pago US/Samoa L Etc/UTC UTC L Etc/UTC Universal L Europe/Moscow W-SU L Etc/UTC Zulu
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Factory
TZif2-00TZif2-00 <-00>0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/UTC
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/Universal
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/Greenwich
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-6
TZif2T`+06TZif2T`+06 <+06>-6
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-1
TZif2+01TZif2+01 <+01>-1
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-7
TZif2bp+07TZif2bp+07 <+07>-7
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/Zulu
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/UCT
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT+0
TZif2GMTTZif2GMT GMT0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-2
TZif2 +02TZif2 +02 <+02>-2
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-5
TZif2FP+05TZif2FP+05 <+05>-5
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/UTC
TZif2UTCTZif2UTC UTC0
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-4
TZif28@+04TZif28@+04 <+04>-4
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/pytz/zoneinfo/Etc/GMT-3
TZif2*0+03TZif2*0+03 <+03>-3
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/RECORD
Flask_Cors-3.0.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 Flask_Cors-3.0.10.dist-info/LICENSE,sha256=bhob3FSDTB4HQMvOXV9vLK4chG_Sp_SCsRZJWU-vvV0,1069 Flask_Cors-3.0.10.dist-info/METADATA,sha256=GGjB2MELGVMzpRA98u66-Y4kjXwJvRuEzuv9JuQaBpc,5382 Flask_Cors-3.0.10.dist-info/RECORD,, Flask_Cors-3.0.10.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 Flask_Cors-3.0.10.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 Flask_Cors-3.0.10.dist-info/top_level.txt,sha256=aWye_0QNZPp_QtPF4ZluLHqnyVLT9CPJsfiGhwqkWuo,11 flask_cors/__init__.py,sha256=oJExwfR7yU3HAsmQ_EfL6KoLK3zq3J9HsET9r-56sfM,791 flask_cors/__pycache__/__init__.cpython-39.pyc,, flask_cors/__pycache__/core.cpython-39.pyc,, flask_cors/__pycache__/decorator.cpython-39.pyc,, flask_cors/__pycache__/extension.cpython-39.pyc,, flask_cors/__pycache__/version.cpython-39.pyc,, flask_cors/core.py,sha256=N6dEVe5dffaQTw79Mc8IvEeTzvY_YsKCiOZ1lJ_PyNk,13894 flask_cors/decorator.py,sha256=iiwjUi0lVeCm4OJJHY5Cvuzj2nENbUns3Iq6zqKXuss,4937 flask_cors/extension.py,sha256=HTSAUEDH8mvTLLMVrcpfbtrdh5OXK72VUPk_FAoQhpU,7586 flask_cors/version.py,sha256=8OdYCyhDLC6EsmyL3_m3G4XCKOJMI20UlrLKmiyEoCE,23
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/LICENSE
Copyright (C) 2016 Cory Dolphin, Olin College Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/WHEEL
Wheel-Version: 1.0 Generator: bdist_wheel (0.36.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/top_level.txt
flask_cors
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/INSTALLER
pip
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/Flask_Cors-3.0.10.dist-info/METADATA
Metadata-Version: 2.1 Name: Flask-Cors Version: 3.0.10 Summary: A Flask extension adding a decorator for CORS support Home-page: https://github.com/corydolphin/flask-cors Author: Cory Dolphin Author-email: corydolphin@gmail.com License: MIT Platform: any Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content Classifier: Topic :: Software Development :: Libraries :: Python Modules Requires-Dist: Flask (>=0.9) Requires-Dist: Six Flask-CORS ========== |Build Status| |Latest Version| |Supported Python versions| |License| A Flask extension for handling Cross Origin Resource Sharing (CORS), making cross-origin AJAX possible. This package has a simple philosophy: when you want to enable CORS, you wish to enable it for all use cases on a domain. This means no mucking around with different allowed headers, methods, etc. By default, submission of cookies across domains is disabled due to the security implications. Please see the documentation for how to enable credential'ed requests, and please make sure you add some sort of `CSRF <http://en.wikipedia.org/wiki/Cross-site_request_forgery>`__ protection before doing so! Installation ------------ Install the extension with using pip, or easy\_install. .. code:: bash $ pip install -U flask-cors Usage ----- This package exposes a Flask extension which by default enables CORS support on all routes, for all origins and methods. It allows parameterization of all CORS headers on a per-resource level. The package also contains a decorator, for those who prefer this approach. Simple Usage ~~~~~~~~~~~~ In the simplest case, initialize the Flask-Cors extension with default arguments in order to allow CORS for all domains on all routes. See the full list of options in the `documentation <https://flask-cors.corydolphin.com/en/latest/api.html#extension>`__. .. code:: python from flask import Flask from flask_cors import CORS app = Flask(__name__) CORS(app) @app.route("/") def helloWorld(): return "Hello, cross-origin-world!" Resource specific CORS ^^^^^^^^^^^^^^^^^^^^^^ Alternatively, you can specify CORS options on a resource and origin level of granularity by passing a dictionary as the `resources` option, mapping paths to a set of options. See the full list of options in the `documentation <https://flask-cors.corydolphin.com/en/latest/api.html#extension>`__. .. code:: python app = Flask(__name__) cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) @app.route("/api/v1/users") def list_users(): return "user example" Route specific CORS via decorator ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This extension also exposes a simple decorator to decorate flask routes with. Simply add ``@cross_origin()`` below a call to Flask's ``@app.route(..)`` to allow CORS on a given route. See the full list of options in the `decorator documentation <https://flask-cors.corydolphin.com/en/latest/api.html#decorator>`__. .. code:: python @app.route("/") @cross_origin() def helloWorld(): return "Hello, cross-origin-world!" Documentation ------------- For a full list of options, please see the full `documentation <https://flask-cors.corydolphin.com/en/latest/api.html>`__ Troubleshooting --------------- If things aren't working as you expect, enable logging to help understand what is going on under the hood, and why. .. code:: python logging.getLogger('flask_cors').level = logging.DEBUG Tests ----- A simple set of tests is included in ``test/``. To run, install nose, and simply invoke ``nosetests`` or ``python setup.py test`` to exercise the tests. Contributing ------------ Questions, comments or improvements? Please create an issue on `Github <https://github.com/corydolphin/flask-cors>`__, tweet at `@corydolphin <https://twitter.com/corydolphin>`__ or send me an email. I do my best to include every contribution proposed in any way that I can. Credits ------- This Flask extension is based upon the `Decorator for the HTTP Access Control <http://flask.pocoo.org/snippets/56/>`__ written by Armin Ronacher. .. |Build Status| image:: https://api.travis-ci.org/corydolphin/flask-cors.svg?branch=master :target: https://travis-ci.org/corydolphin/flask-cors .. |Latest Version| image:: https://img.shields.io/pypi/v/Flask-Cors.svg :target: https://pypi.python.org/pypi/Flask-Cors/ .. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/Flask-Cors.svg :target: https://img.shields.io/pypi/pyversions/Flask-Cors.svg .. |License| image:: http://img.shields.io/:license-mit-blue.svg :target: https://pypi.python.org/pypi/Flask-Cors/
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/interfaces.py
# sqlalchemy/interfaces.py # Copyright (C) 2007-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # Copyright (C) 2007 Jason Kirtland jek@discorporate.us # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Deprecated core event interfaces. .. deprecated:: 0.7 As of SQLAlchemy 0.7, the new event system described in :ref:`event_toplevel` replaces the extension/proxy/listener system, providing a consistent interface to all events without the need for subclassing. """ from . import event from . import util class PoolListener(object): """Hooks into the lifecycle of connections in a :class:`_pool.Pool`. .. deprecated:: 0.7 :class:`.PoolListener` is deprecated and will be removed in a future release. Please refer to :func:`.event.listen` in conjunction with the :class:`_events.PoolEvents` listener interface. Usage:: class MyListener(PoolListener): def connect(self, dbapi_con, con_record): '''perform connect operations''' # etc. # create a new pool with a listener p = QueuePool(..., listeners=[MyListener()]) # add a listener after the fact p.add_listener(MyListener()) # usage with create_engine() e = create_engine("url://", listeners=[MyListener()]) All of the standard connection :class:`~sqlalchemy.pool.Pool` types can accept event listeners for key connection lifecycle events: creation, pool check-out and check-in. There are no events fired when a connection closes. For any given DB-API connection, there will be one ``connect`` event, `n` number of ``checkout`` events, and either `n` or `n - 1` ``checkin`` events. (If a ``Connection`` is detached from its pool via the ``detach()`` method, it won't be checked back in.) These are low-level events for low-level objects: raw Python DB-API connections, without the conveniences of the SQLAlchemy ``Connection`` wrapper, ``Dialect`` services or ``ClauseElement`` execution. If you execute SQL through the connection, explicitly closing all cursors and other resources is recommended. Events also receive a ``_ConnectionRecord``, a long-lived internal ``Pool`` object that basically represents a "slot" in the connection pool. ``_ConnectionRecord`` objects have one public attribute of note: ``info``, a dictionary whose contents are scoped to the lifetime of the DB-API connection managed by the record. You can use this shared storage area however you like. There is no need to subclass ``PoolListener`` to handle events. Any class that implements one or more of these methods can be used as a pool listener. The ``Pool`` will inspect the methods provided by a listener object and add the listener to one or more internal event queues based on its capabilities. In terms of efficiency and function call overhead, you're much better off only providing implementations for the hooks you'll be using. """ @classmethod def _adapt_listener(cls, self, listener): """Adapt a :class:`.PoolListener` to individual :class:`event.Dispatch` events. """ methods = ["connect", "first_connect", "checkout", "checkin"] listener = util.as_interface(listener, methods=methods) for meth in methods: me_meth = getattr(PoolListener, meth) ls_meth = getattr(listener, meth, None) if ls_meth is not None and not util.methods_equivalent( me_meth, ls_meth ): util.warn_deprecated( "PoolListener.%s is deprecated. The " "PoolListener class will be removed in a future " "release. Please transition to the @event interface, " "using @event.listens_for(Engine, '%s')." % (meth, meth) ) if hasattr(listener, "connect"): event.listen(self, "connect", listener.connect) if hasattr(listener, "first_connect"): event.listen(self, "first_connect", listener.first_connect) if hasattr(listener, "checkout"): event.listen(self, "checkout", listener.checkout) if hasattr(listener, "checkin"): event.listen(self, "checkin", listener.checkin) def connect(self, dbapi_con, con_record): """Called once for each new DB-API connection or Pool's ``creator()``. dbapi_con A newly connected raw DB-API connection (not a SQLAlchemy ``Connection`` wrapper). con_record The ``_ConnectionRecord`` that persistently manages the connection """ def first_connect(self, dbapi_con, con_record): """Called exactly once for the first DB-API connection. dbapi_con A newly connected raw DB-API connection (not a SQLAlchemy ``Connection`` wrapper). con_record The ``_ConnectionRecord`` that persistently manages the connection """ def checkout(self, dbapi_con, con_record, con_proxy): """Called when a connection is retrieved from the Pool. dbapi_con A raw DB-API connection con_record The ``_ConnectionRecord`` that persistently manages the connection con_proxy The ``_ConnectionFairy`` which manages the connection for the span of the current checkout. If you raise an ``exc.DisconnectionError``, the current connection will be disposed and a fresh connection retrieved. Processing of all checkout listeners will abort and restart using the new connection. """ def checkin(self, dbapi_con, con_record): """Called when a connection returns to the pool. Note that the connection may be closed, and may be None if the connection has been invalidated. ``checkin`` will not be called for detached connections. (They do not return to the pool.) dbapi_con A raw DB-API connection con_record The ``_ConnectionRecord`` that persistently manages the connection """ class ConnectionProxy(object): """Allows interception of statement execution by Connections. .. deprecated:: 0.7 :class:`.ConnectionProxy` is deprecated and will be removed in a future release. Please refer to :func:`.event.listen` in conjunction with the :class:`_events.ConnectionEvents` listener interface. Either or both of the ``execute()`` and ``cursor_execute()`` may be implemented to intercept compiled statement and cursor level executions, e.g.:: class MyProxy(ConnectionProxy): def execute(self, conn, execute, clauseelement, *multiparams, **params): print "compiled statement:", clauseelement return execute(clauseelement, *multiparams, **params) def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): print "raw statement:", statement return execute(cursor, statement, parameters, context) The ``execute`` argument is a function that will fulfill the default execution behavior for the operation. The signature illustrated in the example should be used. The proxy is installed into an :class:`~sqlalchemy.engine.Engine` via the ``proxy`` argument:: e = create_engine('someurl://', proxy=MyProxy()) """ @classmethod def _adapt_listener(cls, self, listener): methods = [ "execute", "cursor_execute", "begin", "rollback", "commit", "savepoint", "rollback_savepoint", "release_savepoint", "begin_twophase", "prepare_twophase", "rollback_twophase", "commit_twophase", ] for meth in methods: me_meth = getattr(ConnectionProxy, meth) ls_meth = getattr(listener, meth) if not util.methods_equivalent(me_meth, ls_meth): util.warn_deprecated( "ConnectionProxy.%s is deprecated. The " "ConnectionProxy class will be removed in a future " "release. Please transition to the @event interface, " "using @event.listens_for(Engine, '%s')." % (meth, meth) ) def adapt_execute(conn, clauseelement, multiparams, params): def execute_wrapper(clauseelement, *multiparams, **params): return clauseelement, multiparams, params return listener.execute( conn, execute_wrapper, clauseelement, *multiparams, **params ) event.listen(self, "before_execute", adapt_execute) def adapt_cursor_execute( conn, cursor, statement, parameters, context, executemany ): def execute_wrapper(cursor, statement, parameters, context): return statement, parameters return listener.cursor_execute( execute_wrapper, cursor, statement, parameters, context, executemany, ) event.listen(self, "before_cursor_execute", adapt_cursor_execute) def do_nothing_callback(*arg, **kw): pass def adapt_listener(fn): def go(conn, *arg, **kw): fn(conn, do_nothing_callback, *arg, **kw) return util.update_wrapper(go, fn) event.listen(self, "begin", adapt_listener(listener.begin)) event.listen(self, "rollback", adapt_listener(listener.rollback)) event.listen(self, "commit", adapt_listener(listener.commit)) event.listen(self, "savepoint", adapt_listener(listener.savepoint)) event.listen( self, "rollback_savepoint", adapt_listener(listener.rollback_savepoint), ) event.listen( self, "release_savepoint", adapt_listener(listener.release_savepoint), ) event.listen( self, "begin_twophase", adapt_listener(listener.begin_twophase) ) event.listen( self, "prepare_twophase", adapt_listener(listener.prepare_twophase) ) event.listen( self, "rollback_twophase", adapt_listener(listener.rollback_twophase), ) event.listen( self, "commit_twophase", adapt_listener(listener.commit_twophase) ) def execute(self, conn, execute, clauseelement, *multiparams, **params): """Intercept high level execute() events.""" return execute(clauseelement, *multiparams, **params) def cursor_execute( self, execute, cursor, statement, parameters, context, executemany ): """Intercept low-level cursor execute() events.""" return execute(cursor, statement, parameters, context) def begin(self, conn, begin): """Intercept begin() events.""" return begin() def rollback(self, conn, rollback): """Intercept rollback() events.""" return rollback() def commit(self, conn, commit): """Intercept commit() events.""" return commit() def savepoint(self, conn, savepoint, name=None): """Intercept savepoint() events.""" return savepoint(name=name) def rollback_savepoint(self, conn, rollback_savepoint, name, context): """Intercept rollback_savepoint() events.""" return rollback_savepoint(name, context) def release_savepoint(self, conn, release_savepoint, name, context): """Intercept release_savepoint() events.""" return release_savepoint(name, context) def begin_twophase(self, conn, begin_twophase, xid): """Intercept begin_twophase() events.""" return begin_twophase(xid) def prepare_twophase(self, conn, prepare_twophase, xid): """Intercept prepare_twophase() events.""" return prepare_twophase(xid) def rollback_twophase(self, conn, rollback_twophase, xid, is_prepared): """Intercept rollback_twophase() events.""" return rollback_twophase(xid, is_prepared) def commit_twophase(self, conn, commit_twophase, xid, is_prepared): """Intercept commit_twophase() events.""" return commit_twophase(xid, is_prepared)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/log.py
# sqlalchemy/log.py # Copyright (C) 2006-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Logging control and utilities. Control of logging for SA can be performed from the regular python logging module. The regular dotted module namespace is used, starting at 'sqlalchemy'. For class-level logging, the class name is appended. The "echo" keyword parameter, available on SQLA :class:`_engine.Engine` and :class:`_pool.Pool` objects, corresponds to a logger specific to that instance only. """ import logging import sys # set initial level to WARN. This so that # log statements don't occur in the absence of explicit # logging being enabled for 'sqlalchemy'. rootlogger = logging.getLogger("sqlalchemy") if rootlogger.level == logging.NOTSET: rootlogger.setLevel(logging.WARN) def _add_default_handler(logger): handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s") ) logger.addHandler(handler) _logged_classes = set() def class_logger(cls): logger = logging.getLogger(cls.__module__ + "." + cls.__name__) cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG) cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO) cls.logger = logger _logged_classes.add(cls) return cls class Identified(object): logging_name = None def _should_log_debug(self): return self.logger.isEnabledFor(logging.DEBUG) def _should_log_info(self): return self.logger.isEnabledFor(logging.INFO) class InstanceLogger(object): """A logger adapter (wrapper) for :class:`.Identified` subclasses. This allows multiple instances (e.g. Engine or Pool instances) to share a logger, but have its verbosity controlled on a per-instance basis. The basic functionality is to return a logging level which is based on an instance's echo setting. Default implementation is: 'debug' -> logging.DEBUG True -> logging.INFO False -> Effective level of underlying logger ( logging.WARNING by default) None -> same as False """ # Map echo settings to logger levels _echo_map = { None: logging.NOTSET, False: logging.NOTSET, True: logging.INFO, "debug": logging.DEBUG, } def __init__(self, echo, name): self.echo = echo self.logger = logging.getLogger(name) # if echo flag is enabled and no handlers, # add a handler to the list if self._echo_map[echo] <= logging.INFO and not self.logger.handlers: _add_default_handler(self.logger) # # Boilerplate convenience methods # def debug(self, msg, *args, **kwargs): """Delegate a debug call to the underlying logger.""" self.log(logging.DEBUG, msg, *args, **kwargs) def info(self, msg, *args, **kwargs): """Delegate an info call to the underlying logger.""" self.log(logging.INFO, msg, *args, **kwargs) def warning(self, msg, *args, **kwargs): """Delegate a warning call to the underlying logger.""" self.log(logging.WARNING, msg, *args, **kwargs) warn = warning def error(self, msg, *args, **kwargs): """ Delegate an error call to the underlying logger. """ self.log(logging.ERROR, msg, *args, **kwargs) def exception(self, msg, *args, **kwargs): """Delegate an exception call to the underlying logger.""" kwargs["exc_info"] = 1 self.log(logging.ERROR, msg, *args, **kwargs) def critical(self, msg, *args, **kwargs): """Delegate a critical call to the underlying logger.""" self.log(logging.CRITICAL, msg, *args, **kwargs) def log(self, level, msg, *args, **kwargs): """Delegate a log call to the underlying logger. The level here is determined by the echo flag as well as that of the underlying logger, and logger._log() is called directly. """ # inline the logic from isEnabledFor(), # getEffectiveLevel(), to avoid overhead. if self.logger.manager.disable >= level: return selected_level = self._echo_map[self.echo] if selected_level == logging.NOTSET: selected_level = self.logger.getEffectiveLevel() if level >= selected_level: self.logger._log(level, msg, args, **kwargs) def isEnabledFor(self, level): """Is this logger enabled for level 'level'?""" if self.logger.manager.disable >= level: return False return level >= self.getEffectiveLevel() def getEffectiveLevel(self): """What's the effective level for this logger?""" level = self._echo_map[self.echo] if level == logging.NOTSET: level = self.logger.getEffectiveLevel() return level def instance_logger(instance, echoflag=None): """create a logger for an instance that implements :class:`.Identified`.""" if instance.logging_name: name = "%s.%s.%s" % ( instance.__class__.__module__, instance.__class__.__name__, instance.logging_name, ) else: name = "%s.%s" % ( instance.__class__.__module__, instance.__class__.__name__, ) instance._echo = echoflag if echoflag in (False, None): # if no echo setting or False, return a Logger directly, # avoiding overhead of filtering logger = logging.getLogger(name) else: # if a specified echo flag, return an EchoLogger, # which checks the flag, overrides normal log # levels by calling logger._log() logger = InstanceLogger(echoflag, name) instance.logger = logger class echo_property(object): __doc__ = """\ When ``True``, enable log output for this element. This has the effect of setting the Python logging level for the namespace of this element's class and object reference. A value of boolean ``True`` indicates that the loglevel ``logging.INFO`` will be set for the logger, whereas the string value ``debug`` will set the loglevel to ``logging.DEBUG``. """ def __get__(self, instance, owner): if instance is None: return self else: return instance._echo def __set__(self, instance, value): instance_logger(instance, echoflag=value)
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/events.py
# sqlalchemy/events.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Core event interfaces.""" from . import event from . import exc from . import util from .engine import Connectable from .engine import Dialect from .engine import Engine from .pool import Pool from .sql.base import SchemaEventTarget class DDLEvents(event.Events): """ Define event listeners for schema objects, that is, :class:`.SchemaItem` and other :class:`.SchemaEventTarget` subclasses, including :class:`_schema.MetaData`, :class:`_schema.Table`, :class:`_schema.Column`. :class:`_schema.MetaData` and :class:`_schema.Table` support events specifically regarding when CREATE and DROP DDL is emitted to the database. Attachment events are also provided to customize behavior whenever a child schema element is associated with a parent, such as, when a :class:`_schema.Column` is associated with its :class:`_schema.Table`, when a :class:`_schema.ForeignKeyConstraint` is associated with a :class:`_schema.Table`, etc. Example using the ``after_create`` event:: from sqlalchemy import event from sqlalchemy import Table, Column, Metadata, Integer m = MetaData() some_table = Table('some_table', m, Column('data', Integer)) def after_create(target, connection, **kw): connection.execute("ALTER TABLE %s SET name=foo_%s" % (target.name, target.name)) event.listen(some_table, "after_create", after_create) DDL events integrate closely with the :class:`.DDL` class and the :class:`.DDLElement` hierarchy of DDL clause constructs, which are themselves appropriate as listener callables:: from sqlalchemy import DDL event.listen( some_table, "after_create", DDL("ALTER TABLE %(table)s SET name=foo_%(table)s") ) The methods here define the name of an event as well as the names of members that are passed to listener functions. For all :class:`.DDLEvent` events, the ``propagate=True`` keyword argument will ensure that a given event handler is propagated to copies of the object, which are made when using the :meth:`_schema.Table.tometadata` method:: from sqlalchemy import DDL event.listen( some_table, "after_create", DDL("ALTER TABLE %(table)s SET name=foo_%(table)s"), propagate=True ) new_table = some_table.tometadata(new_metadata) The above :class:`.DDL` object will also be associated with the :class:`_schema.Table` object represented by ``new_table``. .. seealso:: :ref:`event_toplevel` :class:`.DDLElement` :class:`.DDL` :ref:`schema_ddl_sequences` """ _target_class_doc = "SomeSchemaClassOrObject" _dispatch_target = SchemaEventTarget def before_create(self, target, connection, **kw): r"""Called before CREATE statements are emitted. :param target: the :class:`_schema.MetaData` or :class:`_schema.Table` object which is the target of the event. :param connection: the :class:`_engine.Connection` where the CREATE statement or statements will be emitted. :param \**kw: additional keyword arguments relevant to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level event, the checkfirst flag, and other elements used by internal events. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def after_create(self, target, connection, **kw): r"""Called after CREATE statements are emitted. :param target: the :class:`_schema.MetaData` or :class:`_schema.Table` object which is the target of the event. :param connection: the :class:`_engine.Connection` where the CREATE statement or statements have been emitted. :param \**kw: additional keyword arguments relevant to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level event, the checkfirst flag, and other elements used by internal events. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def before_drop(self, target, connection, **kw): r"""Called before DROP statements are emitted. :param target: the :class:`_schema.MetaData` or :class:`_schema.Table` object which is the target of the event. :param connection: the :class:`_engine.Connection` where the DROP statement or statements will be emitted. :param \**kw: additional keyword arguments relevant to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level event, the checkfirst flag, and other elements used by internal events. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def after_drop(self, target, connection, **kw): r"""Called after DROP statements are emitted. :param target: the :class:`_schema.MetaData` or :class:`_schema.Table` object which is the target of the event. :param connection: the :class:`_engine.Connection` where the DROP statement or statements have been emitted. :param \**kw: additional keyword arguments relevant to the event. The contents of this dictionary may vary across releases, and include the list of tables being generated for a metadata-level event, the checkfirst flag, and other elements used by internal events. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def before_parent_attach(self, target, parent): """Called before a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. :param target: the target object :param parent: the parent to which the target is being attached. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def after_parent_attach(self, target, parent): """Called after a :class:`.SchemaItem` is associated with a parent :class:`.SchemaItem`. :param target: the target object :param parent: the parent to which the target is being attached. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ def column_reflect(self, inspector, table, column_info): """Called for each unit of 'column info' retrieved when a :class:`_schema.Table` is being reflected. The dictionary of column information as returned by the dialect is passed, and can be modified. The dictionary is that returned in each element of the list returned by :meth:`.reflection.Inspector.get_columns`: * ``name`` - the column's name * ``type`` - the type of this column, which should be an instance of :class:`~sqlalchemy.types.TypeEngine` * ``nullable`` - boolean flag if the column is NULL or NOT NULL * ``default`` - the column's server default value. This is normally specified as a plain string SQL expression, however the event can pass a :class:`.FetchedValue`, :class:`.DefaultClause`, or :func:`_expression.text` object as well. .. versionchanged:: 1.1.6 The :meth:`.DDLEvents.column_reflect` event allows a non string :class:`.FetchedValue`, :func:`_expression.text`, or derived object to be specified as the value of ``default`` in the column dictionary. * ``attrs`` - dict containing optional column attributes The event is called before any action is taken against this dictionary, and the contents can be modified. The :class:`_schema.Column` specific arguments ``info``, ``key``, and ``quote`` can also be added to the dictionary and will be passed to the constructor of :class:`_schema.Column`. Note that this event is only meaningful if either associated with the :class:`_schema.Table` class across the board, e.g.:: from sqlalchemy.schema import Table from sqlalchemy import event def listen_for_reflect(inspector, table, column_info): "receive a column_reflect event" # ... event.listen( Table, 'column_reflect', listen_for_reflect) ...or with a specific :class:`_schema.Table` instance using the ``listeners`` argument:: def listen_for_reflect(inspector, table, column_info): "receive a column_reflect event" # ... t = Table( 'sometable', autoload=True, listeners=[ ('column_reflect', listen_for_reflect) ]) This because the reflection process initiated by ``autoload=True`` completes within the scope of the constructor for :class:`_schema.Table`. :func:`.event.listen` also accepts the ``propagate=True`` modifier for this event; when True, the listener function will be established for any copies made of the target object, i.e. those copies that are generated when :meth:`_schema.Table.tometadata` is used. """ class PoolEvents(event.Events): """Available events for :class:`_pool.Pool`. The methods here define the name of an event as well as the names of members that are passed to listener functions. e.g.:: from sqlalchemy import event def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): "handle an on checkout event" event.listen(Pool, 'checkout', my_on_checkout) In addition to accepting the :class:`_pool.Pool` class and :class:`_pool.Pool` instances, :class:`_events.PoolEvents` also accepts :class:`_engine.Engine` objects and the :class:`_engine.Engine` class as targets, which will be resolved to the ``.pool`` attribute of the given engine or the :class:`_pool.Pool` class:: engine = create_engine("postgresql://scott:tiger@localhost/test") # will associate with engine.pool event.listen(engine, 'checkout', my_on_checkout) """ _target_class_doc = "SomeEngineOrPool" _dispatch_target = Pool @classmethod def _accept_with(cls, target): if isinstance(target, type): if issubclass(target, Engine): return Pool elif issubclass(target, Pool): return target elif isinstance(target, Engine): return target.pool else: return target def connect(self, dbapi_connection, connection_record): """Called at the moment a particular DBAPI connection is first created for a given :class:`_pool.Pool`. This event allows one to capture the point directly after which the DBAPI module-level ``.connect()`` method has been used in order to produce a new DBAPI connection. :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. """ def first_connect(self, dbapi_connection, connection_record): """Called exactly once for the first time a DBAPI connection is checked out from a particular :class:`_pool.Pool`. The rationale for :meth:`_events.PoolEvents.first_connect` is to determine information about a particular series of database connections based on the settings used for all connections. Since a particular :class:`_pool.Pool` refers to a single "creator" function (which in terms of a :class:`_engine.Engine` refers to the URL and connection options used), it is typically valid to make observations about a single connection that can be safely assumed to be valid about all subsequent connections, such as the database version, the server and client encoding settings, collation settings, and many others. :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. """ def checkout(self, dbapi_connection, connection_record, connection_proxy): """Called when a connection is retrieved from the Pool. :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. :param connection_proxy: the :class:`._ConnectionFairy` object which will proxy the public interface of the DBAPI connection for the lifespan of the checkout. If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current connection will be disposed and a fresh connection retrieved. Processing of all checkout listeners will abort and restart using the new connection. .. seealso:: :meth:`_events.ConnectionEvents.engine_connect` - a similar event which occurs upon creation of a new :class:`_engine.Connection`. """ def checkin(self, dbapi_connection, connection_record): """Called when a connection returns to the pool. Note that the connection may be closed, and may be None if the connection has been invalidated. ``checkin`` will not be called for detached connections. (They do not return to the pool.) :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. """ def reset(self, dbapi_connection, connection_record): """Called before the "reset" action occurs for a pooled connection. This event represents when the ``rollback()`` method is called on the DBAPI connection before it is returned to the pool. The behavior of "reset" can be controlled, including disabled, using the ``reset_on_return`` pool argument. The :meth:`_events.PoolEvents.reset` event is usually followed by the :meth:`_events.PoolEvents.checkin` event is called, except in those cases where the connection is discarded immediately after reset. :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. .. seealso:: :meth:`_events.ConnectionEvents.rollback` :meth:`_events.ConnectionEvents.commit` """ def invalidate(self, dbapi_connection, connection_record, exception): """Called when a DBAPI connection is to be "invalidated". This event is called any time the :meth:`._ConnectionRecord.invalidate` method is invoked, either from API usage or via "auto-invalidation", without the ``soft`` flag. The event occurs before a final attempt to call ``.close()`` on the connection occurs. :param dbapi_connection: a DBAPI connection. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection. :param exception: the exception object corresponding to the reason for this invalidation, if any. May be ``None``. .. versionadded:: 0.9.2 Added support for connection invalidation listening. .. seealso:: :ref:`pool_connection_invalidation` """ def soft_invalidate(self, dbapi_connection, connection_record, exception): """Called when a DBAPI connection is to be "soft invalidated". This event is called any time the :meth:`._ConnectionRecord.invalidate` method is invoked with the ``soft`` flag. Soft invalidation refers to when the connection record that tracks this connection will force a reconnect after the current connection is checked in. It does not actively close the dbapi_connection at the point at which it is called. .. versionadded:: 1.0.3 """ def close(self, dbapi_connection, connection_record): """Called when a DBAPI connection is closed. The event is emitted before the close occurs. The close of a connection can fail; typically this is because the connection is already closed. If the close operation fails, the connection is discarded. The :meth:`.close` event corresponds to a connection that's still associated with the pool. To intercept close events for detached connections use :meth:`.close_detached`. .. versionadded:: 1.1 """ def detach(self, dbapi_connection, connection_record): """Called when a DBAPI connection is "detached" from a pool. This event is emitted after the detach occurs. The connection is no longer associated with the given connection record. .. versionadded:: 1.1 """ def close_detached(self, dbapi_connection): """Called when a detached DBAPI connection is closed. The event is emitted before the close occurs. The close of a connection can fail; typically this is because the connection is already closed. If the close operation fails, the connection is discarded. .. versionadded:: 1.1 """ class ConnectionEvents(event.Events): """Available events for :class:`.Connectable`, which includes :class:`_engine.Connection` and :class:`_engine.Engine`. The methods here define the name of an event as well as the names of members that are passed to listener functions. An event listener can be associated with any :class:`.Connectable` class or instance, such as an :class:`_engine.Engine`, e.g.:: from sqlalchemy import event, create_engine def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): log.info("Received statement: %s", statement) engine = create_engine('postgresql://scott:tiger@localhost/test') event.listen(engine, "before_cursor_execute", before_cursor_execute) or with a specific :class:`_engine.Connection`:: with engine.begin() as conn: @event.listens_for(conn, 'before_cursor_execute') def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): log.info("Received statement: %s", statement) When the methods are called with a `statement` parameter, such as in :meth:`.after_cursor_execute`, :meth:`.before_cursor_execute` and :meth:`.dbapi_error`, the statement is the exact SQL string that was prepared for transmission to the DBAPI ``cursor`` in the connection's :class:`.Dialect`. The :meth:`.before_execute` and :meth:`.before_cursor_execute` events can also be established with the ``retval=True`` flag, which allows modification of the statement and parameters to be sent to the database. The :meth:`.before_cursor_execute` event is particularly useful here to add ad-hoc string transformations, such as comments, to all executions:: from sqlalchemy.engine import Engine from sqlalchemy import event @event.listens_for(Engine, "before_cursor_execute", retval=True) def comment_sql_calls(conn, cursor, statement, parameters, context, executemany): statement = statement + " -- some comment" return statement, parameters .. note:: :class:`_events.ConnectionEvents` can be established on any combination of :class:`_engine.Engine`, :class:`_engine.Connection`, as well as instances of each of those classes. Events across all four scopes will fire off for a given instance of :class:`_engine.Connection`. However, for performance reasons, the :class:`_engine.Connection` object determines at instantiation time whether or not its parent :class:`_engine.Engine` has event listeners established. Event listeners added to the :class:`_engine.Engine` class or to an instance of :class:`_engine.Engine` *after* the instantiation of a dependent :class:`_engine.Connection` instance will usually *not* be available on that :class:`_engine.Connection` instance. The newly added listeners will instead take effect for :class:`_engine.Connection` instances created subsequent to those event listeners being established on the parent :class:`_engine.Engine` class or instance. :param retval=False: Applies to the :meth:`.before_execute` and :meth:`.before_cursor_execute` events only. When True, the user-defined event function must have a return value, which is a tuple of parameters that replace the given statement and parameters. See those methods for a description of specific return arguments. """ _target_class_doc = "SomeEngine" _dispatch_target = Connectable @classmethod def _listen(cls, event_key, retval=False): target, identifier, fn = ( event_key.dispatch_target, event_key.identifier, event_key._listen_fn, ) target._has_events = True if not retval: if identifier == "before_execute": orig_fn = fn def wrap_before_execute( conn, clauseelement, multiparams, params ): orig_fn(conn, clauseelement, multiparams, params) return clauseelement, multiparams, params fn = wrap_before_execute elif identifier == "before_cursor_execute": orig_fn = fn def wrap_before_cursor_execute( conn, cursor, statement, parameters, context, executemany ): orig_fn( conn, cursor, statement, parameters, context, executemany, ) return statement, parameters fn = wrap_before_cursor_execute elif retval and identifier not in ( "before_execute", "before_cursor_execute", "handle_error", ): raise exc.ArgumentError( "Only the 'before_execute', " "'before_cursor_execute' and 'handle_error' engine " "event listeners accept the 'retval=True' " "argument." ) event_key.with_wrapper(fn).base_listen() def before_execute(self, conn, clauseelement, multiparams, params): """Intercept high level execute() events, receiving uncompiled SQL constructs and other objects prior to rendering into SQL. This event is good for debugging SQL compilation issues as well as early manipulation of the parameters being sent to the database, as the parameter lists will be in a consistent format here. This event can be optionally established with the ``retval=True`` flag. The ``clauseelement``, ``multiparams``, and ``params`` arguments should be returned as a three-tuple in this case:: @event.listens_for(Engine, "before_execute", retval=True) def before_execute(conn, clauseelement, multiparams, params): # do something with clauseelement, multiparams, params return clauseelement, multiparams, params :param conn: :class:`_engine.Connection` object :param clauseelement: SQL expression construct, :class:`.Compiled` instance, or string statement passed to :meth:`_engine.Connection.execute`. :param multiparams: Multiple parameter sets, a list of dictionaries. :param params: Single parameter set, a single dictionary. .. seealso:: :meth:`.before_cursor_execute` """ def after_execute(self, conn, clauseelement, multiparams, params, result): """Intercept high level execute() events after execute. :param conn: :class:`_engine.Connection` object :param clauseelement: SQL expression construct, :class:`.Compiled` instance, or string statement passed to :meth:`_engine.Connection.execute`. :param multiparams: Multiple parameter sets, a list of dictionaries. :param params: Single parameter set, a single dictionary. :param result: :class:`_engine.ResultProxy` generated by the execution . """ def before_cursor_execute( self, conn, cursor, statement, parameters, context, executemany ): """Intercept low-level cursor execute() events before execution, receiving the string SQL statement and DBAPI-specific parameter list to be invoked against a cursor. This event is a good choice for logging as well as late modifications to the SQL string. It's less ideal for parameter modifications except for those which are specific to a target backend. This event can be optionally established with the ``retval=True`` flag. The ``statement`` and ``parameters`` arguments should be returned as a two-tuple in this case:: @event.listens_for(Engine, "before_cursor_execute", retval=True) def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): # do something with statement, parameters return statement, parameters See the example at :class:`_events.ConnectionEvents`. :param conn: :class:`_engine.Connection` object :param cursor: DBAPI cursor object :param statement: string SQL statement, as to be passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. :param context: :class:`.ExecutionContext` object in use. May be ``None``. :param executemany: boolean, if ``True``, this is an ``executemany()`` call, if ``False``, this is an ``execute()`` call. .. seealso:: :meth:`.before_execute` :meth:`.after_cursor_execute` """ def after_cursor_execute( self, conn, cursor, statement, parameters, context, executemany ): """Intercept low-level cursor execute() events after execution. :param conn: :class:`_engine.Connection` object :param cursor: DBAPI cursor object. Will have results pending if the statement was a SELECT, but these should not be consumed as they will be needed by the :class:`_engine.ResultProxy`. :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. :param context: :class:`.ExecutionContext` object in use. May be ``None``. :param executemany: boolean, if ``True``, this is an ``executemany()`` call, if ``False``, this is an ``execute()`` call. """ @util.deprecated( "0.9", "The :meth:`_events.ConnectionEvents.dbapi_error` " "event is deprecated and will be removed in a future release. " "Please refer to the :meth:`_events.ConnectionEvents.handle_error` " "event.", ) def dbapi_error( self, conn, cursor, statement, parameters, context, exception ): """Intercept a raw DBAPI error. This event is called with the DBAPI exception instance received from the DBAPI itself, *before* SQLAlchemy wraps the exception with it's own exception wrappers, and before any other operations are performed on the DBAPI cursor; the existing transaction remains in effect as well as any state on the cursor. The use case here is to inject low-level exception handling into an :class:`_engine.Engine`, typically for logging and debugging purposes. .. warning:: Code should **not** modify any state or throw any exceptions here as this will interfere with SQLAlchemy's cleanup and error handling routines. For exception modification, please refer to the new :meth:`_events.ConnectionEvents.handle_error` event. Subsequent to this hook, SQLAlchemy may attempt any number of operations on the connection/cursor, including closing the cursor, rolling back of the transaction in the case of connectionless execution, and disposing of the entire connection pool if a "disconnect" was detected. The exception is then wrapped in a SQLAlchemy DBAPI exception wrapper and re-thrown. :param conn: :class:`_engine.Connection` object :param cursor: DBAPI cursor object :param statement: string SQL statement, as passed to the DBAPI :param parameters: Dictionary, tuple, or list of parameters being passed to the ``execute()`` or ``executemany()`` method of the DBAPI ``cursor``. In some cases may be ``None``. :param context: :class:`.ExecutionContext` object in use. May be ``None``. :param exception: The **unwrapped** exception emitted directly from the DBAPI. The class here is specific to the DBAPI module in use. """ def handle_error(self, exception_context): r"""Intercept all exceptions processed by the :class:`_engine.Connection`. This includes all exceptions emitted by the DBAPI as well as within SQLAlchemy's statement invocation process, including encoding errors and other statement validation errors. Other areas in which the event is invoked include transaction begin and end, result row fetching, cursor creation. Note that :meth:`.handle_error` may support new kinds of exceptions and new calling scenarios at *any time*. Code which uses this event must expect new calling patterns to be present in minor releases. To support the wide variety of members that correspond to an exception, as well as to allow extensibility of the event without backwards incompatibility, the sole argument received is an instance of :class:`.ExceptionContext`. This object contains data members representing detail about the exception. Use cases supported by this hook include: * read-only, low-level exception handling for logging and debugging purposes * exception re-writing * Establishing or disabling whether a connection or the owning connection pool is invalidated or expired in response to a specific exception. The hook is called while the cursor from the failed operation (if any) is still open and accessible. Special cleanup operations can be called on this cursor; SQLAlchemy will attempt to close this cursor subsequent to this hook being invoked. If the connection is in "autocommit" mode, the transaction also remains open within the scope of this hook; the rollback of the per-statement transaction also occurs after the hook is called. For the common case of detecting a "disconnect" situation which is not currently handled by the SQLAlchemy dialect, the :attr:`.ExceptionContext.is_disconnect` flag can be set to True which will cause the exception to be considered as a disconnect situation, which typically results in the connection pool being invalidated:: @event.listens_for(Engine, "handle_error") def handle_exception(context): if isinstance(context.original_exception, pyodbc.Error): for code in ( '08S01', '01002', '08003', '08007', '08S02', '08001', 'HYT00', 'HY010'): if code in str(context.original_exception): context.is_disconnect = True A handler function has two options for replacing the SQLAlchemy-constructed exception into one that is user defined. It can either raise this new exception directly, in which case all further event listeners are bypassed and the exception will be raised, after appropriate cleanup as taken place:: @event.listens_for(Engine, "handle_error") def handle_exception(context): if isinstance(context.original_exception, psycopg2.OperationalError) and \ "failed" in str(context.original_exception): raise MySpecialException("failed operation") .. warning:: Because the :meth:`_events.ConnectionEvents.handle_error` event specifically provides for exceptions to be re-thrown as the ultimate exception raised by the failed statement, **stack traces will be misleading** if the user-defined event handler itself fails and throws an unexpected exception; the stack trace may not illustrate the actual code line that failed! It is advised to code carefully here and use logging and/or inline debugging if unexpected exceptions are occurring. Alternatively, a "chained" style of event handling can be used, by configuring the handler with the ``retval=True`` modifier and returning the new exception instance from the function. In this case, event handling will continue onto the next handler. The "chained" exception is available using :attr:`.ExceptionContext.chained_exception`:: @event.listens_for(Engine, "handle_error", retval=True) def handle_exception(context): if context.chained_exception is not None and \ "special" in context.chained_exception.message: return MySpecialException("failed", cause=context.chained_exception) Handlers that return ``None`` may be used within the chain; when a handler returns ``None``, the previous exception instance, if any, is maintained as the current exception that is passed onto the next handler. When a custom exception is raised or returned, SQLAlchemy raises this new exception as-is, it is not wrapped by any SQLAlchemy object. If the exception is not a subclass of :class:`sqlalchemy.exc.StatementError`, certain features may not be available; currently this includes the ORM's feature of adding a detail hint about "autoflush" to exceptions raised within the autoflush process. :param context: an :class:`.ExceptionContext` object. See this class for details on all available members. .. versionadded:: 0.9.7 Added the :meth:`_events.ConnectionEvents.handle_error` hook. .. versionchanged:: 1.1 The :meth:`.handle_error` event will now receive all exceptions that inherit from ``BaseException``, including ``SystemExit`` and ``KeyboardInterrupt``. The setting for :attr:`.ExceptionContext.is_disconnect` is ``True`` in this case and the default for :attr:`.ExceptionContext.invalidate_pool_on_disconnect` is ``False``. .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is now invoked when an :class:`_engine.Engine` fails during the initial call to :meth:`_engine.Engine.connect`, as well as when a :class:`_engine.Connection` object encounters an error during a reconnect operation. .. versionchanged:: 1.0.0 The :meth:`.handle_error` event is not fired off when a dialect makes use of the ``skip_user_error_events`` execution option. This is used by dialects which intend to catch SQLAlchemy-specific exceptions within specific operations, such as when the MySQL dialect detects a table not present within the ``has_table()`` dialect method. Prior to 1.0.0, code which implements :meth:`.handle_error` needs to ensure that exceptions thrown in these scenarios are re-raised without modification. """ def engine_connect(self, conn, branch): """Intercept the creation of a new :class:`_engine.Connection`. This event is called typically as the direct result of calling the :meth:`_engine.Engine.connect` method. It differs from the :meth:`_events.PoolEvents.connect` method, which refers to the actual connection to a database at the DBAPI level; a DBAPI connection may be pooled and reused for many operations. In contrast, this event refers only to the production of a higher level :class:`_engine.Connection` wrapper around such a DBAPI connection. It also differs from the :meth:`_events.PoolEvents.checkout` event in that it is specific to the :class:`_engine.Connection` object, not the DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with, although this DBAPI connection is available here via the :attr:`_engine.Connection.connection` attribute. But note there can in fact be multiple :meth:`_events.PoolEvents.checkout` events within the lifespan of a single :class:`_engine.Connection` object, if that :class:`_engine.Connection` is invalidated and re-established. There can also be multiple :class:`_engine.Connection` objects generated for the same already-checked-out DBAPI connection, in the case that a "branch" of a :class:`_engine.Connection` is produced. :param conn: :class:`_engine.Connection` object. :param branch: if True, this is a "branch" of an existing :class:`_engine.Connection`. A branch is generated within the course of a statement execution to invoke supplemental statements, most typically to pre-execute a SELECT of a default value for the purposes of an INSERT statement. .. versionadded:: 0.9.0 .. seealso:: :ref:`pool_disconnects_pessimistic` - illustrates how to use :meth:`_events.ConnectionEvents.engine_connect` to transparently ensure pooled connections are connected to the database. :meth:`_events.PoolEvents.checkout` the lower-level pool checkout event for an individual DBAPI connection :meth:`_events.ConnectionEvents.set_connection_execution_options` - a copy of a :class:`_engine.Connection` is also made when the :meth:`_engine.Connection.execution_options` method is called. """ def set_connection_execution_options(self, conn, opts): """Intercept when the :meth:`_engine.Connection.execution_options` method is called. This method is called after the new :class:`_engine.Connection` has been produced, with the newly updated execution options collection, but before the :class:`.Dialect` has acted upon any of those new options. Note that this method is not called when a new :class:`_engine.Connection` is produced which is inheriting execution options from its parent :class:`_engine.Engine`; to intercept this condition, use the :meth:`_events.ConnectionEvents.engine_connect` event. :param conn: The newly copied :class:`_engine.Connection` object :param opts: dictionary of options that were passed to the :meth:`_engine.Connection.execution_options` method. .. versionadded:: 0.9.0 .. seealso:: :meth:`_events.ConnectionEvents.set_engine_execution_options` - event which is called when :meth:`_engine.Engine.execution_options` is called. """ def set_engine_execution_options(self, engine, opts): """Intercept when the :meth:`_engine.Engine.execution_options` method is called. The :meth:`_engine.Engine.execution_options` method produces a shallow copy of the :class:`_engine.Engine` which stores the new options. That new :class:`_engine.Engine` is passed here. A particular application of this method is to add a :meth:`_events.ConnectionEvents.engine_connect` event handler to the given :class:`_engine.Engine` which will perform some per- :class:`_engine.Connection` task specific to these execution options. :param conn: The newly copied :class:`_engine.Engine` object :param opts: dictionary of options that were passed to the :meth:`_engine.Connection.execution_options` method. .. versionadded:: 0.9.0 .. seealso:: :meth:`_events.ConnectionEvents.set_connection_execution_options` - event which is called when :meth:`_engine.Connection.execution_options` is called. """ def engine_disposed(self, engine): """Intercept when the :meth:`_engine.Engine.dispose` method is called. The :meth:`_engine.Engine.dispose` method instructs the engine to "dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and replaces it with a new one. Disposing of the old pool has the effect that existing checked-in connections are closed. The new pool does not establish any new connections until it is first used. This event can be used to indicate that resources related to the :class:`_engine.Engine` should also be cleaned up, keeping in mind that the :class:`_engine.Engine` can still be used for new requests in which case it re-acquires connection resources. .. versionadded:: 1.0.5 """ def begin(self, conn): """Intercept begin() events. :param conn: :class:`_engine.Connection` object """ def rollback(self, conn): """Intercept rollback() events, as initiated by a :class:`.Transaction`. Note that the :class:`_pool.Pool` also "auto-rolls back" a DBAPI connection upon checkin, if the ``reset_on_return`` flag is set to its default value of ``'rollback'``. To intercept this rollback, use the :meth:`_events.PoolEvents.reset` hook. :param conn: :class:`_engine.Connection` object .. seealso:: :meth:`_events.PoolEvents.reset` """ def commit(self, conn): """Intercept commit() events, as initiated by a :class:`.Transaction`. Note that the :class:`_pool.Pool` may also "auto-commit" a DBAPI connection upon checkin, if the ``reset_on_return`` flag is set to the value ``'commit'``. To intercept this commit, use the :meth:`_events.PoolEvents.reset` hook. :param conn: :class:`_engine.Connection` object """ def savepoint(self, conn, name): """Intercept savepoint() events. :param conn: :class:`_engine.Connection` object :param name: specified name used for the savepoint. """ def rollback_savepoint(self, conn, name, context): """Intercept rollback_savepoint() events. :param conn: :class:`_engine.Connection` object :param name: specified name used for the savepoint. :param context: :class:`.ExecutionContext` in use. May be ``None``. """ def release_savepoint(self, conn, name, context): """Intercept release_savepoint() events. :param conn: :class:`_engine.Connection` object :param name: specified name used for the savepoint. :param context: :class:`.ExecutionContext` in use. May be ``None``. """ def begin_twophase(self, conn, xid): """Intercept begin_twophase() events. :param conn: :class:`_engine.Connection` object :param xid: two-phase XID identifier """ def prepare_twophase(self, conn, xid): """Intercept prepare_twophase() events. :param conn: :class:`_engine.Connection` object :param xid: two-phase XID identifier """ def rollback_twophase(self, conn, xid, is_prepared): """Intercept rollback_twophase() events. :param conn: :class:`_engine.Connection` object :param xid: two-phase XID identifier :param is_prepared: boolean, indicates if :meth:`.TwoPhaseTransaction.prepare` was called. """ def commit_twophase(self, conn, xid, is_prepared): """Intercept commit_twophase() events. :param conn: :class:`_engine.Connection` object :param xid: two-phase XID identifier :param is_prepared: boolean, indicates if :meth:`.TwoPhaseTransaction.prepare` was called. """ class DialectEvents(event.Events): """event interface for execution-replacement functions. These events allow direct instrumentation and replacement of key dialect functions which interact with the DBAPI. .. note:: :class:`.DialectEvents` hooks should be considered **semi-public** and experimental. These hooks are not for general use and are only for those situations where intricate re-statement of DBAPI mechanics must be injected onto an existing dialect. For general-use statement-interception events, please use the :class:`_events.ConnectionEvents` interface. .. seealso:: :meth:`_events.ConnectionEvents.before_cursor_execute` :meth:`_events.ConnectionEvents.before_execute` :meth:`_events.ConnectionEvents.after_cursor_execute` :meth:`_events.ConnectionEvents.after_execute` .. versionadded:: 0.9.4 """ _target_class_doc = "SomeEngine" _dispatch_target = Dialect @classmethod def _listen(cls, event_key, retval=False): target = event_key.dispatch_target target._has_events = True event_key.base_listen() @classmethod def _accept_with(cls, target): if isinstance(target, type): if issubclass(target, Engine): return Dialect elif issubclass(target, Dialect): return target elif isinstance(target, Engine): return target.dialect else: return target def do_connect(self, dialect, conn_rec, cargs, cparams): """Receive connection arguments before a connection is made. Return a DBAPI connection to halt further events from invoking; the returned connection will be used. Alternatively, the event can manipulate the cargs and/or cparams collections; cargs will always be a Python list that can be mutated in-place and cparams a Python dictionary. Return None to allow control to pass to the next event handler and ultimately to allow the dialect to connect normally, given the updated arguments. .. versionadded:: 1.0.3 .. seealso:: :ref:`custom_dbapi_args` """ def do_executemany(self, cursor, statement, parameters, context): """Receive a cursor to have executemany() called. Return the value True to halt further events from invoking, and to indicate that the cursor execution has already taken place within the event handler. """ def do_execute_no_params(self, cursor, statement, context): """Receive a cursor to have execute() with no parameters called. Return the value True to halt further events from invoking, and to indicate that the cursor execution has already taken place within the event handler. """ def do_execute(self, cursor, statement, parameters, context): """Receive a cursor to have execute() called. Return the value True to halt further events from invoking, and to indicate that the cursor execution has already taken place within the event handler. """ def do_setinputsizes( self, inputsizes, cursor, statement, parameters, context ): """Receive the setinputsizes dictionary for possible modification. This event is emitted in the case where the dialect makes use of the DBAPI ``cursor.setinputsizes()`` method which passes information about parameter binding for a particular statement. The given ``inputsizes`` dictionary will contain :class:`.BindParameter` objects as keys, linked to DBAPI-specific type objects as values; for parameters that are not bound, they are added to the dictionary with ``None`` as the value, which means the parameter will not be included in the ultimate setinputsizes call. The event may be used to inspect and/or log the datatypes that are being bound, as well as to modify the dictionary in place. Parameters can be added, modified, or removed from this dictionary. Callers will typically want to inspect the :attr:`.BindParameter.type` attribute of the given bind objects in order to make decisions about the DBAPI object. After the event, the ``inputsizes`` dictionary is converted into an appropriate datastructure to be passed to ``cursor.setinputsizes``; either a list for a positional bound parameter execution style, or a dictionary of string parameter keys to DBAPI type objects for a named bound parameter execution style. Most dialects **do not use** this method at all; the only built-in dialect which uses this hook is the cx_Oracle dialect. The hook here is made available so as to allow customization of how datatypes are set up with the cx_Oracle DBAPI. .. versionadded:: 1.2.9 .. seealso:: :ref:`cx_oracle_setinputsizes` """ pass
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/__init__.py
# sqlalchemy/__init__.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from . import util as _util # noqa from .inspection import inspect # noqa from .schema import BLANK_SCHEMA # noqa from .schema import CheckConstraint # noqa from .schema import Column # noqa from .schema import ColumnDefault # noqa from .schema import Computed # noqa from .schema import Constraint # noqa from .schema import DDL # noqa from .schema import DefaultClause # noqa from .schema import FetchedValue # noqa from .schema import ForeignKey # noqa from .schema import ForeignKeyConstraint # noqa from .schema import IdentityOptions # noqa from .schema import Index # noqa from .schema import MetaData # noqa from .schema import PassiveDefault # noqa from .schema import PrimaryKeyConstraint # noqa from .schema import Sequence # noqa from .schema import Table # noqa from .schema import ThreadLocalMetaData # noqa from .schema import UniqueConstraint # noqa from .sql import alias # noqa from .sql import all_ # noqa from .sql import and_ # noqa from .sql import any_ # noqa from .sql import asc # noqa from .sql import between # noqa from .sql import bindparam # noqa from .sql import case # noqa from .sql import cast # noqa from .sql import collate # noqa from .sql import column # noqa from .sql import delete # noqa from .sql import desc # noqa from .sql import distinct # noqa from .sql import except_ # noqa from .sql import except_all # noqa from .sql import exists # noqa from .sql import extract # noqa from .sql import false # noqa from .sql import func # noqa from .sql import funcfilter # noqa from .sql import insert # noqa from .sql import intersect # noqa from .sql import intersect_all # noqa from .sql import join # noqa from .sql import lateral # noqa from .sql import literal # noqa from .sql import literal_column # noqa from .sql import modifier # noqa from .sql import not_ # noqa from .sql import null # noqa from .sql import nullsfirst # noqa from .sql import nullslast # noqa from .sql import or_ # noqa from .sql import outerjoin # noqa from .sql import outparam # noqa from .sql import over # noqa from .sql import select # noqa from .sql import subquery # noqa from .sql import table # noqa from .sql import tablesample # noqa from .sql import text # noqa from .sql import true # noqa from .sql import tuple_ # noqa from .sql import type_coerce # noqa from .sql import union # noqa from .sql import union_all # noqa from .sql import update # noqa from .sql import within_group # noqa from .types import ARRAY # noqa from .types import BIGINT # noqa from .types import BigInteger # noqa from .types import BINARY # noqa from .types import Binary # noqa from .types import BLOB # noqa from .types import BOOLEAN # noqa from .types import Boolean # noqa from .types import CHAR # noqa from .types import CLOB # noqa from .types import DATE # noqa from .types import Date # noqa from .types import DATETIME # noqa from .types import DateTime # noqa from .types import DECIMAL # noqa from .types import Enum # noqa from .types import FLOAT # noqa from .types import Float # noqa from .types import INT # noqa from .types import INTEGER # noqa from .types import Integer # noqa from .types import Interval # noqa from .types import JSON # noqa from .types import LargeBinary # noqa from .types import NCHAR # noqa from .types import NUMERIC # noqa from .types import Numeric # noqa from .types import NVARCHAR # noqa from .types import PickleType # noqa from .types import REAL # noqa from .types import SMALLINT # noqa from .types import SmallInteger # noqa from .types import String # noqa from .types import TEXT # noqa from .types import Text # noqa from .types import TIME # noqa from .types import Time # noqa from .types import TIMESTAMP # noqa from .types import TypeDecorator # noqa from .types import Unicode # noqa from .types import UnicodeText # noqa from .types import VARBINARY # noqa from .types import VARCHAR # noqa from .engine import create_engine # noqa nosort from .engine import engine_from_config # noqa nosort __version__ = '1.3.18' def __go(lcls): global __all__ from . import events # noqa from . import util as _sa_util import inspect as _inspect __all__ = sorted( name for name, obj in lcls.items() if not (name.startswith("_") or _inspect.ismodule(obj)) ) _sa_util.dependencies.resolve_all("sqlalchemy") from . import exc exc._version_token = "".join(__version__.split(".")[0:2]) __go(locals())
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/types.py
# types.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Compatibility namespace for sqlalchemy.sql.types. """ __all__ = [ "TypeEngine", "TypeDecorator", "UserDefinedType", "INT", "CHAR", "VARCHAR", "NCHAR", "NVARCHAR", "TEXT", "Text", "FLOAT", "NUMERIC", "REAL", "DECIMAL", "TIMESTAMP", "DATETIME", "CLOB", "BLOB", "BINARY", "VARBINARY", "BOOLEAN", "BIGINT", "SMALLINT", "INTEGER", "DATE", "TIME", "String", "Integer", "SmallInteger", "BigInteger", "Numeric", "Float", "DateTime", "Date", "Time", "LargeBinary", "Binary", "Boolean", "Unicode", "Concatenable", "UnicodeText", "PickleType", "Interval", "Enum", "Indexable", "ARRAY", "JSON", ] from .sql.sqltypes import _Binary # noqa from .sql.sqltypes import ARRAY # noqa from .sql.sqltypes import BIGINT # noqa from .sql.sqltypes import BigInteger # noqa from .sql.sqltypes import BINARY # noqa from .sql.sqltypes import Binary # noqa from .sql.sqltypes import BLOB # noqa from .sql.sqltypes import BOOLEAN # noqa from .sql.sqltypes import Boolean # noqa from .sql.sqltypes import CHAR # noqa from .sql.sqltypes import CLOB # noqa from .sql.sqltypes import Concatenable # noqa from .sql.sqltypes import DATE # noqa from .sql.sqltypes import Date # noqa from .sql.sqltypes import DATETIME # noqa from .sql.sqltypes import DateTime # noqa from .sql.sqltypes import DECIMAL # noqa from .sql.sqltypes import Enum # noqa from .sql.sqltypes import FLOAT # noqa from .sql.sqltypes import Float # noqa from .sql.sqltypes import Indexable # noqa from .sql.sqltypes import INT # noqa from .sql.sqltypes import INTEGER # noqa from .sql.sqltypes import Integer # noqa from .sql.sqltypes import Interval # noqa from .sql.sqltypes import JSON # noqa from .sql.sqltypes import LargeBinary # noqa from .sql.sqltypes import MatchType # noqa from .sql.sqltypes import NCHAR # noqa from .sql.sqltypes import NULLTYPE # noqa from .sql.sqltypes import NullType # noqa from .sql.sqltypes import NUMERIC # noqa from .sql.sqltypes import Numeric # noqa from .sql.sqltypes import NVARCHAR # noqa from .sql.sqltypes import PickleType # noqa from .sql.sqltypes import REAL # noqa from .sql.sqltypes import SchemaType # noqa from .sql.sqltypes import SMALLINT # noqa from .sql.sqltypes import SmallInteger # noqa from .sql.sqltypes import String # noqa from .sql.sqltypes import STRINGTYPE # noqa from .sql.sqltypes import TEXT # noqa from .sql.sqltypes import Text # noqa from .sql.sqltypes import TIME # noqa from .sql.sqltypes import Time # noqa from .sql.sqltypes import TIMESTAMP # noqa from .sql.sqltypes import Unicode # noqa from .sql.sqltypes import UnicodeText # noqa from .sql.sqltypes import VARBINARY # noqa from .sql.sqltypes import VARCHAR # noqa from .sql.type_api import adapt_type # noqa from .sql.type_api import to_instance # noqa from .sql.type_api import TypeDecorator # noqa from .sql.type_api import TypeEngine # noqa from .sql.type_api import UserDefinedType # noqa from .sql.type_api import Variant # noqa
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/inspection.py
# sqlalchemy/inspect.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """The inspection module provides the :func:`_sa.inspect` function, which delivers runtime information about a wide variety of SQLAlchemy objects, both within the Core as well as the ORM. The :func:`_sa.inspect` function is the entry point to SQLAlchemy's public API for viewing the configuration and construction of in-memory objects. Depending on the type of object passed to :func:`_sa.inspect`, the return value will either be a related object which provides a known interface, or in many cases it will return the object itself. The rationale for :func:`_sa.inspect` is twofold. One is that it replaces the need to be aware of a large variety of "information getting" functions in SQLAlchemy, such as :meth:`_reflection.Inspector.from_engine`, :func:`.orm.attributes.instance_state`, :func:`_orm.class_mapper`, and others. The other is that the return value of :func:`_sa.inspect` is guaranteed to obey a documented API, thus allowing third party tools which build on top of SQLAlchemy configurations to be constructed in a forwards-compatible way. """ from . import exc from . import util _registrars = util.defaultdict(list) def inspect(subject, raiseerr=True): """Produce an inspection object for the given target. The returned value in some cases may be the same object as the one given, such as if a :class:`_orm.Mapper` object is passed. In other cases, it will be an instance of the registered inspection type for the given object, such as if an :class:`_engine.Engine` is passed, an :class:`_reflection.Inspector` object is returned. :param subject: the subject to be inspected. :param raiseerr: When ``True``, if the given subject does not correspond to a known SQLAlchemy inspected type, :class:`sqlalchemy.exc.NoInspectionAvailable` is raised. If ``False``, ``None`` is returned. """ type_ = type(subject) for cls in type_.__mro__: if cls in _registrars: reg = _registrars[cls] if reg is True: return subject ret = reg(subject) if ret is not None: break else: reg = ret = None if raiseerr and (reg is None or ret is None): raise exc.NoInspectionAvailable( "No inspection system is " "available for object of type %s" % type_ ) return ret def _inspects(*types): def decorate(fn_or_cls): for type_ in types: if type_ in _registrars: raise AssertionError( "Type %s is already " "registered" % type_ ) _registrars[type_] = fn_or_cls return fn_or_cls return decorate def _self_inspects(cls): _inspects(cls)(True) return cls
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/exc.py
# sqlalchemy/exc.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Exceptions used with SQLAlchemy. The base exception class is :exc:`.SQLAlchemyError`. Exceptions which are raised as a result of DBAPI exceptions are all subclasses of :exc:`.DBAPIError`. """ from .util import compat _version_token = None class SQLAlchemyError(Exception): """Generic error class.""" code = None def __init__(self, *arg, **kw): code = kw.pop("code", None) if code is not None: self.code = code super(SQLAlchemyError, self).__init__(*arg, **kw) def _code_str(self): if not self.code: return "" else: return ( "(Background on this error at: " "http://sqlalche.me/e/%s/%s)" % (_version_token, self.code,) ) def _message(self, as_unicode=compat.py3k): # rules: # # 1. under py2k, for __str__ return single string arg as it was # given without converting to unicode. for __unicode__ # do a conversion but check that it's not unicode already just in # case # # 2. under py3k, single arg string will usually be a unicode # object, but since __str__() must return unicode, check for # bytestring just in case # # 3. for multiple self.args, this is not a case in current # SQLAlchemy though this is happening in at least one known external # library, call str() which does a repr(). # if len(self.args) == 1: text = self.args[0] if as_unicode and isinstance(text, compat.binary_types): return compat.decode_backslashreplace(text, "utf-8") else: return self.args[0] else: # this is not a normal case within SQLAlchemy but is here for # compatibility with Exception.args - the str() comes out as # a repr() of the tuple return str(self.args) def _sql_message(self, as_unicode): message = self._message(as_unicode) if self.code: message = "%s %s" % (message, self._code_str()) return message def __str__(self): return self._sql_message(compat.py3k) def __unicode__(self): return self._sql_message(as_unicode=True) class ArgumentError(SQLAlchemyError): """Raised when an invalid or conflicting function argument is supplied. This error generally corresponds to construction time state errors. """ class ObjectNotExecutableError(ArgumentError): """Raised when an object is passed to .execute() that can't be executed as SQL. .. versionadded:: 1.1 """ def __init__(self, target): super(ObjectNotExecutableError, self).__init__( "Not an executable object: %r" % target ) class NoSuchModuleError(ArgumentError): """Raised when a dynamically-loaded module (usually a database dialect) of a particular name cannot be located.""" class NoForeignKeysError(ArgumentError): """Raised when no foreign keys can be located between two selectables during a join.""" class AmbiguousForeignKeysError(ArgumentError): """Raised when more than one foreign key matching can be located between two selectables during a join.""" class CircularDependencyError(SQLAlchemyError): """Raised by topological sorts when a circular dependency is detected. There are two scenarios where this error occurs: * In a Session flush operation, if two objects are mutually dependent on each other, they can not be inserted or deleted via INSERT or DELETE statements alone; an UPDATE will be needed to post-associate or pre-deassociate one of the foreign key constrained values. The ``post_update`` flag described at :ref:`post_update` can resolve this cycle. * In a :attr:`_schema.MetaData.sorted_tables` operation, two :class:`_schema.ForeignKey` or :class:`_schema.ForeignKeyConstraint` objects mutually refer to each other. Apply the ``use_alter=True`` flag to one or both, see :ref:`use_alter`. """ def __init__(self, message, cycles, edges, msg=None, code=None): if msg is None: message += " (%s)" % ", ".join(repr(s) for s in cycles) else: message = msg SQLAlchemyError.__init__(self, message, code=code) self.cycles = cycles self.edges = edges def __reduce__(self): return self.__class__, (None, self.cycles, self.edges, self.args[0]) class CompileError(SQLAlchemyError): """Raised when an error occurs during SQL compilation""" class UnsupportedCompilationError(CompileError): """Raised when an operation is not supported by the given compiler. .. seealso:: :ref:`faq_sql_expression_string` :ref:`error_l7de` """ code = "l7de" def __init__(self, compiler, element_type): super(UnsupportedCompilationError, self).__init__( "Compiler %r can't render element of type %s" % (compiler, element_type) ) class IdentifierError(SQLAlchemyError): """Raised when a schema name is beyond the max character limit""" class DisconnectionError(SQLAlchemyError): """A disconnect is detected on a raw DB-API connection. This error is raised and consumed internally by a connection pool. It can be raised by the :meth:`_events.PoolEvents.checkout` event so that the host pool forces a retry; the exception will be caught three times in a row before the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt. """ invalidate_pool = False class InvalidatePoolError(DisconnectionError): """Raised when the connection pool should invalidate all stale connections. A subclass of :class:`_exc.DisconnectionError` that indicates that the disconnect situation encountered on the connection probably means the entire pool should be invalidated, as the database has been restarted. This exception will be handled otherwise the same way as :class:`_exc.DisconnectionError`, allowing three attempts to reconnect before giving up. .. versionadded:: 1.2 """ invalidate_pool = True class TimeoutError(SQLAlchemyError): # noqa """Raised when a connection pool times out on getting a connection.""" class InvalidRequestError(SQLAlchemyError): """SQLAlchemy was asked to do something it can't do. This error generally corresponds to runtime state errors. """ class NoInspectionAvailable(InvalidRequestError): """A subject passed to :func:`sqlalchemy.inspection.inspect` produced no context for inspection.""" class ResourceClosedError(InvalidRequestError): """An operation was requested from a connection, cursor, or other object that's in a closed state.""" class NoSuchColumnError(KeyError, InvalidRequestError): """A nonexistent column is requested from a ``RowProxy``.""" class NoReferenceError(InvalidRequestError): """Raised by ``ForeignKey`` to indicate a reference cannot be resolved.""" class NoReferencedTableError(NoReferenceError): """Raised by ``ForeignKey`` when the referred ``Table`` cannot be located. """ def __init__(self, message, tname): NoReferenceError.__init__(self, message) self.table_name = tname def __reduce__(self): return self.__class__, (self.args[0], self.table_name) class NoReferencedColumnError(NoReferenceError): """Raised by ``ForeignKey`` when the referred ``Column`` cannot be located. """ def __init__(self, message, tname, cname): NoReferenceError.__init__(self, message) self.table_name = tname self.column_name = cname def __reduce__(self): return ( self.__class__, (self.args[0], self.table_name, self.column_name), ) class NoSuchTableError(InvalidRequestError): """Table does not exist or is not visible to a connection.""" class UnreflectableTableError(InvalidRequestError): """Table exists but can't be reflected for some reason. .. versionadded:: 1.2 """ class UnboundExecutionError(InvalidRequestError): """SQL was attempted without a database connection to execute it on.""" class DontWrapMixin(object): """A mixin class which, when applied to a user-defined Exception class, will not be wrapped inside of :exc:`.StatementError` if the error is emitted within the process of executing a statement. E.g.:: from sqlalchemy.exc import DontWrapMixin class MyCustomException(Exception, DontWrapMixin): pass class MySpecialType(TypeDecorator): impl = String def process_bind_param(self, value, dialect): if value == 'invalid': raise MyCustomException("invalid!") """ # Moved to orm.exc; compatibility definition installed by orm import until 0.6 UnmappedColumnError = None class StatementError(SQLAlchemyError): """An error occurred during execution of a SQL statement. :class:`StatementError` wraps the exception raised during execution, and features :attr:`.statement` and :attr:`.params` attributes which supply context regarding the specifics of the statement which had an issue. The wrapped exception object is available in the :attr:`.orig` attribute. """ statement = None """The string SQL statement being invoked when this exception occurred.""" params = None """The parameter list being used when this exception occurred.""" orig = None """The DBAPI exception object.""" ismulti = None def __init__( self, message, statement, params, orig, hide_parameters=False, code=None, ismulti=None, ): SQLAlchemyError.__init__(self, message, code=code) self.statement = statement self.params = params self.orig = orig self.ismulti = ismulti self.hide_parameters = hide_parameters self.detail = [] def add_detail(self, msg): self.detail.append(msg) def __reduce__(self): return ( self.__class__, ( self.args[0], self.statement, self.params, self.orig, self.hide_parameters, self.ismulti, ), ) def _sql_message(self, as_unicode): from sqlalchemy.sql import util details = [self._message(as_unicode=as_unicode)] if self.statement: if not as_unicode and not compat.py3k: stmt_detail = "[SQL: %s]" % compat.safe_bytestring( self.statement ) else: stmt_detail = "[SQL: %s]" % self.statement details.append(stmt_detail) if self.params: if self.hide_parameters: details.append( "[SQL parameters hidden due to hide_parameters=True]" ) else: params_repr = util._repr_params( self.params, 10, ismulti=self.ismulti ) details.append("[parameters: %r]" % params_repr) code_str = self._code_str() if code_str: details.append(code_str) return "\n".join(["(%s)" % det for det in self.detail] + details) class DBAPIError(StatementError): """Raised when the execution of a database operation fails. Wraps exceptions raised by the DB-API underlying the database operation. Driver-specific implementations of the standard DB-API exception types are wrapped by matching sub-types of SQLAlchemy's :class:`DBAPIError` when possible. DB-API's ``Error`` type maps to :class:`DBAPIError` in SQLAlchemy, otherwise the names are identical. Note that there is no guarantee that different DB-API implementations will raise the same exception type for any given error condition. :class:`DBAPIError` features :attr:`~.StatementError.statement` and :attr:`~.StatementError.params` attributes which supply context regarding the specifics of the statement which had an issue, for the typical case when the error was raised within the context of emitting a SQL statement. The wrapped exception object is available in the :attr:`~.StatementError.orig` attribute. Its type and properties are DB-API implementation specific. """ code = "dbapi" @classmethod def instance( cls, statement, params, orig, dbapi_base_err, hide_parameters=False, connection_invalidated=False, dialect=None, ismulti=None, ): # Don't ever wrap these, just return them directly as if # DBAPIError didn't exist. if ( isinstance(orig, BaseException) and not isinstance(orig, Exception) ) or isinstance(orig, DontWrapMixin): return orig if orig is not None: # not a DBAPI error, statement is present. # raise a StatementError if isinstance(orig, SQLAlchemyError) and statement: return StatementError( "(%s.%s) %s" % ( orig.__class__.__module__, orig.__class__.__name__, orig.args[0], ), statement, params, orig, hide_parameters=hide_parameters, code=orig.code, ismulti=ismulti, ) elif not isinstance(orig, dbapi_base_err) and statement: return StatementError( "(%s.%s) %s" % ( orig.__class__.__module__, orig.__class__.__name__, orig, ), statement, params, orig, hide_parameters=hide_parameters, ismulti=ismulti, ) glob = globals() for super_ in orig.__class__.__mro__: name = super_.__name__ if dialect: name = dialect.dbapi_exception_translation_map.get( name, name ) if name in glob and issubclass(glob[name], DBAPIError): cls = glob[name] break return cls( statement, params, orig, connection_invalidated=connection_invalidated, hide_parameters=hide_parameters, code=cls.code, ismulti=ismulti, ) def __reduce__(self): return ( self.__class__, ( self.statement, self.params, self.orig, self.hide_parameters, self.connection_invalidated, self.ismulti, ), ) def __init__( self, statement, params, orig, hide_parameters=False, connection_invalidated=False, code=None, ismulti=None, ): try: text = str(orig) except Exception as e: text = "Error in str() of DB-API-generated exception: " + str(e) StatementError.__init__( self, "(%s.%s) %s" % (orig.__class__.__module__, orig.__class__.__name__, text), statement, params, orig, hide_parameters, code=code, ismulti=ismulti, ) self.connection_invalidated = connection_invalidated class InterfaceError(DBAPIError): """Wraps a DB-API InterfaceError.""" code = "rvf5" class DatabaseError(DBAPIError): """Wraps a DB-API DatabaseError.""" code = "4xp6" class DataError(DatabaseError): """Wraps a DB-API DataError.""" code = "9h9h" class OperationalError(DatabaseError): """Wraps a DB-API OperationalError.""" code = "e3q8" class IntegrityError(DatabaseError): """Wraps a DB-API IntegrityError.""" code = "gkpj" class InternalError(DatabaseError): """Wraps a DB-API InternalError.""" code = "2j85" class ProgrammingError(DatabaseError): """Wraps a DB-API ProgrammingError.""" code = "f405" class NotSupportedError(DatabaseError): """Wraps a DB-API NotSupportedError.""" code = "tw8g" # Warnings class SADeprecationWarning(DeprecationWarning): """Issued once per usage of a deprecated API.""" class SAPendingDeprecationWarning(PendingDeprecationWarning): """Issued once per usage of a deprecated API.""" class SAWarning(RuntimeWarning): """Issued at runtime."""
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/processors.py
# sqlalchemy/processors.py # Copyright (C) 2010-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """defines generic type conversion functions, as used in bind and result processors. They all share one common characteristic: None is passed through unchanged. """ import codecs import datetime import re from . import util def str_to_datetime_processor_factory(regexp, type_): rmatch = regexp.match # Even on python2.6 datetime.strptime is both slower than this code # and it does not support microseconds. has_named_groups = bool(regexp.groupindex) def process(value): if value is None: return None else: try: m = rmatch(value) except TypeError as err: util.raise_( ValueError( "Couldn't parse %s string '%r' " "- value is not a string." % (type_.__name__, value) ), from_=err, ) if m is None: raise ValueError( "Couldn't parse %s string: " "'%s'" % (type_.__name__, value) ) if has_named_groups: groups = m.groupdict(0) return type_( **dict( list( zip( iter(groups.keys()), list(map(int, iter(groups.values()))), ) ) ) ) else: return type_(*list(map(int, m.groups(0)))) return process def py_fallback(): def to_unicode_processor_factory(encoding, errors=None): decoder = codecs.getdecoder(encoding) def process(value): if value is None: return None else: # decoder returns a tuple: (value, len). Simply dropping the # len part is safe: it is done that way in the normal # 'xx'.decode(encoding) code path. return decoder(value, errors)[0] return process def to_conditional_unicode_processor_factory(encoding, errors=None): decoder = codecs.getdecoder(encoding) def process(value): if value is None: return None elif isinstance(value, util.text_type): return value else: # decoder returns a tuple: (value, len). Simply dropping the # len part is safe: it is done that way in the normal # 'xx'.decode(encoding) code path. return decoder(value, errors)[0] return process def to_decimal_processor_factory(target_class, scale): fstring = "%%.%df" % scale def process(value): if value is None: return None else: return target_class(fstring % value) return process def to_float(value): # noqa if value is None: return None else: return float(value) def to_str(value): # noqa if value is None: return None else: return str(value) def int_to_boolean(value): # noqa if value is None: return None else: return bool(value) DATETIME_RE = re.compile( r"(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?" ) TIME_RE = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d+))?") DATE_RE = re.compile(r"(\d+)-(\d+)-(\d+)") str_to_datetime = str_to_datetime_processor_factory( # noqa DATETIME_RE, datetime.datetime ) str_to_time = str_to_datetime_processor_factory( # noqa TIME_RE, datetime.time ) # noqa str_to_date = str_to_datetime_processor_factory( # noqa DATE_RE, datetime.date ) # noqa return locals() try: from sqlalchemy.cprocessors import DecimalResultProcessor # noqa from sqlalchemy.cprocessors import int_to_boolean # noqa from sqlalchemy.cprocessors import str_to_date # noqa from sqlalchemy.cprocessors import str_to_datetime # noqa from sqlalchemy.cprocessors import str_to_time # noqa from sqlalchemy.cprocessors import to_float # noqa from sqlalchemy.cprocessors import to_str # noqa from sqlalchemy.cprocessors import UnicodeResultProcessor # noqa def to_unicode_processor_factory(encoding, errors=None): if errors is not None: return UnicodeResultProcessor(encoding, errors).process else: return UnicodeResultProcessor(encoding).process def to_conditional_unicode_processor_factory(encoding, errors=None): if errors is not None: return UnicodeResultProcessor(encoding, errors).conditional_process else: return UnicodeResultProcessor(encoding).conditional_process def to_decimal_processor_factory(target_class, scale): # Note that the scale argument is not taken into account for integer # values in the C implementation while it is in the Python one. # For example, the Python implementation might return # Decimal('5.00000') whereas the C implementation will # return Decimal('5'). These are equivalent of course. return DecimalResultProcessor(target_class, "%%.%df" % scale).process except ImportError: globals().update(py_fallback())
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/schema.py
# schema.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Compatibility namespace for sqlalchemy.sql.schema and related. """ from .sql.base import SchemaVisitor # noqa from .sql.ddl import _CreateDropBase # noqa from .sql.ddl import _DDLCompiles # noqa from .sql.ddl import _DropView # noqa from .sql.ddl import AddConstraint # noqa from .sql.ddl import CreateColumn # noqa from .sql.ddl import CreateIndex # noqa from .sql.ddl import CreateSchema # noqa from .sql.ddl import CreateSequence # noqa from .sql.ddl import CreateTable # noqa from .sql.ddl import DDL # noqa from .sql.ddl import DDLBase # noqa from .sql.ddl import DDLElement # noqa from .sql.ddl import DropColumnComment # noqa from .sql.ddl import DropConstraint # noqa from .sql.ddl import DropIndex # noqa from .sql.ddl import DropSchema # noqa from .sql.ddl import DropSequence # noqa from .sql.ddl import DropTable # noqa from .sql.ddl import DropTableComment # noqa from .sql.ddl import SetColumnComment # noqa from .sql.ddl import SetTableComment # noqa from .sql.ddl import sort_tables # noqa from .sql.ddl import sort_tables_and_constraints # noqa from .sql.naming import conv # noqa from .sql.schema import _get_table_key # noqa from .sql.schema import BLANK_SCHEMA # noqa from .sql.schema import CheckConstraint # noqa from .sql.schema import Column # noqa from .sql.schema import ColumnCollectionConstraint # noqa from .sql.schema import ColumnCollectionMixin # noqa from .sql.schema import ColumnDefault # noqa from .sql.schema import Computed # noqa from .sql.schema import Constraint # noqa from .sql.schema import DefaultClause # noqa from .sql.schema import DefaultGenerator # noqa from .sql.schema import FetchedValue # noqa from .sql.schema import ForeignKey # noqa from .sql.schema import ForeignKeyConstraint # noqa from .sql.schema import Index # noqa from .sql.schema import IdentityOptions # noqa from .sql.schema import MetaData # noqa from .sql.schema import PassiveDefault # noqa from .sql.schema import PrimaryKeyConstraint # noqa from .sql.schema import SchemaItem # noqa from .sql.schema import Sequence # noqa from .sql.schema import Table # noqa from .sql.schema import ThreadLocalMetaData # noqa from .sql.schema import UniqueConstraint # noqa
0
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy
qxf2_public_repos/what-is-confusing-backend/venv/Lib/site-packages/sqlalchemy/connectors/mxodbc.py
# connectors/mxodbc.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ Provide a SQLALchemy connector for the eGenix mxODBC commercial Python adapter for ODBC. This is not a free product, but eGenix provides SQLAlchemy with a license for use in continuous integration testing. This has been tested for use with mxODBC 3.1.2 on SQL Server 2005 and 2008, using the SQL Server Native driver. However, it is possible for this to be used on other database platforms. For more info on mxODBC, see http://www.egenix.com/ """ import re import sys import warnings from . import Connector class MxODBCConnector(Connector): driver = "mxodbc" supports_sane_multi_rowcount = False supports_unicode_statements = True supports_unicode_binds = True supports_native_decimal = True @classmethod def dbapi(cls): # this classmethod will normally be replaced by an instance # attribute of the same name, so this is normally only called once. cls._load_mx_exceptions() platform = sys.platform if platform == "win32": from mx.ODBC import Windows as Module # this can be the string "linux2", and possibly others elif "linux" in platform: from mx.ODBC import unixODBC as Module elif platform == "darwin": from mx.ODBC import iODBC as Module else: raise ImportError("Unrecognized platform for mxODBC import") return Module @classmethod def _load_mx_exceptions(cls): """ Import mxODBC exception classes into the module namespace, as if they had been imported normally. This is done here to avoid requiring all SQLAlchemy users to install mxODBC. """ global InterfaceError, ProgrammingError from mx.ODBC import InterfaceError from mx.ODBC import ProgrammingError def on_connect(self): def connect(conn): conn.stringformat = self.dbapi.MIXED_STRINGFORMAT conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT conn.errorhandler = self._error_handler() return connect def _error_handler(self): """ Return a handler that adjusts mxODBC's raised Warnings to emit Python standard warnings. """ from mx.ODBC.Error import Warning as MxOdbcWarning def error_handler(connection, cursor, errorclass, errorvalue): if issubclass(errorclass, MxOdbcWarning): errorclass.__bases__ = (Warning,) warnings.warn( message=str(errorvalue), category=errorclass, stacklevel=2 ) else: raise errorclass(errorvalue) return error_handler def create_connect_args(self, url): r"""Return a tuple of \*args, \**kwargs for creating a connection. The mxODBC 3.x connection constructor looks like this: connect(dsn, user='', password='', clear_auto_commit=1, errorhandler=None) This method translates the values in the provided uri into args and kwargs needed to instantiate an mxODBC Connection. The arg 'errorhandler' is not used by SQLAlchemy and will not be populated. """ opts = url.translate_connect_args(username="user") opts.update(url.query) args = opts.pop("host") opts.pop("port", None) opts.pop("database", None) return (args,), opts def is_disconnect(self, e, connection, cursor): # TODO: eGenix recommends checking connection.closed here # Does that detect dropped connections ? if isinstance(e, self.dbapi.ProgrammingError): return "connection already closed" in str(e) elif isinstance(e, self.dbapi.Error): return "[08S01]" in str(e) else: return False def _get_server_version_info(self, connection): # eGenix suggests using conn.dbms_version instead # of what we're doing here dbapi_con = connection.connection version = [] r = re.compile(r"[.\-]") # 18 == pyodbc.SQL_DBMS_VER for n in r.split(dbapi_con.getinfo(18)[1]): try: version.append(int(n)) except ValueError: version.append(n) return tuple(version) def _get_direct(self, context): if context: native_odbc_execute = context.execution_options.get( "native_odbc_execute", "auto" ) # default to direct=True in all cases, is more generally # compatible especially with SQL Server return False if native_odbc_execute is True else True else: return True def do_executemany(self, cursor, statement, parameters, context=None): cursor.executemany( statement, parameters, direct=self._get_direct(context) ) def do_execute(self, cursor, statement, parameters, context=None): cursor.execute(statement, parameters, direct=self._get_direct(context))
0