id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
143,348 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.UnexpectedResponseCodeException
|
class UnexpectedResponseCodeException(Exception):
"""
Raised when the server returns an unexpected response code.
"""
|
class UnexpectedResponseCodeException(Exception):
'''
Raised when the server returns an unexpected response code.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
143,349 |
KnuVerse/knuverse-sdk-python
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KnuVerse_knuverse-sdk-python/knuverse/knufactor.py
|
knuverse.knufactor.Knufactor
|
class Knufactor:
def __init__(self,
apikey=None,
secret=None,
email=None,
password=None,
server="https://cloud.knuverse.com",
base_uri="/api/v1/"):
if not server.startswith("http://") and not server.startswith("https://"):
# Allow not specifying the HTTP protocol to use. Default to https
server = "https://" + server
self._server = server + base_uri
self._apikey = apikey
self._secret = secret
self._email = email
self._password = password
self._last_auth = None
self._auth_token = None
self._headers = {
"Accept": "application/json",
}
self.version = "1.0.10"
# Private Methods
# ###############
def _auth(f):
"""
Makes sure the request has a valid authorization jwt before calling the wrapped function.
It does this by checking the timestamp of the last jwt and if > 10 minutes have elapsed,
it refreshes it's existing jwt from the server.
Args:
f: Function to wrap
Returns:
Function, f
"""
@wraps(f)
def method(self, *args, **kwargs):
if not self._auth_token or datetime.utcnow() >= self._last_auth + timedelta(minutes=10):
# Need to get new jwt
self.auth_refresh()
return f(self, *args, **kwargs)
return method
def _get(self, uri, params=None, headers=None):
if not headers:
headers = {}
headers.update(self._headers)
r = requests.get(self._server + uri, params=params, headers=headers)
return r
def _post(self, uri, body=None, headers=None):
if not headers:
headers = {}
headers.update(self._headers)
headers.update({
"Content-type": "application/json"
})
r = requests.post(self._server + uri, json=body, headers=headers)
return r
def _put(self, uri, body=None, files=None, headers=None):
if not headers:
headers = {}
headers.update(self._headers)
r = requests.put(self._server + uri, json=body,
files=files, headers=headers)
return r
def _delete(self, uri, body=None, headers=None):
if not headers:
headers = {}
headers.update(self._headers)
r = requests.delete(self._server + uri, json=body, headers=headers)
return r
def _head(self, uri, headers=None):
if not headers:
headers = {}
headers.update(self._headers)
r = requests.head(self._server + uri, headers=headers)
return r
@staticmethod
def _create_response(response):
"""
Attempts to decode JSON response.
If encoding fails(due to empty response: 204 No Content, etc), None
Args:
response: Requests response object
Returns: JSON body or None
"""
try:
r = response.json()
except ValueError:
r = None
return r
@staticmethod
def _check_response(response, expected):
"""
Checks if the expected response code matches the actual response code.
If they're not equal, raises the appropriate exception
Args:
response: (int) Actual status code
expected: (int) Expected status code
"""
response_code = response.status_code
if expected == response_code:
return
if response_code < 400:
raise ex.UnexpectedResponseCodeException(response.text)
elif response_code == 401:
raise ex.UnauthorizedException(response.text)
elif response_code == 400:
raise ex.BadRequestException(response.text)
elif response_code == 403:
raise ex.ForbiddenException(response.text)
elif response_code == 404:
raise ex.NotFoundException(response.text)
elif response_code == 429:
raise ex.RateLimitedException(response.text)
else:
raise ex.InternalServerErrorException(response.text)
def _client_id(self, client):
# If not formatted like a client ID, assume it's a client name and get the ID.
if not re.match(r"[a-f,0-9]{32}", client):
client = self.client_id(client)
if not client:
raise ex.NotFoundException("%s not found." % client)
return client
# Authentication interfaces
# =========================
def auth_refresh(self, apikey=None, secret=None, email=None, password=None):
"""
Renew authentication token manually. Uses POST to /auth interface
:param apikey: Unique identifier for authorized use of the API
:type apikey: str or None
:param secret: The secret password corresponding to the API key.
:type secret: str or None
:param email: Email to use for authentication
:type apikey: str or None
:param apikey: Password corresponding to email
:type apikey: str or None
:Returns: None
"""
jwt = self.auth_token(apikey=apikey, secret=secret,
email=email, password=password)
self._headers["Authorization"] = "Bearer %s" % jwt
self._auth_token = jwt
self._last_auth = datetime.utcnow()
def auth_token(self, apikey=None, secret=None, email=None, password=None):
"""
Get authentication token. Uses POST to /auth interface.
:Returns: (str) Authentication JWT
"""
if (apikey and secret) or (self._apikey and self._secret):
body = {
"key_id": apikey or self._apikey,
"secret": secret or self._secret
}
elif (email and password) or (self._email and self._password):
body = {
"user": email or self._email,
"password": password or self._password
}
else:
raise Value("No authentication provided.")
response = self._post(url.auth, body=body)
self._check_response(response, 200)
return self._create_response(response).get("jwt")
@_auth
def auth_grant(self, client, role=None, mode=None):
"""
Used to get a grant token. Grant tokens expire after 5 minutes for role "grant_verify" and 10 minutes for the
"grant_enroll" and "grant_enroll_verify" roles. Grant tokens can be used to start enrollments and verifications.
Uses POST to /auth/grant interface
:Args:
* *client*: (str) Client name
:Kwargs:
* *role*: (str or None) The grant token role. Can be "grant_verify", "grant_enroll", or "grant_enroll_verify". If role is not sent in, the role defaults to "grant_verify".
* *mode*: (str or None) The mode to perform actions with. Can be "audiopass" or "audiopin". It defaults to the module setting's "mode_default" if None is passed in.
:Returns: (dictionary) Specified below
:Return Dictionary:
* *jwt* - (str) Grant token that can be used to do verifications
* *mode* - (str) Default enrollment and verification mode for the server. Either "audiopin" or "audiopass"
"""
body = {
"name": client
}
if role:
body["role"] = role
if mode:
body["mode"] = mode
response = self._post(url.auth_grant, body=body)
self._check_response(response, 200)
return self._create_response(response)
# Client interfaces
###################
@_auth
def client_create(self, name, password):
"""
Create a new client. Uses the POST to /clients interface.
:Args:
* *name*: (str) Name of client
* *password*: (str) Password of client
:Returns: (str) ID of the newly created client.
"""
body = {
"name": name,
"password": password
}
response = self._post(url.clients, body=body)
self._check_response(response, 201)
return self._create_response(response).get("client_id")
@_auth
def client_count(self):
"""
Get number of clients. Uses HEAD to /clients interface.
:Returns: (int) Number of clients
"""
response = self._head(url.clients)
self._check_response(response, 200)
return int(response.headers.get("x-client-count", -1))
@_auth
def client_list(self, name=None, name_only=None, all_enrolled=None):
"""
Get list of clients. Uses GET to /clients interface.
:Kwargs:
* *name*: (str) If specified, returns the client information for this client only.
* *name_only*: (bool) If true, returns only the names of the clients requested
* *all_enrolled*: (bool) If true, will return all enrolled clients
:Returns: (list) List of dictionaries with the client information as requested.
"""
params = {}
if name: # When specific name value is provided
params["name"] = name
# (Boolean) "True": only keyword "name" is provided
if name_only:
params["name"] = ""
# (Boolean) "True": returns all enrolled clients
if all_enrolled:
params["all_enrolled"] = all_enrolled
response = self._get(url.clients, params=params)
self._check_response(response, 200)
if name:
return response.json()
return self._create_response(response).get("clients")
@_auth
def client_id(self, client):
"""
Get a client's ID. Uses GET to /clients?name=<client> interface.
:Args:
* *client*: (str) Client's name
:Returns: (str) Client id
"""
params = {
"name": client
}
response = self._get(url.clients, params=params)
self._check_response(response, 200)
return self._create_response(response).get("client_id")
@_auth
def client_info(self, client):
"""
Get client info. Uses GET to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
:Returns: (dict) Client dictionary
"""
client = self._client_id(client)
response = self._get(url.clients_id.format(id=client))
self._check_response(response, 200)
return self._create_response(response)
@_auth
def client_validate_password(self, client, password):
"""
Validate client's password. Uses PUT to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
* *password*: (str) Client's Password
"""
client = self._client_id(client)
body = {
"action": "validate_password",
"auth_password": password
}
response = self._put(url.clients_id.format(id=client), body=body)
self._check_response(response, 200)
@_auth
def client_validate_pin(self, client, pin):
"""
Validate client's PIN. Uses PUT to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
* *pin*: (str) Client's PIN
"""
client = self._client_id(client)
body = {
"action": "validate_pin",
"current_pin": pin
}
response = self._put(url.clients_id.format(id=client), body=body)
self._check_response(response, 200)
@_auth
def client_update(self,
client,
reason=None,
pin=None,
current_pin=None,
verification_speed=None,
row_doubling=None,
password=None,
bypass_expiration=None,
bypass_limit=None,
bypass_spacing_minutes=None,
bypass_code=None,
is_disabled=None,
verification_lock=None,
password_lock=None,
enroll_deadline_extension_minutes=None,
enroll_deadline_enable=None,
windows_profile=None,
role_rationale=None,
role=None,
):
"""
Update client info
Uses PUT to /clients/<client> interface
:Args:
* *client*: (str) Client's ID
:Kwargs:
* *reason*: (str) The reason for changing the client's settings
* *pin*: (str) The new PIN to set
* *current_pin*: (str) The current PIN of the user. Only required if role is not admin and the Account Reset Mode (System Configuration) requires PIN.
* *verification_speed*: (int) The speed at which the verification should appear for the client. Allowed values: 0, 25, 50, 75, 100.
* *row_doubling*: (str) Row doubling is an AudioPIN only option that puts two rows of words in each pinpad digit. Allowed values: "OFF", "TRAIN", "ON"
* *password*: (str) New client password
* *bypass_expiration*: (int) Used to enable/disable a client's bypass. The time, in minutes, from when the request was received until the bypass expires. 0 removes the bypass, while -1 sets a bypass that doesn't expire.
* *bypass_limit*: (int) The number of times a user may bypass. Set to 0 for no limit. If set without either an existing valid bypass_expiration, or providing one in the request, the client's bypass_expiration will be set to 10 mins. Default value: 0. Size range: >=0
* *bypass_spacing_minutes*: (int) Specifies the time, in minutes, the user must wait between using each bypass. Set to 0 for no bypass rate limiting. If set without either an existing valid bypass_expiration, or providing one in the request, the client's bypass_expiration will be set to 10 mins.
* *bypass_code*: (str) The code that the client must enter to bypass.
* *is_disabled*: (bool) If true, the client cannot do verifications (will automatically bypass).
* *verification_lock*: (bool) Unlocks the given client if the client verified incorrectly too many times.
* *password_lock*: (bool) Set to false to unlock a client who enter thier password incorrectly too many times.
* *enroll_deadline_extension_minutes*: (int) Amount of time, in minutes, to extend an enrollment deadline by.
* *enroll_deadline_enable*: (bool) When true, enables the enrollment deadline for a certain client, when false disables an enrollment deadline.
* *windows_profile*: (str) Assigns a Windows Profile to the user using the Windows Profile ID. To remove a profile, send null.
* *role_rationale*: (str) Update the client rationale for a role
* *role*: (str) Update the client role. Note: Google users cannot have their role updated. Allowed values: "admin", "manager", "support", "user".
:More information: Can be found `here <https://cloud.knuverse.com/docs/api/#api-Clients-Update_client_information>`_.
"""
client = self._client_id(client)
body = {}
if reason is not None:
body["reason"] = reason
if pin is not None:
body["pin"] = pin
if current_pin is not None:
body["current_pin"] = current_pin
if verification_speed is not None:
body["verification_speed"] = verification_speed
if row_doubling is not None:
body["row_doubling"] = row_doubling
if password is not None:
body["auth_password"] = self._password
body["password"] = password
if bypass_expiration is not None:
body["bypass_expiration"] = bypass_expiration
if bypass_limit is not None:
body["bypass_limit"] = bypass_limit
if bypass_spacing_minutes is not None:
body["bypass_spacing_minutes"] = bypass_spacing_minutes
if bypass_code is not None:
body["bypass_code"] = bypass_code
if is_disabled is not None:
body["is_disabled"] = is_disabled
if verification_lock is not None:
body["verification_lock"] = verification_lock
if password_lock is not None:
body["password_lock"] = password_lock
if enroll_deadline_extension_minutes is not None:
body["enroll_deadline_extension_minutes"] = enroll_deadline_extension_minutes
if enroll_deadline_enable is not None:
body["enroll_deadline_enable"] = enroll_deadline_enable
if windows_profile is not None:
body["windows_profile"] = windows_profile
if role is not None:
body["auth_password"] = self._password
body["role"] = role
if role_rationale is not None:
body["role_rationale"] = role_rationale
response = self._put(url.clients_id.format(id=client), body=body)
self._check_response(response, 200)
@_auth
def client_unenroll(self, client):
"""
Unenroll a client. Uses DELETE to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
"""
client = self._client_id(client)
response = self._delete(url.clients_id.format(id=client))
self._check_response(response, 204)
# Enrollment interfaces
#######################
@_auth
def enrollment_resource(self, client, audio=False):
"""
Get Client Enrollment Data. Uses GET to /enrollments/<client> interface.
:Args:
* *client*: (str) Client's ID
* *audio*: (boolean) If True then the enrollment audio is returned.
:Returns: (dictionary) Look `here <https://cloud.knuverse.com/docs/api/#api-Enrollments-Get_enrollment_info>`_ for information on keys and values.
"""
client = self._client_id(client)
params = {}
if audio:
params["audio"] = True
response = self._get(
url.enrollments_id.format(id=client), params=params)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def enrollment_start(
self,
name,
mode=None,
pin=None,
phone_number=None
):
"""
Start Client Enrollment. Uses the POST to /enrollments interface.
:Args:
* *client*: (str) Client's Name
* *mode*: (str) DEPRECATED. Presence of PIN is used to determine mode (AudioPass vs AudioPIN)
* *pin*: (str) Client's PIN. 4 digit string
* *phone_number*: (str) Phone number to call.
:Returns: (dict) Enrollment record with prompts as described `here <https://cloud.knuverse.com/docs/api/#api-Enrollments-Start_enrollment>`_.
"""
data = {
"name": name,
}
if mode:
warning_msg = 'WARNING: The "mode" parameter for enrollment_start is DEPRECATED and will be ignored. ' \
'To avoid incompatibility with a future release please stop providing it.'
print(warning_msg, file=sys.stderr)
if pin:
data["pin"] = pin
if phone_number:
data["phone_number"] = phone_number
response = self._post(url.enrollments, body=data)
self._check_response(response, 201)
return self._create_response(response)
@_auth
def enrollment_upload(
self,
enrollment_id,
audio_file,
):
"""
Upload Enrollment Data. Uses PUT to /enrollments/<enrollment_id> interface.
:Args:
* *enrollment_id*: (str) Enrollment's ID
* *audio_file*: (str) Path to the audio file of the recorded words. Not required for phone enrollments.
"""
files = {
"file": os.path.basename(audio_file),
os.path.basename(audio_file): open(audio_file, 'rb')
}
response = self._put(url.enrollments_id.format(
id=enrollment_id), files=files)
self._check_response(response, 202)
# Event interfaces
# ================
@_auth
def events_client(self, client):
"""
Get a client's events. Uses GET to /events/clients/<client> interface.
:Args:
* *client*: (str) Client's ID
:Returns: (list) Events
"""
# TODO Add paging to this
client = self._client_id(client)
response = self._get(url.events_clients_id.format(id=client))
self._check_response(response, 200)
return self._create_response(response).get("events")
@_auth
def events_clients(self):
"""
Get all client events. Uses GET to /events/clients interface.
:Returns: (list) Events
"""
# TODO Add paging to this
response = self._get(url.events_clients)
self._check_response(response, 200)
return self._create_response(response).get("events")
@_auth
def events_login(self):
"""
Get all login events. Uses GET to /events/login interface.
:Returns: (list) Events
"""
# TODO Add paging to this
response = self._get(url.events_logins)
return self._create_response(response).get("events")
@_auth
def events_system(self):
"""
Get all system events. Uses GET to /events/system interface.
:Returns: (list) Events
"""
# TODO Add paging to this
response = self._get(url.events_system)
self._check_response(response, 200)
return self._create_response(response).get("events")
# General interfaces
# ==================
def about(self):
"""
Get server info. Uses GET to /about interface
:returns: dict - Server information
"""
response = self._get(url.about)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def status(self):
"""
Get server status. Uses GET to /status interface.
:Returns: (dict) Server status as described `here <https://cloud.knuverse.com/docs/api/#api-General-Status>`_.
"""
response = self._get(url.status)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def warnings(self):
"""
Get server system warnings. Uses GET to /status/warnings.
:returns: (dict) Server messages and warnings as described `here <https://cloud.knuverse.com/docs/api/#api-General-Warnings>`_.
"""
response = self._get(url.status_warnings)
self._check_response(response, 200)
return self._create_response(response)
# System Modules interfaces
###########################
@_auth
def module_settings(self):
"""
Get Module settings. Uses GET to /settings/modules interface.
:Returns: (dict) Module settings as shown `here <https://cloud.knuverse.com/docs/api/#api-Module_Settings-Get_the_module_settings>`_.
"""
response = self._get(url.settings_modules)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def settings_module_update(self,
mode_audiopin_enable=None,
mode_audiopass_enable=None,
mode_default=None):
"""
Set Module settings. Uses PUT to /settings/modules interface.
:Args:
* *mode_audiopin_enable*: (bool) Turn on and off the AudioPIN feature
* *mode_audiopass_enable*: (bool) Turn on and off the AudioPass feature
* *mode_default*: (str) Set the default verification mode. Either 'audiopin' or 'audiopass'.
:Returns: None
"""
body = {
"auth_password": self._password
}
if mode_audiopin_enable:
body["mode_audiopin_enable"] = mode_audiopin_enable
if mode_audiopass_enable:
body["mode_audiopass_enable"] = mode_audiopass_enable
if mode_default:
body["mode_default"] = mode_default
response = self._put(url.settings_modules, body=body)
self._check_response(response, 200)
@_auth
def settings_module_reset(self):
"""
Resets the module settings back to default. Uses DELETE to /settings/modules interface.
"""
data = {
"auth_password": self._password
}
response = self._delete(url.settings_modules, body=data)
self._check_response(response, 204)
# Report generation interfaces
##############################
@staticmethod
def _format_input_dates(start_date, end_date):
if not isinstance(start_date, datetime) or not isinstance(end_date, datetime):
raise TypeError("Start date and end date must be datetime objects")
start_str = start_date.strftime("%Y-%m-%d %H:%M:%S")
end_str = end_date.strftime("%Y-%m-%d %H:%M:%S")
return start_str, end_str
@_auth
def report_events(self, start_date, end_date, type="system"):
"""
Create a report for all client events or all system events.
Uses GET to /reports/events/{clients,system} interface
:Args:
* *start_date*: (datetime) Start time for report generation
* *end_date*: (datetime) End time for report generation
:Kwargs:
* *type*: (str) Type of event report to create. "system" or "clients"
:Returns: (list) List of events in the input range
"""
start_str, end_str = self._format_input_dates(start_date, end_date)
params = {
"start_date": start_str,
"end_date": end_str
}
endpoint = url.reports_events_clients if type == "clients" else url.reports_events_system
response = self._get(endpoint, params=params)
self._check_response(response, 200)
return self._create_response(response).get("events")
@_auth
def report_verifications(self, start_date, end_date):
"""
Create a report for all verifications. Uses GET to /reports/verifications interface
:Args:
* *start_date*: (datetime) Start time for report generation
* *end_date*: (datetime) End time for report generation
:Returns: (str) CSV formatted report string
"""
start_str, end_str = self._format_input_dates(start_date, end_date)
params = {
"start_date": start_str,
"end_date": end_str
}
response = self._get(url.reports_verifications, params=params)
self._check_response(response, 200)
return self._create_response(response)
# System Settings interfaces
############################
@_auth
def settings_system(self):
"""
Get system settings. Uses GET to /settings/system interface.
:Returns: (dict) System settings as shown `here <https://cloud.knuverse.com/docs/api/#api-System_Settings-Get_System_Settings>`_.
"""
response = self._get(url.settings_system)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def settings_system_update(self, data):
"""
Set system settings. Uses PUT to /settings/system interface
:Args:
* *data*: (dict) Settings dictionary as specified `here <https://cloud.knuverse.com/docs/api/#api-System_Settings-Set_System_Settings>`_.
:Returns: None
"""
data["auth_password"] = self._password
response = self._put(url.settings_system, body=data)
self._check_response(response, 200)
@_auth
def settings_system_reset(self):
"""
Resets the system settings back to default. Uses DELETE to /settings/system interface.
"""
data = {
"auth_password": self._password
}
response = self._delete(url.settings_system, body=data)
self._check_response(response, 204)
# Verification interfaces
#########################
@_auth
def verification_start(
self,
client,
mode=None,
verification_speed=None,
row_doubling="off",
phone_number=None,
):
"""
Start a verification. Uses POST to /verifications interface.
:Args:
* *client*: (str) Client's Name
* *mode*: (str) Verification Mode. Allowed values: "audiopin", "audiopass"
* *verification_speed*: (int) Allowed values: 0, 25, 50, 75, 100
* *row_doubling*: (str) Allowed values: "off", "train", "on"
* *phone_number*: (str) Phone number to call.
:Returns: (dict) Verification record with animation as discussed `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Start_verification>`_.
"""
data = {
"name": client,
"user_agent": "knuverse-sdk-python-v%s" % self.version
}
if mode:
data["mode"] = mode
if phone_number:
data["phone_number"] = phone_number
if verification_speed:
data["verification_speed"] = verification_speed
if row_doubling:
data["row_doubling"] = row_doubling
response = self._post(url.verifications, body=data)
self._check_response(response, 201)
return self._create_response(response)
@_auth
def verification_upload(
self,
verification_id,
audio_file=None,
bypass=False,
bypass_pin=None,
bypass_code=None,
):
"""
Upload verification data. Uses PUT to /verfications/<verification_id> interface
:Args:
* *verification_id*: (str) Verification ID
* *audio_file*: (str) Path to the audio file of the recorded words. Not required for phone verifications.
* *bypass*: (boolean) True if using a bypass code or pin to verify
* *bypass_pin*: (str) Client's PIN if this is a bypass
* *bypass_code*: (str) Client's bypass code if this is a bypass
"""
files = {}
if audio_file:
files[os.path.basename(audio_file)] = open(audio_file, 'rb')
files["file"] = os.path.basename(audio_file)
elif bypass:
files["bypass"] = True
files["bypass_code"] = bypass_code
files["pin"] = bypass_pin
response = self._put(url.verifications_id.format(
id=verification_id), files=files)
self._check_response(response, 202)
return self._create_response(response)
@_auth
def verification_cancel(self, verification_id, reason=None):
"""
Cancels a started verification. Uses PUT to /verifications/<verification_id> interface
:Args:
* *verification_id*: (str) Verification ID
:Kwargs:
* *reason*: (str) Reason for cancelling the verification
:Returns: None
"""
data = {
"cancel": True,
"cancel_reason": reason
}
response = self._put(url.verifications_id.format(
id=verification_id), body=data)
self._check_response(response, 202)
@_auth
def verification_delete(self, verification_id):
"""
Remove verification. Uses DELETE to /verifications/<verification_id> interface.
:Args:
* *verification_id*: (str) Verification ID
"""
response = self._delete(
url.verifications_id.format(id=verification_id))
self._check_response(response, 204)
@_auth
def verification_count(self):
"""
Get Verification Count. Uses HEAD to /verifications interface.
:Returns: (int) Number of verifications
"""
response = self._head(url.verifications)
self._check_response(response, 200)
return int(response.headers.get('x-verification-count', -1))
@_auth
def verification_list(self, limit=10):
"""
Get list of verifications. Uses GET to /verifications interface.
:Returns: (list) Verification list as specified `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_list>`_.
"""
# TODO add arguments for paging and stuff
params = {}
params["limit"] = limit
response = self._get(url.verifications, params=params)
self._check_response(response, 200)
return self._create_response(response).get("verifications")
@_auth
def verification_resource(self, verification_id, audio=False):
"""
Get Verification Resource. Uses GET to /verifications/<verification_id> interface.
:Args:
* *verification_id*: (str) Verification ID
* *audio*: (boolean) If True, audio data associated with verification will be returned.
:Returns: (dict) Verification data as shown `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_info>`_.
"""
params = {}
if audio:
params["audio"] = True
response = self._get(url.verifications_id.format(
id=verification_id), params=params)
self._check_response(response, 200)
return self._create_response(response)
@_auth
def verification_resource_secure(self, verification_id, jwt, name):
"""
Get Verification Resource.
Uses GET to /verifications/<verification_id> interface
Use this method rather than verification_resource when adding a second factor to your application.
See `this <https://cloud.knuverse.com/docs/integration/>`_ for more information.
:Args:
* *verification_id*: (str) Verification ID
* *jwt*: (str) Completion token received from application
* *name*: (str) Client name associated with the jwt. Received from application.
:Returns: (dict) Verification data as shown `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_info>`_.
"""
params = {
"jwt": jwt,
"name": name
}
response = self._get(url.verifications_id.format(
id=verification_id), params=params)
self._check_response(response, 200)
return self._create_response(response)
|
class Knufactor:
def __init__(self,
apikey=None,
secret=None,
email=None,
password=None,
server="https://cloud.knuverse.com",
base_uri="/api/v1/"):
pass
def _auth(f):
'''
Makes sure the request has a valid authorization jwt before calling the wrapped function.
It does this by checking the timestamp of the last jwt and if > 10 minutes have elapsed,
it refreshes it's existing jwt from the server.
Args:
f: Function to wrap
Returns:
Function, f
'''
pass
@wraps(f)
def method(self, *args, **kwargs):
pass
def _get(self, uri, params=None, headers=None):
pass
def _post(self, uri, body=None, headers=None):
pass
def _put(self, uri, body=None, files=None, headers=None):
pass
def _delete(self, uri, body=None, headers=None):
pass
def _head(self, uri, headers=None):
pass
@staticmethod
def _create_response(response):
'''
Attempts to decode JSON response.
If encoding fails(due to empty response: 204 No Content, etc), None
Args:
response: Requests response object
Returns: JSON body or None
'''
pass
@staticmethod
def _check_response(response, expected):
'''
Checks if the expected response code matches the actual response code.
If they're not equal, raises the appropriate exception
Args:
response: (int) Actual status code
expected: (int) Expected status code
'''
pass
def _client_id(self, client):
pass
def auth_refresh(self, apikey=None, secret=None, email=None, password=None):
'''
Renew authentication token manually. Uses POST to /auth interface
:param apikey: Unique identifier for authorized use of the API
:type apikey: str or None
:param secret: The secret password corresponding to the API key.
:type secret: str or None
:param email: Email to use for authentication
:type apikey: str or None
:param apikey: Password corresponding to email
:type apikey: str or None
:Returns: None
'''
pass
def auth_token(self, apikey=None, secret=None, email=None, password=None):
'''
Get authentication token. Uses POST to /auth interface.
:Returns: (str) Authentication JWT
'''
pass
@_auth
def auth_grant(self, client, role=None, mode=None):
'''
Used to get a grant token. Grant tokens expire after 5 minutes for role "grant_verify" and 10 minutes for the
"grant_enroll" and "grant_enroll_verify" roles. Grant tokens can be used to start enrollments and verifications.
Uses POST to /auth/grant interface
:Args:
* *client*: (str) Client name
:Kwargs:
* *role*: (str or None) The grant token role. Can be "grant_verify", "grant_enroll", or "grant_enroll_verify". If role is not sent in, the role defaults to "grant_verify".
* *mode*: (str or None) The mode to perform actions with. Can be "audiopass" or "audiopin". It defaults to the module setting's "mode_default" if None is passed in.
:Returns: (dictionary) Specified below
:Return Dictionary:
* *jwt* - (str) Grant token that can be used to do verifications
* *mode* - (str) Default enrollment and verification mode for the server. Either "audiopin" or "audiopass"
'''
pass
@_auth
def client_create(self, name, password):
'''
Create a new client. Uses the POST to /clients interface.
:Args:
* *name*: (str) Name of client
* *password*: (str) Password of client
:Returns: (str) ID of the newly created client.
'''
pass
@_auth
def client_count(self):
'''
Get number of clients. Uses HEAD to /clients interface.
:Returns: (int) Number of clients
'''
pass
@_auth
def client_list(self, name=None, name_only=None, all_enrolled=None):
'''
Get list of clients. Uses GET to /clients interface.
:Kwargs:
* *name*: (str) If specified, returns the client information for this client only.
* *name_only*: (bool) If true, returns only the names of the clients requested
* *all_enrolled*: (bool) If true, will return all enrolled clients
:Returns: (list) List of dictionaries with the client information as requested.
'''
pass
@_auth
def client_id(self, client):
'''
Get a client's ID. Uses GET to /clients?name=<client> interface.
:Args:
* *client*: (str) Client's name
:Returns: (str) Client id
'''
pass
@_auth
def client_info(self, client):
'''
Get client info. Uses GET to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
:Returns: (dict) Client dictionary
'''
pass
@_auth
def client_validate_password(self, client, password):
'''
Validate client's password. Uses PUT to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
* *password*: (str) Client's Password
'''
pass
@_auth
def client_validate_pin(self, client, pin):
'''
Validate client's PIN. Uses PUT to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
* *pin*: (str) Client's PIN
'''
pass
@_auth
def client_update(self,
client,
reason=None,
pin=None,
current_pin=None,
verification_speed=None,
row_doubling=None,
password=None,
bypass_expiration=None,
bypass_limit=None,
bypass_spacing_minutes=None,
bypass_code=None,
is_disabled=None,
verification_lock=None,
password_lock=None,
enroll_deadline_extension_minutes=None,
enroll_deadline_enable=None,
windows_profile=None,
role_rationale=None,
role=None,
):
'''
Update client info
Uses PUT to /clients/<client> interface
:Args:
* *client*: (str) Client's ID
:Kwargs:
* *reason*: (str) The reason for changing the client's settings
* *pin*: (str) The new PIN to set
* *current_pin*: (str) The current PIN of the user. Only required if role is not admin and the Account Reset Mode (System Configuration) requires PIN.
* *verification_speed*: (int) The speed at which the verification should appear for the client. Allowed values: 0, 25, 50, 75, 100.
* *row_doubling*: (str) Row doubling is an AudioPIN only option that puts two rows of words in each pinpad digit. Allowed values: "OFF", "TRAIN", "ON"
* *password*: (str) New client password
* *bypass_expiration*: (int) Used to enable/disable a client's bypass. The time, in minutes, from when the request was received until the bypass expires. 0 removes the bypass, while -1 sets a bypass that doesn't expire.
* *bypass_limit*: (int) The number of times a user may bypass. Set to 0 for no limit. If set without either an existing valid bypass_expiration, or providing one in the request, the client's bypass_expiration will be set to 10 mins. Default value: 0. Size range: >=0
* *bypass_spacing_minutes*: (int) Specifies the time, in minutes, the user must wait between using each bypass. Set to 0 for no bypass rate limiting. If set without either an existing valid bypass_expiration, or providing one in the request, the client's bypass_expiration will be set to 10 mins.
* *bypass_code*: (str) The code that the client must enter to bypass.
* *is_disabled*: (bool) If true, the client cannot do verifications (will automatically bypass).
* *verification_lock*: (bool) Unlocks the given client if the client verified incorrectly too many times.
* *password_lock*: (bool) Set to false to unlock a client who enter thier password incorrectly too many times.
* *enroll_deadline_extension_minutes*: (int) Amount of time, in minutes, to extend an enrollment deadline by.
* *enroll_deadline_enable*: (bool) When true, enables the enrollment deadline for a certain client, when false disables an enrollment deadline.
* *windows_profile*: (str) Assigns a Windows Profile to the user using the Windows Profile ID. To remove a profile, send null.
* *role_rationale*: (str) Update the client rationale for a role
* *role*: (str) Update the client role. Note: Google users cannot have their role updated. Allowed values: "admin", "manager", "support", "user".
:More information: Can be found `here <https://cloud.knuverse.com/docs/api/#api-Clients-Update_client_information>`_.
'''
pass
@_auth
def client_unenroll(self, client):
'''
Unenroll a client. Uses DELETE to /clients/<client> interface.
:Args:
* *client*: (str) Client's ID
'''
pass
@_auth
def enrollment_resource(self, client, audio=False):
'''
Get Client Enrollment Data. Uses GET to /enrollments/<client> interface.
:Args:
* *client*: (str) Client's ID
* *audio*: (boolean) If True then the enrollment audio is returned.
:Returns: (dictionary) Look `here <https://cloud.knuverse.com/docs/api/#api-Enrollments-Get_enrollment_info>`_ for information on keys and values.
'''
pass
@_auth
def enrollment_start(
self,
name,
mode=None,
pin=None,
phone_number=None
):
'''
Start Client Enrollment. Uses the POST to /enrollments interface.
:Args:
* *client*: (str) Client's Name
* *mode*: (str) DEPRECATED. Presence of PIN is used to determine mode (AudioPass vs AudioPIN)
* *pin*: (str) Client's PIN. 4 digit string
* *phone_number*: (str) Phone number to call.
:Returns: (dict) Enrollment record with prompts as described `here <https://cloud.knuverse.com/docs/api/#api-Enrollments-Start_enrollment>`_.
'''
pass
@_auth
def enrollment_upload(
self,
enrollment_id,
audio_file,
):
'''
Upload Enrollment Data. Uses PUT to /enrollments/<enrollment_id> interface.
:Args:
* *enrollment_id*: (str) Enrollment's ID
* *audio_file*: (str) Path to the audio file of the recorded words. Not required for phone enrollments.
'''
pass
@_auth
def events_client(self, client):
'''
Get a client's events. Uses GET to /events/clients/<client> interface.
:Args:
* *client*: (str) Client's ID
:Returns: (list) Events
'''
pass
@_auth
def events_clients(self):
'''
Get all client events. Uses GET to /events/clients interface.
:Returns: (list) Events
'''
pass
@_auth
def events_login(self):
'''
Get all login events. Uses GET to /events/login interface.
:Returns: (list) Events
'''
pass
@_auth
def events_system(self):
'''
Get all system events. Uses GET to /events/system interface.
:Returns: (list) Events
'''
pass
def about(self):
'''
Get server info. Uses GET to /about interface
:returns: dict - Server information
'''
pass
@_auth
def status(self):
'''
Get server status. Uses GET to /status interface.
:Returns: (dict) Server status as described `here <https://cloud.knuverse.com/docs/api/#api-General-Status>`_.
'''
pass
@_auth
def warnings(self):
'''
Get server system warnings. Uses GET to /status/warnings.
:returns: (dict) Server messages and warnings as described `here <https://cloud.knuverse.com/docs/api/#api-General-Warnings>`_.
'''
pass
@_auth
def module_settings(self):
'''
Get Module settings. Uses GET to /settings/modules interface.
:Returns: (dict) Module settings as shown `here <https://cloud.knuverse.com/docs/api/#api-Module_Settings-Get_the_module_settings>`_.
'''
pass
@_auth
def settings_module_update(self,
mode_audiopin_enable=None,
mode_audiopass_enable=None,
mode_default=None):
'''
Set Module settings. Uses PUT to /settings/modules interface.
:Args:
* *mode_audiopin_enable*: (bool) Turn on and off the AudioPIN feature
* *mode_audiopass_enable*: (bool) Turn on and off the AudioPass feature
* *mode_default*: (str) Set the default verification mode. Either 'audiopin' or 'audiopass'.
:Returns: None
'''
pass
@_auth
def settings_module_reset(self):
'''
Resets the module settings back to default. Uses DELETE to /settings/modules interface.
'''
pass
@staticmethod
def _format_input_dates(start_date, end_date):
pass
@_auth
def report_events(self, start_date, end_date, type="system"):
'''
Create a report for all client events or all system events.
Uses GET to /reports/events/{clients,system} interface
:Args:
* *start_date*: (datetime) Start time for report generation
* *end_date*: (datetime) End time for report generation
:Kwargs:
* *type*: (str) Type of event report to create. "system" or "clients"
:Returns: (list) List of events in the input range
'''
pass
@_auth
def report_verifications(self, start_date, end_date):
'''
Create a report for all verifications. Uses GET to /reports/verifications interface
:Args:
* *start_date*: (datetime) Start time for report generation
* *end_date*: (datetime) End time for report generation
:Returns: (str) CSV formatted report string
'''
pass
@_auth
def settings_system(self):
'''
Get system settings. Uses GET to /settings/system interface.
:Returns: (dict) System settings as shown `here <https://cloud.knuverse.com/docs/api/#api-System_Settings-Get_System_Settings>`_.
'''
pass
@_auth
def settings_system_update(self, data):
'''
Set system settings. Uses PUT to /settings/system interface
:Args:
* *data*: (dict) Settings dictionary as specified `here <https://cloud.knuverse.com/docs/api/#api-System_Settings-Set_System_Settings>`_.
:Returns: None
'''
pass
@_auth
def settings_system_reset(self):
'''
Resets the system settings back to default. Uses DELETE to /settings/system interface.
'''
pass
@_auth
def verification_start(
self,
client,
mode=None,
verification_speed=None,
row_doubling="off",
phone_number=None,
):
'''
Start a verification. Uses POST to /verifications interface.
:Args:
* *client*: (str) Client's Name
* *mode*: (str) Verification Mode. Allowed values: "audiopin", "audiopass"
* *verification_speed*: (int) Allowed values: 0, 25, 50, 75, 100
* *row_doubling*: (str) Allowed values: "off", "train", "on"
* *phone_number*: (str) Phone number to call.
:Returns: (dict) Verification record with animation as discussed `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Start_verification>`_.
'''
pass
@_auth
def verification_upload(
self,
verification_id,
audio_file=None,
bypass=False,
bypass_pin=None,
bypass_code=None,
):
'''
Upload verification data. Uses PUT to /verfications/<verification_id> interface
:Args:
* *verification_id*: (str) Verification ID
* *audio_file*: (str) Path to the audio file of the recorded words. Not required for phone verifications.
* *bypass*: (boolean) True if using a bypass code or pin to verify
* *bypass_pin*: (str) Client's PIN if this is a bypass
* *bypass_code*: (str) Client's bypass code if this is a bypass
'''
pass
@_auth
def verification_cancel(self, verification_id, reason=None):
'''
Cancels a started verification. Uses PUT to /verifications/<verification_id> interface
:Args:
* *verification_id*: (str) Verification ID
:Kwargs:
* *reason*: (str) Reason for cancelling the verification
:Returns: None
'''
pass
@_auth
def verification_delete(self, verification_id):
'''
Remove verification. Uses DELETE to /verifications/<verification_id> interface.
:Args:
* *verification_id*: (str) Verification ID
'''
pass
@_auth
def verification_count(self):
'''
Get Verification Count. Uses HEAD to /verifications interface.
:Returns: (int) Number of verifications
'''
pass
@_auth
def verification_list(self, limit=10):
'''
Get list of verifications. Uses GET to /verifications interface.
:Returns: (list) Verification list as specified `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_list>`_.
'''
pass
@_auth
def verification_resource(self, verification_id, audio=False):
'''
Get Verification Resource. Uses GET to /verifications/<verification_id> interface.
:Args:
* *verification_id*: (str) Verification ID
* *audio*: (boolean) If True, audio data associated with verification will be returned.
:Returns: (dict) Verification data as shown `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_info>`_.
'''
pass
@_auth
def verification_resource_secure(self, verification_id, jwt, name):
'''
Get Verification Resource.
Uses GET to /verifications/<verification_id> interface
Use this method rather than verification_resource when adding a second factor to your application.
See `this <https://cloud.knuverse.com/docs/integration/>`_ for more information.
:Args:
* *verification_id*: (str) Verification ID
* *jwt*: (str) Completion token received from application
* *name*: (str) Client name associated with the jwt. Received from application.
:Returns: (dict) Verification data as shown `here <https://cloud.knuverse.com/docs/api/#api-Verifications-Get_verification_info>`_.
'''
pass
| 90 | 41 | 17 | 2 | 9 | 6 | 2 | 0.64 | 0 | 12 | 7 | 0 | 46 | 9 | 49 | 49 | 976 | 176 | 490 | 225 | 347 | 313 | 339 | 133 | 288 | 19 | 0 | 1 | 109 |
143,350 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.BadRequestException
|
class BadRequestException(HttpErrorException):
"""
Used for HTTP Bad Request(400) Errors
"""
|
class BadRequestException(HttpErrorException):
'''
Used for HTTP Bad Request(400) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,351 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.ForbiddenException
|
class ForbiddenException(HttpErrorException):
"""
Used for HTTP Forbidden(403) Errors
"""
|
class ForbiddenException(HttpErrorException):
'''
Used for HTTP Forbidden(403) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,352 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.HttpErrorException
|
class HttpErrorException(Exception):
"""
Used for HTTP errors. Status codes >= 400
"""
|
class HttpErrorException(Exception):
'''
Used for HTTP errors. Status codes >= 400
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 6 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
143,353 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.InternalServerErrorException
|
class InternalServerErrorException(HttpErrorException):
"""
Used for HTTP Internal Server Error(500) Errors
"""
|
class InternalServerErrorException(HttpErrorException):
'''
Used for HTTP Internal Server Error(500) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,354 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.NotFoundException
|
class NotFoundException(HttpErrorException):
"""
Used for HTTP Not Found(404) Errors
"""
|
class NotFoundException(HttpErrorException):
'''
Used for HTTP Not Found(404) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,355 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.RateLimitedException
|
class RateLimitedException(HttpErrorException):
"""
Used for HTTP Rate Limited(429) Errors
"""
|
class RateLimitedException(HttpErrorException):
'''
Used for HTTP Rate Limited(429) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,356 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.RequestException
|
class RequestException(Exception):
"""
Used for invalid requests.
"""
|
class RequestException(Exception):
'''
Used for invalid requests.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
143,357 |
KnuVerse/knuverse-sdk-python
|
KnuVerse_knuverse-sdk-python/knuverse/exceptions.py
|
knuverse.exceptions.UnauthorizedException
|
class UnauthorizedException(HttpErrorException):
"""
Used for HTTP Unauthorized(401) Errors
"""
|
class UnauthorizedException(HttpErrorException):
'''
Used for HTTP Unauthorized(401) Errors
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 1 | 1 | 0 | 3 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
143,358 |
Koed00/django-q
|
django_q/management/commands/qcluster.py
|
django_q.management.commands.qcluster.Command
|
class Command(BaseCommand):
# Translators: help text for qcluster management command
help = _("Starts a Django Q Cluster.")
def add_arguments(self, parser):
parser.add_argument(
"--run-once",
action="store_true",
dest="run_once",
default=False,
help="Run once and then stop.",
)
def handle(self, *args, **options):
q = Cluster()
q.start()
if options.get("run_once", False):
q.stop()
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 7 | 0 | 7 | 0 | 2 | 0.07 | 1 | 1 | 1 | 0 | 2 | 0 | 2 | 2 | 18 | 2 | 15 | 5 | 12 | 1 | 9 | 5 | 6 | 2 | 1 | 1 | 3 |
143,359 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/models.py
|
django_q.models.Failure.Meta
|
class Meta:
app_label = "django_q"
verbose_name = _("Failed task")
verbose_name_plural = _("Failed tasks")
ordering = ["-stopped"]
proxy = True
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 0 | 0 | 0 |
143,360 |
Koed00/django-q
|
django_q/models.py
|
django_q.models.Schedule
|
class Schedule(models.Model):
name = models.CharField(max_length=100, null=True, blank=True)
func = models.CharField(max_length=256, help_text="e.g. module.tasks.function")
hook = models.CharField(
max_length=256,
null=True,
blank=True,
help_text="e.g. module.tasks.result_function",
)
args = models.TextField(null=True, blank=True, help_text=_("e.g. 1, 2, 'John'"))
kwargs = models.TextField(
null=True, blank=True, help_text=_("e.g. x=1, y=2, name='John'")
)
ONCE = "O"
MINUTES = "I"
HOURLY = "H"
DAILY = "D"
WEEKLY = "W"
MONTHLY = "M"
QUARTERLY = "Q"
YEARLY = "Y"
CRON = "C"
TYPE = (
(ONCE, _("Once")),
(MINUTES, _("Minutes")),
(HOURLY, _("Hourly")),
(DAILY, _("Daily")),
(WEEKLY, _("Weekly")),
(MONTHLY, _("Monthly")),
(QUARTERLY, _("Quarterly")),
(YEARLY, _("Yearly")),
(CRON, _("Cron")),
)
schedule_type = models.CharField(
max_length=1, choices=TYPE, default=TYPE[0][0], verbose_name=_("Schedule Type")
)
minutes = models.PositiveSmallIntegerField(
null=True, blank=True, help_text=_("Number of minutes for the Minutes type")
)
repeats = models.IntegerField(
default=-1, verbose_name=_("Repeats"), help_text=_("n = n times, -1 = forever")
)
next_run = models.DateTimeField(
verbose_name=_("Next Run"), default=timezone.now, null=True
)
cron = models.CharField(
max_length=100,
null=True,
blank=True,
validators=[validate_cron],
help_text=_("Cron expression"),
)
task = models.CharField(max_length=100, null=True, editable=False)
cluster = models.CharField(max_length=100, default=None, null=True, blank=True)
def success(self):
if self.task and Task.objects.filter(id=self.task):
return Task.objects.get(id=self.task).success
def last_run(self):
if self.task and Task.objects.filter(id=self.task):
task = Task.objects.get(id=self.task)
if task.success:
url = reverse("admin:django_q_success_change", args=(task.id,))
else:
url = reverse("admin:django_q_failure_change", args=(task.id,))
return format_html(f'<a href="{url}">[{task.name}]</a>')
return None
def __str__(self):
return self.func
success.boolean = True
last_run.allow_tags = True
class Meta:
app_label = "django_q"
verbose_name = _("Scheduled task")
verbose_name_plural = _("Scheduled tasks")
ordering = ["next_run"]
|
class Schedule(models.Model):
def success(self):
pass
def last_run(self):
pass
def __str__(self):
pass
class Meta:
| 5 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 1 | 1 | 1 | 0 | 3 | 0 | 3 | 3 | 80 | 5 | 75 | 33 | 70 | 0 | 43 | 33 | 38 | 3 | 1 | 2 | 6 |
143,361 |
Koed00/django-q
|
django_q/models.py
|
django_q.models.SuccessManager
|
class SuccessManager(models.Manager):
def get_queryset(self):
return super(SuccessManager, self).get_queryset().filter(success=True)
|
class SuccessManager(models.Manager):
def get_queryset(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
143,362 |
Koed00/django-q
|
django_q/models.py
|
django_q.models.Task
|
class Task(models.Model):
id = models.CharField(max_length=32, primary_key=True, editable=False)
name = models.CharField(max_length=100, editable=False)
func = models.CharField(max_length=256)
hook = models.CharField(max_length=256, null=True)
args = PickledObjectField(null=True, protocol=-1)
kwargs = PickledObjectField(null=True, protocol=-1)
result = PickledObjectField(null=True, protocol=-1)
group = models.CharField(max_length=100, editable=False, null=True)
started = models.DateTimeField(editable=False)
stopped = models.DateTimeField(editable=False)
success = models.BooleanField(default=True, editable=False)
attempt_count = models.IntegerField(default=0)
@staticmethod
def get_result(task_id):
if len(task_id) == 32 and Task.objects.filter(id=task_id).exists():
return Task.objects.get(id=task_id).result
elif Task.objects.filter(name=task_id).exists():
return Task.objects.get(name=task_id).result
@staticmethod
def get_result_group(group_id, failures=False):
if failures:
values = Task.objects.filter(group=group_id).values_list(
"result", flat=True
)
else:
values = (
Task.objects.filter(group=group_id)
.exclude(success=False)
.values_list("result", flat=True)
)
return decode_results(values)
def group_result(self, failures=False):
if self.group:
return self.get_result_group(self.group, failures)
@staticmethod
def get_group_count(group_id, failures=False):
if failures:
return Failure.objects.filter(group=group_id).count()
return Task.objects.filter(group=group_id).count()
def group_count(self, failures=False):
if self.group:
return self.get_group_count(self.group, failures)
@staticmethod
def delete_group(group_id, objects=False):
group = Task.objects.filter(group=group_id)
if objects:
return group.delete()
return group.update(group=None)
def group_delete(self, tasks=False):
if self.group:
return self.delete_group(self.group, tasks)
@staticmethod
def get_task(task_id):
if len(task_id) == 32 and Task.objects.filter(id=task_id).exists():
return Task.objects.get(id=task_id)
elif Task.objects.filter(name=task_id).exists():
return Task.objects.get(name=task_id)
@staticmethod
def get_task_group(group_id, failures=True):
if failures:
return Task.objects.filter(group=group_id)
return Task.objects.filter(group=group_id).exclude(success=False)
def time_taken(self):
return (self.stopped - self.started).total_seconds()
@property
def short_result(self):
return truncatechars(self.result, 100)
def __str__(self):
return f"{self.name or self.id}"
class Meta:
app_label = "django_q"
ordering = ["-stopped"]
|
class Task(models.Model):
@staticmethod
def get_result(task_id):
pass
@staticmethod
def get_result_group(group_id, failures=False):
pass
def group_result(self, failures=False):
pass
@staticmethod
def get_group_count(group_id, failures=False):
pass
def group_count(self, failures=False):
pass
@staticmethod
def delete_group(group_id, objects=False):
pass
def group_delete(self, tasks=False):
pass
@staticmethod
def get_task(task_id):
pass
@staticmethod
def get_task_group(group_id, failures=True):
pass
def time_taken(self):
pass
@property
def short_result(self):
pass
def __str__(self):
pass
class Meta:
| 21 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 1 | 1 | 2 | 6 | 0 | 12 | 12 | 86 | 13 | 73 | 36 | 52 | 0 | 57 | 29 | 43 | 3 | 1 | 1 | 23 |
143,363 |
Koed00/django-q
|
django_q/queues.py
|
django_q.queues.Queue
|
class Queue(multiprocessing.queues.Queue):
"""A portable implementation of multiprocessing.Queue.
Because of multithreading / multiprocessing semantics, Queue.qsize() may
raise the NotImplementedError exception on Unix platforms like Mac OS X
where sem_getvalue() is not implemented. This subclass addresses this
problem by using a synchronized shared counter (initialized to zero) and
increasing / decreasing its value every time the put() and get() methods
are called, respectively. This not only prevents NotImplementedError from
being raised, but also allows us to implement a reliable version of both
qsize() and empty().
"""
def __init__(self, *args, **kwargs):
if sys.version_info < (3, 0):
super(Queue, self).__init__(*args, **kwargs)
else:
super(Queue, self).__init__(
*args, ctx=multiprocessing.get_context(), **kwargs
)
self.size = SharedCounter(0)
def __getstate__(self):
return super(Queue, self).__getstate__() + (self.size,)
def __setstate__(self, state):
super(Queue, self).__setstate__(state[:-1])
self.size = state[-1]
def put(self, *args, **kwargs):
super(Queue, self).put(*args, **kwargs)
self.size.increment(1)
def get(self, *args, **kwargs):
x = super(Queue, self).get(*args, **kwargs)
self.size.increment(-1)
return x
def qsize(self) -> int:
"""Reliable implementation of multiprocessing.Queue.qsize()"""
return self.size.value
def empty(self) -> bool:
"""Reliable implementation of multiprocessing.Queue.empty()"""
return not self.qsize() > 0
|
class Queue(multiprocessing.queues.Queue):
'''A portable implementation of multiprocessing.Queue.
Because of multithreading / multiprocessing semantics, Queue.qsize() may
raise the NotImplementedError exception on Unix platforms like Mac OS X
where sem_getvalue() is not implemented. This subclass addresses this
problem by using a synchronized shared counter (initialized to zero) and
increasing / decreasing its value every time the put() and get() methods
are called, respectively. This not only prevents NotImplementedError from
being raised, but also allows us to implement a reliable version of both
qsize() and empty().
'''
def __init__(self, *args, **kwargs):
pass
def __getstate__(self):
pass
def __setstate__(self, state):
pass
def put(self, *args, **kwargs):
pass
def get(self, *args, **kwargs):
pass
def qsize(self) -> int:
'''Reliable implementation of multiprocessing.Queue.qsize()'''
pass
def empty(self) -> bool:
'''Reliable implementation of multiprocessing.Queue.empty()'''
pass
| 8 | 3 | 4 | 0 | 3 | 0 | 1 | 0.48 | 1 | 4 | 1 | 0 | 7 | 1 | 7 | 27 | 45 | 8 | 25 | 10 | 17 | 12 | 22 | 10 | 14 | 2 | 2 | 1 | 8 |
143,364 |
Koed00/django-q
|
django_q/queues.py
|
django_q.queues.SharedCounter
|
class SharedCounter:
"""A synchronized shared counter.
The locking done by multiprocessing.Value ensures that only a single
process or thread may read or write the in-memory ctypes object. However,
in order to do n += 1, Python performs a read followed by a write, so a
second process may read the old value before the new one is written by
the first process. The solution is to use a multiprocessing.Lock to
guarantee the atomicity of the modifications to Value.
This class comes almost entirely from Eli Bendersky's blog:
http://eli.thegreenplace.net/2012/01/04/shared-counter-with-pythons-multiprocessing/
"""
def __init__(self, n=0):
self.count = multiprocessing.Value("i", n)
def increment(self, n=1):
"""Increment the counter by n (default = 1)"""
with self.count.get_lock():
self.count.value += n
@property
def value(self):
"""Return the value of the counter"""
return self.count.value
|
class SharedCounter:
'''A synchronized shared counter.
The locking done by multiprocessing.Value ensures that only a single
process or thread may read or write the in-memory ctypes object. However,
in order to do n += 1, Python performs a read followed by a write, so a
second process may read the old value before the new one is written by
the first process. The solution is to use a multiprocessing.Lock to
guarantee the atomicity of the modifications to Value.
This class comes almost entirely from Eli Bendersky's blog:
http://eli.thegreenplace.net/2012/01/04/shared-counter-with-pythons-multiprocessing/
'''
def __init__(self, n=0):
pass
def increment(self, n=1):
'''Increment the counter by n (default = 1)'''
pass
@property
def value(self):
'''Return the value of the counter'''
pass
| 5 | 3 | 3 | 0 | 2 | 1 | 1 | 1.33 | 0 | 0 | 0 | 0 | 3 | 1 | 3 | 3 | 26 | 5 | 9 | 6 | 4 | 12 | 8 | 5 | 4 | 1 | 0 | 1 | 3 |
143,365 |
Koed00/django-q
|
django_q/signing.py
|
django_q.signing.SignedPackage
|
class SignedPackage:
"""Wraps Django's signing module with custom Pickle serializer."""
@staticmethod
def dumps(obj, compressed: bool = Conf.COMPRESSED) -> str:
return signing.dumps(
obj,
key=Conf.SECRET_KEY,
salt=Conf.PREFIX,
compress=compressed,
serializer=PickleSerializer,
)
@staticmethod
def loads(obj) -> any:
return signing.loads(
obj, key=Conf.SECRET_KEY, salt=Conf.PREFIX, serializer=PickleSerializer
)
|
class SignedPackage:
'''Wraps Django's signing module with custom Pickle serializer.'''
@staticmethod
def dumps(obj, compressed: bool = Conf.COMPRESSED) -> str:
pass
@staticmethod
def loads(obj) -> any:
pass
| 5 | 1 | 6 | 0 | 6 | 0 | 1 | 0.07 | 0 | 4 | 2 | 0 | 0 | 0 | 2 | 2 | 18 | 2 | 15 | 5 | 10 | 1 | 5 | 3 | 2 | 1 | 0 | 0 | 2 |
143,366 |
Koed00/django-q
|
django_q/models.py
|
django_q.models.OrmQ
|
class OrmQ(models.Model):
key = models.CharField(max_length=100)
payload = models.TextField()
lock = models.DateTimeField(null=True)
def task(self):
return SignedPackage.loads(self.payload)
def func(self):
return self.task()["func"]
def task_id(self):
return self.task()["id"]
def name(self):
return self.task()["name"]
class Meta:
app_label = "django_q"
verbose_name = _("Queued task")
verbose_name_plural = _("Queued tasks")
|
class OrmQ(models.Model):
def task(self):
pass
def func(self):
pass
def task_id(self):
pass
def name(self):
pass
class Meta:
| 6 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 4 | 0 | 4 | 4 | 21 | 5 | 16 | 12 | 10 | 0 | 16 | 12 | 10 | 1 | 1 | 0 | 4 |
143,367 |
Koed00/django-q
|
django_q/status.py
|
django_q.status.Stat
|
class Stat(Status):
"""Status object for Cluster monitoring."""
def __init__(self, sentinel):
super(Stat, self).__init__(
sentinel.parent_pid or sentinel.pid, cluster_id=sentinel.cluster_id
)
self.broker = sentinel.broker or get_broker()
self.tob = sentinel.tob
self.reincarnations = sentinel.reincarnations
self.sentinel = sentinel.pid
self.status = sentinel.status()
self.done_q_size = 0
self.task_q_size = 0
if Conf.QSIZE:
self.done_q_size = sentinel.result_queue.qsize()
self.task_q_size = sentinel.task_queue.qsize()
if sentinel.monitor:
self.monitor = sentinel.monitor.pid
if sentinel.pusher:
self.pusher = sentinel.pusher.pid
self.workers = [w.pid for w in sentinel.pool]
def uptime(self) -> float:
return (timezone.now() - self.tob).total_seconds()
@property
def key(self) -> str:
"""
:return: redis key for this cluster statistic
"""
return self.get_key(self.cluster_id)
@staticmethod
def get_key(cluster_id) -> str:
"""
:param cluster_id: cluster ID
:return: redis key for the cluster statistic
"""
return f"{Conf.Q_STAT}:{cluster_id}"
def save(self):
try:
self.broker.set_stat(self.key, SignedPackage.dumps(self, True), 3)
except Exception as e:
logger.error(e)
def empty_queues(self) -> bool:
return self.done_q_size + self.task_q_size == 0
@staticmethod
def get(pid: int, cluster_id: str, broker: Broker = None) -> Union[Status, None]:
"""
gets the current status for the cluster
:param pid:
:param broker: an optional broker instance
:param cluster_id: id of the cluster
:return: Stat or Status
"""
if not broker:
broker = get_broker()
pack = broker.get_stat(Stat.get_key(cluster_id))
if pack:
try:
return SignedPackage.loads(pack)
except BadSignature:
return None
return Status(pid=pid, cluster_id=cluster_id)
@staticmethod
def get_all(broker: Broker = None) -> list:
"""
Get the status for all currently running clusters with the same prefix
and secret key.
:return: list of type Stat
"""
if not broker:
broker = get_broker()
stats = []
packs = broker.get_stats(f"{Conf.Q_STAT}:*") or []
for pack in packs:
try:
stats.append(SignedPackage.loads(pack))
except BadSignature:
continue
return stats
def __getstate__(self):
# Don't pickle the redis connection
state = dict(self.__dict__)
del state["broker"]
return state
|
class Stat(Status):
'''Status object for Cluster monitoring.'''
def __init__(self, sentinel):
pass
def uptime(self) -> float:
pass
@property
def key(self) -> str:
'''
:return: redis key for this cluster statistic
'''
pass
@staticmethod
def get_key(cluster_id) -> str:
'''
:param cluster_id: cluster ID
:return: redis key for the cluster statistic
'''
pass
def save(self):
pass
def empty_queues(self) -> bool:
pass
@staticmethod
def get_key(cluster_id) -> str:
'''
gets the current status for the cluster
:param pid:
:param broker: an optional broker instance
:param cluster_id: id of the cluster
:return: Stat or Status
'''
pass
@staticmethod
def get_all(broker: Broker = None) -> list:
'''
Get the status for all currently running clusters with the same prefix
and secret key.
:return: list of type Stat
'''
pass
def __getstate__(self):
pass
| 14 | 5 | 9 | 0 | 6 | 2 | 2 | 0.34 | 1 | 11 | 3 | 0 | 6 | 10 | 9 | 10 | 92 | 9 | 62 | 30 | 48 | 21 | 56 | 25 | 46 | 4 | 1 | 2 | 19 |
143,368 |
Koed00/django-q
|
django_q/tasks.py
|
django_q.tasks.AsyncTask
|
class AsyncTask:
"""
an async task
"""
def __init__(self, func, *args, **kwargs):
self.id = ""
self.started = False
self.func = func
self.args = args
self.kwargs = kwargs
@property
def broker(self):
return self._get_option("broker", None)
@broker.setter
def broker(self, value):
self._set_option("broker", value)
@property
def sync(self):
return self._get_option("sync", None)
@sync.setter
def sync(self, value):
self._set_option("sync", value)
@property
def save(self):
return self._get_option("save", None)
@save.setter
def save(self, value):
self._set_option("save", value)
@property
def hook(self):
return self._get_option("hook", None)
@hook.setter
def hook(self, value):
self._set_option("hook", value)
@property
def group(self):
return self._get_option("group", None)
@group.setter
def group(self, value):
self._set_option("group", value)
@property
def cached(self):
return self._get_option("cached", Conf.CACHED)
@cached.setter
def cached(self, value):
self._set_option("cached", value)
def _set_option(self, key, value):
if "q_options" in self.kwargs:
self.kwargs["q_options"][key] = value
else:
self.kwargs[key] = value
self.started = False
def _get_option(self, key, default=None):
if "q_options" in self.kwargs:
return self.kwargs["q_options"].get(key, default)
else:
return self.kwargs.get(key, default)
def run(self):
self.id = async_task(self.func, *self.args, **self.kwargs)
self.started = True
return self.id
def result(self, wait=0):
if self.started:
return result(self.id, wait=wait, cached=self.cached)
def fetch(self, wait=0):
if self.started:
return fetch(self.id, wait=wait, cached=self.cached)
def result_group(self, failures=False, wait=0, count=None):
if self.started and self.group:
return result_group(
self.group,
failures=failures,
wait=wait,
count=count,
cached=self.cached,
)
def fetch_group(self, failures=True, wait=0, count=None):
if self.started and self.group:
return fetch_group(
self.group,
failures=failures,
wait=wait,
count=count,
cached=self.cached,
)
|
class AsyncTask:
'''
an async task
'''
def __init__(self, func, *args, **kwargs):
pass
@property
def broker(self):
pass
@broker.setter
def broker(self):
pass
@property
def sync(self):
pass
@sync.setter
def sync(self):
pass
@property
def save(self):
pass
@save.setter
def save(self):
pass
@property
def hook(self):
pass
@hook.setter
def hook(self):
pass
@property
def group(self):
pass
@group.setter
def group(self):
pass
@property
def cached(self):
pass
@cached.setter
def cached(self):
pass
def _set_option(self, key, value):
pass
def _get_option(self, key, default=None):
pass
def run(self):
pass
def result(self, wait=0):
pass
def fetch(self, wait=0):
pass
def result_group(self, failures=False, wait=0, count=None):
pass
def fetch_group(self, failures=True, wait=0, count=None):
pass
| 33 | 1 | 4 | 0 | 3 | 0 | 1 | 0.04 | 0 | 1 | 1 | 0 | 20 | 5 | 20 | 20 | 109 | 24 | 82 | 38 | 49 | 3 | 56 | 26 | 35 | 2 | 0 | 1 | 26 |
143,369 |
Koed00/django-q
|
django_q/tasks.py
|
django_q.tasks.Chain
|
class Chain:
"""
A sequential chain of tasks
"""
def __init__(self, chain=None, group=None, cached=Conf.CACHED, sync=Conf.SYNC):
self.chain = chain or []
self.group = group or ""
self.broker = get_broker()
self.cached = cached
self.sync = sync
self.started = False
def append(self, func, *args, **kwargs):
"""
add a task to the chain
takes the same parameters as async_task()
"""
self.chain.append((func, args, kwargs))
# remove existing results
if self.started:
delete_group(self.group)
self.started = False
return self.length()
def run(self):
"""
Start queueing the chain to the worker cluster
:return: the chain's group id
"""
self.group = async_chain(
chain=self.chain[:],
group=self.group,
cached=self.cached,
sync=self.sync,
broker=self.broker,
)
self.started = True
return self.group
def result(self, wait=0):
"""
return the full list of results from the chain when it finishes. blocks until timeout.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of results
"""
if self.started:
return result_group(
self.group, wait=wait, count=self.length(), cached=self.cached
)
def fetch(self, failures=True, wait=0):
"""
get the task result objects from the chain when it finishes. blocks until timeout.
:param failures: include failed tasks
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects
"""
if self.started:
return fetch_group(
self.group,
failures=failures,
wait=wait,
count=self.length(),
cached=self.cached,
)
def current(self):
"""
get the index of the currently executing chain element
:return int: current chain index
"""
if not self.started:
return None
return count_group(self.group, cached=self.cached)
def length(self):
"""
get the length of the chain
:return int: length of the chain
"""
return len(self.chain)
|
class Chain:
'''
A sequential chain of tasks
'''
def __init__(self, chain=None, group=None, cached=Conf.CACHED, sync=Conf.SYNC):
pass
def append(self, func, *args, **kwargs):
'''
add a task to the chain
takes the same parameters as async_task()
'''
pass
def run(self):
'''
Start queueing the chain to the worker cluster
:return: the chain's group id
'''
pass
def result(self, wait=0):
'''
return the full list of results from the chain when it finishes. blocks until timeout.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of results
'''
pass
def fetch(self, failures=True, wait=0):
'''
get the task result objects from the chain when it finishes. blocks until timeout.
:param failures: include failed tasks
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects
'''
pass
def current(self):
'''
get the index of the currently executing chain element
:return int: current chain index
'''
pass
def length(self):
'''
get the length of the chain
:return int: length of the chain
'''
pass
| 8 | 7 | 10 | 0 | 6 | 4 | 2 | 0.7 | 0 | 1 | 1 | 0 | 7 | 6 | 7 | 7 | 82 | 7 | 44 | 14 | 36 | 31 | 30 | 14 | 22 | 2 | 0 | 1 | 11 |
143,370 |
Koed00/django-q
|
django_q/tasks.py
|
django_q.tasks.Iter
|
class Iter:
"""
An async task with iterable arguments
"""
def __init__(
self,
func=None,
args=None,
kwargs=None,
cached=Conf.CACHED,
sync=Conf.SYNC,
broker=None,
):
self.func = func
self.args = args or []
self.kwargs = kwargs or {}
self.id = ""
self.broker = broker or get_broker()
self.cached = cached
self.sync = sync
self.started = False
def append(self, *args):
"""
add arguments to the set
"""
self.args.append(args)
if self.started:
self.started = False
return self.length()
def run(self):
"""
Start queueing the tasks to the worker cluster
:return: the task id
"""
self.kwargs["cached"] = self.cached
self.kwargs["sync"] = self.sync
self.kwargs["broker"] = self.broker
self.id = async_iter(self.func, self.args, **self.kwargs)
self.started = True
return self.id
def result(self, wait=0):
"""
return the full list of results.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of results
"""
if self.started:
return result(self.id, wait=wait, cached=self.cached)
def fetch(self, wait=0):
"""
get the task result objects.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects
"""
if self.started:
return fetch(self.id, wait=wait, cached=self.cached)
def length(self):
"""
get the length of the arguments list
:return int: length of the argument list
"""
return len(self.args)
|
class Iter:
'''
An async task with iterable arguments
'''
def __init__(
self,
func=None,
args=None,
kwargs=None,
cached=Conf.CACHED,
sync=Conf.SYNC,
broker=None,
):
pass
def append(self, *args):
'''
add arguments to the set
'''
pass
def run(self):
'''
Start queueing the tasks to the worker cluster
:return: the task id
'''
pass
def result(self, wait=0):
'''
return the full list of results.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of results
'''
pass
def fetch(self, wait=0):
'''
get the task result objects.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects
'''
pass
def length(self):
'''
get the length of the arguments list
:return int: length of the argument list
'''
pass
| 7 | 6 | 10 | 0 | 6 | 4 | 2 | 0.63 | 0 | 1 | 1 | 0 | 6 | 8 | 6 | 6 | 68 | 6 | 38 | 23 | 23 | 24 | 30 | 15 | 23 | 2 | 0 | 1 | 9 |
143,371 |
Koed00/django-q
|
django_q/tests/tasks.py
|
django_q.tests.tasks.TaskError
|
class TaskError(Exception):
pass
|
class TaskError(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
143,372 |
Koed00/django-q
|
django_q/tests/test_cluster.py
|
django_q.tests.test_cluster.WordClass
|
class WordClass:
def __init__(self):
self.word_list = DEFAULT_WORDLIST
def get_words(self):
return self.word_list
|
class WordClass:
def __init__(self):
pass
def get_words(self):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 6 | 1 | 5 | 4 | 2 | 0 | 5 | 4 | 2 | 1 | 0 | 0 | 2 |
143,373 |
Koed00/django-q
|
django_q/tests/testing_utilities/multiple_database_routers.py
|
django_q.tests.testing_utilities.multiple_database_routers.TestingMultipleAppsDatabaseRouter
|
class TestingMultipleAppsDatabaseRouter:
"""
A router to control all database operations on models in the
auth application.
"""
@staticmethod
def is_admin(model):
return model._meta.app_label in ["admin"]
def db_for_read(self, model, **hints):
if self.is_admin(model):
return "admin"
return "default"
def db_for_write(self, model, **hints):
if self.is_admin(model):
return "admin"
return "default"
|
class TestingMultipleAppsDatabaseRouter:
'''
A router to control all database operations on models in the
auth application.
'''
@staticmethod
def is_admin(model):
pass
def db_for_read(self, model, **hints):
pass
def db_for_write(self, model, **hints):
pass
| 5 | 1 | 3 | 0 | 3 | 0 | 2 | 0.33 | 0 | 0 | 0 | 0 | 2 | 0 | 3 | 3 | 19 | 3 | 12 | 5 | 7 | 4 | 11 | 4 | 7 | 2 | 0 | 1 | 5 |
143,374 |
Koed00/django-q
|
django_q/status.py
|
django_q.status.Status
|
class Status:
"""Cluster status base class."""
def __init__(self, pid, cluster_id):
self.workers = []
self.tob = None
self.reincarnations = 0
self.pid = pid
self.cluster_id = cluster_id
self.sentinel = 0
self.status = Conf.STOPPED
self.done_q_size = 0
self.host = socket.gethostname()
self.monitor = 0
self.task_q_size = 0
self.pusher = 0
self.timestamp = timezone.now()
|
class Status:
'''Cluster status base class.'''
def __init__(self, pid, cluster_id):
pass
| 2 | 1 | 14 | 0 | 14 | 0 | 1 | 0.07 | 0 | 1 | 1 | 1 | 1 | 13 | 1 | 1 | 17 | 1 | 15 | 15 | 13 | 1 | 15 | 15 | 13 | 1 | 0 | 0 | 1 |
143,375 |
Koed00/django-q
|
django_q/models.py
|
django_q.models.FailureManager
|
class FailureManager(models.Manager):
def get_queryset(self):
return super(FailureManager, self).get_queryset().filter(success=False)
|
class FailureManager(models.Manager):
def get_queryset(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
143,376 |
Koed00/django-q
|
django_q/migrations/0014_schedule_cluster.py
|
django_q.migrations.0014_schedule_cluster.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0013_task_attempt_count'),
]
operations = [
migrations.AddField(
model_name='schedule',
name='cluster',
field=models.CharField(blank=True, default=None, max_length=100, null=True),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,377 |
Koed00/django-q
|
django_q/migrations/0013_task_attempt_count.py
|
django_q.migrations.0013_task_attempt_count.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0012_auto_20200702_1608'),
]
operations = [
migrations.AddField(
model_name='task',
name='attempt_count',
field=models.IntegerField(default=0),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,378 |
Koed00/django-q
|
django_q/management/commands/qinfo.py
|
django_q.management.commands.qinfo.Command
|
class Command(BaseCommand):
# Translators: help text for qinfo management command
help = _("General information over all clusters.")
def add_arguments(self, parser):
parser.add_argument(
"--config",
action="store_true",
dest="config",
default=False,
help="Print current configuration.",
)
parser.add_argument(
"--ids",
action="store_true",
dest="ids",
default=False,
help="Print cluster task ID(s) (PIDs).",
)
def handle(self, *args, **options):
if options.get("ids", True):
get_ids()
elif options.get("config", False):
hide = [
"conf",
"IDLE",
"STOPPING",
"STARTING",
"WORKING",
"SIGNAL_NAMES",
"STOPPED",
]
settings = [
a for a in dir(Conf) if not a.startswith("__") and a not in hide
]
self.stdout.write(f"VERSION: {'.'.join(str(v) for v in VERSION)}")
for setting in settings:
value = getattr(Conf, setting)
if value is not None:
self.stdout.write(f"{setting}: {value}")
else:
info()
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 19 | 0 | 19 | 0 | 3 | 0.03 | 1 | 2 | 1 | 0 | 2 | 0 | 2 | 2 | 43 | 2 | 40 | 8 | 37 | 1 | 16 | 8 | 13 | 5 | 1 | 3 | 6 |
143,379 |
Koed00/django-q
|
django_q/management/commands/qmemory.py
|
django_q.management.commands.qmemory.Command
|
class Command(BaseCommand):
# Translators: help text for qmemory management command
help = _("Monitors Q Cluster memory usage")
def add_arguments(self, parser):
parser.add_argument(
"--run-once",
action="store_true",
dest="run_once",
default=False,
help="Run once and then stop.",
)
parser.add_argument(
"--workers",
action="store_true",
dest="workers",
default=False,
help="Show each worker's memory usage.",
)
def handle(self, *args, **options):
memory(
run_once=options.get("run_once", False),
workers=options.get("workers", False),
)
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 10 | 0 | 10 | 0 | 1 | 0.05 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 25 | 2 | 22 | 4 | 19 | 1 | 7 | 4 | 4 | 1 | 1 | 0 | 2 |
143,380 |
Koed00/django-q
|
django_q/management/commands/qmonitor.py
|
django_q.management.commands.qmonitor.Command
|
class Command(BaseCommand):
# Translators: help text for qmonitor management command
help = _("Monitors Q Cluster activity")
def add_arguments(self, parser):
parser.add_argument(
"--run-once",
action="store_true",
dest="run_once",
default=False,
help="Run once and then stop.",
)
def handle(self, *args, **options):
monitor(run_once=options.get("run_once", False))
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 1 | 0.08 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 15 | 2 | 12 | 4 | 9 | 1 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
143,381 |
Koed00/django-q
|
django_q/migrations/0001_initial.py
|
django_q.migrations.0001_initial.Migration
|
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Schedule',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('func', models.CharField(max_length=256, help_text='e.g. module.tasks.function')),
('hook', models.CharField(null=True, blank=True, max_length=256, help_text='e.g. module.tasks.result_function')),
('args', models.CharField(null=True, blank=True, max_length=256, help_text="e.g. 1, 2, 'John'")),
('kwargs', models.CharField(null=True, blank=True, max_length=256, help_text="e.g. x=1, y=2, name='John'")),
('schedule_type', models.CharField(verbose_name='Schedule Type', choices=[('O', 'Once'), ('H', 'Hourly'), ('D', 'Daily'), ('W', 'Weekly'), ('M', 'Monthly'), ('Q', 'Quarterly'), ('Y', 'Yearly')], default='O', max_length=1)),
('repeats', models.SmallIntegerField(verbose_name='Repeats', default=-1, help_text='n = n times, -1 = forever')),
('next_run', models.DateTimeField(verbose_name='Next Run', default=django.utils.timezone.now, null=True)),
('task', models.CharField(editable=False, null=True, max_length=100)),
],
options={
'verbose_name': 'Scheduled task',
'ordering': ['next_run'],
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(editable=False, max_length=100)),
('func', models.CharField(max_length=256)),
('hook', models.CharField(null=True, max_length=256)),
('args', picklefield.fields.PickledObjectField(editable=False, null=True)),
('kwargs', picklefield.fields.PickledObjectField(editable=False, null=True)),
('result', picklefield.fields.PickledObjectField(editable=False, null=True)),
('started', models.DateTimeField(editable=False)),
('stopped', models.DateTimeField(editable=False)),
('success', models.BooleanField(editable=False, default=True)),
],
),
migrations.CreateModel(
name='Failure',
fields=[
],
options={
'verbose_name': 'Failed task',
'proxy': True,
},
bases=('django_q.task',),
),
migrations.CreateModel(
name='Success',
fields=[
],
options={
'verbose_name': 'Successful task',
'proxy': True,
},
bases=('django_q.task',),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 60 | 2 | 58 | 3 | 57 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,382 |
Koed00/django-q
|
django_q/migrations/0002_auto_20150630_1624.py
|
django_q.migrations.0002_auto_20150630_1624.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='schedule',
name='args',
field=models.TextField(help_text="e.g. 1, 2, 'John'", blank=True, null=True),
),
migrations.AlterField(
model_name='schedule',
name='kwargs',
field=models.TextField(help_text="e.g. x=1, y=2, name='John'", blank=True, null=True),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 2 | 16 | 3 | 15 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,383 |
Koed00/django-q
|
django_q/migrations/0003_auto_20150708_1326.py
|
django_q.migrations.0003_auto_20150708_1326.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0002_auto_20150630_1624'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'},
),
migrations.AlterModelOptions(
name='schedule',
options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'},
),
migrations.RemoveField(
model_name='task',
name='id',
),
migrations.AddField(
model_name='task',
name='id',
field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 29 | 2 | 27 | 3 | 26 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,384 |
Koed00/django-q
|
django_q/migrations/0004_auto_20150710_1043.py
|
django_q.migrations.0004_auto_20150710_1043.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0003_auto_20150708_1326'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task', 'ordering': ['-stopped']},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task', 'ordering': ['-stopped']},
),
migrations.AlterModelOptions(
name='task',
options={'ordering': ['-stopped']},
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 2 | 18 | 3 | 17 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,385 |
Koed00/django-q
|
django_q/migrations/0005_auto_20150718_1506.py
|
django_q.migrations.0005_auto_20150718_1506.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0004_auto_20150710_1043'),
]
operations = [
migrations.AddField(
model_name='schedule',
name='name',
field=models.CharField(max_length=100, null=True),
),
migrations.AddField(
model_name='task',
name='group',
field=models.CharField(max_length=100, null=True, editable=False),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 2 | 16 | 3 | 15 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,386 |
Koed00/django-q
|
django_q/migrations/0006_auto_20150805_1817.py
|
django_q.migrations.0006_auto_20150805_1817.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0005_auto_20150718_1506'),
]
operations = [
migrations.AddField(
model_name='schedule',
name='minutes',
field=models.PositiveSmallIntegerField(help_text='Number of minutes for the Minutes type', blank=True, null=True),
),
migrations.AlterField(
model_name='schedule',
name='schedule_type',
field=models.CharField(max_length=1, choices=[('O', 'Once'), ('I', 'Minutes'), ('H', 'Hourly'), ('D', 'Daily'), ('W', 'Weekly'), ('M', 'Monthly'), ('Q', 'Quarterly'), ('Y', 'Yearly')], default='O', verbose_name='Schedule Type'),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 2 | 16 | 3 | 15 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,387 |
Koed00/django-q
|
django_q/migrations/0007_ormq.py
|
django_q.migrations.0007_ormq.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0006_auto_20150805_1817'),
]
operations = [
migrations.CreateModel(
name='OrmQ',
fields=[
('id', models.AutoField(primary_key=True, auto_created=True, verbose_name='ID', serialize=False)),
('key', models.CharField(max_length=100)),
('payload', models.TextField()),
('lock', models.DateTimeField(null=True)),
],
options={
'verbose_name_plural': 'Queued tasks',
'verbose_name': 'Queued task',
},
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 2 | 19 | 3 | 18 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,388 |
Koed00/django-q
|
django_q/migrations/0008_auto_20160224_1026.py
|
django_q.migrations.0008_auto_20160224_1026.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0007_ormq'),
]
operations = [
migrations.AlterField(
model_name='schedule',
name='name',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,389 |
Koed00/django-q
|
django_q/migrations/0009_auto_20171009_0915.py
|
django_q.migrations.0009_auto_20171009_0915.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0008_auto_20160224_1026'),
]
operations = [
migrations.AlterField(
model_name='schedule',
name='repeats',
field=models.IntegerField(default=-1, help_text='n = n times, -1 = forever', verbose_name='Repeats'),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,390 |
Koed00/django-q
|
django_q/migrations/0010_auto_20200610_0856.py
|
django_q.migrations.0010_auto_20200610_0856.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0009_auto_20171009_0915'),
]
operations = [
migrations.AlterField(
model_name='task',
name='args',
field=picklefield.fields.PickledObjectField(editable=False, null=True, protocol=-1),
),
migrations.AlterField(
model_name='task',
name='kwargs',
field=picklefield.fields.PickledObjectField(editable=False, null=True, protocol=-1),
),
migrations.AlterField(
model_name='task',
name='result',
field=picklefield.fields.PickledObjectField(editable=False, null=True, protocol=-1),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 2 | 21 | 3 | 20 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,391 |
Koed00/django-q
|
django_q/migrations/0011_auto_20200628_1055.py
|
django_q.migrations.0011_auto_20200628_1055.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0010_auto_20200610_0856'),
]
operations = [
migrations.AddField(
model_name='schedule',
name='cron',
field=models.CharField(blank=True, help_text='Cron expression', max_length=100, null=True),
),
migrations.AlterField(
model_name='schedule',
name='schedule_type',
field=models.CharField(choices=[('O', 'Once'), ('I', 'Minutes'), ('H', 'Hourly'), ('D', 'Daily'), ('W', 'Weekly'), ('M', 'Monthly'), ('Q', 'Quarterly'), ('Y', 'Yearly'), ('C', 'Cron')], default='O', max_length=1, verbose_name='Schedule Type'),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 2 | 16 | 3 | 15 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,392 |
Koed00/django-q
|
django_q/migrations/0012_auto_20200702_1608.py
|
django_q.migrations.0012_auto_20200702_1608.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_q', '0011_auto_20200628_1055'),
]
operations = [
migrations.AlterField(
model_name='schedule',
name='cron',
field=models.CharField(blank=True, help_text='Cron expression', max_length=100, null=True, validators=[django_q.models.validate_cron]),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,393 |
Koed00/django-q
|
django_q/tests/testing_utilities/multiple_database_routers.py
|
django_q.tests.testing_utilities.multiple_database_routers.TestingReplicaDatabaseRouter
|
class TestingReplicaDatabaseRouter:
"""
A router to control all database operations on models in the
auth application.
"""
def db_for_read(self, model, **hints):
"""
Allows read access from REPLICA database.
"""
return "replica"
def db_for_write(self, model, **hints):
"""
Always write to DEFAULT database
"""
return "default"
|
class TestingReplicaDatabaseRouter:
'''
A router to control all database operations on models in the
auth application.
'''
def db_for_read(self, model, **hints):
'''
Allows read access from REPLICA database.
'''
pass
def db_for_write(self, model, **hints):
'''
Always write to DEFAULT database
'''
pass
| 3 | 3 | 5 | 0 | 2 | 3 | 1 | 2 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 17 | 2 | 5 | 3 | 2 | 10 | 5 | 3 | 2 | 1 | 0 | 0 | 2 |
143,394 |
Koed00/django-q
|
django_q/humanhash.py
|
django_q.humanhash.HumanHasher
|
class HumanHasher:
"""
Transforms hex digests to human-readable strings.
The format of these strings will look something like:
`victor-bacon-zulu-lima`. The output is obtained by compressing the input
digest to a fixed number of bytes, then mapping those bytes to one of 256
words. A default wordlist is provided, but you can override this if you
prefer.
As long as you use the same wordlist, the output will be consistent (i.e.
the same digest will always render the same representation).
"""
def __init__(self, wordlist=DEFAULT_WORDLIST):
if len(wordlist) != 256:
raise ArgumentError("Wordlist must have exactly 256 items")
self.wordlist = wordlist
def humanize(self, hexdigest, words=4, separator="-"):
"""
Humanize a given hexadecimal digest.
Change the number of words output by specifying `words`. Change the
word separator with `separator`.
>>> digest = '60ad8d0d871b6095808297'
>>> HumanHasher().humanize(digest)
'sodium-magnesium-nineteen-hydrogen'
"""
# Gets a list of byte values between 0-255.
bytes = [
int(x, 16)
for x in list(map("".join, list(zip(hexdigest[::2], hexdigest[1::2]))))
]
# Compress an arbitrary number of bytes to `words`.
compressed = self.compress(bytes, words)
# Map the compressed byte values through the word list.
return separator.join(self.wordlist[byte] for byte in compressed)
@staticmethod
def compress(bytes, target):
"""
Compress a list of byte values to a fixed target length.
>>> bytes = [96, 173, 141, 13, 135, 27, 96, 149, 128, 130, 151]
>>> HumanHasher.compress(bytes, 4)
[205, 128, 156, 96]
Attempting to compress a smaller number of bytes to a larger number is
an error:
>>> HumanHasher.compress(bytes, 15) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: Fewer input bytes than requested output
"""
length = len(bytes)
if target > length:
raise ValueError("Fewer input bytes than requested output")
# Split `bytes` into `target` segments.
seg_size = length // target
segments = [bytes[i * seg_size : (i + 1) * seg_size] for i in range(target)]
# Catch any left-over bytes in the last segment.
segments[-1].extend(bytes[target * seg_size :])
# Use a simple XOR checksum-like function for compression.
checksum = lambda bytes: reduce(operator.xor, bytes, 0)
checksums = list(map(checksum, segments))
return checksums
def uuid(self, **params):
"""
Generate a UUID with a human-readable representation.
Returns `(human_repr, full_digest)`. Accepts the same keyword arguments
as :meth:`humanize` (they'll be passed straight through).
"""
digest = str(uuidlib.uuid4()).replace("-", "")
return self.humanize(digest, **params), digest
|
class HumanHasher:
'''
Transforms hex digests to human-readable strings.
The format of these strings will look something like:
`victor-bacon-zulu-lima`. The output is obtained by compressing the input
digest to a fixed number of bytes, then mapping those bytes to one of 256
words. A default wordlist is provided, but you can override this if you
prefer.
As long as you use the same wordlist, the output will be consistent (i.e.
the same digest will always render the same representation).
'''
def __init__(self, wordlist=DEFAULT_WORDLIST):
pass
def humanize(self, hexdigest, words=4, separator="-"):
'''
Humanize a given hexadecimal digest.
Change the number of words output by specifying `words`. Change the
word separator with `separator`.
>>> digest = '60ad8d0d871b6095808297'
>>> HumanHasher().humanize(digest)
'sodium-magnesium-nineteen-hydrogen'
'''
pass
@staticmethod
def compress(bytes, target):
'''
Compress a list of byte values to a fixed target length.
>>> bytes = [96, 173, 141, 13, 135, 27, 96, 149, 128, 130, 151]
>>> HumanHasher.compress(bytes, 4)
[205, 128, 156, 96]
Attempting to compress a smaller number of bytes to a larger number is
an error:
>>> HumanHasher.compress(bytes, 15) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: Fewer input bytes than requested output
'''
pass
def uuid(self, **params):
'''
Generate a UUID with a human-readable representation.
Returns `(human_repr, full_digest)`. Accepts the same keyword arguments
as :meth:`humanize` (they'll be passed straight through).
'''
pass
| 6 | 4 | 17 | 4 | 6 | 8 | 2 | 1.58 | 0 | 8 | 0 | 0 | 3 | 1 | 4 | 4 | 88 | 21 | 26 | 15 | 20 | 41 | 22 | 14 | 17 | 2 | 0 | 1 | 6 |
143,395 |
Koed00/django-q
|
django_q/core_signing.py
|
django_q.core_signing.TimestampSigner
|
class TimestampSigner(Signer, TsS):
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super(TimestampSigner, self).unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = baseconv.base62.decode(timestamp)
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired("Signature age %s > %s seconds" % (age, max_age))
return value
|
class TimestampSigner(Signer, TsS):
def unsign(self, value, max_age=None):
'''
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
'''
pass
| 2 | 1 | 16 | 0 | 11 | 5 | 4 | 0.42 | 2 | 2 | 0 | 0 | 1 | 0 | 1 | 2 | 17 | 0 | 12 | 5 | 10 | 5 | 12 | 5 | 10 | 4 | 2 | 2 | 4 |
143,396 |
Koed00/django-q
|
django_q/conf.py
|
django_q.conf.ErrorReporter
|
class ErrorReporter:
# initialize with iterator of reporters (better name, targets?)
def __init__(self, reporters):
self.targets = [target for target in reporters]
# report error to all configured targets
def report(self):
for t in self.targets:
t.report()
|
class ErrorReporter:
def __init__(self, reporters):
pass
def report(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 2 | 0.33 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 10 | 2 | 6 | 5 | 3 | 2 | 6 | 5 | 3 | 2 | 0 | 1 | 3 |
143,397 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/models.py
|
django_q.models.OrmQ.Meta
|
class Meta:
app_label = "django_q"
verbose_name = _("Queued task")
verbose_name_plural = _("Queued tasks")
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
143,398 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/models.py
|
django_q.models.Schedule.Meta
|
class Meta:
app_label = "django_q"
verbose_name = _("Scheduled task")
verbose_name_plural = _("Scheduled tasks")
ordering = ["next_run"]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0 | 5 | 5 | 4 | 0 | 5 | 5 | 4 | 0 | 0 | 0 | 0 |
143,399 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/models.py
|
django_q.models.Success.Meta
|
class Meta:
app_label = "django_q"
verbose_name = _("Successful task")
verbose_name_plural = _("Successful tasks")
ordering = ["-stopped"]
proxy = True
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 0 | 0 | 0 |
143,400 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/models.py
|
django_q.models.Task.Meta
|
class Meta:
app_label = "django_q"
ordering = ["-stopped"]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
143,401 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/tests/test_cluster.py
|
django_q.tests.test_cluster.TestSignals
|
class TestSignals:
@pytest.mark.django_db
def test_pre_enqueue_signal(self, broker):
broker.list_key = "pre_enqueue_test:q"
broker.delete_queue()
self.signal_was_called: bool = False
self.task: Optional[dict] = None
def handler(sender, task, **kwargs):
self.signal_was_called = True
self.task = task
pre_enqueue.connect(handler)
task_id = async_task("math.copysign", 1, -1, broker=broker)
assert self.signal_was_called is True
assert self.task.get("id") == task_id
pre_enqueue.disconnect(handler)
broker.delete_queue()
@pytest.mark.django_db
def test_pre_execute_signal(self, broker):
broker.list_key = "pre_execute_test:q"
broker.delete_queue()
self.signal_was_called: bool = False
self.task: Optional[dict] = None
self.func = None
def handler(sender, task, func, **kwargs):
self.signal_was_called = True
self.task = task
self.func = func
pre_execute.connect(handler)
task_id = async_task("math.copysign", 1, -1, broker=broker)
task_queue = Queue()
result_queue = Queue()
event = Event()
event.set()
pusher(task_queue, event, broker=broker)
task_queue.put("STOP")
worker(task_queue, result_queue, Value("f", -1))
result_queue.put("STOP")
monitor(result_queue, broker)
broker.delete_queue()
assert self.signal_was_called is True
assert self.task.get("id") == task_id
assert self.func == copysign
pre_execute.disconnect(handler)
@pytest.mark.django_db
def test_post_execute_signal(self, broker):
broker.list_key = "post_execute_test:q"
broker.delete_queue()
self.signal_was_called: bool = False
self.task: Optional[dict] = None
self.func = None
def handler(sender, task, **kwargs):
self.signal_was_called = True
self.task = task
post_execute.connect(handler)
task_id = async_task("math.copysign", 1, -1, broker=broker)
task_queue = Queue()
result_queue = Queue()
event = Event()
event.set()
pusher(task_queue, event, broker=broker)
task_queue.put("STOP")
worker(task_queue, result_queue, Value("f", -1))
result_queue.put("STOP")
monitor(result_queue, broker)
broker.delete_queue()
assert self.signal_was_called is True
assert self.task.get("id") == task_id
assert self.task.get("result") == -1
post_execute.disconnect(handler)
|
class TestSignals:
@pytest.mark.django_db
def test_pre_enqueue_signal(self, broker):
pass
def handler(sender, task, **kwargs):
pass
@pytest.mark.django_db
def test_pre_execute_signal(self, broker):
pass
def handler(sender, task, **kwargs):
pass
@pytest.mark.django_db
def test_post_execute_signal(self, broker):
pass
def handler(sender, task, **kwargs):
pass
| 10 | 0 | 14 | 1 | 13 | 0 | 1 | 0 | 0 | 3 | 1 | 0 | 3 | 3 | 3 | 3 | 77 | 8 | 69 | 22 | 59 | 0 | 66 | 19 | 59 | 1 | 0 | 0 | 6 |
143,402 |
Koed00/django-q
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-q/django_q/tests/test_cluster.py
|
django_q.tests.test_cluster.test_acknowledge_failure_override.VerifyAckMockBroker
|
class VerifyAckMockBroker(Broker):
def __init__(self, *args, **kwargs):
super(VerifyAckMockBroker, self).__init__(*args, **kwargs)
self.acknowledgements = {}
def acknowledge(self, task_id):
count = self.acknowledgements.get(task_id, 0)
self.acknowledgements[task_id] = count + 1
|
class VerifyAckMockBroker(Broker):
def __init__(self, *args, **kwargs):
pass
def acknowledge(self, task_id):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 21 | 8 | 1 | 7 | 5 | 4 | 0 | 7 | 5 | 4 | 1 | 1 | 0 | 2 |
143,403 |
Koed00/django-q
|
django_q/core_signing.py
|
django_q.core_signing.Signer
|
class Signer(Sgnr):
def unsign(self, signed_value):
signed_value = force_str(signed_value)
if self.sep not in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
if constant_time_compare(sig, self.signature(value)):
return force_str(value)
raise BadSignature('Signature "%s" does not match' % sig)
|
class Signer(Sgnr):
def unsign(self, signed_value):
pass
| 2 | 0 | 8 | 0 | 8 | 0 | 3 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 9 | 0 | 9 | 3 | 7 | 0 | 9 | 3 | 7 | 3 | 1 | 1 | 3 |
143,404 |
Koed00/django-q
|
django_q/admin.py
|
django_q.admin.FailAdmin
|
class FailAdmin(admin.ModelAdmin):
"""model admin for failed tasks."""
list_display = ("name", "func", "started", "stopped", "short_result")
def has_add_permission(self, request):
"""Don't allow adds."""
return False
actions = [retry_failed]
search_fields = ("name", "func")
list_filter = ("group",)
readonly_fields = []
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
|
class FailAdmin(admin.ModelAdmin):
'''model admin for failed tasks.'''
def has_add_permission(self, request):
'''Don't allow adds.'''
pass
def get_readonly_fields(self, request, obj=None):
'''Set all fields readonly.'''
pass
| 3 | 3 | 3 | 0 | 2 | 1 | 1 | 0.3 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 17 | 4 | 10 | 8 | 7 | 3 | 10 | 8 | 7 | 1 | 1 | 0 | 2 |
143,405 |
Koed00/django-q
|
django_q/admin.py
|
django_q.admin.QueueAdmin
|
class QueueAdmin(admin.ModelAdmin):
"""queue admin for ORM broker"""
list_display = ("id", "key", "task_id", "name", "func", "lock")
def save_model(self, request, obj, form, change):
obj.save(using=Conf.ORM)
def delete_model(self, request, obj):
obj.delete(using=Conf.ORM)
def get_queryset(self, request):
return super(QueueAdmin, self).get_queryset(request).using(Conf.ORM)
def has_add_permission(self, request):
"""Don't allow adds."""
return False
list_filter = ("key",)
|
class QueueAdmin(admin.ModelAdmin):
'''queue admin for ORM broker'''
def save_model(self, request, obj, form, change):
pass
def delete_model(self, request, obj):
pass
def get_queryset(self, request):
pass
def has_add_permission(self, request):
'''Don't allow adds.'''
pass
| 5 | 2 | 2 | 0 | 2 | 0 | 1 | 0.18 | 1 | 2 | 1 | 0 | 4 | 0 | 4 | 4 | 19 | 6 | 11 | 7 | 6 | 2 | 11 | 7 | 6 | 1 | 1 | 0 | 4 |
143,406 |
Koed00/django-q
|
django_q/admin.py
|
django_q.admin.ScheduleAdmin
|
class ScheduleAdmin(admin.ModelAdmin):
"""model admin for schedules"""
list_display = (
"id",
"name",
"func",
"schedule_type",
"repeats",
"cluster",
"next_run",
"last_run",
"success",
)
# optional cron strings
if not croniter:
readonly_fields = ("cron",)
list_filter = ("next_run", "schedule_type", "cluster")
search_fields = ("func",)
list_display_links = ("id", "name")
|
class ScheduleAdmin(admin.ModelAdmin):
'''model admin for schedules'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 3 | 17 | 6 | 16 | 2 | 7 | 6 | 6 | 0 | 1 | 1 | 0 |
143,407 |
Koed00/django-q
|
django_q/signing.py
|
django_q.signing.PickleSerializer
|
class PickleSerializer:
"""Simple wrapper around Pickle for signing.dumps and signing.loads."""
@staticmethod
def dumps(obj) -> bytes:
return pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
@staticmethod
def loads(data) -> any:
return pickle.loads(data)
|
class PickleSerializer:
'''Simple wrapper around Pickle for signing.dumps and signing.loads.'''
@staticmethod
def dumps(obj) -> bytes:
pass
@staticmethod
def loads(data) -> any:
pass
| 5 | 1 | 2 | 0 | 2 | 0 | 1 | 0.14 | 0 | 1 | 0 | 0 | 0 | 0 | 2 | 2 | 10 | 2 | 7 | 5 | 2 | 1 | 5 | 3 | 2 | 1 | 0 | 0 | 2 |
143,408 |
Koed00/django-q
|
django_q/apps.py
|
django_q.apps.DjangoQConfig
|
class DjangoQConfig(AppConfig):
name = "django_q"
verbose_name = Conf.LABEL
default_auto_field = "django.db.models.AutoField"
def ready(self):
from django_q.signals import call_hook
|
class DjangoQConfig(AppConfig):
def ready(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 1 | 6 | 6 | 3 | 0 | 6 | 6 | 3 | 1 | 1 | 0 | 1 |
143,409 |
Koed00/django-q
|
django_q/brokers/__init__.py
|
django_q.brokers.Broker
|
class Broker:
def __init__(self, list_key: str = Conf.PREFIX):
self.connection = self.get_connection(list_key)
self.list_key = list_key
self.cache = self.get_cache()
self._info = None
def __getstate__(self):
return self.list_key, self._info
def __setstate__(self, state):
self.list_key, self._info = state
self.connection = self.get_connection(self.list_key)
self.cache = self.get_cache()
def enqueue(self, task):
"""
Puts a task onto the queue
:type task: str
:return: task id
"""
pass
def dequeue(self):
"""
Gets a task from the queue
:return: tuple with task id and task message
"""
pass
def queue_size(self):
"""
:return: the amount of tasks in the queue
"""
pass
def lock_size(self):
"""
:return: the number of tasks currently awaiting acknowledgement
"""
def delete_queue(self):
"""
Deletes the queue from the broker
"""
pass
def purge_queue(self):
"""
Purges the queue of any tasks
"""
pass
def delete(self, task_id):
"""
Deletes a task from the queue
:param task_id: the id of the task
"""
pass
def acknowledge(self, task_id):
"""
Acknowledges completion of the task and removes it from the queue.
:param task_id: the id of the task
"""
pass
def fail(self, task_id):
"""
Fails a task message
:param task_id:
:return:
"""
def ping(self) -> bool:
"""
Checks whether the broker connection is available
:rtype: bool
"""
pass
def info(self):
"""
Shows the broker type
"""
return self._info
def set_stat(self, key: str, value: str, timeout: int):
"""
Saves a cluster statistic to the cache provider
:type key: str
:type value: str
:type timeout: int
"""
if not self.cache:
return
key_list = self.cache.get(Conf.Q_STAT, [])
if key not in key_list:
key_list.append(key)
self.cache.set(Conf.Q_STAT, key_list)
return self.cache.set(key, value, timeout)
def get_stat(self, key: str):
"""
Gets a cluster statistic from the cache provider
:type key: str
:return: a cluster Stat
"""
if not self.cache:
return
return self.cache.get(key)
def get_stats(self, pattern: str) -> Optional[list]:
"""
Returns a list of all cluster stats from the cache provider
:type pattern: str
:return: a list of Stats
"""
if not self.cache:
return
key_list = self.cache.get(Conf.Q_STAT)
if not key_list or len(key_list) == 0:
return []
stats = []
for key in key_list:
stat = self.cache.get(key)
if stat:
stats.append(stat)
else:
key_list.remove(key)
self.cache.set(Conf.Q_STAT, key_list)
return stats
@staticmethod
def get_cache():
"""
Gets the current cache provider
:return: a cache provider
"""
try:
return caches[Conf.CACHE]
except InvalidCacheBackendError:
return None
@staticmethod
def get_connection(list_key: str = Conf.PREFIX):
"""
Gets a connection to the broker
:param list_key: Optional queue name
:return: a broker connection
"""
return 0
|
class Broker:
def __init__(self, list_key: str = Conf.PREFIX):
pass
def __getstate__(self):
pass
def __setstate__(self, state):
pass
def enqueue(self, task):
'''
Puts a task onto the queue
:type task: str
:return: task id
'''
pass
def dequeue(self):
'''
Gets a task from the queue
:return: tuple with task id and task message
'''
pass
def queue_size(self):
'''
:return: the amount of tasks in the queue
'''
pass
def lock_size(self):
'''
:return: the number of tasks currently awaiting acknowledgement
'''
pass
def delete_queue(self):
'''
Deletes the queue from the broker
'''
pass
def purge_queue(self):
'''
Purges the queue of any tasks
'''
pass
def delete_queue(self):
'''
Deletes a task from the queue
:param task_id: the id of the task
'''
pass
def acknowledge(self, task_id):
'''
Acknowledges completion of the task and removes it from the queue.
:param task_id: the id of the task
'''
pass
def fail(self, task_id):
'''
Fails a task message
:param task_id:
:return:
'''
pass
def ping(self) -> bool:
'''
Checks whether the broker connection is available
:rtype: bool
'''
pass
def info(self):
'''
Shows the broker type
'''
pass
def set_stat(self, key: str, value: str, timeout: int):
'''
Saves a cluster statistic to the cache provider
:type key: str
:type value: str
:type timeout: int
'''
pass
def get_stat(self, key: str):
'''
Gets a cluster statistic from the cache provider
:type key: str
:return: a cluster Stat
'''
pass
def get_stats(self, pattern: str) -> Optional[list]:
'''
Returns a list of all cluster stats from the cache provider
:type pattern: str
:return: a list of Stats
'''
pass
@staticmethod
def get_cache():
'''
Gets the current cache provider
:return: a cache provider
'''
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX):
'''
Gets a connection to the broker
:param list_key: Optional queue name
:return: a broker connection
'''
pass
| 22 | 16 | 7 | 0 | 3 | 3 | 1 | 0.97 | 0 | 5 | 1 | 7 | 17 | 4 | 19 | 19 | 152 | 18 | 68 | 31 | 46 | 66 | 65 | 29 | 45 | 5 | 0 | 2 | 27 |
143,410 |
Koed00/django-q
|
django_q/cluster.py
|
django_q.cluster.Sentinel
|
class Sentinel:
def __init__(
self,
stop_event,
start_event,
cluster_id,
broker=None,
timeout=Conf.TIMEOUT,
start=True,
):
# Make sure we catch signals for the pool
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
self.pid = current_process().pid
self.cluster_id = cluster_id
self.parent_pid = get_ppid()
self.name = current_process().name
self.broker = broker or get_broker()
self.reincarnations = 0
self.tob = timezone.now()
self.stop_event = stop_event
self.start_event = start_event
self.pool_size = Conf.WORKERS
self.pool = []
self.timeout = timeout
self.task_queue = (
Queue(maxsize=Conf.QUEUE_LIMIT) if Conf.QUEUE_LIMIT else Queue()
)
self.result_queue = Queue()
self.event_out = Event()
self.monitor = None
self.pusher = None
if start:
self.start()
def start(self):
self.broker.ping()
self.spawn_cluster()
self.guard()
def status(self) -> str:
if not self.start_event.is_set() and not self.stop_event.is_set():
return Conf.STARTING
elif self.start_event.is_set() and not self.stop_event.is_set():
if self.result_queue.empty() and self.task_queue.empty():
return Conf.IDLE
return Conf.WORKING
elif self.stop_event.is_set() and self.start_event.is_set():
if self.monitor.is_alive() or self.pusher.is_alive() or len(self.pool) > 0:
return Conf.STOPPING
return Conf.STOPPED
def spawn_process(self, target, *args) -> Process:
"""
:type target: function or class
"""
p = Process(target=target, args=args)
p.daemon = True
if target == worker:
p.daemon = Conf.DAEMONIZE_WORKERS
p.timer = args[2]
self.pool.append(p)
p.start()
return p
def spawn_pusher(self) -> Process:
return self.spawn_process(pusher, self.task_queue, self.event_out, self.broker)
def spawn_worker(self):
self.spawn_process(
worker, self.task_queue, self.result_queue, Value("f", -1), self.timeout
)
def spawn_monitor(self) -> Process:
return self.spawn_process(monitor, self.result_queue, self.broker)
def reincarnate(self, process):
"""
:param process: the process to reincarnate
:type process: Process or None
"""
if not Conf.SYNC:
db.connections.close_all() # Close any old connections
if process == self.monitor:
self.monitor = self.spawn_monitor()
logger.error(_(f"reincarnated monitor {process.name} after sudden death"))
elif process == self.pusher:
self.pusher = self.spawn_pusher()
logger.error(_(f"reincarnated pusher {process.name} after sudden death"))
else:
self.pool.remove(process)
self.spawn_worker()
if process.timer.value == 0:
# only need to terminate on timeout, otherwise we risk destabilizing the queues
process.terminate()
logger.warning(_(f"reincarnated worker {process.name} after timeout"))
elif int(process.timer.value) == -2:
logger.info(_(f"recycled worker {process.name}"))
else:
logger.error(_(f"reincarnated worker {process.name} after death"))
self.reincarnations += 1
def spawn_cluster(self):
self.pool = []
Stat(self).save()
if not Conf.SYNC:
db.connection.close()
# spawn worker pool
for __ in range(self.pool_size):
self.spawn_worker()
# spawn auxiliary
self.monitor = self.spawn_monitor()
self.pusher = self.spawn_pusher()
# set worker cpu affinity if needed
if psutil and Conf.CPU_AFFINITY:
set_cpu_affinity(Conf.CPU_AFFINITY, [w.pid for w in self.pool])
def guard(self):
logger.info(
_(
f"{current_process().name} guarding cluster {humanize(self.cluster_id.hex)}"
)
)
self.start_event.set()
Stat(self).save()
logger.info(_(f"Q Cluster {humanize(self.cluster_id.hex)} running."))
counter = 0
cycle = Conf.GUARD_CYCLE # guard loop sleep in seconds
# Guard loop. Runs at least once
while not self.stop_event.is_set() or not counter:
# Check Workers
for p in self.pool:
with p.timer.get_lock():
# Are you alive?
if not p.is_alive() or p.timer.value == 0:
self.reincarnate(p)
continue
# Decrement timer if work is being done
if p.timer.value > 0:
p.timer.value -= cycle
# Check Monitor
if not self.monitor.is_alive():
self.reincarnate(self.monitor)
# Check Pusher
if not self.pusher.is_alive():
self.reincarnate(self.pusher)
# Call scheduler once a minute (or so)
counter += cycle
if counter >= 30 and Conf.SCHEDULER:
counter = 0
scheduler(broker=self.broker)
# Save current status
Stat(self).save()
sleep(cycle)
self.stop()
def stop(self):
Stat(self).save()
name = current_process().name
logger.info(_(f"{name} stopping cluster processes"))
# Stopping pusher
self.event_out.set()
# Wait for it to stop
while self.pusher.is_alive():
sleep(0.1)
Stat(self).save()
# Put poison pills in the queue
for __ in range(len(self.pool)):
self.task_queue.put("STOP")
self.task_queue.close()
# wait for the task queue to empty
self.task_queue.join_thread()
# Wait for all the workers to exit
while len(self.pool):
for p in self.pool:
if not p.is_alive():
self.pool.remove(p)
sleep(0.1)
Stat(self).save()
# Finally stop the monitor
self.result_queue.put("STOP")
self.result_queue.close()
# Wait for the result queue to empty
self.result_queue.join_thread()
logger.info(_(f"{name} waiting for the monitor."))
# Wait for everything to close or time out
count = 0
if not self.timeout:
self.timeout = 30
while self.status() == Conf.STOPPING and count < self.timeout * 10:
sleep(0.1)
Stat(self).save()
count += 1
# Final status
Stat(self).save()
|
class Sentinel:
def __init__(
self,
stop_event,
start_event,
cluster_id,
broker=None,
timeout=Conf.TIMEOUT,
start=True,
):
pass
def start(self):
pass
def status(self) -> str:
pass
def spawn_process(self, target, *args) -> Process:
'''
:type target: function or class
'''
pass
def spawn_pusher(self) -> Process:
pass
def spawn_worker(self):
pass
def spawn_monitor(self) -> Process:
pass
def reincarnate(self, process):
'''
:param process: the process to reincarnate
:type process: Process or None
'''
pass
def spawn_cluster(self):
pass
def guard(self):
pass
def stop(self):
pass
| 12 | 2 | 17 | 0 | 14 | 3 | 4 | 0.2 | 0 | 6 | 3 | 0 | 11 | 17 | 11 | 11 | 196 | 11 | 156 | 46 | 136 | 31 | 134 | 38 | 122 | 8 | 0 | 4 | 41 |
143,411 |
Koed00/django-q
|
django_q/brokers/aws_sqs.py
|
django_q.brokers.aws_sqs.Sqs
|
class Sqs(Broker):
def __init__(self, list_key: str = Conf.PREFIX):
self.sqs = None
super(Sqs, self).__init__(list_key)
self.queue = self.get_queue()
def __setstate__(self, state):
super(Sqs, self).__setstate__(state)
self.sqs = None
self.queue = self.get_queue()
def enqueue(self, task):
response = self.queue.send_message(MessageBody=task)
return response.get("MessageId")
def dequeue(self):
# sqs supports max 10 messages in bulk
if Conf.BULK > 10:
Conf.BULK = 10
params = {"MaxNumberOfMessages": Conf.BULK, "VisibilityTimeout": Conf.RETRY}
# sqs long polling
sqs_config = Conf.SQS
if "receive_message_wait_time_seconds" in sqs_config:
wait_time_second = sqs_config.get("receive_message_wait_time_seconds", 20)
# validation of parameter
if not isinstance(wait_time_second, int):
raise ValueError("receive_message_wait_time_seconds should be int")
if wait_time_second > 20:
raise ValueError(
"receive_message_wait_time_seconds is invalid. Reason: Must be >= 0 and <= 20"
)
params.update({"WaitTimeSeconds": wait_time_second})
tasks = self.queue.receive_messages(**params)
if tasks:
return [(t.receipt_handle, t.body) for t in tasks]
def acknowledge(self, task_id):
return self.delete(task_id)
def queue_size(self) -> int:
return int(self.queue.attributes["ApproximateNumberOfMessages"])
def lock_size(self) -> int:
return int(self.queue.attributes["ApproximateNumberOfMessagesNotVisible"])
def delete(self, task_id):
message = self.sqs.Message(self.queue.url, task_id)
message.delete()
def fail(self, task_id):
self.delete(task_id)
def delete_queue(self):
self.queue.delete()
def purge_queue(self):
self.queue.purge()
def ping(self) -> bool:
return "sqs" in self.connection.get_available_resources()
def info(self) -> str:
return "AWS SQS"
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Session:
config = Conf.SQS
if "aws_region" in config:
config["region_name"] = config["aws_region"]
del config["aws_region"]
if "receive_message_wait_time_seconds" in config:
del config["receive_message_wait_time_seconds"]
return Session(**config)
def get_queue(self):
self.sqs = self.connection.resource("sqs")
try:
# try to return an existing queue by name. If the queue does not
# exist try to create it.
return self.sqs.get_queue_by_name(QueueName=self.list_key)
except ClientError as exp:
if exp.response["Error"]["Code"] != QUEUE_DOES_NOT_EXIST:
raise exp
return self.sqs.create_queue(QueueName=self.list_key)
|
class Sqs(Broker):
def __init__(self, list_key: str = Conf.PREFIX):
pass
def __setstate__(self, state):
pass
def enqueue(self, task):
pass
def dequeue(self):
pass
def acknowledge(self, task_id):
pass
def queue_size(self) -> int:
pass
def lock_size(self) -> int:
pass
def delete(self, task_id):
pass
def fail(self, task_id):
pass
def delete_queue(self):
pass
def purge_queue(self):
pass
def ping(self) -> bool:
pass
def info(self) -> str:
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Session:
pass
def get_queue(self):
pass
| 17 | 0 | 5 | 1 | 4 | 0 | 2 | 0.08 | 1 | 6 | 1 | 0 | 14 | 2 | 15 | 34 | 92 | 22 | 65 | 27 | 48 | 5 | 62 | 25 | 46 | 6 | 1 | 2 | 24 |
143,412 |
Koed00/django-q
|
django_q/cluster.py
|
django_q.cluster.Cluster
|
class Cluster:
def __init__(self, broker: Broker = None):
self.broker = broker or get_broker()
self.sentinel = None
self.stop_event = None
self.start_event = None
self.pid = current_process().pid
self.cluster_id = uuid.uuid4()
self.host = socket.gethostname()
self.timeout = Conf.TIMEOUT
signal.signal(signal.SIGTERM, self.sig_handler)
signal.signal(signal.SIGINT, self.sig_handler)
def start(self) -> int:
# Start Sentinel
self.stop_event = Event()
self.start_event = Event()
self.sentinel = Process(
target=Sentinel,
args=(
self.stop_event,
self.start_event,
self.cluster_id,
self.broker,
self.timeout,
),
)
self.sentinel.start()
logger.info(_(f"Q Cluster {self.name} starting."))
while not self.start_event.is_set():
sleep(0.1)
return self.pid
def stop(self) -> bool:
if not self.sentinel.is_alive():
return False
logger.info(_(f"Q Cluster {self.name} stopping."))
self.stop_event.set()
self.sentinel.join()
logger.info(_(f"Q Cluster {self.name} has stopped."))
self.start_event = None
self.stop_event = None
return True
def sig_handler(self, signum, frame):
logger.debug(
_(
f'{current_process().name} got signal {Conf.SIGNAL_NAMES.get(signum, "UNKNOWN")}'
)
)
self.stop()
@property
def stat(self) -> Status:
if self.sentinel:
return Stat.get(pid=self.pid, cluster_id=self.cluster_id)
return Status(pid=self.pid, cluster_id=self.cluster_id)
@property
def name(self) -> str:
return humanize(self.cluster_id.hex)
@property
def is_starting(self) -> bool:
return self.stop_event and self.start_event and not self.start_event.is_set()
@property
def is_running(self) -> bool:
return self.stop_event and self.start_event and self.start_event.is_set()
@property
def is_stopping(self) -> bool:
return (
self.stop_event
and self.start_event
and self.start_event.is_set()
and self.stop_event.is_set()
)
@property
def has_stopped(self) -> bool:
return self.start_event is None and self.stop_event is None and self.sentinel
|
class Cluster:
def __init__(self, broker: Broker = None):
pass
def start(self) -> int:
pass
def stop(self) -> bool:
pass
def sig_handler(self, signum, frame):
pass
@property
def stat(self) -> Status:
pass
@property
def name(self) -> str:
pass
@property
def is_starting(self) -> bool:
pass
@property
def is_running(self) -> bool:
pass
@property
def is_stopping(self) -> bool:
pass
@property
def has_stopped(self) -> bool:
pass
| 17 | 0 | 7 | 0 | 7 | 0 | 1 | 0.01 | 0 | 8 | 5 | 0 | 10 | 8 | 10 | 10 | 82 | 9 | 72 | 25 | 55 | 1 | 48 | 19 | 37 | 2 | 0 | 1 | 13 |
143,413 |
Koed00/django-q
|
django_q/brokers/redis_broker.py
|
django_q.brokers.redis_broker.Redis
|
class Redis(Broker):
def __init__(self, list_key: str = Conf.PREFIX):
super(Redis, self).__init__(list_key=f"django_q:{list_key}:q")
def enqueue(self, task):
return self.connection.rpush(self.list_key, task)
def dequeue(self):
task = self.connection.blpop(self.list_key, 1)
if task:
return [(None, task[1])]
def queue_size(self):
return self.connection.llen(self.list_key)
def delete_queue(self):
return self.connection.delete(self.list_key)
def purge_queue(self):
return self.connection.ltrim(self.list_key, 1, 0)
def ping(self) -> bool:
try:
return self.connection.ping()
except redis.ConnectionError as e:
logger.error("Can not connect to Redis server.")
raise e
def info(self) -> str:
if not self._info:
info = self.connection.info("server")
self._info = f"Redis {info['redis_version']}"
return self._info
def set_stat(self, key: str, value: str, timeout: int):
self.connection.set(key, value, timeout)
def get_stat(self, key: str):
if self.connection.exists(key):
return self.connection.get(key)
def get_stats(self, pattern: str):
keys = self.connection.keys(pattern=pattern)
if keys:
return self.connection.mget(keys)
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Redis:
if django_redis and Conf.DJANGO_REDIS:
return django_redis.get_redis_connection(Conf.DJANGO_REDIS)
if isinstance(Conf.REDIS, str):
return redis.from_url(Conf.REDIS)
return redis.StrictRedis(**Conf.REDIS)
|
class Redis(Broker):
def __init__(self, list_key: str = Conf.PREFIX):
pass
def enqueue(self, task):
pass
def dequeue(self):
pass
def queue_size(self):
pass
def delete_queue(self):
pass
def purge_queue(self):
pass
def ping(self) -> bool:
pass
def info(self) -> str:
pass
def set_stat(self, key: str, value: str, timeout: int):
pass
def get_stat(self, key: str):
pass
def get_stats(self, pattern: str):
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Redis:
pass
| 14 | 0 | 3 | 0 | 3 | 0 | 2 | 0 | 1 | 6 | 1 | 0 | 11 | 1 | 12 | 31 | 53 | 11 | 42 | 19 | 28 | 0 | 41 | 17 | 28 | 3 | 1 | 1 | 19 |
143,414 |
Koed00/django-q
|
django_q/brokers/disque.py
|
django_q.brokers.disque.Disque
|
class Disque(Broker):
def enqueue(self, task):
retry = Conf.RETRY if Conf.RETRY > 0 else f"{Conf.RETRY} REPLICATE 1"
return self.connection.execute_command(
f"ADDJOB {self.list_key} {task} 500 RETRY {retry}"
).decode()
def dequeue(self):
tasks = self.connection.execute_command(
f"GETJOB COUNT {Conf.BULK} TIMEOUT 1000 FROM {self.list_key}"
)
if tasks:
return [(t[1].decode(), t[2].decode()) for t in tasks]
def queue_size(self):
return self.connection.execute_command(f"QLEN {self.list_key}")
def acknowledge(self, task_id):
command = "FASTACK" if Conf.DISQUE_FASTACK else "ACKJOB"
return self.connection.execute_command(f"{command} {task_id}")
def ping(self) -> bool:
return self.connection.execute_command("HELLO")[0] > 0
def delete(self, task_id):
return self.connection.execute_command(f"DELJOB {task_id}")
def fail(self, task_id):
return self.delete(task_id)
def delete_queue(self) -> int:
jobs = self.connection.execute_command(f"JSCAN QUEUE {self.list_key}")[1]
if jobs:
job_ids = " ".join(jid.decode() for jid in jobs)
self.connection.execute_command(f"DELJOB {job_ids}")
return len(jobs)
def info(self) -> str:
if not self._info:
info = self.connection.info("server")
self._info = f'Disque {info["disque_version"]}'
return self._info
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Redis:
if not Conf.DISQUE_NODES:
raise redis.exceptions.ConnectionError(_("No Disque nodes configured"))
# randomize nodes
random.shuffle(Conf.DISQUE_NODES)
# find one that works
for node in Conf.DISQUE_NODES:
host, port = node.split(":")
kwargs = {"host": host, "port": port}
if Conf.DISQUE_AUTH:
kwargs["password"] = Conf.DISQUE_AUTH
redis_client = redis.Redis(**kwargs)
redis_client.decode_responses = True
try:
redis_client.execute_command("HELLO")
return redis_client
except redis.exceptions.ConnectionError:
continue
raise redis.exceptions.ConnectionError(
_("Could not connect to any Disque nodes")
)
|
class Disque(Broker):
def enqueue(self, task):
pass
def dequeue(self):
pass
def queue_size(self):
pass
def acknowledge(self, task_id):
pass
def ping(self) -> bool:
pass
def delete(self, task_id):
pass
def fail(self, task_id):
pass
def delete_queue(self) -> int:
pass
def info(self) -> str:
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Redis:
pass
| 12 | 0 | 5 | 0 | 5 | 0 | 2 | 0.04 | 1 | 4 | 1 | 0 | 9 | 1 | 10 | 29 | 65 | 9 | 54 | 23 | 42 | 2 | 47 | 22 | 36 | 5 | 1 | 2 | 19 |
143,415 |
Koed00/django-q
|
django_q/brokers/orm.py
|
django_q.brokers.orm.ORM
|
class ORM(Broker):
@staticmethod
def get_connection(list_key: str = Conf.PREFIX):
if transaction.get_autocommit(
using=Conf.ORM
): # Only True when not in an atomic block
# Make sure stale connections in the broker thread are explicitly
# closed before attempting DB access.
# logger.debug("Broker thread calling close_old_connections")
db.close_old_connections()
else:
logger.debug("Broker in an atomic transaction")
return OrmQ.objects.using(Conf.ORM)
def queue_size(self) -> int:
return (
self.get_connection()
.filter(key=self.list_key, lock__lte=_timeout())
.count()
)
def lock_size(self) -> int:
return (
self.get_connection().filter(key=self.list_key, lock__gt=_timeout()).count()
)
def purge_queue(self):
return self.get_connection().filter(key=self.list_key).delete()
def ping(self) -> bool:
return True
def info(self) -> str:
if not self._info:
self._info = f"ORM {Conf.ORM}"
return self._info
def fail(self, task_id):
self.delete(task_id)
def enqueue(self, task):
package = self.get_connection().create(
key=self.list_key, payload=task, lock=_timeout()
)
return package.pk
def dequeue(self):
tasks = self.get_connection().filter(key=self.list_key, lock__lt=_timeout())[
0 : Conf.BULK
]
if tasks:
task_list = []
for task in tasks:
if (
self.get_connection()
.filter(id=task.id, lock=task.lock)
.update(lock=timezone.now())
):
task_list.append((task.pk, task.payload))
# else don't process, as another cluster has been faster than us on that task
return task_list
# empty queue, spare the cpu
sleep(Conf.POLL)
def delete_queue(self):
return self.purge_queue()
def delete(self, task_id):
self.get_connection().filter(pk=task_id).delete()
def acknowledge(self, task_id):
return self.delete(task_id)
|
class ORM(Broker):
@staticmethod
def get_connection(list_key: str = Conf.PREFIX):
pass
def queue_size(self) -> int:
pass
def lock_size(self) -> int:
pass
def purge_queue(self):
pass
def ping(self) -> bool:
pass
def info(self) -> str:
pass
def fail(self, task_id):
pass
def enqueue(self, task):
pass
def dequeue(self):
pass
def delete_queue(self):
pass
def delete_queue(self):
pass
def acknowledge(self, task_id):
pass
| 14 | 0 | 5 | 0 | 5 | 1 | 1 | 0.11 | 1 | 5 | 2 | 0 | 11 | 2 | 12 | 31 | 72 | 11 | 56 | 20 | 42 | 6 | 38 | 18 | 25 | 4 | 1 | 3 | 17 |
143,416 |
Koed00/django-q
|
django_q/brokers/mongo.py
|
django_q.brokers.mongo.Mongo
|
class Mongo(Broker):
def __init__(self, list_key=Conf.PREFIX):
super(Mongo, self).__init__(list_key)
self.collection = self.get_collection()
def __setstate__(self, state):
super(Mongo, self).__setstate__(state)
self.collection = self.get_collection()
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> MongoClient:
return MongoClient(**Conf.MONGO)
def get_collection(self):
if not Conf.MONGO_DB:
try:
Conf.MONGO_DB = self.connection.get_default_database().name
except ConfigurationError:
Conf.MONGO_DB = "django-q"
return self.connection[Conf.MONGO_DB][self.list_key]
def queue_size(self):
return self.collection.count_documents({"lock": {"$lte": _timeout()}})
def lock_size(self):
return self.collection.count_documents({"lock": {"$gt": _timeout()}})
def purge_queue(self):
return self.delete_queue()
def ping(self) -> bool:
return self.info is not None
def info(self) -> str:
if not self._info:
self._info = f"MongoDB {self.connection.server_info()['version']}"
return self._info
def fail(self, task_id):
self.delete(task_id)
def enqueue(self, task):
inserted_id = self.collection.insert_one(
{"payload": task, "lock": _timeout()}
).inserted_id
return str(inserted_id)
def dequeue(self):
task = self.collection.find_one_and_update(
{"lock": {"$lte": _timeout()}}, {"$set": {"lock": timezone.now()}}
)
if task:
return [(str(task["_id"]), task["payload"])]
# empty queue, spare the cpu
sleep(Conf.POLL)
def delete_queue(self):
return self.collection.drop()
def delete(self, task_id):
self.collection.delete_one({"_id": ObjectId(task_id)})
def acknowledge(self, task_id):
return self.delete(task_id)
|
class Mongo(Broker):
def __init__(self, list_key=Conf.PREFIX):
pass
def __setstate__(self, state):
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> MongoClient:
pass
def get_collection(self):
pass
def queue_size(self):
pass
def lock_size(self):
pass
def purge_queue(self):
pass
def ping(self) -> bool:
pass
def info(self) -> str:
pass
def fail(self, task_id):
pass
def enqueue(self, task):
pass
def dequeue(self):
pass
def delete_queue(self):
pass
def delete_queue(self):
pass
def acknowledge(self, task_id):
pass
| 17 | 0 | 3 | 0 | 3 | 0 | 1 | 0.02 | 1 | 4 | 1 | 0 | 14 | 2 | 15 | 34 | 64 | 14 | 49 | 21 | 32 | 1 | 44 | 20 | 28 | 3 | 1 | 2 | 19 |
143,417 |
Koed00/django-q
|
django_q/brokers/ironmq.py
|
django_q.brokers.ironmq.IronMQBroker
|
class IronMQBroker(Broker):
def enqueue(self, task):
return self.connection.post(task)["ids"][0]
def dequeue(self):
timeout = Conf.RETRY or None
tasks = self.connection.get(timeout=timeout, wait=1, max=Conf.BULK)["messages"]
if tasks:
return [(t["id"], t["body"]) for t in tasks]
def ping(self) -> bool:
return self.connection.name == self.list_key
def info(self) -> str:
return "IronMQ"
def queue_size(self):
return self.connection.size()
def delete_queue(self):
try:
return self.connection.delete_queue()["msg"]
except HTTPError:
return False
def purge_queue(self):
return self.connection.clear()
def delete(self, task_id):
try:
return self.connection.delete(task_id)["msg"]
except HTTPError:
return False
def fail(self, task_id):
self.delete(task_id)
def acknowledge(self, task_id):
return self.delete(task_id)
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Queue:
ironmq = IronMQ(name=None, **Conf.IRON_MQ)
return ironmq.queue(queue_name=list_key)
|
class IronMQBroker(Broker):
def enqueue(self, task):
pass
def dequeue(self):
pass
def ping(self) -> bool:
pass
def info(self) -> str:
pass
def queue_size(self):
pass
def delete_queue(self):
pass
def purge_queue(self):
pass
def delete_queue(self):
pass
def fail(self, task_id):
pass
def acknowledge(self, task_id):
pass
@staticmethod
def get_connection(list_key: str = Conf.PREFIX) -> Queue:
pass
| 13 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 4 | 1 | 0 | 10 | 0 | 11 | 30 | 44 | 10 | 34 | 16 | 21 | 0 | 33 | 15 | 21 | 2 | 1 | 1 | 14 |
143,418 |
Koed00/django-q
|
django_q/admin.py
|
django_q.admin.TaskAdmin
|
class TaskAdmin(admin.ModelAdmin):
"""model admin for success tasks."""
list_display = ("name", "func", "started", "stopped", "time_taken", "group")
def has_add_permission(self, request):
"""Don't allow adds."""
return False
def get_queryset(self, request):
"""Only show successes."""
qs = super(TaskAdmin, self).get_queryset(request)
return qs.filter(success=True)
search_fields = ("name", "func", "group")
readonly_fields = []
list_filter = ("group",)
def get_readonly_fields(self, request, obj=None):
"""Set all fields readonly."""
return list(self.readonly_fields) + [field.name for field in obj._meta.fields]
|
class TaskAdmin(admin.ModelAdmin):
'''model admin for success tasks.'''
def has_add_permission(self, request):
'''Don't allow adds.'''
pass
def get_queryset(self, request):
'''Only show successes.'''
pass
def get_readonly_fields(self, request, obj=None):
'''Set all fields readonly.'''
pass
| 4 | 4 | 3 | 0 | 2 | 1 | 1 | 0.33 | 1 | 2 | 0 | 0 | 3 | 0 | 3 | 3 | 21 | 5 | 12 | 9 | 8 | 4 | 12 | 9 | 8 | 1 | 1 | 0 | 3 |
143,419 |
Koed00/django-rq-jobs
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-rq-jobs/django_rq_jobs/models.py
|
django_rq_jobs.models.Job.Meta
|
class Meta(object):
ordering = ['next_run']
verbose_name_plural = _("Scheduled jobs")
|
class Meta(object):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,420 |
Koed00/django-rq-jobs
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Koed00_django-rq-jobs/django_rq_jobs/tests/tests.py
|
django_rq_jobs.tests.tests.RQJobsTestCase
|
class RQJobsTestCase(TestCase):
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def setUp(self):
activate('en-en')
Job.objects.create(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY)
Job.objects.create(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.WEEKLY)
Job.objects.create(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.MONTHLY)
Job.objects.create(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.QUARTERLY)
Job.objects.create(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.YEARLY)
Job.objects.create(task='django_rq_jobs.tests.tasks.django_arg_check',
schedule_type=Job.ONCE, args={'verbosity': 3})
def test_camel_case(self):
self.assertEqual(underscore_to_camelcase(
'this_is_a_function'), 'This Is A Function')
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_task_list(self):
self.assertEqual(task_list(), [
('django_rq_jobs.tests.tasks.django_arg_check', 'Django Arg Check'),
('django_rq_jobs.tests.tasks.django_check', 'Django Check')
])
with self.settings(RQ_JOBS_MODULE=None):
self.assertRaises(ImproperlyConfigured, task_list)
@override_settings(RQ_QUEUES="{'high': {},'default': {}, 'low': {}")
def test_queue_index_by_name(self):
self.assertTrue(queue_index_by_name('default') >= 0)
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_create_job(self):
"""test simple job creation """
test_job = Job.objects.get(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY)
self.assertEqual(
test_job.task, 'django_rq_jobs.tests.tasks.django_check')
self.assertEqual(test_job.args, None)
self.assertEqual(test_job.schedule_type, Job.HOURLY)
self.assertEqual(test_job.repeats, -1)
self.assertEqual(test_job.last_run, None)
self.assertTrue(test_job.next_run < timezone.now())
self.assertEqual(test_job.rq_id, None)
self.assertEqual(test_job.rq_job, None)
self.assertEqual(test_job.rq_link(), None)
self.assertEqual(test_job.rq_origin, None)
self.assertEqual(test_job.rq_status(), None)
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks', RQ={'AUTOCOMMIT': True})
def test_run_job(self):
"""run a job and check if it's rescheduled properly"""
management.call_command('rqjobs')
get_worker('default').work(burst=True)
test_job = Job.objects.get(
task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY)
self.assertNotEqual(test_job, None)
self.assertNotEqual(test_job.rq_id, None)
self.assertNotEqual(test_job.rq_origin, None)
self.assertIsNot(test_job.rq_job, None)
self.assertNotEqual(test_job.rq_status(), None)
self.assertNotEqual(test_job.rq_origin, None)
self.assertNotEqual(test_job.rq_link(), None)
self.assertNotEqual(test_job.last_run, None)
self.assertTrue(test_job.next_run > timezone.now())
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_run_once_job(self):
"""run an single run job with arguments and check if it gets deleted"""
test_job = Job.objects.get(
task='django_rq_jobs.tests.tasks.django_arg_check')
management.call_command('rqjobs')
get_worker('default').work(burst=True)
self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_run_limited_job(self):
"""run a limited run job twice to see if it counts down and gets deleted"""
test_job = Job.objects.create(task='django_rq_jobs.tests.tasks.django_check', schedule_type=Job.HOURLY, repeats=2,
next_run=timezone.now() + timedelta(hours=-2))
management.call_command('rqjobs')
get_worker('default').work(burst=True)
self.assertEqual(Job.objects.get(pk=test_job.pk).repeats, 1)
management.call_command('rqjobs')
get_worker('default').work(burst=True)
self.assertFalse(Job.objects.filter(pk=test_job.pk).exists())
def tearDown(self):
q = get_failed_queue()
q.empty()
|
class RQJobsTestCase(TestCase):
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def setUp(self):
pass
def test_camel_case(self):
pass
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_task_list(self):
pass
@override_settings(RQ_QUEUES="{'high': {},'default': {}, 'low': {}")
def test_queue_index_by_name(self):
pass
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_create_job(self):
'''test simple job creation '''
pass
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks', RQ={'AUTOCOMMIT': True})
def test_run_job(self):
'''run a job and check if it's rescheduled properly'''
pass
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_run_once_job(self):
'''run an single run job with arguments and check if it gets deleted'''
pass
@override_settings(RQ_JOBS_MODULE='django_rq_jobs.tests.tasks')
def test_run_limited_job(self):
'''run a limited run job twice to see if it counts down and gets deleted'''
pass
def tearDown(self):
pass
| 17 | 4 | 7 | 0 | 7 | 0 | 1 | 0.06 | 1 | 1 | 0 | 0 | 9 | 0 | 9 | 9 | 82 | 8 | 70 | 22 | 53 | 4 | 59 | 15 | 49 | 1 | 1 | 1 | 9 |
143,421 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/management/commands/rqjobs.py
|
django_rq_jobs.management.commands.rqjobs.Command
|
class Command(BaseCommand):
help = _("Queues scheduled jobs")
BaseCommand.can_import_settings = True
BaseCommand.requires_system_checks = True
BaseCommand.leave_locale_alone = True
def handle(self, *args, **options):
for job in Job.objects.exclude(repeats=0).filter(next_run__lt=arrow.utcnow().datetime):
if '.' not in job.task:
job = fix_module(job)
if job.args:
rq = django_rq.enqueue(job.rq_task, **literal_eval(job.args))
else:
rq = django_rq.enqueue(job.rq_task)
job.rq_id = rq.id
job.rq_origin = rq.origin
job.last_run = arrow.utcnow().datetime
self.stdout.write(_('* Queueing {} on {}.').format(job.get_task_display(), job.rq_origin), ending=' ')
if job.schedule_type != Job.ONCE:
if job.repeats < 0 or job.repeats > 1:
next_run = arrow.get(job.next_run)
if job.schedule_type == Job.HOURLY:
next_run = next_run.replace(hours=+1)
elif job.schedule_type == Job.DAILY:
next_run = next_run.replace(days=+1)
elif job.schedule_type == Job.WEEKLY:
next_run = next_run.replace(weeks=+1)
elif job.schedule_type == Job.MONTHLY:
next_run = next_run.replace(months=+1)
elif job.schedule_type == Job.QUARTERLY:
next_run = next_run.replace(months=+3)
elif job.schedule_type == Job.YEARLY:
next_run = next_run.replace(years=+1)
job.next_run = next_run.datetime
if job.repeats > 1:
job.repeats += -1
self.stdout.write(_('Next run {}.').format(next_run.humanize()))
job.save()
else:
job.delete()
self.stdout.write(_('Deleting limited run task'))
else:
self.stdout.write(_('Deleting run once task'))
job.delete()
|
class Command(BaseCommand):
def handle(self, *args, **options):
pass
| 2 | 0 | 38 | 0 | 38 | 0 | 13 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 44 | 1 | 43 | 6 | 41 | 0 | 35 | 6 | 33 | 13 | 1 | 4 | 13 |
143,422 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/models.py
|
django_rq_jobs.models.Job
|
class Job(models.Model):
task = models.CharField(max_length=200, choices=task_list())
args = models.CharField(max_length=255, null=True, blank=True)
ONCE = 'O'
HOURLY = 'H'
DAILY = 'D'
WEEKLY = 'W'
MONTHLY = 'M'
QUARTERLY = 'Q'
YEARLY = 'Y'
TYPE = (
(ONCE, _('Once')),
(HOURLY, _('Hourly')),
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly')),
(QUARTERLY, _('Quarterly')),
(YEARLY, _('Yearly')),
)
schedule_type = models.CharField(max_length=1, choices=TYPE, default=TYPE[0][0], verbose_name=_('Schedule Type'))
repeats = models.SmallIntegerField(default=-1, verbose_name=_('Repeats'))
next_run = models.DateTimeField(verbose_name=_('Next Run'), default=timezone.now, null=True)
last_run = models.DateTimeField(verbose_name=_('Last Run'), editable=False, null=True)
rq_id = models.CharField(max_length=64, editable=False, null=True)
rq_origin = models.CharField(max_length=64, editable=False, null=True)
@property
def rq_job(self):
"""The last RQ Job this ran on"""
if not self.rq_id or not self.rq_origin:
return
try:
return RQJob.fetch(self.rq_id, connection=get_connection(self.rq_origin))
except NoSuchJobError:
return
def rq_status(self):
"""Proxy for status so we can include it in the admin"""
if self.rq_job:
return self.rq_job.status
def rq_link(self):
"""Link to Django-RQ status page for this job"""
if self.rq_job:
url = reverse('rq_job_detail',
kwargs={'job_id': self.rq_id, 'queue_index': queue_index_by_name(self.rq_origin)})
return '<a href="{}">{}</a>'.format(url, self.rq_id)
@property
def rq_task(self):
"""
The function to call for this task.
Config errors are caught by tasks_list() already.
"""
task_path = self.task.split('.')
module_name = '.'.join(task_path[:-1])
task_name = task_path[-1]
module = importlib.import_module(module_name)
return getattr(module, task_name)
rq_link.allow_tags = True
class Meta(object):
ordering = ['next_run']
verbose_name_plural = _("Scheduled jobs")
def __unicode__(self):
return self.get_task_display()
|
class Job(models.Model):
@property
def rq_job(self):
'''The last RQ Job this ran on'''
pass
def rq_status(self):
'''Proxy for status so we can include it in the admin'''
pass
def rq_link(self):
'''Link to Django-RQ status page for this job'''
pass
@property
def rq_task(self):
'''
The function to call for this task.
Config errors are caught by tasks_list() already.
'''
pass
class Meta(object):
def __unicode__(self):
pass
| 9 | 4 | 6 | 0 | 5 | 1 | 2 | 0.13 | 1 | 0 | 0 | 0 | 5 | 0 | 5 | 5 | 69 | 8 | 54 | 32 | 45 | 7 | 43 | 30 | 36 | 3 | 1 | 1 | 9 |
143,423 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/migrations/0002_auto_20150721_1255.py
|
django_rq_jobs.migrations.0002_auto_20150721_1255.Migration
|
class Migration(migrations.Migration):
dependencies = [
('django_rq_jobs', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='job',
name='task',
field=models.CharField(max_length=200, choices=[]),
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 2 | 11 | 3 | 10 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,424 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/migrations/0001_initial.py
|
django_rq_jobs.migrations.0001_initial.Migration
|
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Job',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('task', models.CharField(choices=[], max_length=100)),
('args', models.CharField(blank=True, max_length=255, null=True)),
('schedule_type', models.CharField(choices=[('O', 'Once'), ('H', 'Hourly'), ('D', 'Daily'), ('W', 'Weekly'), ('M', 'Monthly'), ('Q', 'Quarterly'), ('Y', 'Yearly')], verbose_name='Schedule Type', max_length=1, default='O')),
('repeats', models.SmallIntegerField(verbose_name='Repeats', default=-1)),
('next_run', models.DateTimeField(verbose_name='Next Run', null=True, default=django.utils.timezone.now)),
('last_run', models.DateTimeField(verbose_name='Last Run', null=True, editable=False)),
('rq_id', models.CharField(editable=False, max_length=64, null=True)),
('rq_origin', models.CharField(editable=False, max_length=64, null=True)),
],
options={
'verbose_name_plural': 'Scheduled jobs',
'ordering': ['next_run'],
},
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 2 | 23 | 3 | 22 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,425 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/apps.py
|
django_rq_jobs.apps.RQJobsConfig
|
class RQJobsConfig(AppConfig):
name = 'django_rq_jobs'
verbose_name = 'RQ Jobs'
|
class RQJobsConfig(AppConfig):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
143,426 |
Koed00/django-rq-jobs
|
Koed00_django-rq-jobs/django_rq_jobs/admin.py
|
django_rq_jobs.admin.ScheduleAdmin
|
class ScheduleAdmin(admin.ModelAdmin):
list_display = (
u'id',
'task',
'schedule_type',
'repeats',
'last_run',
'next_run',
'rq_link',
'rq_status'
)
list_filter = ('last_run', 'next_run', 'schedule_type')
list_display_links = ('id', 'task')
|
class ScheduleAdmin(admin.ModelAdmin):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 0 | 13 | 4 | 12 | 0 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
143,427 |
KoffeinFlummi/Chronyk
|
KoffeinFlummi_Chronyk/chronyk/chronyk.py
|
chronyk.chronyk.DateRangeError
|
class DateRangeError(Exception):
"""Exception thrown when the value passed to the chronyk.Chronyk
constructor exceeds the range permitted with allowpast and allowfuture.
"""
pass
|
class DateRangeError(Exception):
'''Exception thrown when the value passed to the chronyk.Chronyk
constructor exceeds the range permitted with allowpast and allowfuture.
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 5 | 0 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
143,428 |
KoffeinFlummi/Chronyk
|
KoffeinFlummi_Chronyk/chronyk/chronyk.py
|
chronyk.chronyk.Chronyk
|
class Chronyk:
"""Class containing methods for parsing and outputting times and dates for
humans. For usage information, consule the module documentation.
:param timestr = current time
This can be either a timestamp, a datetime object or a string
describing a time or date.
:param timezone = local timezone
The timezone (in seconds west of UTC) the given time is in. By default,
the local tz is used. To use UTC, use timezone=0.
:param allowpast = True
Determines if values from the past are allowed. This can be handy when
parsing direct user input.
:param allowfuture = True
Determines if values from the future are allowed. This can be handy
when parsing direct user input.
If the passed values exceeds the bounds set by allowpast and allowfuture,
a chronyk.DateRangeError is raised. If the type of the value is unknown to
Chronyk, a TypeError is raised. If Chronyk fails to parse a given string,
a ValueError is raised.
Subtracting Chronyk instances from another will yield a ChronykDelta
object, which in turn can be added to other Chronyk instances.
"""
def __init__(
self, timestr=None, timezone=LOCALTZ,
allowpast=True, allowfuture=True):
""" Converts input to UTC timestamp. """
if timestr is None:
timestr = time.time()
self.timezone = timezone
if type(timestr) == str:
self.__timestamp__ = self.__fromstring__(timestr)
elif type(timestr) in [int, float]:
self.__timestamp__ = timestr + self.timezone
elif type(timestr) in [
datetime.datetime, datetime.date, datetime.time]:
self.__timestamp__ = _mktime(timestr.timetuple()) + self.timezone
elif type(timestr) == time.struct_time:
self.__timestamp__ = _mktime(timestr) + self.timezone
else:
raise TypeError("Failed to recognize given type.")
if not allowpast and self.__timestamp__ < currentutc():
raise DateRangeError("Values from the past are not allowed.")
if not allowfuture and self.__timestamp__ > currentutc():
raise DateRangeError("Values from the future are not allowed.")
def __repr__(self):
return "Chronyk({})".format(self.timestring())
# Type Conversions
def __str__(self):
return self.timestring()
def __int__(self):
return int(self.timestamp(timezone=0))
def __float__(self):
return float(self.timestamp(timezone=0))
# Comparison Operators
def __eq__(self, other):
if type(other) == Chronyk:
return self.__timestamp__ == other.timestamp(timezone=0)
if type(other) in [int, float]:
return self.__timestamp__ == other
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
if type(other) == Chronyk:
return self.__timestamp__ > other.timestamp(timezone=0)
if type(other) in [int, float]:
return self.__timestamp__ > other
return NotImplemented
def __le__(self, other):
return not self.__gt__(other)
def __lt__(self, other):
if type(other) == Chronyk:
return self.__timestamp__ < other.timestamp(timezone=0)
if type(other) in [int, float]:
return self.__timestamp__ < other
return NotImplemented
def __ge__(self, other):
return not self.__lt__(other)
# Arithmetic Operators
def __add__(self, other):
if type(other) == ChronykDelta:
newtimest = self.timestamp() + other.seconds
return Chronyk(newtimest, timezone=self.timezone)
if type(other) in [int, float]:
newtimest = self.timestamp() + other
return Chronyk(newtimest, timezone=self.timezone)
return NotImplemented
def __sub__(self, other):
if type(other) == Chronyk:
delta = self.__timestamp__ - other.timestamp(timezone=0)
return ChronykDelta(delta)
if type(other) == ChronykDelta:
newtimest = self.timestamp() - other.seconds
return Chronyk(newtimest, timezone=self.timezone)
if type(other) in [int, float]:
newtimest = self.timestamp() - other
return Chronyk(newtimest, timezone=self.timezone)
return NotImplemented
# Helpers
def __fromrelative__(self, timestr):
timestr = " {} ".format(timestr)
if timestr.find(" ago ") == -1 and timestr.find(" in ") == -1:
return None
future = timestr.find(" in ") != -1
coef = 1 if future else -1
dati = datetime.datetime.utcnow()
# timedelta does not support years
if timestr.find(" year") != -1:
try:
match = re.match(r".*?([0-9]+?) year", timestr)
assert match is not None
dati = dati.replace(
year=dati.year + int(match.group(1)) * coef
)
except AssertionError:
pass
# ... or months
if timestr.find(" month") != -1:
try:
match = re.match(r".*?([0-9]+?) month", timestr)
assert match is not None
months = int(match.group(1))
newyear = dati.year + int(((dati.month - 1) + months * coef) / 12)
newmonth = (((dati.month - 1) + months * coef) % 12) + 1
newday = dati.day
while newday > calendar.monthrange(newyear, newmonth)[1]:
newday -= 1
dati = dati.replace(year=newyear, month=newmonth, day=newday)
except AssertionError:
pass
delta = {
"weeks": 0,
"days": 0,
"hours": 0,
"minutes": 0,
"seconds": 0
}
for key in delta.keys():
if timestr.find(" " + key[:-1]) != -1:
try:
match = re.match(
re.compile(".*?([0-9]+?) " + key[:-1]),
timestr
)
assert match is not None
delta[key] += int(match.group(1))
except AssertionError:
pass
if not future:
for key in delta.keys():
delta[key] *= -1
dati = dati + datetime.timedelta(**delta)
return _mktime(dati.timetuple())
def __fromabsolute__(self, timestr):
# http://en.wikipedia.org/wiki/Date_format_by_country
datetimeformats = [
"%Y-%m-%dT%H:%M:%SZ",
"%Y-%m-%dT%H:%M:%S%z",
"%Y-%m-%dT%H:%M:%S%Z",
"%c",
"%s"
]
dateformats = [
# ISO
"%Y-%m-%d",
# YMD other than ISO
"%Y%m%d",
"%Y.%m.%d",
# Popular MDY formats
"%m/%d/%Y",
"%m/%d/%y",
# DMY with full year
"%d %m %Y",
"%d-%m-%Y",
"%d/%m/%Y",
"%d/%m %Y",
"%d.%m.%Y",
"%d. %m. %Y",
"%d %b %Y",
"%d %B %Y",
"%d. %b %Y",
"%d. %B %Y",
# MDY with full year
"%b %d %Y",
"%b %dst %Y",
"%b %dnd %Y",
"%b %drd %Y",
"%b %dth %Y",
"%b %d, %Y",
"%b %dst, %Y",
"%b %dnd, %Y",
"%b %drd, %Y",
"%b %dth, %Y",
"%B %d %Y",
"%B %dst %Y",
"%B %dnd %Y",
"%B %drd %Y",
"%B %dth %Y",
"%B %d, %Y",
"%B %dst, %Y",
"%B %dnd, %Y",
"%B %drd, %Y",
"%B %dth, %Y",
# DMY with 2-digit year
"%d %m %y",
"%d-%m-%y",
"%d/%m/%y",
"%d/%m-%y", # why denmark?
"%d.%m.%y",
"%d. %m. %y",
"%d %b %y",
"%d %B %y",
"%d. %b %y",
"%d. %B %y",
# MDY with 2-digit year
"%b %dst %y",
"%b %dnd %y",
"%b %drd %y",
"%b %dth %y",
"%B %dst %y",
"%B %dnd %y",
"%B %drd %y",
"%B %dth %y",
]
timeformats = [
# 24 hour clock with seconds
"%H:%M:%S %z",
"%H:%M:%S %z",
"%H:%M:%S",
# 24 hour clock without seconds
"%H:%M %z",
"%H:%M %Z",
"%H:%M",
# 12 hour clock with seconds
"%I:%M:%S %p %z",
"%I:%M:%S %p %Z",
"%I:%M:%S %p",
# 12 hour clock without seconds
"%I:%M %p %z",
"%I:%M %p %Z",
"%I:%M %p"
]
# Prepare combinations
for dateformat in dateformats:
for timeformat in timeformats:
datetimeformats.append("{} {}".format(dateformat, timeformat))
datetimeformats.append(dateformat)
# Date / Datetime
for dateformat in datetimeformats:
try:
struct = time.strptime(timestr, dateformat)
except ValueError:
pass
else:
timestamp = _mktime(struct)
if "z" not in dateformat.lower():
# string doesn't contains timezone information.
timestamp += self.timezone
return timestamp
# Time (using today as date)
for timeformat in timeformats:
timestr_full = _strftime("%Y-%m-%d") + " " + timestr
format_full = "%Y-%m-%d {}".format(timeformat)
try:
struct = time.strptime(timestr_full, format_full)
except ValueError:
pass
else:
timestamp = _mktime(struct)
if "z" not in timeformat.lower():
# string doesn't contains timezone information.
timestamp += self.timezone
return timestamp
def __fromstring__(self, timestr):
timestr = timestr.lower().strip().replace(". ", " ")
# COMMON NAMES FOR TIMES
if timestr in ["today", "now", "this week", "this month", "this day"]:
return currentutc()
if timestr in ["yesterday", "yester day"]:
return currentutc() - 24 * 3600
if timestr in ["yesteryear", "yester year"]:
dati = datetime.datetime.utcnow()
return _mktime(dati.replace(year=dati.year - 1).timetuple())
# RELATIVE TIMES
relative = self.__fromrelative__(timestr)
if relative is not None:
return relative
# ABSOLUTE TIMES
absolute = self.__fromabsolute__(timestr)
if absolute is not None:
return absolute
raise ValueError("Failed to parse time string.")
# Methods
def datetime(self, timezone=None):
"""Returns a datetime object.
This object retains all information, including timezones.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
"""
if timezone is None:
timezone = self.timezone
return _dtfromtimestamp(self.__timestamp__ - timezone)
def date(self, timezone=None):
"""Returns a datetime.date object.
This object retains all information, including timezones.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
"""
if timezone is None:
timezone = self.timezone
return _dfromtimestamp(self.__timestamp__ - timezone)
def timestamp(self, timezone=None):
"""Returns a timestamp (seconds since the epoch).
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
"""
if timezone is None:
timezone = self.timezone
return self.__timestamp__ - timezone
def ctime(self, timezone=None):
"""Returns a ctime string.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
"""
if timezone is None:
timezone = self.timezone
return time.ctime(self.__timestamp__ - timezone)
def timestring(self, pattern="%Y-%m-%d %H:%M:%S", timezone=None):
"""Returns a time string.
:param pattern = "%Y-%m-%d %H:%M:%S"
The format used. By default, an ISO-type format is used. The
syntax here is identical to the one used by time.strftime() and
time.strptime().
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
"""
if timezone is None:
timezone = self.timezone
timestamp = self.__timestamp__ - timezone
timestamp -= LOCALTZ
return _strftime(pattern, _gmtime(timestamp))
def relativestring(
self, now=None, minimum=10, maximum=3600 * 24 * 30,
pattern="%Y-%m-%d", timezone=None, maxunits=1):
"""Returns a relative time string (e.g. "10 seconds ago").
:param now = time.time()
The timestamp to compare this time to. By default, the current
local time is used.
:param minimum = 10
Amount in seconds under which "just now" is returned instead of a
numerical description. A value <= 0 disables this.
:param maximum = 3600 * 24 * 30 (30 days)
Amount in seconds above which the actual date is returned instead
of a numerical description. A value < 0 disables this.
:param pattern = "%Y-%m-%d"
The string format to use when maximum is exceeded. The syntax here
is identical to the one used by Chronyk.timestring(), which in turn
is the same as the one used by time.strptime() and time.strftime().
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the result in. By
default, the value used when constructing the class (local tz by
default) is used. To use UTC, use timezone=0. To use the local TZ,
use timezone=chronyk.LOCALTZ.
:param maxunits = 1
The maximum amount of units to return. This is identical to the
parameter of the same name of ChronykDelta's timestring method.
"""
if now is None:
now = time.time()
if timezone is None:
timezone = self.timezone
diff = now - (self.__timestamp__ - timezone)
future = diff < 0
diff = abs(diff)
if diff < minimum:
return "just now"
if diff > maximum and maximum > 0:
return self.timestring(pattern)
timestring = ChronykDelta(diff).timestring(maxunits=maxunits)
if timestring == "1 day":
return "tomorrow" if future else "yesterday"
if future:
return "in {}".format(timestring)
else:
return "{} ago".format(timestring)
|
class Chronyk:
'''Class containing methods for parsing and outputting times and dates for
humans. For usage information, consule the module documentation.
:param timestr = current time
This can be either a timestamp, a datetime object or a string
describing a time or date.
:param timezone = local timezone
The timezone (in seconds west of UTC) the given time is in. By default,
the local tz is used. To use UTC, use timezone=0.
:param allowpast = True
Determines if values from the past are allowed. This can be handy when
parsing direct user input.
:param allowfuture = True
Determines if values from the future are allowed. This can be handy
when parsing direct user input.
If the passed values exceeds the bounds set by allowpast and allowfuture,
a chronyk.DateRangeError is raised. If the type of the value is unknown to
Chronyk, a TypeError is raised. If Chronyk fails to parse a given string,
a ValueError is raised.
Subtracting Chronyk instances from another will yield a ChronykDelta
object, which in turn can be added to other Chronyk instances.
'''
def __init__(
self, timestr=None, timezone=LOCALTZ,
allowpast=True, allowfuture=True):
''' Converts input to UTC timestamp. '''
pass
def __repr__(self):
pass
def __str__(self):
pass
def __int__(self):
pass
def __float__(self):
pass
def __eq__(self, other):
pass
def __ne__(self, other):
pass
def __gt__(self, other):
pass
def __le__(self, other):
pass
def __lt__(self, other):
pass
def __ge__(self, other):
pass
def __add__(self, other):
pass
def __sub__(self, other):
pass
def __fromrelative__(self, timestr):
pass
def __fromabsolute__(self, timestr):
pass
def __fromstring__(self, timestr):
pass
def datetime(self, timezone=None):
'''Returns a datetime object.
This object retains all information, including timezones.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
'''
pass
def datetime(self, timezone=None):
'''Returns a datetime.date object.
This object retains all information, including timezones.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
'''
pass
def timestamp(self, timezone=None):
'''Returns a timestamp (seconds since the epoch).
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
'''
pass
def ctime(self, timezone=None):
'''Returns a ctime string.
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
'''
pass
def timestring(self, pattern="%Y-%m-%d %H:%M:%S", timezone=None):
'''Returns a time string.
:param pattern = "%Y-%m-%d %H:%M:%S"
The format used. By default, an ISO-type format is used. The
syntax here is identical to the one used by time.strftime() and
time.strptime().
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the value in. By
default, the timezone used when constructing the class is used
(local one by default). To use UTC, use timezone = 0. To use the
local tz, use timezone = chronyk.LOCALTZ.
'''
pass
def relativestring(
self, now=None, minimum=10, maximum=3600 * 24 * 30,
pattern="%Y-%m-%d", timezone=None, maxunits=1):
'''Returns a relative time string (e.g. "10 seconds ago").
:param now = time.time()
The timestamp to compare this time to. By default, the current
local time is used.
:param minimum = 10
Amount in seconds under which "just now" is returned instead of a
numerical description. A value <= 0 disables this.
:param maximum = 3600 * 24 * 30 (30 days)
Amount in seconds above which the actual date is returned instead
of a numerical description. A value < 0 disables this.
:param pattern = "%Y-%m-%d"
The string format to use when maximum is exceeded. The syntax here
is identical to the one used by Chronyk.timestring(), which in turn
is the same as the one used by time.strptime() and time.strftime().
:param timezone = self.timezone
The timezone (in seconds west of UTC) to return the result in. By
default, the value used when constructing the class (local tz by
default) is used. To use UTC, use timezone=0. To use the local TZ,
use timezone=chronyk.LOCALTZ.
:param maxunits = 1
The maximum amount of units to return. This is identical to the
parameter of the same name of ChronykDelta's timestring method.
'''
pass
| 23 | 8 | 19 | 2 | 13 | 4 | 4 | 0.4 | 0 | 10 | 2 | 0 | 22 | 2 | 22 | 22 | 474 | 74 | 287 | 58 | 260 | 114 | 193 | 54 | 170 | 13 | 0 | 3 | 77 |
143,429 |
KoffeinFlummi/Chronyk
|
KoffeinFlummi_Chronyk/chronyk/chronyk.py
|
chronyk.chronyk.ChronykDelta
|
class ChronykDelta:
"""Abstraction for a certain amount of time.
:param timestr (required)
The amount of time to represent. This can be either a number
(int / float) or a string, which will be parsed accordingly.
If you supply an unknown type, a TypeError is raised. If the string you
passed cannot be parsed, a ValueError is raised.
"""
def __init__(self, timestr):
if type(timestr) == str:
self.seconds = self.__fromstring__(timestr)
elif type(timestr) in [int, float]:
self.seconds = timestr
else:
raise TypeError("Failed to recognize given type.")
def __repr__(self):
return "ChronykDelta({})".format(self.timestring())
# Type Conversions
def __str__(self):
return self.timestring()
def __int__(self):
return int(self.seconds)
def __float__(self):
return float(self.seconds)
# Comparison Operators
def __eq__(self, other):
if type(other) == ChronykDelta:
return self.seconds == other.seconds
if type(other) in [int, float]:
return self.seconds == other
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
if type(other) == ChronykDelta:
return self.seconds > other.seconds
if type(other) in [int, float]:
return self.seconds > other
return NotImplemented
def __le__(self, other):
return not self.__gt__(other)
def __lt__(self, other):
if type(other) == ChronykDelta:
return self.seconds < other.seconds
if type(other) in [int, float]:
return self.seconds < other
return NotImplemented
def __ge__(self, other):
return not self.__lt__(other)
# Arithmetic Operators
def __add__(self, other):
if type(other) == ChronykDelta:
return ChronykDelta(self.seconds + other.seconds)
if type(other) == Chronyk:
return other + self
if type(other) in [int, float]:
return ChronykDelta(self.seconds + other)
return NotImplemented
def __sub__(self, other):
if type(other) == ChronykDelta:
return ChronykDelta(self.seconds - other.seconds)
if type(other) in [int, float]:
return ChronykDelta(self.seconds - other)
return NotImplemented
def __mul__(self, other):
if type(other) in [int, float]:
return ChronykDelta(self.seconds * other)
return NotImplemented
def __truediv__(self, other):
if type(other) in [int, float]:
return ChronykDelta(self.seconds / other)
return NotImplemented
def __floordiv__(self, other):
if type(other) in [int, float]:
return int(self.__truediv__(other))
return NotImplemented
# Methods
def __fromstring__(self, timestr):
seconds = 0
comps = {
"second": 1,
"minute": 60,
"hour": 3600,
"day": 3600 * 24,
"week": 3600 * 24 * 7,
"month": 3600 * 24 * 30,
"year": 3600 * 24 * 365
}
for k, v in comps.items():
try:
match = re.match(
re.compile(".*?([0-9]+?) "+k),
timestr
)
assert match is not None
seconds += float(match.group(1)) * v
except AssertionError:
pass
return seconds
def timestring(self, maxunits=3):
"""Returns a string representation of this amount of time, like:
"2 hours and 30 minutes" or "4 days, 2 hours and 40 minutes"
:param maxunits = 3
The maximum amount of units to use.
1: "2 hours"
4: "4 days, 2 hours, 5 minuts and 46 seconds"
This method ignores the sign of the amount of time (that rhimes).
"""
try:
assert maxunits >= 1
except:
raise ValueError("Values < 1 for maxunits are not supported.")
values = collections.OrderedDict()
seconds = abs(self.seconds)
values["year"] = _round(seconds / (3600 * 24 * 365))
values["year"] = values["year"] if values["year"] > 0 else 0
seconds -= values["year"] * 3600 * 24 * 365
values["month"] = _round(seconds / (3600 * 24 * 30))
values["month"] = values["month"] if values["month"] > 0 else 0
seconds -= values["month"] * 3600 * 24 * 30
values["day"] = _round(seconds / (3600 * 24))
values["day"] = values["day"] if values["day"] > 0 else 0
seconds -= values["day"] * 3600 * 24
values["hour"] = _round(seconds / 3600)
values["hour"] = values["hour"] if values["hour"] > 0 else 0
seconds -= values["hour"] * 3600
values["minute"] = _round(seconds / 60)
values["minute"] = values["minute"] if values["minute"] > 0 else 0
values["second"] = _round(seconds - values["minute"] * 60)
for k, v in values.items():
if v == 0:
values.pop(k)
else:
break
textsegs = []
for k, v in list(values.items())[:maxunits]:
if v > 0:
textsegs.append(_pluralstr(k, v))
if len(textsegs) == 0:
return ""
if len(textsegs) == 1:
return textsegs[0]
return ", ".join(textsegs[:-1]) + " and " + textsegs[-1]
|
class ChronykDelta:
'''Abstraction for a certain amount of time.
:param timestr (required)
The amount of time to represent. This can be either a number
(int / float) or a string, which will be parsed accordingly.
If you supply an unknown type, a TypeError is raised. If the string you
passed cannot be parsed, a ValueError is raised.
'''
def __init__(self, timestr):
pass
def __repr__(self):
pass
def __str__(self):
pass
def __int__(self):
pass
def __float__(self):
pass
def __eq__(self, other):
pass
def __ne__(self, other):
pass
def __gt__(self, other):
pass
def __le__(self, other):
pass
def __lt__(self, other):
pass
def __ge__(self, other):
pass
def __add__(self, other):
pass
def __sub__(self, other):
pass
def __mul__(self, other):
pass
def __truediv__(self, other):
pass
def __floordiv__(self, other):
pass
def __fromstring__(self, timestr):
pass
def timestring(self, maxunits=3):
'''Returns a string representation of this amount of time, like:
"2 hours and 30 minutes" or "4 days, 2 hours and 40 minutes"
:param maxunits = 3
The maximum amount of units to use.
1: "2 hours"
4: "4 days, 2 hours, 5 minuts and 46 seconds"
This method ignores the sign of the amount of time (that rhimes).
'''
pass
| 19 | 2 | 9 | 1 | 7 | 0 | 3 | 0.15 | 0 | 10 | 1 | 0 | 18 | 1 | 18 | 18 | 189 | 46 | 124 | 28 | 105 | 19 | 110 | 28 | 91 | 13 | 0 | 2 | 48 |
143,430 |
KokocGroup/elastic-dict
|
KokocGroup_elastic-dict/elasticdict/ElasticDict.py
|
elasticdict.ElasticDict.ElasticDict
|
class ElasticDict(dict):
"""Dict which could adjust to you current needs
It eliminates neccessity of using brackets when filling a dictionary:
>>> a = ElasticDict()
>>> a.x = 3
>>> print a
{'x': 3}
>>> a.y.z = (1,2,3,)
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3}
Any address to non-existed keys will automaticall create it with value of type ElasticDict.
So it could be used recursively.
>>> print a.b.c.d
{}
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3, 'b': {'c': {'d': {}}}}
Following expression violates python syntax:
>>> print a.01234
SyntaxError: invalid syntax
But such elements can still be addressed by usual way using brackets.
>>> a['01234'] = 7
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3, 'b': {'c': {'d': {}}}, '01234': 7}
It is possible to mix both ways of addressing for your taste.
>>> a['qwer'].d.x.e[234] = 14
>>> print a
{'qwer': {'d': {'x': {'e': {234: 14}}}}}
>>> print a.qwer.d.x.e[234]
14
"""
def __init__(self, *args, **kwargs):
super(ElasticDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def __getitem__(self, item):
try:
return dict.__getitem__(self, item)
except KeyError:
if item in dir(dict):
return getattr(self, item)
value = self[item] = type(self)()
return value
def __getattr__(self, name):
if name in dir(dict) or name.startswith('_') or name in ('trait_names', 'resolve_expression', 'prepare_database_save'): # can not be accessed by dot
raise AttributeError()
else:
value = self.__dict__[name] = type(self)()
return value
def get_as_dict(self):
u"""
Exports self as ordinary dict(), replacing recursively all instances of ElasticDict() to dict()
:rtype: dict()
"""
def convert(val):
if isinstance(val, tuple):
return tuple(convert(v) for v in val)
elif isinstance(val, list):
return [convert(v) for v in val]
elif isinstance(val, (dict, ElasticDict)):
return {k: convert(v) for k, v in val.iteritems()}
else:
return val
return convert(self.__dict__)
@staticmethod
def create_from(value):
u"""
Create an instance of ElasticDict() where all nested dict()'s are replaced to ElasticDict()
:rtype: ElasticDict (if value is dict()), else type(value)
"""
def convert(val):
if isinstance(val, tuple):
return tuple(convert(v) for v in val)
elif isinstance(val, list):
return [convert(v) for v in val]
elif isinstance(val, (dict, ElasticDict)):
return ElasticDict({k: convert(v) for k, v in val.iteritems()})
else:
return val
return convert(value)
|
class ElasticDict(dict):
'''Dict which could adjust to you current needs
It eliminates neccessity of using brackets when filling a dictionary:
>>> a = ElasticDict()
>>> a.x = 3
>>> print a
{'x': 3}
>>> a.y.z = (1,2,3,)
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3}
Any address to non-existed keys will automaticall create it with value of type ElasticDict.
So it could be used recursively.
>>> print a.b.c.d
{}
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3, 'b': {'c': {'d': {}}}}
Following expression violates python syntax:
>>> print a.01234
SyntaxError: invalid syntax
But such elements can still be addressed by usual way using brackets.
>>> a['01234'] = 7
>>> print a
{'y': {'z': (1, 2, 3)}, 'x': 3, 'b': {'c': {'d': {}}}, '01234': 7}
It is possible to mix both ways of addressing for your taste.
>>> a['qwer'].d.x.e[234] = 14
>>> print a
{'qwer': {'d': {'x': {'e': {234: 14}}}}}
>>> print a.qwer.d.x.e[234]
14
'''
def __init__(self, *args, **kwargs):
pass
def __getitem__(self, item):
pass
def __getattr__(self, name):
pass
def get_as_dict(self):
'''
Exports self as ordinary dict(), replacing recursively all instances of ElasticDict() to dict()
:rtype: dict()
'''
pass
def convert(val):
pass
@staticmethod
def create_from(value):
'''
Create an instance of ElasticDict() where all nested dict()'s are replaced to ElasticDict()
:rtype: ElasticDict (if value is dict()), else type(value)
'''
pass
def convert(val):
pass
| 9 | 3 | 10 | 0 | 8 | 1 | 2 | 0.93 | 1 | 6 | 0 | 0 | 4 | 1 | 5 | 32 | 91 | 13 | 41 | 12 | 32 | 38 | 33 | 11 | 25 | 4 | 2 | 2 | 16 |
143,431 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/examples/customerror.py
|
customerror.CustomErrorExample
|
class CustomErrorExample(EWrapper):
'''Callback object passed to TWS, these functions will be called directly
by TWS.
'''
def __init__(self):
super(CustomErrorExample, self).__init__()
self.got_err = Event()
def orderStatus(self, id, status, filled, remaining, avgFillPrice, permId,
parentId, lastFilledPrice, clientId, whyHeld):
pass
def openOrder(self, orderID, contract, order, orderState):
pass
def nextValidId(self, orderId):
'''Always called by TWS but not relevant for our example'''
pass
def openOrderEnd(self):
'''Always called by TWS but not relevant for our example'''
pass
def managedAccounts(self, openOrderEnd):
'''Called by TWS but not relevant for our example'''
pass
def historicalData(self, reqId, date, open, high,
low, close, volume,
barCount, WAP, hasGaps):
pass
def error(self, id, errCode, errString):
if errCode == 165 or (errCode >= 2100 and errCode <= 2110):
print("TWS warns %s" % errString)
elif errCode == 502:
print('Looks like TWS is not running, '
'start it up and try again')
sys.exit()
elif errCode == 501:
print("TWS reports error in client: %s" % errString)
elif errCode >= 1100 and errCode < 2100:
print("TWS reports system error: %s" % errString)
elif errCode == 321:
print("TWS complaining about bad request: %s" % errString)
else:
super(CustomErrorExample, self).error(id, errCode, errString)
self.got_err.set()
def winError(self, msg, lastError):
print("TWS reports API error: %s" % msg)
self.got_err.set()
def pyError(self, type, val, tb):
sys.print_exception(type, val, tb)
|
class CustomErrorExample(EWrapper):
'''Callback object passed to TWS, these functions will be called directly
by TWS.
'''
def __init__(self):
pass
def orderStatus(self, id, status, filled, remaining, avgFillPrice, permId,
parentId, lastFilledPrice, clientId, whyHeld):
pass
def openOrder(self, orderID, contract, order, orderState):
pass
def nextValidId(self, orderId):
'''Always called by TWS but not relevant for our example'''
pass
def openOrderEnd(self):
'''Always called by TWS but not relevant for our example'''
pass
def managedAccounts(self, openOrderEnd):
'''Called by TWS but not relevant for our example'''
pass
def historicalData(self, reqId, date, open, high,
low, close, volume,
barCount, WAP, hasGaps):
pass
def error(self, id, errCode, errString):
pass
def winError(self, msg, lastError):
pass
def pyError(self, type, val, tb):
pass
| 11 | 4 | 4 | 0 | 4 | 0 | 2 | 0.15 | 1 | 2 | 0 | 0 | 10 | 1 | 10 | 59 | 59 | 13 | 40 | 15 | 26 | 6 | 31 | 12 | 20 | 6 | 2 | 1 | 15 |
143,432 |
Komnomnomnom/swigibpy
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy._object
|
class _object:
pass
|
class _object:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 0 | 0 |
143,433 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/examples/contractdetails.py
|
contractdetails.ContractDetailsExample
|
class ContractDetailsExample(EWrapper):
'''Callback object passed to TWS, these functions will be called directly
by TWS.
'''
def __init__(self):
super(ContractDetailsExample, self).__init__()
self.got_contract = Event()
def orderStatus(self, id, status, filled, remaining, avgFillPrice, permId,
parentId, lastFilledPrice, clientId, whyHeld):
pass
def openOrder(self, orderID, contract, order, orderState):
pass
def nextValidId(self, orderId):
'''Always called by TWS but not relevant for our example'''
pass
def openOrderEnd(self):
'''Always called by TWS but not relevant for our example'''
pass
def managedAccounts(self, openOrderEnd):
'''Called by TWS but not relevant for our example'''
pass
def contractDetailsEnd(self, reqId):
print("Contract details request complete, (request id %i)" % reqId)
def contractDetails(self, reqId, contractDetails):
print("Contract details received (request id %i):" % reqId)
print("callable: %s" % contractDetails.callable)
print("category: %s" % contractDetails.category)
print("contractMonth: %s" % contractDetails.contractMonth)
print("convertible: %s" % contractDetails.convertible)
print("coupon: %s" % contractDetails.coupon)
print("industry: %s" % contractDetails.industry)
print("liquidHours: %s" % contractDetails.liquidHours)
print("longName: %s" % contractDetails.longName)
print("marketName: %s" % contractDetails.marketName)
print("minTick: %s" % contractDetails.minTick)
print("nextOptionPartial: %s" % contractDetails.nextOptionPartial)
print("orderTypes: %s" % contractDetails.orderTypes)
print("priceMagnifier: %s" % contractDetails.priceMagnifier)
print("putable: %s" % contractDetails.putable)
if contractDetails.secIdList is not None:
for secId in contractDetails.secIdList:
print("secIdList: %s" % secId)
else:
print("secIdList: None")
print("subcategory: %s" % contractDetails.subcategory)
print("tradingHours: %s" % contractDetails.tradingHours)
print("timeZoneId: %s" % contractDetails.timeZoneId)
print("underConId: %s" % contractDetails.underConId)
print("evRule: %s" % contractDetails.evRule)
print("evMultiplier: %s" % contractDetails.evMultiplier)
contract = contractDetails.summary
print("\nContract Summary:")
print("exchange: %s" % contract.exchange)
print("symbol: %s" % contract.symbol)
print("secType: %s" % contract.secType)
print("currency: %s" % contract.currency)
print("tradingClass: %s" % contract.tradingClass)
if contract.comboLegs is not None:
for comboLeg in contract.comboLegs:
print("comboLegs: %s - %s" %
(comboLeg.action, comboLeg.exchange))
else:
print("comboLegs: None")
print("\nBond Values:")
print("bondType: %s" % contractDetails.bondType)
print("couponType: %s" % contractDetails.couponType)
print("cusip: %s" % contractDetails.cusip)
print("descAppend: %s" % contractDetails.descAppend)
print("issueDate: %s" % contractDetails.issueDate)
print("maturity: %s" % contractDetails.maturity)
print("nextOptionDate: %s" % contractDetails.nextOptionDate)
print("nextOptionType: %s" % contractDetails.nextOptionType)
print("notes: %s" % contractDetails.notes)
print("ratings: %s" % contractDetails.ratings)
print("validExchanges: %s" % contractDetails.validExchanges)
self.got_contract.set()
|
class ContractDetailsExample(EWrapper):
'''Callback object passed to TWS, these functions will be called directly
by TWS.
'''
def __init__(self):
pass
def orderStatus(self, id, status, filled, remaining, avgFillPrice, permId,
parentId, lastFilledPrice, clientId, whyHeld):
pass
def openOrder(self, orderID, contract, order, orderState):
pass
def nextValidId(self, orderId):
'''Always called by TWS but not relevant for our example'''
pass
def openOrderEnd(self):
'''Always called by TWS but not relevant for our example'''
pass
def managedAccounts(self, openOrderEnd):
'''Called by TWS but not relevant for our example'''
pass
def contractDetailsEnd(self, reqId):
pass
def contractDetailsEnd(self, reqId):
pass
| 9 | 4 | 10 | 1 | 9 | 0 | 2 | 0.09 | 1 | 2 | 0 | 0 | 8 | 1 | 8 | 57 | 90 | 14 | 70 | 14 | 60 | 6 | 66 | 13 | 57 | 5 | 2 | 2 | 12 |
143,434 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.ScannerSubscription
|
class ScannerSubscription(object):
"""Proxy of C++ ScannerSubscription class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(ScannerSubscription self) -> ScannerSubscription"""
_swigibpy.ScannerSubscription_swiginit(self, _swigibpy.new_ScannerSubscription())
numberOfRows = _swig_property(_swigibpy.ScannerSubscription_numberOfRows_get, _swigibpy.ScannerSubscription_numberOfRows_set)
instrument = _swig_property(_swigibpy.ScannerSubscription_instrument_get, _swigibpy.ScannerSubscription_instrument_set)
locationCode = _swig_property(_swigibpy.ScannerSubscription_locationCode_get, _swigibpy.ScannerSubscription_locationCode_set)
scanCode = _swig_property(_swigibpy.ScannerSubscription_scanCode_get, _swigibpy.ScannerSubscription_scanCode_set)
abovePrice = _swig_property(_swigibpy.ScannerSubscription_abovePrice_get, _swigibpy.ScannerSubscription_abovePrice_set)
belowPrice = _swig_property(_swigibpy.ScannerSubscription_belowPrice_get, _swigibpy.ScannerSubscription_belowPrice_set)
aboveVolume = _swig_property(_swigibpy.ScannerSubscription_aboveVolume_get, _swigibpy.ScannerSubscription_aboveVolume_set)
marketCapAbove = _swig_property(_swigibpy.ScannerSubscription_marketCapAbove_get, _swigibpy.ScannerSubscription_marketCapAbove_set)
marketCapBelow = _swig_property(_swigibpy.ScannerSubscription_marketCapBelow_get, _swigibpy.ScannerSubscription_marketCapBelow_set)
moodyRatingAbove = _swig_property(_swigibpy.ScannerSubscription_moodyRatingAbove_get, _swigibpy.ScannerSubscription_moodyRatingAbove_set)
moodyRatingBelow = _swig_property(_swigibpy.ScannerSubscription_moodyRatingBelow_get, _swigibpy.ScannerSubscription_moodyRatingBelow_set)
spRatingAbove = _swig_property(_swigibpy.ScannerSubscription_spRatingAbove_get, _swigibpy.ScannerSubscription_spRatingAbove_set)
spRatingBelow = _swig_property(_swigibpy.ScannerSubscription_spRatingBelow_get, _swigibpy.ScannerSubscription_spRatingBelow_set)
maturityDateAbove = _swig_property(_swigibpy.ScannerSubscription_maturityDateAbove_get, _swigibpy.ScannerSubscription_maturityDateAbove_set)
maturityDateBelow = _swig_property(_swigibpy.ScannerSubscription_maturityDateBelow_get, _swigibpy.ScannerSubscription_maturityDateBelow_set)
couponRateAbove = _swig_property(_swigibpy.ScannerSubscription_couponRateAbove_get, _swigibpy.ScannerSubscription_couponRateAbove_set)
couponRateBelow = _swig_property(_swigibpy.ScannerSubscription_couponRateBelow_get, _swigibpy.ScannerSubscription_couponRateBelow_set)
excludeConvertible = _swig_property(_swigibpy.ScannerSubscription_excludeConvertible_get, _swigibpy.ScannerSubscription_excludeConvertible_set)
averageOptionVolumeAbove = _swig_property(_swigibpy.ScannerSubscription_averageOptionVolumeAbove_get, _swigibpy.ScannerSubscription_averageOptionVolumeAbove_set)
scannerSettingPairs = _swig_property(_swigibpy.ScannerSubscription_scannerSettingPairs_get, _swigibpy.ScannerSubscription_scannerSettingPairs_set)
stockTypeFilter = _swig_property(_swigibpy.ScannerSubscription_stockTypeFilter_get, _swigibpy.ScannerSubscription_stockTypeFilter_set)
__swig_destroy__ = _swigibpy.delete_ScannerSubscription
|
class ScannerSubscription(object):
'''Proxy of C++ ScannerSubscription class'''
def __init__(self):
'''__init__(ScannerSubscription self) -> ScannerSubscription'''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 0.07 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 30 | 1 | 27 | 26 | 25 | 2 | 27 | 26 | 25 | 1 | 1 | 0 | 1 |
143,435 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.TagValue
|
class TagValue(object):
"""Proxy of C++ TagValue class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(TagValue self) -> TagValue
__init__(TagValue self, IBString const & p_tag, IBString const & p_value) -> TagValue
"""
_swigibpy.TagValue_swiginit(self, _swigibpy.new_TagValue(*args))
tag = _swig_property(_swigibpy.TagValue_tag_get, _swigibpy.TagValue_tag_set)
value = _swig_property(_swigibpy.TagValue_value_get, _swigibpy.TagValue_value_set)
__swig_destroy__ = _swigibpy.delete_TagValue
|
class TagValue(object):
'''Proxy of C++ TagValue class'''
def __init__(self, *args):
'''
__init__(TagValue self) -> TagValue
__init__(TagValue self, IBString const & p_tag, IBString const & p_value) -> TagValue
'''
pass
| 2 | 2 | 6 | 0 | 2 | 4 | 1 | 0.63 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 14 | 1 | 8 | 7 | 6 | 5 | 8 | 7 | 6 | 1 | 1 | 0 | 1 |
143,436 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.TWSPoller
|
class TWSPoller(threading.Thread):
'''Continually polls TWS for any outstanding messages.
Loops indefinitely until killed or a system error occurs.
Uses socket select to poll for input and calls TWS's
`EClientSocketBase::checkMessages` function.
'''
MAX_BACKOFF = 5000
def __init__(self, tws, wrapper):
super(TWSPoller, self).__init__()
self.daemon = True
self._tws = tws
self._wrapper = wrapper
self._stop_evt = threading.Event()
self._connected_evt = threading.Event()
self.tws_connected(tws.isConnected())
def stop_poller(self):
self._stop_evt.set()
def tws_connected(self, flag):
if flag:
self._connected_evt.set()
else:
self._connected_evt.clear()
def run(self):
modules = sys.modules
try:
self._run()
except:
# ignore errors raised during interpreter shutdown.
if modules:
raise
def _run(self):
'''Continually poll TWS'''
stop = self._stop_evt
connected = self._connected_evt
tws = self._tws
fd = tws.fd()
pollfd = [fd]
while not stop.is_set():
while (not connected.is_set() or not tws.isConnected()) and not stop.is_set():
connected.clear()
backoff = 0
retries = 0
while not connected.is_set() and not stop.is_set():
if tws.reconnect_auto and not tws.reconnect():
if backoff < self.MAX_BACKOFF:
retries += 1
backoff = min(2**(retries + 1), self.MAX_BACKOFF)
connected.wait(backoff / 1000.)
else:
connected.wait(1)
fd = tws.fd()
pollfd = [fd]
if fd > 0:
try:
evtin, _evtout, evterr = select.select(pollfd, [], pollfd, 1)
except select.error:
connected.clear()
continue
else:
if fd in evtin:
try:
if not tws.checkMessages():
tws.eDisconnect(stop_polling=False)
continue
except (SystemExit, SystemError, KeyboardInterrupt):
break
except:
try:
self._wrapper.pyError(*sys.exc_info())
except:
print_exc()
elif fd in evterr:
connected.clear()
continue
|
class TWSPoller(threading.Thread):
'''Continually polls TWS for any outstanding messages.
Loops indefinitely until killed or a system error occurs.
Uses socket select to poll for input and calls TWS's
`EClientSocketBase::checkMessages` function.
'''
def __init__(self, tws, wrapper):
pass
def stop_poller(self):
pass
def tws_connected(self, flag):
pass
def run(self):
pass
def _run(self):
'''Continually poll TWS'''
pass
| 6 | 2 | 14 | 1 | 13 | 0 | 4 | 0.1 | 1 | 5 | 0 | 0 | 5 | 5 | 5 | 30 | 87 | 13 | 67 | 21 | 61 | 7 | 64 | 21 | 58 | 14 | 1 | 6 | 21 |
143,437 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.SwigPyIterator
|
class SwigPyIterator(object):
"""Proxy of C++ swig::SwigPyIterator class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _swigibpy.delete_SwigPyIterator
def value(self):
"""value(SwigPyIterator self) -> PyObject *"""
return _swigibpy.SwigPyIterator_value(self)
def incr(self, n=1):
"""incr(SwigPyIterator self, size_t n=1) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator_incr(self, n)
def decr(self, n=1):
"""decr(SwigPyIterator self, size_t n=1) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator_decr(self, n)
def distance(self, x):
"""distance(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t"""
return _swigibpy.SwigPyIterator_distance(self, x)
def equal(self, x):
"""equal(SwigPyIterator self, SwigPyIterator x) -> bool"""
return _swigibpy.SwigPyIterator_equal(self, x)
def copy(self):
"""copy(SwigPyIterator self) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator_copy(self)
def next(self):
"""next(SwigPyIterator self) -> PyObject *"""
return _swigibpy.SwigPyIterator_next(self)
def __next__(self):
"""__next__(SwigPyIterator self) -> PyObject *"""
return _swigibpy.SwigPyIterator___next__(self)
def previous(self):
"""previous(SwigPyIterator self) -> PyObject *"""
return _swigibpy.SwigPyIterator_previous(self)
def advance(self, n):
"""advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator_advance(self, n)
def __eq__(self, x):
"""__eq__(SwigPyIterator self, SwigPyIterator x) -> bool"""
return _swigibpy.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
"""__ne__(SwigPyIterator self, SwigPyIterator x) -> bool"""
return _swigibpy.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
"""__iadd__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
"""__isub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator___isub__(self, n)
def __add__(self, n):
"""__add__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator"""
return _swigibpy.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
"""
__sub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
__sub__(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t
"""
return _swigibpy.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
|
class SwigPyIterator(object):
'''Proxy of C++ swig::SwigPyIterator class'''
def __init__(self, *args, **kwargs):
pass
def value(self):
'''value(SwigPyIterator self) -> PyObject *'''
pass
def incr(self, n=1):
'''incr(SwigPyIterator self, size_t n=1) -> SwigPyIterator'''
pass
def decr(self, n=1):
'''decr(SwigPyIterator self, size_t n=1) -> SwigPyIterator'''
pass
def distance(self, x):
'''distance(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t'''
pass
def equal(self, x):
'''equal(SwigPyIterator self, SwigPyIterator x) -> bool'''
pass
def copy(self):
'''copy(SwigPyIterator self) -> SwigPyIterator'''
pass
def next(self):
'''next(SwigPyIterator self) -> PyObject *'''
pass
def __next__(self):
'''__next__(SwigPyIterator self) -> PyObject *'''
pass
def previous(self):
'''previous(SwigPyIterator self) -> PyObject *'''
pass
def advance(self, n):
'''advance(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator'''
pass
def __eq__(self, x):
'''__eq__(SwigPyIterator self, SwigPyIterator x) -> bool'''
pass
def __ne__(self, x):
'''__ne__(SwigPyIterator self, SwigPyIterator x) -> bool'''
pass
def __iadd__(self, n):
'''__iadd__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator'''
pass
def __isub__(self, n):
'''__isub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator'''
pass
def __add__(self, n):
'''__add__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator'''
pass
def __sub__(self, *args):
'''
__sub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
__sub__(SwigPyIterator self, SwigPyIterator x) -> ptrdiff_t
'''
pass
def __iter__(self):
pass
| 19 | 17 | 3 | 0 | 2 | 1 | 1 | 0.5 | 1 | 1 | 0 | 0 | 18 | 0 | 18 | 18 | 93 | 33 | 40 | 22 | 21 | 20 | 40 | 22 | 21 | 1 | 1 | 0 | 18 |
143,438 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.OrderState
|
class OrderState(object):
"""Proxy of C++ OrderState class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(OrderState self) -> OrderState"""
_swigibpy.OrderState_swiginit(self, _swigibpy.new_OrderState())
status = _swig_property(_swigibpy.OrderState_status_get, _swigibpy.OrderState_status_set)
initMargin = _swig_property(_swigibpy.OrderState_initMargin_get, _swigibpy.OrderState_initMargin_set)
maintMargin = _swig_property(_swigibpy.OrderState_maintMargin_get, _swigibpy.OrderState_maintMargin_set)
equityWithLoan = _swig_property(_swigibpy.OrderState_equityWithLoan_get, _swigibpy.OrderState_equityWithLoan_set)
commission = _swig_property(_swigibpy.OrderState_commission_get, _swigibpy.OrderState_commission_set)
minCommission = _swig_property(_swigibpy.OrderState_minCommission_get, _swigibpy.OrderState_minCommission_set)
maxCommission = _swig_property(_swigibpy.OrderState_maxCommission_get, _swigibpy.OrderState_maxCommission_set)
commissionCurrency = _swig_property(_swigibpy.OrderState_commissionCurrency_get, _swigibpy.OrderState_commissionCurrency_set)
warningText = _swig_property(_swigibpy.OrderState_warningText_get, _swigibpy.OrderState_warningText_set)
__swig_destroy__ = _swigibpy.delete_OrderState
|
class OrderState(object):
'''Proxy of C++ OrderState class'''
def __init__(self):
'''__init__(OrderState self) -> OrderState'''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 0.13 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 18 | 1 | 15 | 14 | 13 | 2 | 15 | 14 | 13 | 1 | 1 | 0 | 1 |
143,439 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.OrderComboLegList
|
class OrderComboLegList(object):
"""Proxy of C++ std::vector<(shared_ptr<(OrderComboLeg)>)> class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def iterator(self):
"""iterator(OrderComboLegList self) -> SwigPyIterator"""
return _swigibpy.OrderComboLegList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(OrderComboLegList self) -> bool"""
return _swigibpy.OrderComboLegList___nonzero__(self)
def __bool__(self):
"""__bool__(OrderComboLegList self) -> bool"""
return _swigibpy.OrderComboLegList___bool__(self)
def __len__(self):
"""__len__(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type"""
return _swigibpy.OrderComboLegList___len__(self)
def pop(self):
"""pop(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type"""
return _swigibpy.OrderComboLegList_pop(self)
def __getslice__(self, i, j):
"""__getslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j) -> OrderComboLegList"""
return _swigibpy.OrderComboLegList___getslice__(self, i, j)
def __setslice__(self, *args, **kwargs):
"""__setslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j, OrderComboLegList v)"""
return _swigibpy.OrderComboLegList___setslice__(self, *args, **kwargs)
def __delslice__(self, i, j):
"""__delslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j)"""
return _swigibpy.OrderComboLegList___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i)
__delitem__(OrderComboLegList self, PySliceObject * slice)
"""
return _swigibpy.OrderComboLegList___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(OrderComboLegList self, PySliceObject * slice) -> OrderComboLegList
__getitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &
"""
return _swigibpy.OrderComboLegList___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(OrderComboLegList self, PySliceObject * slice, OrderComboLegList v)
__setitem__(OrderComboLegList self, PySliceObject * slice)
__setitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
"""
return _swigibpy.OrderComboLegList___setitem__(self, *args)
def append(self, x):
"""append(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)"""
return _swigibpy.OrderComboLegList_append(self, x)
def empty(self):
"""empty(OrderComboLegList self) -> bool"""
return _swigibpy.OrderComboLegList_empty(self)
def size(self):
"""size(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type"""
return _swigibpy.OrderComboLegList_size(self)
def clear(self):
"""clear(OrderComboLegList self)"""
return _swigibpy.OrderComboLegList_clear(self)
def swap(self, v):
"""swap(OrderComboLegList self, OrderComboLegList v)"""
return _swigibpy.OrderComboLegList_swap(self, v)
def get_allocator(self):
"""get_allocator(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::allocator_type"""
return _swigibpy.OrderComboLegList_get_allocator(self)
def begin(self):
"""begin(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::iterator"""
return _swigibpy.OrderComboLegList_begin(self)
def end(self):
"""end(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::iterator"""
return _swigibpy.OrderComboLegList_end(self)
def rbegin(self):
"""rbegin(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::reverse_iterator"""
return _swigibpy.OrderComboLegList_rbegin(self)
def rend(self):
"""rend(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::reverse_iterator"""
return _swigibpy.OrderComboLegList_rend(self)
def pop_back(self):
"""pop_back(OrderComboLegList self)"""
return _swigibpy.OrderComboLegList_pop_back(self)
def erase(self, *args):
"""
erase(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
erase(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator first, std::vector< shared_ptr< OrderComboLeg > >::iterator last) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
"""
return _swigibpy.OrderComboLegList_erase(self, *args)
def __init__(self, *args):
"""
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, OrderComboLegList arg2) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, std::vector< shared_ptr< OrderComboLeg > >::size_type size) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, std::vector< shared_ptr< OrderComboLeg > >::size_type size, std::vector< shared_ptr< OrderComboLeg > >::value_type const & value) -> OrderComboLegList
"""
_swigibpy.OrderComboLegList_swiginit(self, _swigibpy.new_OrderComboLegList(*args))
def push_back(self, x):
"""push_back(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)"""
return _swigibpy.OrderComboLegList_push_back(self, x)
def front(self):
"""front(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &"""
return _swigibpy.OrderComboLegList_front(self)
def back(self):
"""back(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &"""
return _swigibpy.OrderComboLegList_back(self)
def assign(self, n, x):
"""assign(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type n, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)"""
return _swigibpy.OrderComboLegList_assign(self, n, x)
def resize(self, *args):
"""
resize(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type new_size)
resize(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type new_size, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
"""
return _swigibpy.OrderComboLegList_resize(self, *args)
def insert(self, *args):
"""
insert(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
insert(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos, std::vector< shared_ptr< OrderComboLeg > >::size_type n, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
"""
return _swigibpy.OrderComboLegList_insert(self, *args)
def reserve(self, n):
"""reserve(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type n)"""
return _swigibpy.OrderComboLegList_reserve(self, n)
def capacity(self):
"""capacity(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type"""
return _swigibpy.OrderComboLegList_capacity(self)
__swig_destroy__ = _swigibpy.delete_OrderComboLegList
|
class OrderComboLegList(object):
'''Proxy of C++ std::vector<(shared_ptr<(OrderComboLeg)>)> class'''
def iterator(self):
'''iterator(OrderComboLegList self) -> SwigPyIterator'''
pass
def __iter__(self):
pass
def __nonzero__(self):
'''__nonzero__(OrderComboLegList self) -> bool'''
pass
def __bool__(self):
'''__bool__(OrderComboLegList self) -> bool'''
pass
def __len__(self):
'''__len__(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type'''
pass
def pop(self):
'''pop(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type'''
pass
def __getslice__(self, i, j):
'''__getslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j) -> OrderComboLegList'''
pass
def __setslice__(self, *args, **kwargs):
'''__setslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j, OrderComboLegList v)'''
pass
def __delslice__(self, i, j):
'''__delslice__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::difference_type j)'''
pass
def __delitem__(self, *args):
'''
__delitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i)
__delitem__(OrderComboLegList self, PySliceObject * slice)
'''
pass
def __getitem__(self, *args):
'''
__getitem__(OrderComboLegList self, PySliceObject * slice) -> OrderComboLegList
__getitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &
'''
pass
def __setitem__(self, *args):
'''
__setitem__(OrderComboLegList self, PySliceObject * slice, OrderComboLegList v)
__setitem__(OrderComboLegList self, PySliceObject * slice)
__setitem__(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::difference_type i, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
'''
pass
def append(self, x):
'''append(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)'''
pass
def empty(self):
'''empty(OrderComboLegList self) -> bool'''
pass
def size(self):
'''size(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type'''
pass
def clear(self):
'''clear(OrderComboLegList self)'''
pass
def swap(self, v):
'''swap(OrderComboLegList self, OrderComboLegList v)'''
pass
def get_allocator(self):
'''get_allocator(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::allocator_type'''
pass
def begin(self):
'''begin(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::iterator'''
pass
def end(self):
'''end(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::iterator'''
pass
def rbegin(self):
'''rbegin(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::reverse_iterator'''
pass
def rend(self):
'''rend(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::reverse_iterator'''
pass
def pop_back(self):
'''pop_back(OrderComboLegList self)'''
pass
def erase(self, *args):
'''
erase(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
erase(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator first, std::vector< shared_ptr< OrderComboLeg > >::iterator last) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
'''
pass
def __init__(self, *args):
'''
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, OrderComboLegList arg2) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, std::vector< shared_ptr< OrderComboLeg > >::size_type size) -> OrderComboLegList
__init__(std::vector<(shared_ptr<(OrderComboLeg)>)> self, std::vector< shared_ptr< OrderComboLeg > >::size_type size, std::vector< shared_ptr< OrderComboLeg > >::value_type const & value) -> OrderComboLegList
'''
pass
def push_back(self, x):
'''push_back(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)'''
pass
def front(self):
'''front(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &'''
pass
def back(self):
'''back(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::value_type const &'''
pass
def assign(self, n, x):
'''assign(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type n, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)'''
pass
def resize(self, *args):
'''
resize(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type new_size)
resize(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type new_size, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
'''
pass
def insert(self, *args):
'''
insert(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x) -> std::vector< shared_ptr< OrderComboLeg > >::iterator
insert(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::iterator pos, std::vector< shared_ptr< OrderComboLeg > >::size_type n, std::vector< shared_ptr< OrderComboLeg > >::value_type const & x)
'''
pass
def reserve(self, n):
'''reserve(OrderComboLegList self, std::vector< shared_ptr< OrderComboLeg > >::size_type n)'''
pass
def capacity(self):
'''capacity(OrderComboLegList self) -> std::vector< shared_ptr< OrderComboLeg > >::size_type'''
pass
| 34 | 33 | 4 | 0 | 2 | 2 | 1 | 0.81 | 1 | 0 | 0 | 0 | 33 | 0 | 33 | 33 | 190 | 63 | 70 | 37 | 36 | 57 | 70 | 37 | 36 | 1 | 1 | 0 | 33 |
143,440 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.OrderComboLeg
|
class OrderComboLeg(object):
"""Proxy of C++ OrderComboLeg class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(OrderComboLeg self) -> OrderComboLeg"""
_swigibpy.OrderComboLeg_swiginit(self, _swigibpy.new_OrderComboLeg())
price = _swig_property(_swigibpy.OrderComboLeg_price_get, _swigibpy.OrderComboLeg_price_set)
def __eq__(self, other):
"""__eq__(OrderComboLeg self, OrderComboLeg other) -> bool"""
return _swigibpy.OrderComboLeg___eq__(self, other)
__swig_destroy__ = _swigibpy.delete_OrderComboLeg
|
class OrderComboLeg(object):
'''Proxy of C++ OrderComboLeg class'''
def __init__(self):
'''__init__(OrderComboLeg self) -> OrderComboLeg'''
pass
def __eq__(self, other):
'''__eq__(OrderComboLeg self, OrderComboLeg other) -> bool'''
pass
| 3 | 3 | 3 | 0 | 2 | 1 | 1 | 0.33 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 15 | 3 | 9 | 7 | 6 | 3 | 9 | 7 | 6 | 1 | 1 | 0 | 2 |
143,441 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.Order
|
class Order(object):
"""Proxy of C++ Order class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(Order self) -> Order"""
_swigibpy.Order_swiginit(self, _swigibpy.new_Order())
orderId = _swig_property(_swigibpy.Order_orderId_get, _swigibpy.Order_orderId_set)
clientId = _swig_property(_swigibpy.Order_clientId_get, _swigibpy.Order_clientId_set)
permId = _swig_property(_swigibpy.Order_permId_get, _swigibpy.Order_permId_set)
action = _swig_property(_swigibpy.Order_action_get, _swigibpy.Order_action_set)
totalQuantity = _swig_property(_swigibpy.Order_totalQuantity_get, _swigibpy.Order_totalQuantity_set)
orderType = _swig_property(_swigibpy.Order_orderType_get, _swigibpy.Order_orderType_set)
lmtPrice = _swig_property(_swigibpy.Order_lmtPrice_get, _swigibpy.Order_lmtPrice_set)
auxPrice = _swig_property(_swigibpy.Order_auxPrice_get, _swigibpy.Order_auxPrice_set)
tif = _swig_property(_swigibpy.Order_tif_get, _swigibpy.Order_tif_set)
activeStartTime = _swig_property(_swigibpy.Order_activeStartTime_get, _swigibpy.Order_activeStartTime_set)
activeStopTime = _swig_property(_swigibpy.Order_activeStopTime_get, _swigibpy.Order_activeStopTime_set)
ocaGroup = _swig_property(_swigibpy.Order_ocaGroup_get, _swigibpy.Order_ocaGroup_set)
ocaType = _swig_property(_swigibpy.Order_ocaType_get, _swigibpy.Order_ocaType_set)
orderRef = _swig_property(_swigibpy.Order_orderRef_get, _swigibpy.Order_orderRef_set)
transmit = _swig_property(_swigibpy.Order_transmit_get, _swigibpy.Order_transmit_set)
parentId = _swig_property(_swigibpy.Order_parentId_get, _swigibpy.Order_parentId_set)
blockOrder = _swig_property(_swigibpy.Order_blockOrder_get, _swigibpy.Order_blockOrder_set)
sweepToFill = _swig_property(_swigibpy.Order_sweepToFill_get, _swigibpy.Order_sweepToFill_set)
displaySize = _swig_property(_swigibpy.Order_displaySize_get, _swigibpy.Order_displaySize_set)
triggerMethod = _swig_property(_swigibpy.Order_triggerMethod_get, _swigibpy.Order_triggerMethod_set)
outsideRth = _swig_property(_swigibpy.Order_outsideRth_get, _swigibpy.Order_outsideRth_set)
hidden = _swig_property(_swigibpy.Order_hidden_get, _swigibpy.Order_hidden_set)
goodAfterTime = _swig_property(_swigibpy.Order_goodAfterTime_get, _swigibpy.Order_goodAfterTime_set)
goodTillDate = _swig_property(_swigibpy.Order_goodTillDate_get, _swigibpy.Order_goodTillDate_set)
rule80A = _swig_property(_swigibpy.Order_rule80A_get, _swigibpy.Order_rule80A_set)
allOrNone = _swig_property(_swigibpy.Order_allOrNone_get, _swigibpy.Order_allOrNone_set)
minQty = _swig_property(_swigibpy.Order_minQty_get, _swigibpy.Order_minQty_set)
percentOffset = _swig_property(_swigibpy.Order_percentOffset_get, _swigibpy.Order_percentOffset_set)
overridePercentageConstraints = _swig_property(_swigibpy.Order_overridePercentageConstraints_get, _swigibpy.Order_overridePercentageConstraints_set)
trailStopPrice = _swig_property(_swigibpy.Order_trailStopPrice_get, _swigibpy.Order_trailStopPrice_set)
trailingPercent = _swig_property(_swigibpy.Order_trailingPercent_get, _swigibpy.Order_trailingPercent_set)
faGroup = _swig_property(_swigibpy.Order_faGroup_get, _swigibpy.Order_faGroup_set)
faProfile = _swig_property(_swigibpy.Order_faProfile_get, _swigibpy.Order_faProfile_set)
faMethod = _swig_property(_swigibpy.Order_faMethod_get, _swigibpy.Order_faMethod_set)
faPercentage = _swig_property(_swigibpy.Order_faPercentage_get, _swigibpy.Order_faPercentage_set)
openClose = _swig_property(_swigibpy.Order_openClose_get, _swigibpy.Order_openClose_set)
origin = _swig_property(_swigibpy.Order_origin_get, _swigibpy.Order_origin_set)
shortSaleSlot = _swig_property(_swigibpy.Order_shortSaleSlot_get, _swigibpy.Order_shortSaleSlot_set)
designatedLocation = _swig_property(_swigibpy.Order_designatedLocation_get, _swigibpy.Order_designatedLocation_set)
exemptCode = _swig_property(_swigibpy.Order_exemptCode_get, _swigibpy.Order_exemptCode_set)
discretionaryAmt = _swig_property(_swigibpy.Order_discretionaryAmt_get, _swigibpy.Order_discretionaryAmt_set)
eTradeOnly = _swig_property(_swigibpy.Order_eTradeOnly_get, _swigibpy.Order_eTradeOnly_set)
firmQuoteOnly = _swig_property(_swigibpy.Order_firmQuoteOnly_get, _swigibpy.Order_firmQuoteOnly_set)
nbboPriceCap = _swig_property(_swigibpy.Order_nbboPriceCap_get, _swigibpy.Order_nbboPriceCap_set)
optOutSmartRouting = _swig_property(_swigibpy.Order_optOutSmartRouting_get, _swigibpy.Order_optOutSmartRouting_set)
auctionStrategy = _swig_property(_swigibpy.Order_auctionStrategy_get, _swigibpy.Order_auctionStrategy_set)
startingPrice = _swig_property(_swigibpy.Order_startingPrice_get, _swigibpy.Order_startingPrice_set)
stockRefPrice = _swig_property(_swigibpy.Order_stockRefPrice_get, _swigibpy.Order_stockRefPrice_set)
delta = _swig_property(_swigibpy.Order_delta_get, _swigibpy.Order_delta_set)
stockRangeLower = _swig_property(_swigibpy.Order_stockRangeLower_get, _swigibpy.Order_stockRangeLower_set)
stockRangeUpper = _swig_property(_swigibpy.Order_stockRangeUpper_get, _swigibpy.Order_stockRangeUpper_set)
volatility = _swig_property(_swigibpy.Order_volatility_get, _swigibpy.Order_volatility_set)
volatilityType = _swig_property(_swigibpy.Order_volatilityType_get, _swigibpy.Order_volatilityType_set)
deltaNeutralOrderType = _swig_property(_swigibpy.Order_deltaNeutralOrderType_get, _swigibpy.Order_deltaNeutralOrderType_set)
deltaNeutralAuxPrice = _swig_property(_swigibpy.Order_deltaNeutralAuxPrice_get, _swigibpy.Order_deltaNeutralAuxPrice_set)
deltaNeutralConId = _swig_property(_swigibpy.Order_deltaNeutralConId_get, _swigibpy.Order_deltaNeutralConId_set)
deltaNeutralSettlingFirm = _swig_property(_swigibpy.Order_deltaNeutralSettlingFirm_get, _swigibpy.Order_deltaNeutralSettlingFirm_set)
deltaNeutralClearingAccount = _swig_property(_swigibpy.Order_deltaNeutralClearingAccount_get, _swigibpy.Order_deltaNeutralClearingAccount_set)
deltaNeutralClearingIntent = _swig_property(_swigibpy.Order_deltaNeutralClearingIntent_get, _swigibpy.Order_deltaNeutralClearingIntent_set)
deltaNeutralOpenClose = _swig_property(_swigibpy.Order_deltaNeutralOpenClose_get, _swigibpy.Order_deltaNeutralOpenClose_set)
deltaNeutralShortSale = _swig_property(_swigibpy.Order_deltaNeutralShortSale_get, _swigibpy.Order_deltaNeutralShortSale_set)
deltaNeutralShortSaleSlot = _swig_property(_swigibpy.Order_deltaNeutralShortSaleSlot_get, _swigibpy.Order_deltaNeutralShortSaleSlot_set)
deltaNeutralDesignatedLocation = _swig_property(_swigibpy.Order_deltaNeutralDesignatedLocation_get, _swigibpy.Order_deltaNeutralDesignatedLocation_set)
continuousUpdate = _swig_property(_swigibpy.Order_continuousUpdate_get, _swigibpy.Order_continuousUpdate_set)
referencePriceType = _swig_property(_swigibpy.Order_referencePriceType_get, _swigibpy.Order_referencePriceType_set)
basisPoints = _swig_property(_swigibpy.Order_basisPoints_get, _swigibpy.Order_basisPoints_set)
basisPointsType = _swig_property(_swigibpy.Order_basisPointsType_get, _swigibpy.Order_basisPointsType_set)
scaleInitLevelSize = _swig_property(_swigibpy.Order_scaleInitLevelSize_get, _swigibpy.Order_scaleInitLevelSize_set)
scaleSubsLevelSize = _swig_property(_swigibpy.Order_scaleSubsLevelSize_get, _swigibpy.Order_scaleSubsLevelSize_set)
scalePriceIncrement = _swig_property(_swigibpy.Order_scalePriceIncrement_get, _swigibpy.Order_scalePriceIncrement_set)
scalePriceAdjustValue = _swig_property(_swigibpy.Order_scalePriceAdjustValue_get, _swigibpy.Order_scalePriceAdjustValue_set)
scalePriceAdjustInterval = _swig_property(_swigibpy.Order_scalePriceAdjustInterval_get, _swigibpy.Order_scalePriceAdjustInterval_set)
scaleProfitOffset = _swig_property(_swigibpy.Order_scaleProfitOffset_get, _swigibpy.Order_scaleProfitOffset_set)
scaleAutoReset = _swig_property(_swigibpy.Order_scaleAutoReset_get, _swigibpy.Order_scaleAutoReset_set)
scaleInitPosition = _swig_property(_swigibpy.Order_scaleInitPosition_get, _swigibpy.Order_scaleInitPosition_set)
scaleInitFillQty = _swig_property(_swigibpy.Order_scaleInitFillQty_get, _swigibpy.Order_scaleInitFillQty_set)
scaleRandomPercent = _swig_property(_swigibpy.Order_scaleRandomPercent_get, _swigibpy.Order_scaleRandomPercent_set)
scaleTable = _swig_property(_swigibpy.Order_scaleTable_get, _swigibpy.Order_scaleTable_set)
hedgeType = _swig_property(_swigibpy.Order_hedgeType_get, _swigibpy.Order_hedgeType_set)
hedgeParam = _swig_property(_swigibpy.Order_hedgeParam_get, _swigibpy.Order_hedgeParam_set)
account = _swig_property(_swigibpy.Order_account_get, _swigibpy.Order_account_set)
settlingFirm = _swig_property(_swigibpy.Order_settlingFirm_get, _swigibpy.Order_settlingFirm_set)
clearingAccount = _swig_property(_swigibpy.Order_clearingAccount_get, _swigibpy.Order_clearingAccount_set)
clearingIntent = _swig_property(_swigibpy.Order_clearingIntent_get, _swigibpy.Order_clearingIntent_set)
algoStrategy = _swig_property(_swigibpy.Order_algoStrategy_get, _swigibpy.Order_algoStrategy_set)
algoParams = _swig_property(_swigibpy.Order_algoParams_get, _swigibpy.Order_algoParams_set)
smartComboRoutingParams = _swig_property(_swigibpy.Order_smartComboRoutingParams_get, _swigibpy.Order_smartComboRoutingParams_set)
whatIf = _swig_property(_swigibpy.Order_whatIf_get, _swigibpy.Order_whatIf_set)
notHeld = _swig_property(_swigibpy.Order_notHeld_get, _swigibpy.Order_notHeld_set)
orderComboLegs = _swig_property(_swigibpy.Order_orderComboLegs_get, _swigibpy.Order_orderComboLegs_set)
orderMiscOptions = _swig_property(_swigibpy.Order_orderMiscOptions_get, _swigibpy.Order_orderMiscOptions_set)
def CloneOrderComboLegs(dst, src):
"""CloneOrderComboLegs(Order::OrderComboLegListSPtr & dst, Order::OrderComboLegListSPtr const & src)"""
return _swigibpy.Order_CloneOrderComboLegs(dst, src)
CloneOrderComboLegs = staticmethod(CloneOrderComboLegs)
__swig_destroy__ = _swigibpy.delete_Order
|
class Order(object):
'''Proxy of C++ Order class'''
def __init__(self):
'''__init__(Order self) -> Order'''
pass
def CloneOrderComboLegs(dst, src):
'''CloneOrderComboLegs(Order::OrderComboLegListSPtr & dst, Order::OrderComboLegListSPtr const & src)'''
pass
| 3 | 3 | 3 | 0 | 2 | 1 | 1 | 0.03 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 106 | 3 | 100 | 97 | 97 | 3 | 100 | 97 | 97 | 1 | 1 | 0 | 2 |
143,442 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.ExecutionFilter
|
class ExecutionFilter(object):
"""Proxy of C++ ExecutionFilter class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(ExecutionFilter self) -> ExecutionFilter"""
_swigibpy.ExecutionFilter_swiginit(self, _swigibpy.new_ExecutionFilter())
m_clientId = _swig_property(_swigibpy.ExecutionFilter_m_clientId_get, _swigibpy.ExecutionFilter_m_clientId_set)
m_acctCode = _swig_property(_swigibpy.ExecutionFilter_m_acctCode_get, _swigibpy.ExecutionFilter_m_acctCode_set)
m_time = _swig_property(_swigibpy.ExecutionFilter_m_time_get, _swigibpy.ExecutionFilter_m_time_set)
m_symbol = _swig_property(_swigibpy.ExecutionFilter_m_symbol_get, _swigibpy.ExecutionFilter_m_symbol_set)
m_secType = _swig_property(_swigibpy.ExecutionFilter_m_secType_get, _swigibpy.ExecutionFilter_m_secType_set)
m_exchange = _swig_property(_swigibpy.ExecutionFilter_m_exchange_get, _swigibpy.ExecutionFilter_m_exchange_set)
m_side = _swig_property(_swigibpy.ExecutionFilter_m_side_get, _swigibpy.ExecutionFilter_m_side_set)
__swig_destroy__ = _swigibpy.delete_ExecutionFilter
|
class ExecutionFilter(object):
'''Proxy of C++ ExecutionFilter class'''
def __init__(self):
'''__init__(ExecutionFilter self) -> ExecutionFilter'''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 0.15 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 16 | 1 | 13 | 12 | 11 | 2 | 13 | 12 | 11 | 1 | 1 | 0 | 1 |
143,443 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.TagValueList
|
class TagValueList(object):
"""Proxy of C++ std::vector<(shared_ptr<(TagValue)>)> class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def iterator(self):
"""iterator(TagValueList self) -> SwigPyIterator"""
return _swigibpy.TagValueList_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(TagValueList self) -> bool"""
return _swigibpy.TagValueList___nonzero__(self)
def __bool__(self):
"""__bool__(TagValueList self) -> bool"""
return _swigibpy.TagValueList___bool__(self)
def __len__(self):
"""__len__(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type"""
return _swigibpy.TagValueList___len__(self)
def pop(self):
"""pop(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type"""
return _swigibpy.TagValueList_pop(self)
def __getslice__(self, i, j):
"""__getslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j) -> TagValueList"""
return _swigibpy.TagValueList___getslice__(self, i, j)
def __setslice__(self, *args, **kwargs):
"""__setslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j, TagValueList v)"""
return _swigibpy.TagValueList___setslice__(self, *args, **kwargs)
def __delslice__(self, i, j):
"""__delslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j)"""
return _swigibpy.TagValueList___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i)
__delitem__(TagValueList self, PySliceObject * slice)
"""
return _swigibpy.TagValueList___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(TagValueList self, PySliceObject * slice) -> TagValueList
__getitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i) -> std::vector< shared_ptr< TagValue > >::value_type const &
"""
return _swigibpy.TagValueList___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(TagValueList self, PySliceObject * slice, TagValueList v)
__setitem__(TagValueList self, PySliceObject * slice)
__setitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::value_type const & x)
"""
return _swigibpy.TagValueList___setitem__(self, *args)
def append(self, x):
"""append(TagValueList self, std::vector< shared_ptr< TagValue > >::value_type const & x)"""
return _swigibpy.TagValueList_append(self, x)
def empty(self):
"""empty(TagValueList self) -> bool"""
return _swigibpy.TagValueList_empty(self)
def size(self):
"""size(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type"""
return _swigibpy.TagValueList_size(self)
def clear(self):
"""clear(TagValueList self)"""
return _swigibpy.TagValueList_clear(self)
def swap(self, v):
"""swap(TagValueList self, TagValueList v)"""
return _swigibpy.TagValueList_swap(self, v)
def get_allocator(self):
"""get_allocator(TagValueList self) -> std::vector< shared_ptr< TagValue > >::allocator_type"""
return _swigibpy.TagValueList_get_allocator(self)
def begin(self):
"""begin(TagValueList self) -> std::vector< shared_ptr< TagValue > >::iterator"""
return _swigibpy.TagValueList_begin(self)
def end(self):
"""end(TagValueList self) -> std::vector< shared_ptr< TagValue > >::iterator"""
return _swigibpy.TagValueList_end(self)
def rbegin(self):
"""rbegin(TagValueList self) -> std::vector< shared_ptr< TagValue > >::reverse_iterator"""
return _swigibpy.TagValueList_rbegin(self)
def rend(self):
"""rend(TagValueList self) -> std::vector< shared_ptr< TagValue > >::reverse_iterator"""
return _swigibpy.TagValueList_rend(self)
def pop_back(self):
"""pop_back(TagValueList self)"""
return _swigibpy.TagValueList_pop_back(self)
def erase(self, *args):
"""
erase(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos) -> std::vector< shared_ptr< TagValue > >::iterator
erase(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator first, std::vector< shared_ptr< TagValue > >::iterator last) -> std::vector< shared_ptr< TagValue > >::iterator
"""
return _swigibpy.TagValueList_erase(self, *args)
def __init__(self, *args):
"""
__init__(std::vector<(shared_ptr<(TagValue)>)> self) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, TagValueList arg2) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, std::vector< shared_ptr< TagValue > >::size_type size) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, std::vector< shared_ptr< TagValue > >::size_type size, std::vector< shared_ptr< TagValue > >::value_type const & value) -> TagValueList
"""
_swigibpy.TagValueList_swiginit(self, _swigibpy.new_TagValueList(*args))
def push_back(self, x):
"""push_back(TagValueList self, std::vector< shared_ptr< TagValue > >::value_type const & x)"""
return _swigibpy.TagValueList_push_back(self, x)
def front(self):
"""front(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type const &"""
return _swigibpy.TagValueList_front(self)
def back(self):
"""back(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type const &"""
return _swigibpy.TagValueList_back(self)
def assign(self, n, x):
"""assign(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type n, std::vector< shared_ptr< TagValue > >::value_type const & x)"""
return _swigibpy.TagValueList_assign(self, n, x)
def resize(self, *args):
"""
resize(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type new_size)
resize(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type new_size, std::vector< shared_ptr< TagValue > >::value_type const & x)
"""
return _swigibpy.TagValueList_resize(self, *args)
def insert(self, *args):
"""
insert(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos, std::vector< shared_ptr< TagValue > >::value_type const & x) -> std::vector< shared_ptr< TagValue > >::iterator
insert(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos, std::vector< shared_ptr< TagValue > >::size_type n, std::vector< shared_ptr< TagValue > >::value_type const & x)
"""
return _swigibpy.TagValueList_insert(self, *args)
def reserve(self, n):
"""reserve(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type n)"""
return _swigibpy.TagValueList_reserve(self, n)
def capacity(self):
"""capacity(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type"""
return _swigibpy.TagValueList_capacity(self)
__swig_destroy__ = _swigibpy.delete_TagValueList
|
class TagValueList(object):
'''Proxy of C++ std::vector<(shared_ptr<(TagValue)>)> class'''
def iterator(self):
'''iterator(TagValueList self) -> SwigPyIterator'''
pass
def __iter__(self):
pass
def __nonzero__(self):
'''__nonzero__(TagValueList self) -> bool'''
pass
def __bool__(self):
'''__bool__(TagValueList self) -> bool'''
pass
def __len__(self):
'''__len__(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type'''
pass
def pop(self):
'''pop(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type'''
pass
def __getslice__(self, i, j):
'''__getslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j) -> TagValueList'''
pass
def __setslice__(self, *args, **kwargs):
'''__setslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j, TagValueList v)'''
pass
def __delslice__(self, i, j):
'''__delslice__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::difference_type j)'''
pass
def __delitem__(self, *args):
'''
__delitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i)
__delitem__(TagValueList self, PySliceObject * slice)
'''
pass
def __getitem__(self, *args):
'''
__getitem__(TagValueList self, PySliceObject * slice) -> TagValueList
__getitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i) -> std::vector< shared_ptr< TagValue > >::value_type const &
'''
pass
def __setitem__(self, *args):
'''
__setitem__(TagValueList self, PySliceObject * slice, TagValueList v)
__setitem__(TagValueList self, PySliceObject * slice)
__setitem__(TagValueList self, std::vector< shared_ptr< TagValue > >::difference_type i, std::vector< shared_ptr< TagValue > >::value_type const & x)
'''
pass
def append(self, x):
'''append(TagValueList self, std::vector< shared_ptr< TagValue > >::value_type const & x)'''
pass
def empty(self):
'''empty(TagValueList self) -> bool'''
pass
def size(self):
'''size(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type'''
pass
def clear(self):
'''clear(TagValueList self)'''
pass
def swap(self, v):
'''swap(TagValueList self, TagValueList v)'''
pass
def get_allocator(self):
'''get_allocator(TagValueList self) -> std::vector< shared_ptr< TagValue > >::allocator_type'''
pass
def begin(self):
'''begin(TagValueList self) -> std::vector< shared_ptr< TagValue > >::iterator'''
pass
def end(self):
'''end(TagValueList self) -> std::vector< shared_ptr< TagValue > >::iterator'''
pass
def rbegin(self):
'''rbegin(TagValueList self) -> std::vector< shared_ptr< TagValue > >::reverse_iterator'''
pass
def rend(self):
'''rend(TagValueList self) -> std::vector< shared_ptr< TagValue > >::reverse_iterator'''
pass
def pop_back(self):
'''pop_back(TagValueList self)'''
pass
def erase(self, *args):
'''
erase(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos) -> std::vector< shared_ptr< TagValue > >::iterator
erase(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator first, std::vector< shared_ptr< TagValue > >::iterator last) -> std::vector< shared_ptr< TagValue > >::iterator
'''
pass
def __init__(self, *args):
'''
__init__(std::vector<(shared_ptr<(TagValue)>)> self) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, TagValueList arg2) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, std::vector< shared_ptr< TagValue > >::size_type size) -> TagValueList
__init__(std::vector<(shared_ptr<(TagValue)>)> self, std::vector< shared_ptr< TagValue > >::size_type size, std::vector< shared_ptr< TagValue > >::value_type const & value) -> TagValueList
'''
pass
def push_back(self, x):
'''push_back(TagValueList self, std::vector< shared_ptr< TagValue > >::value_type const & x)'''
pass
def front(self):
'''front(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type const &'''
pass
def back(self):
'''back(TagValueList self) -> std::vector< shared_ptr< TagValue > >::value_type const &'''
pass
def assign(self, n, x):
'''assign(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type n, std::vector< shared_ptr< TagValue > >::value_type const & x)'''
pass
def resize(self, *args):
'''
resize(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type new_size)
resize(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type new_size, std::vector< shared_ptr< TagValue > >::value_type const & x)
'''
pass
def insert(self, *args):
'''
insert(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos, std::vector< shared_ptr< TagValue > >::value_type const & x) -> std::vector< shared_ptr< TagValue > >::iterator
insert(TagValueList self, std::vector< shared_ptr< TagValue > >::iterator pos, std::vector< shared_ptr< TagValue > >::size_type n, std::vector< shared_ptr< TagValue > >::value_type const & x)
'''
pass
def reserve(self, n):
'''reserve(TagValueList self, std::vector< shared_ptr< TagValue > >::size_type n)'''
pass
def capacity(self):
'''capacity(TagValueList self) -> std::vector< shared_ptr< TagValue > >::size_type'''
pass
| 34 | 33 | 4 | 0 | 2 | 2 | 1 | 0.81 | 1 | 0 | 0 | 0 | 33 | 0 | 33 | 33 | 190 | 63 | 70 | 37 | 36 | 57 | 70 | 37 | 36 | 1 | 1 | 0 | 33 |
143,444 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.Execution
|
class Execution(object):
"""Proxy of C++ Execution class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self):
"""__init__(Execution self) -> Execution"""
_swigibpy.Execution_swiginit(self, _swigibpy.new_Execution())
execId = _swig_property(_swigibpy.Execution_execId_get, _swigibpy.Execution_execId_set)
time = _swig_property(_swigibpy.Execution_time_get, _swigibpy.Execution_time_set)
acctNumber = _swig_property(_swigibpy.Execution_acctNumber_get, _swigibpy.Execution_acctNumber_set)
exchange = _swig_property(_swigibpy.Execution_exchange_get, _swigibpy.Execution_exchange_set)
side = _swig_property(_swigibpy.Execution_side_get, _swigibpy.Execution_side_set)
shares = _swig_property(_swigibpy.Execution_shares_get, _swigibpy.Execution_shares_set)
price = _swig_property(_swigibpy.Execution_price_get, _swigibpy.Execution_price_set)
permId = _swig_property(_swigibpy.Execution_permId_get, _swigibpy.Execution_permId_set)
clientId = _swig_property(_swigibpy.Execution_clientId_get, _swigibpy.Execution_clientId_set)
orderId = _swig_property(_swigibpy.Execution_orderId_get, _swigibpy.Execution_orderId_set)
liquidation = _swig_property(_swigibpy.Execution_liquidation_get, _swigibpy.Execution_liquidation_set)
cumQty = _swig_property(_swigibpy.Execution_cumQty_get, _swigibpy.Execution_cumQty_set)
avgPrice = _swig_property(_swigibpy.Execution_avgPrice_get, _swigibpy.Execution_avgPrice_set)
orderRef = _swig_property(_swigibpy.Execution_orderRef_get, _swigibpy.Execution_orderRef_set)
evRule = _swig_property(_swigibpy.Execution_evRule_get, _swigibpy.Execution_evRule_set)
evMultiplier = _swig_property(_swigibpy.Execution_evMultiplier_get, _swigibpy.Execution_evMultiplier_set)
__swig_destroy__ = _swigibpy.delete_Execution
|
class Execution(object):
'''Proxy of C++ Execution class'''
def __init__(self):
'''__init__(Execution self) -> Execution'''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 0.09 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 25 | 1 | 22 | 21 | 20 | 2 | 22 | 21 | 20 | 1 | 1 | 0 | 1 |
143,445 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.EWrapperQuiet
|
class EWrapperQuiet(EWrapper):
'''Implements all EWrapper methods and ignores method calls.'''
def _ignore_call(self, *args, **kwargs):
pass
|
class EWrapperQuiet(EWrapper):
'''Implements all EWrapper methods and ignores method calls.'''
def _ignore_call(self, *args, **kwargs):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 50 | 5 | 1 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
143,446 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.EWrapper
|
class EWrapper(object):
"""Proxy of C++ EWrapper class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
__swig_destroy__ = _swigibpy.delete_EWrapper
def tickPrice(self, tickerId, field, price, canAutoExecute):
"""tickPrice(EWrapper self, TickerId tickerId, TickType field, double price, int canAutoExecute)"""
return _swigibpy.EWrapper_tickPrice(self, tickerId, field, price, canAutoExecute)
def tickSize(self, tickerId, field, size):
"""tickSize(EWrapper self, TickerId tickerId, TickType field, int size)"""
return _swigibpy.EWrapper_tickSize(self, tickerId, field, size)
def tickOptionComputation(self, tickerId, tickType, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice):
"""tickOptionComputation(EWrapper self, TickerId tickerId, TickType tickType, double impliedVol, double delta, double optPrice, double pvDividend, double gamma, double vega, double theta, double undPrice)"""
return _swigibpy.EWrapper_tickOptionComputation(self, tickerId, tickType, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice)
def tickGeneric(self, tickerId, tickType, value):
"""tickGeneric(EWrapper self, TickerId tickerId, TickType tickType, double value)"""
return _swigibpy.EWrapper_tickGeneric(self, tickerId, tickType, value)
def tickString(self, tickerId, tickType, value):
"""tickString(EWrapper self, TickerId tickerId, TickType tickType, IBString const & value)"""
return _swigibpy.EWrapper_tickString(self, tickerId, tickType, value)
def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, totalDividends, holdDays, futureExpiry, dividendImpact, dividendsToExpiry):
"""tickEFP(EWrapper self, TickerId tickerId, TickType tickType, double basisPoints, IBString const & formattedBasisPoints, double totalDividends, int holdDays, IBString const & futureExpiry, double dividendImpact, double dividendsToExpiry)"""
return _swigibpy.EWrapper_tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, totalDividends, holdDays, futureExpiry, dividendImpact, dividendsToExpiry)
def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld):
"""orderStatus(EWrapper self, OrderId orderId, IBString const & status, int filled, int remaining, double avgFillPrice, int permId, int parentId, double lastFillPrice, int clientId, IBString const & whyHeld)"""
return _swigibpy.EWrapper_orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld)
def openOrder(self, orderId, arg0, arg1, arg2):
"""openOrder(EWrapper self, OrderId orderId, Contract arg0, Order arg1, OrderState arg2)"""
return _swigibpy.EWrapper_openOrder(self, orderId, arg0, arg1, arg2)
def openOrderEnd(self):
"""openOrderEnd(EWrapper self)"""
return _swigibpy.EWrapper_openOrderEnd(self)
def winError(self, str, lastError):
'''Error in TWS API library'''
sys.stderr.write("TWS ERROR - %s: %s\n" % (lastError, str))
def connectionClosed(self):
"""connectionClosed(EWrapper self)"""
return _swigibpy.EWrapper_connectionClosed(self)
def updateAccountValue(self, key, val, currency, accountName):
"""updateAccountValue(EWrapper self, IBString const & key, IBString const & val, IBString const & currency, IBString const & accountName)"""
return _swigibpy.EWrapper_updateAccountValue(self, key, val, currency, accountName)
def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName):
"""updatePortfolio(EWrapper self, Contract contract, int position, double marketPrice, double marketValue, double averageCost, double unrealizedPNL, double realizedPNL, IBString const & accountName)"""
return _swigibpy.EWrapper_updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName)
def updateAccountTime(self, timeStamp):
"""updateAccountTime(EWrapper self, IBString const & timeStamp)"""
return _swigibpy.EWrapper_updateAccountTime(self, timeStamp)
def accountDownloadEnd(self, accountName):
"""accountDownloadEnd(EWrapper self, IBString const & accountName)"""
return _swigibpy.EWrapper_accountDownloadEnd(self, accountName)
def nextValidId(self, orderId):
"""nextValidId(EWrapper self, OrderId orderId)"""
return _swigibpy.EWrapper_nextValidId(self, orderId)
def contractDetails(self, reqId, contractDetails):
"""contractDetails(EWrapper self, int reqId, ContractDetails contractDetails)"""
return _swigibpy.EWrapper_contractDetails(self, reqId, contractDetails)
def bondContractDetails(self, reqId, contractDetails):
"""bondContractDetails(EWrapper self, int reqId, ContractDetails contractDetails)"""
return _swigibpy.EWrapper_bondContractDetails(self, reqId, contractDetails)
def contractDetailsEnd(self, reqId):
"""contractDetailsEnd(EWrapper self, int reqId)"""
return _swigibpy.EWrapper_contractDetailsEnd(self, reqId)
def execDetails(self, reqId, contract, execution):
"""execDetails(EWrapper self, int reqId, Contract contract, Execution execution)"""
return _swigibpy.EWrapper_execDetails(self, reqId, contract, execution)
def execDetailsEnd(self, reqId):
"""execDetailsEnd(EWrapper self, int reqId)"""
return _swigibpy.EWrapper_execDetailsEnd(self, reqId)
def error(self, id, errorCode, errorString):
'''Error during communication with TWS'''
if errorCode == 165: # Historical data sevice message
sys.stderr.write("TWS INFO - %s: %s\n" % (errorCode, errorString))
elif errorCode >= 501 and errorCode < 600: # Socket read failed
sys.stderr.write("TWS CLIENT-ERROR - %s: %s\n" % (errorCode, errorString))
elif errorCode >= 100 and errorCode < 1100:
sys.stderr.write("TWS ERROR - %s: %s\n" % (errorCode, errorString))
elif errorCode >= 1100 and errorCode < 2100:
sys.stderr.write("TWS SYSTEM-ERROR - %s: %s\n" % (errorCode, errorString))
elif errorCode in (2104, 2106, 2108):
sys.stderr.write("TWS INFO - %s: %s\n" % (errorCode, errorString))
elif errorCode >= 2100 and errorCode <= 2110:
sys.stderr.write("TWS WARNING - %s: %s\n" % (errorCode, errorString))
else:
sys.stderr.write("TWS ERROR - %s: %s\n" % (errorCode, errorString))
def updateMktDepth(self, id, position, operation, side, price, size):
"""updateMktDepth(EWrapper self, TickerId id, int position, int operation, int side, double price, int size)"""
return _swigibpy.EWrapper_updateMktDepth(self, id, position, operation, side, price, size)
def updateMktDepthL2(self, id, position, marketMaker, operation, side, price, size):
"""updateMktDepthL2(EWrapper self, TickerId id, int position, IBString marketMaker, int operation, int side, double price, int size)"""
return _swigibpy.EWrapper_updateMktDepthL2(self, id, position, marketMaker, operation, side, price, size)
def updateNewsBulletin(self, msgId, msgType, newsMessage, originExch):
"""updateNewsBulletin(EWrapper self, int msgId, int msgType, IBString const & newsMessage, IBString const & originExch)"""
return _swigibpy.EWrapper_updateNewsBulletin(self, msgId, msgType, newsMessage, originExch)
def managedAccounts(self, accountsList):
"""managedAccounts(EWrapper self, IBString const & accountsList)"""
return _swigibpy.EWrapper_managedAccounts(self, accountsList)
def receiveFA(self, pFaDataType, cxml):
"""receiveFA(EWrapper self, faDataType pFaDataType, IBString const & cxml)"""
return _swigibpy.EWrapper_receiveFA(self, pFaDataType, cxml)
def historicalData(self, reqId, date, open, high, low, close, volume, barCount, WAP, hasGaps):
"""historicalData(EWrapper self, TickerId reqId, IBString const & date, double open, double high, double low, double close, int volume, int barCount, double WAP, int hasGaps)"""
return _swigibpy.EWrapper_historicalData(self, reqId, date, open, high, low, close, volume, barCount, WAP, hasGaps)
def scannerParameters(self, xml):
"""scannerParameters(EWrapper self, IBString const & xml)"""
return _swigibpy.EWrapper_scannerParameters(self, xml)
def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr):
"""scannerData(EWrapper self, int reqId, int rank, ContractDetails contractDetails, IBString const & distance, IBString const & benchmark, IBString const & projection, IBString const & legsStr)"""
return _swigibpy.EWrapper_scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr)
def scannerDataEnd(self, reqId):
"""scannerDataEnd(EWrapper self, int reqId)"""
return _swigibpy.EWrapper_scannerDataEnd(self, reqId)
def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count):
"""realtimeBar(EWrapper self, TickerId reqId, long time, double open, double high, double low, double close, long volume, double wap, int count)"""
return _swigibpy.EWrapper_realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count)
def currentTime(self, time):
"""currentTime(EWrapper self, long time)"""
return _swigibpy.EWrapper_currentTime(self, time)
def fundamentalData(self, reqId, data):
"""fundamentalData(EWrapper self, TickerId reqId, IBString const & data)"""
return _swigibpy.EWrapper_fundamentalData(self, reqId, data)
def deltaNeutralValidation(self, reqId, underComp):
"""deltaNeutralValidation(EWrapper self, int reqId, UnderComp underComp)"""
return _swigibpy.EWrapper_deltaNeutralValidation(self, reqId, underComp)
def tickSnapshotEnd(self, reqId):
"""tickSnapshotEnd(EWrapper self, int reqId)"""
return _swigibpy.EWrapper_tickSnapshotEnd(self, reqId)
def marketDataType(self, reqId, marketDataType):
"""marketDataType(EWrapper self, TickerId reqId, int marketDataType)"""
return _swigibpy.EWrapper_marketDataType(self, reqId, marketDataType)
def commissionReport(self, commissionReport):
"""commissionReport(EWrapper self, CommissionReport commissionReport)"""
return _swigibpy.EWrapper_commissionReport(self, commissionReport)
def position(self, account, contract, position, avgCost):
"""position(EWrapper self, IBString const & account, Contract contract, int position, double avgCost)"""
return _swigibpy.EWrapper_position(self, account, contract, position, avgCost)
def positionEnd(self):
"""positionEnd(EWrapper self)"""
return _swigibpy.EWrapper_positionEnd(self)
def accountSummary(self, reqId, account, tag, value, curency):
"""accountSummary(EWrapper self, int reqId, IBString const & account, IBString const & tag, IBString const & value, IBString const & curency)"""
return _swigibpy.EWrapper_accountSummary(self, reqId, account, tag, value, curency)
def accountSummaryEnd(self, reqId):
"""accountSummaryEnd(EWrapper self, int reqId)"""
return _swigibpy.EWrapper_accountSummaryEnd(self, reqId)
def verifyMessageAPI(self, apiData):
"""verifyMessageAPI(EWrapper self, IBString const & apiData)"""
return _swigibpy.EWrapper_verifyMessageAPI(self, apiData)
def verifyCompleted(self, isSuccessful, errorText):
"""verifyCompleted(EWrapper self, bool isSuccessful, IBString const & errorText)"""
return _swigibpy.EWrapper_verifyCompleted(self, isSuccessful, errorText)
def displayGroupList(self, reqId, groups):
"""displayGroupList(EWrapper self, int reqId, IBString const & groups)"""
return _swigibpy.EWrapper_displayGroupList(self, reqId, groups)
def displayGroupUpdated(self, reqId, contractInfo):
"""displayGroupUpdated(EWrapper self, int reqId, IBString const & contractInfo)"""
return _swigibpy.EWrapper_displayGroupUpdated(self, reqId, contractInfo)
def pyError(self, type, value, traceback):
'''Handles an error thrown during invocation of an EWrapper method.
Arguments are those provided by sys.exc_info()
'''
sys.stderr.write("Exception thrown during EWrapper method dispatch:\n")
print_exception(type, value, traceback)
def __init__(self):
"""__init__(EWrapper self) -> EWrapper"""
if self.__class__ == EWrapper:
_self = None
else:
_self = self
_swigibpy.EWrapper_swiginit(self, _swigibpy.new_EWrapper(_self, ))
def __disown__(self):
self.this.disown()
_swigibpy.disown_EWrapper(self)
return weakref_proxy(self)
|
class EWrapper(object):
'''Proxy of C++ EWrapper class'''
def tickPrice(self, tickerId, field, price, canAutoExecute):
'''tickPrice(EWrapper self, TickerId tickerId, TickType field, double price, int canAutoExecute)'''
pass
def tickSize(self, tickerId, field, size):
'''tickSize(EWrapper self, TickerId tickerId, TickType field, int size)'''
pass
def tickOptionComputation(self, tickerId, tickType, impliedVol, delta, optPrice, pvDividend, gamma, vega, theta, undPrice):
'''tickOptionComputation(EWrapper self, TickerId tickerId, TickType tickType, double impliedVol, double delta, double optPrice, double pvDividend, double gamma, double vega, double theta, double undPrice)'''
pass
def tickGeneric(self, tickerId, tickType, value):
'''tickGeneric(EWrapper self, TickerId tickerId, TickType tickType, double value)'''
pass
def tickString(self, tickerId, tickType, value):
'''tickString(EWrapper self, TickerId tickerId, TickType tickType, IBString const & value)'''
pass
def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, totalDividends, holdDays, futureExpiry, dividendImpact, dividendsToExpiry):
'''tickEFP(EWrapper self, TickerId tickerId, TickType tickType, double basisPoints, IBString const & formattedBasisPoints, double totalDividends, int holdDays, IBString const & futureExpiry, double dividendImpact, double dividendsToExpiry)'''
pass
def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeld):
'''orderStatus(EWrapper self, OrderId orderId, IBString const & status, int filled, int remaining, double avgFillPrice, int permId, int parentId, double lastFillPrice, int clientId, IBString const & whyHeld)'''
pass
def openOrder(self, orderId, arg0, arg1, arg2):
'''openOrder(EWrapper self, OrderId orderId, Contract arg0, Order arg1, OrderState arg2)'''
pass
def openOrderEnd(self):
'''openOrderEnd(EWrapper self)'''
pass
def winError(self, str, lastError):
'''Error in TWS API library'''
pass
def connectionClosed(self):
'''connectionClosed(EWrapper self)'''
pass
def updateAccountValue(self, key, val, currency, accountName):
'''updateAccountValue(EWrapper self, IBString const & key, IBString const & val, IBString const & currency, IBString const & accountName)'''
pass
def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName):
'''updatePortfolio(EWrapper self, Contract contract, int position, double marketPrice, double marketValue, double averageCost, double unrealizedPNL, double realizedPNL, IBString const & accountName)'''
pass
def updateAccountTime(self, timeStamp):
'''updateAccountTime(EWrapper self, IBString const & timeStamp)'''
pass
def accountDownloadEnd(self, accountName):
'''accountDownloadEnd(EWrapper self, IBString const & accountName)'''
pass
def nextValidId(self, orderId):
'''nextValidId(EWrapper self, OrderId orderId)'''
pass
def contractDetails(self, reqId, contractDetails):
'''contractDetails(EWrapper self, int reqId, ContractDetails contractDetails)'''
pass
def bondContractDetails(self, reqId, contractDetails):
'''bondContractDetails(EWrapper self, int reqId, ContractDetails contractDetails)'''
pass
def contractDetailsEnd(self, reqId):
'''contractDetailsEnd(EWrapper self, int reqId)'''
pass
def execDetails(self, reqId, contract, execution):
'''execDetails(EWrapper self, int reqId, Contract contract, Execution execution)'''
pass
def execDetailsEnd(self, reqId):
'''execDetailsEnd(EWrapper self, int reqId)'''
pass
def error(self, id, errorCode, errorString):
'''Error during communication with TWS'''
pass
def updateMktDepth(self, id, position, operation, side, price, size):
'''updateMktDepth(EWrapper self, TickerId id, int position, int operation, int side, double price, int size)'''
pass
def updateMktDepthL2(self, id, position, marketMaker, operation, side, price, size):
'''updateMktDepthL2(EWrapper self, TickerId id, int position, IBString marketMaker, int operation, int side, double price, int size)'''
pass
def updateNewsBulletin(self, msgId, msgType, newsMessage, originExch):
'''updateNewsBulletin(EWrapper self, int msgId, int msgType, IBString const & newsMessage, IBString const & originExch)'''
pass
def managedAccounts(self, accountsList):
'''managedAccounts(EWrapper self, IBString const & accountsList)'''
pass
def receiveFA(self, pFaDataType, cxml):
'''receiveFA(EWrapper self, faDataType pFaDataType, IBString const & cxml)'''
pass
def historicalData(self, reqId, date, open, high, low, close, volume, barCount, WAP, hasGaps):
'''historicalData(EWrapper self, TickerId reqId, IBString const & date, double open, double high, double low, double close, int volume, int barCount, double WAP, int hasGaps)'''
pass
def scannerParameters(self, xml):
'''scannerParameters(EWrapper self, IBString const & xml)'''
pass
def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection, legsStr):
'''scannerData(EWrapper self, int reqId, int rank, ContractDetails contractDetails, IBString const & distance, IBString const & benchmark, IBString const & projection, IBString const & legsStr)'''
pass
def scannerDataEnd(self, reqId):
'''scannerDataEnd(EWrapper self, int reqId)'''
pass
def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count):
'''realtimeBar(EWrapper self, TickerId reqId, long time, double open, double high, double low, double close, long volume, double wap, int count)'''
pass
def currentTime(self, time):
'''currentTime(EWrapper self, long time)'''
pass
def fundamentalData(self, reqId, data):
'''fundamentalData(EWrapper self, TickerId reqId, IBString const & data)'''
pass
def deltaNeutralValidation(self, reqId, underComp):
'''deltaNeutralValidation(EWrapper self, int reqId, UnderComp underComp)'''
pass
def tickSnapshotEnd(self, reqId):
'''tickSnapshotEnd(EWrapper self, int reqId)'''
pass
def marketDataType(self, reqId, marketDataType):
'''marketDataType(EWrapper self, TickerId reqId, int marketDataType)'''
pass
def commissionReport(self, commissionReport):
'''commissionReport(EWrapper self, CommissionReport commissionReport)'''
pass
def position(self, account, contract, position, avgCost):
'''position(EWrapper self, IBString const & account, Contract contract, int position, double avgCost)'''
pass
def positionEnd(self):
'''positionEnd(EWrapper self)'''
pass
def accountSummary(self, reqId, account, tag, value, curency):
'''accountSummary(EWrapper self, int reqId, IBString const & account, IBString const & tag, IBString const & value, IBString const & curency)'''
pass
def accountSummaryEnd(self, reqId):
'''accountSummaryEnd(EWrapper self, int reqId)'''
pass
def verifyMessageAPI(self, apiData):
'''verifyMessageAPI(EWrapper self, IBString const & apiData)'''
pass
def verifyCompleted(self, isSuccessful, errorText):
'''verifyCompleted(EWrapper self, bool isSuccessful, IBString const & errorText)'''
pass
def displayGroupList(self, reqId, groups):
'''displayGroupList(EWrapper self, int reqId, IBString const & groups)'''
pass
def displayGroupUpdated(self, reqId, contractInfo):
'''displayGroupUpdated(EWrapper self, int reqId, IBString const & contractInfo)'''
pass
def pyError(self, type, value, traceback):
'''Handles an error thrown during invocation of an EWrapper method.
Arguments are those provided by sys.exc_info()
'''
pass
def __init__(self):
'''__init__(EWrapper self) -> EWrapper'''
pass
def __disown__(self):
pass
| 50 | 49 | 3 | 0 | 2 | 1 | 1 | 0.43 | 1 | 0 | 0 | 6 | 49 | 0 | 49 | 49 | 271 | 98 | 122 | 54 | 72 | 53 | 115 | 54 | 65 | 7 | 1 | 1 | 56 |
143,447 |
Komnomnomnom/swigibpy
|
Komnomnomnom_swigibpy/swigibpy.py
|
swigibpy.EPosixClientSocket
|
class EPosixClientSocket(EClientSocketBase):
"""Proxy of C++ EPosixClientSocket class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, ewrapper, poll_auto=True, reconnect_auto=False):
'''Create an EPosixClientSocket to comunicate with Interactive Brokers.
Parameters
----------
ewrapper : EWrapper subclass to which responses will be dispatched.
poll_auto : boolean, if True automatically poll for messages with a
background thread. Default True
reconnect_auto : boolean, if True automatically reconnect to TWS if
the connection is lost. Default False
'''
_swigibpy.EPosixClientSocket_swiginit(self, _swigibpy.new_EPosixClientSocket(ewrapper))
# store a reference to EWrapper on the Python side (C++ member is protected so inaccessible from Python).
self._ewrapper = ewrapper
self._connect_lock = threading.Lock()
self.poller = None
self._poll_auto = poll_auto
self.reconnect_auto = reconnect_auto
self._connect_args = None
__swig_destroy__ = _swigibpy.delete_EPosixClientSocket
def eConnect(self, host, port, clientId=0, extraAuth=False, **kwargs):
if "poll_auto" in kwargs:
warnings.warn("eConnect argument 'poll_auto' is deprecated, use 'poll_auto' arg in constructor instead", DeprecationWarning)
self.poll_auto = kwargs.pop('poll_auto')
with self._connect_lock:
success = _swigibpy.EPosixClientSocket_eConnect(self, host, port, clientId, extraAuth)
if success:
self._connect_args = ((host, port, clientId, extraAuth), kwargs)
if self.isConnected():
self._startPolling()
if self.poller is not None:
self.poller.tws_connected(True)
return success
def eDisconnect(self, stop_polling=True):
if stop_polling:
self._stopPolling()
val = _swigibpy.EPosixClientSocket_eDisconnect(self)
if self.poller is not None:
self.poller.tws_connected(False)
return val
def isSocketOK(self):
"""isSocketOK(EPosixClientSocket self) -> bool"""
return _swigibpy.EPosixClientSocket_isSocketOK(self)
def fd(self):
"""fd(EPosixClientSocket self) -> int"""
return _swigibpy.EPosixClientSocket_fd(self)
def onReceive(self):
"""onReceive(EPosixClientSocket self)"""
return _swigibpy.EPosixClientSocket_onReceive(self)
def onSend(self):
"""onSend(EPosixClientSocket self)"""
return _swigibpy.EPosixClientSocket_onSend(self)
def onError(self):
"""onError(EPosixClientSocket self)"""
return _swigibpy.EPosixClientSocket_onError(self)
def handleSocketError(self):
"""handleSocketError(EPosixClientSocket self) -> bool"""
return _swigibpy.EPosixClientSocket_handleSocketError(self)
def reconnect(self):
if self._connect_args is None:
return
return self.eConnect(*self._connect_args[0], **self._connect_args[1])
def _startPolling(self):
if not self.poll_auto:
return
if self.poller is None or not self.poller.is_alive():
self.poller = TWSPoller(self, self._ewrapper)
self.poller.start()
def _stopPolling(self):
if self.poller is not None:
self.poller.stop_poller()
@property
def poll_auto(self):
return self._poll_auto
@poll_auto.setter
def poll_auto(self, val):
self._poll_auto = val
if val:
self._startPolling()
else:
self._stopPolling()
|
class EPosixClientSocket(EClientSocketBase):
'''Proxy of C++ EPosixClientSocket class'''
def __init__(self, ewrapper, poll_auto=True, reconnect_auto=False):
'''Create an EPosixClientSocket to comunicate with Interactive Brokers.
Parameters
----------
ewrapper : EWrapper subclass to which responses will be dispatched.
poll_auto : boolean, if True automatically poll for messages with a
background thread. Default True
reconnect_auto : boolean, if True automatically reconnect to TWS if
the connection is lost. Default False
'''
pass
def eConnect(self, host, port, clientId=0, extraAuth=False, **kwargs):
pass
def eDisconnect(self, stop_polling=True):
pass
def isSocketOK(self):
'''isSocketOK(EPosixClientSocket self) -> bool'''
pass
def fd(self):
'''fd(EPosixClientSocket self) -> int'''
pass
def onReceive(self):
'''onReceive(EPosixClientSocket self)'''
pass
def onSend(self):
'''onSend(EPosixClientSocket self)'''
pass
def onError(self):
'''onError(EPosixClientSocket self)'''
pass
def handleSocketError(self):
'''handleSocketError(EPosixClientSocket self) -> bool'''
pass
def reconnect(self):
pass
def _startPolling(self):
pass
def _stopPolling(self):
pass
@property
def poll_auto(self):
pass
@poll_auto.setter
def poll_auto(self):
pass
| 17 | 8 | 6 | 0 | 4 | 1 | 2 | 0.25 | 1 | 2 | 1 | 0 | 14 | 6 | 14 | 122 | 116 | 32 | 67 | 28 | 50 | 17 | 64 | 26 | 49 | 5 | 3 | 2 | 25 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.