id
int64
0
843k
repository_name
stringlengths
7
55
file_path
stringlengths
9
332
class_name
stringlengths
3
290
human_written_code
stringlengths
12
4.36M
class_skeleton
stringlengths
19
2.2M
total_program_units
int64
1
9.57k
total_doc_str
int64
0
4.2k
AvgCountLine
float64
0
7.89k
AvgCountLineBlank
float64
0
300
AvgCountLineCode
float64
0
7.89k
AvgCountLineComment
float64
0
7.89k
AvgCyclomatic
float64
0
130
CommentToCodeRatio
float64
0
176
CountClassBase
float64
0
48
CountClassCoupled
float64
0
589
CountClassCoupledModified
float64
0
581
CountClassDerived
float64
0
5.37k
CountDeclInstanceMethod
float64
0
4.2k
CountDeclInstanceVariable
float64
0
299
CountDeclMethod
float64
0
4.2k
CountDeclMethodAll
float64
0
4.2k
CountLine
float64
1
115k
CountLineBlank
float64
0
9.01k
CountLineCode
float64
0
94.4k
CountLineCodeDecl
float64
0
46.1k
CountLineCodeExe
float64
0
91.3k
CountLineComment
float64
0
27k
CountStmt
float64
1
93.2k
CountStmtDecl
float64
0
46.1k
CountStmtExe
float64
0
90.2k
MaxCyclomatic
float64
0
759
MaxInheritanceTree
float64
0
16
MaxNesting
float64
0
34
SumCyclomatic
float64
0
6k
2,600
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/notification_data.py
mbed_cloud._backends.mds.models.notification_data.NotificationData
class NotificationData(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'ct': 'str', 'ep': 'str', 'max_age': 'str', 'path': 'str', 'payload': 'str' } attribute_map = { 'ct': 'ct', 'ep': 'ep', 'max_age': 'max-age', 'path': 'path', 'payload': 'payload' } def __init__(self, ct=None, ep=None, max_age=None, path=None, payload=None): """ NotificationData - a model defined in Swagger """ self._ct = ct self._ep = ep self._max_age = max_age self._path = path self._payload = payload self.discriminator = None @property def ct(self): """ Gets the ct of this NotificationData. Content type. :return: The ct of this NotificationData. :rtype: str """ return self._ct @ct.setter def ct(self, ct): """ Sets the ct of this NotificationData. Content type. :param ct: The ct of this NotificationData. :type: str """ self._ct = ct @property def ep(self): """ Gets the ep of this NotificationData. Device Management Device ID. :return: The ep of this NotificationData. :rtype: str """ return self._ep @ep.setter def ep(self, ep): """ Sets the ep of this NotificationData. Device Management Device ID. :param ep: The ep of this NotificationData. :type: str """ self._ep = ep @property def max_age(self): """ Gets the max_age of this NotificationData. Max age value is an integer number of seconds between 0 and 2^32-1 but the actual maximum cache time is limited to 3 days. A default value of 60 seconds is assumed in the absence of the option. :return: The max_age of this NotificationData. :rtype: str """ return self._max_age @max_age.setter def max_age(self, max_age): """ Sets the max_age of this NotificationData. Max age value is an integer number of seconds between 0 and 2^32-1 but the actual maximum cache time is limited to 3 days. A default value of 60 seconds is assumed in the absence of the option. :param max_age: The max_age of this NotificationData. :type: str """ self._max_age = max_age @property def path(self): """ Gets the path of this NotificationData. URI path. :return: The path of this NotificationData. :rtype: str """ return self._path @path.setter def path(self, path): """ Sets the path of this NotificationData. URI path. :param path: The path of this NotificationData. :type: str """ self._path = path @property def payload(self): """ Gets the payload of this NotificationData. Base64 encoded payload. :return: The payload of this NotificationData. :rtype: str """ return self._payload @payload.setter def payload(self, payload): """ Sets the payload of this NotificationData. Base64 encoded payload. :param payload: The payload of this NotificationData. :type: str """ self._payload = payload def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, NotificationData): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class NotificationData(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, ct=None, ep=None, max_age=None, path=None, payload=None): ''' NotificationData - a model defined in Swagger ''' pass @property def ct(self): ''' Gets the ct of this NotificationData. Content type. :return: The ct of this NotificationData. :rtype: str ''' pass @ct.setter def ct(self): ''' Sets the ct of this NotificationData. Content type. :param ct: The ct of this NotificationData. :type: str ''' pass @property def ep(self): ''' Gets the ep of this NotificationData. Device Management Device ID. :return: The ep of this NotificationData. :rtype: str ''' pass @ep.setter def ep(self): ''' Sets the ep of this NotificationData. Device Management Device ID. :param ep: The ep of this NotificationData. :type: str ''' pass @property def max_age(self): ''' Gets the max_age of this NotificationData. Max age value is an integer number of seconds between 0 and 2^32-1 but the actual maximum cache time is limited to 3 days. A default value of 60 seconds is assumed in the absence of the option. :return: The max_age of this NotificationData. :rtype: str ''' pass @max_age.setter def max_age(self): ''' Sets the max_age of this NotificationData. Max age value is an integer number of seconds between 0 and 2^32-1 but the actual maximum cache time is limited to 3 days. A default value of 60 seconds is assumed in the absence of the option. :param max_age: The max_age of this NotificationData. :type: str ''' pass @property def path(self): ''' Gets the path of this NotificationData. URI path. :return: The path of this NotificationData. :rtype: str ''' pass @path.setter def path(self): ''' Sets the path of this NotificationData. URI path. :param path: The path of this NotificationData. :type: str ''' pass @property def payload(self): ''' Gets the payload of this NotificationData. Base64 encoded payload. :return: The payload of this NotificationData. :rtype: str ''' pass @payload.setter def payload(self): ''' Sets the payload of this NotificationData. Base64 encoded payload. :param payload: The payload of this NotificationData. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
27
17
10
1
4
5
1
1.09
1
3
0
0
16
6
16
16
209
38
82
38
55
89
50
28
33
5
1
2
21
2,601
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/endpoint_data.py
mbed_cloud._backends.mds.models.endpoint_data.EndpointData
class EndpointData(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'ep': 'str', 'ept': 'str', 'original_ep': 'str', 'q': 'bool', 'resources': 'list[ResourcesData]' } attribute_map = { 'ep': 'ep', 'ept': 'ept', 'original_ep': 'original-ep', 'q': 'q', 'resources': 'resources' } def __init__(self, ep=None, ept=None, original_ep=None, q=None, resources=None): """ EndpointData - a model defined in Swagger """ self._ep = ep self._ept = ept self._original_ep = original_ep self._q = q self._resources = resources self.discriminator = None @property def ep(self): """ Gets the ep of this EndpointData. Unique Device Management device ID. :return: The ep of this EndpointData. :rtype: str """ return self._ep @ep.setter def ep(self, ep): """ Sets the ep of this EndpointData. Unique Device Management device ID. :param ep: The ep of this EndpointData. :type: str """ self._ep = ep @property def ept(self): """ Gets the ept of this EndpointData. Endpoint type. :return: The ept of this EndpointData. :rtype: str """ return self._ept @ept.setter def ept(self, ept): """ Sets the ept of this EndpointData. Endpoint type. :param ept: The ept of this EndpointData. :type: str """ self._ept = ept @property def original_ep(self): """ Gets the original_ep of this EndpointData. In case of a self-provided endpoint name that is used to initiate the device registration, Device Management provides a new device ID to be used from that point on. The new Pelion platform provided Device ID is forwarded as the 'ep' property and the original self-provided one as the optional 'original-ep' property in a registration notification. The name and ID can then be mapped accordingly. Device Management saves the original endpoint name in the Device Directory for future device registrations so that you don't need to do the mapping again. :return: The original_ep of this EndpointData. :rtype: str """ return self._original_ep @original_ep.setter def original_ep(self, original_ep): """ Sets the original_ep of this EndpointData. In case of a self-provided endpoint name that is used to initiate the device registration, Device Management provides a new device ID to be used from that point on. The new Pelion platform provided Device ID is forwarded as the 'ep' property and the original self-provided one as the optional 'original-ep' property in a registration notification. The name and ID can then be mapped accordingly. Device Management saves the original endpoint name in the Device Directory for future device registrations so that you don't need to do the mapping again. :param original_ep: The original_ep of this EndpointData. :type: str """ self._original_ep = original_ep @property def q(self): """ Gets the q of this EndpointData. Queue mode (default value is false). :return: The q of this EndpointData. :rtype: bool """ return self._q @q.setter def q(self, q): """ Sets the q of this EndpointData. Queue mode (default value is false). :param q: The q of this EndpointData. :type: bool """ self._q = q @property def resources(self): """ Gets the resources of this EndpointData. :return: The resources of this EndpointData. :rtype: list[ResourcesData] """ return self._resources @resources.setter def resources(self, resources): """ Sets the resources of this EndpointData. :param resources: The resources of this EndpointData. :type: list[ResourcesData] """ self._resources = resources def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, EndpointData): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class EndpointData(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, ep=None, ept=None, original_ep=None, q=None, resources=None): ''' EndpointData - a model defined in Swagger ''' pass @property def ep(self): ''' Gets the ep of this EndpointData. Unique Device Management device ID. :return: The ep of this EndpointData. :rtype: str ''' pass @ep.setter def ep(self): ''' Sets the ep of this EndpointData. Unique Device Management device ID. :param ep: The ep of this EndpointData. :type: str ''' pass @property def ept(self): ''' Gets the ept of this EndpointData. Endpoint type. :return: The ept of this EndpointData. :rtype: str ''' pass @ept.setter def ept(self): ''' Sets the ept of this EndpointData. Endpoint type. :param ept: The ept of this EndpointData. :type: str ''' pass @property def original_ep(self): ''' Gets the original_ep of this EndpointData. In case of a self-provided endpoint name that is used to initiate the device registration, Device Management provides a new device ID to be used from that point on. The new Pelion platform provided Device ID is forwarded as the 'ep' property and the original self-provided one as the optional 'original-ep' property in a registration notification. The name and ID can then be mapped accordingly. Device Management saves the original endpoint name in the Device Directory for future device registrations so that you don't need to do the mapping again. :return: The original_ep of this EndpointData. :rtype: str ''' pass @original_ep.setter def original_ep(self): ''' Sets the original_ep of this EndpointData. In case of a self-provided endpoint name that is used to initiate the device registration, Device Management provides a new device ID to be used from that point on. The new Pelion platform provided Device ID is forwarded as the 'ep' property and the original self-provided one as the optional 'original-ep' property in a registration notification. The name and ID can then be mapped accordingly. Device Management saves the original endpoint name in the Device Directory for future device registrations so that you don't need to do the mapping again. :param original_ep: The original_ep of this EndpointData. :type: str ''' pass @property def q(self): ''' Gets the q of this EndpointData. Queue mode (default value is false). :return: The q of this EndpointData. :rtype: bool ''' pass @q.setter def q(self): ''' Sets the q of this EndpointData. Queue mode (default value is false). :param q: The q of this EndpointData. :type: bool ''' pass @property def resources(self): ''' Gets the resources of this EndpointData. :return: The resources of this EndpointData. :rtype: list[ResourcesData] ''' pass @resources.setter def resources(self): ''' Sets the resources of this EndpointData. :param resources: The resources of this EndpointData. :type: list[ResourcesData] ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
27
17
10
1
4
5
1
1.06
1
3
0
0
16
6
16
16
207
38
82
38
55
87
50
28
33
5
1
2
21
2,602
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/endpoint.py
mbed_cloud._backends.mds.models.endpoint.Endpoint
class Endpoint(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'name': 'str', 'q': 'bool', 'status': 'str', 'type': 'str' } attribute_map = { 'name': 'name', 'q': 'q', 'status': 'status', 'type': 'type' } def __init__(self, name=None, q=None, status=None, type=None): """ Endpoint - a model defined in Swagger """ self._name = name self._q = q self._status = status self._type = type self.discriminator = None @property def name(self): """ Gets the name of this Endpoint. Unique Device Management Device ID representing the endpoint. :return: The name of this Endpoint. :rtype: str """ return self._name @name.setter def name(self, name): """ Sets the name of this Endpoint. Unique Device Management Device ID representing the endpoint. :param name: The name of this Endpoint. :type: str """ self._name = name @property def q(self): """ Gets the q of this Endpoint. Determines whether the device is in queue mode. <br/><br/><b>Queue mode</b><br/> When an endpoint is in queue mode, messages sent to the endpoint do not wake up the physical device. The messages are queued and delivered when the device wakes up and connects to Device Management Connect itself. You can also use the queue mode when the device is behind a NAT and cannot be reached directly by Device Management Connect. :return: The q of this Endpoint. :rtype: bool """ return self._q @q.setter def q(self, q): """ Sets the q of this Endpoint. Determines whether the device is in queue mode. <br/><br/><b>Queue mode</b><br/> When an endpoint is in queue mode, messages sent to the endpoint do not wake up the physical device. The messages are queued and delivered when the device wakes up and connects to Device Management Connect itself. You can also use the queue mode when the device is behind a NAT and cannot be reached directly by Device Management Connect. :param q: The q of this Endpoint. :type: bool """ self._q = q @property def status(self): """ Gets the status of this Endpoint. Deprecated and the value is always ACTIVE. Only used for API backwards compatibility reasons. :return: The status of this Endpoint. :rtype: str """ return self._status @status.setter def status(self, status): """ Sets the status of this Endpoint. Deprecated and the value is always ACTIVE. Only used for API backwards compatibility reasons. :param status: The status of this Endpoint. :type: str """ self._status = status @property def type(self): """ Gets the type of this Endpoint. Type of endpoint. (Free text) :return: The type of this Endpoint. :rtype: str """ return self._type @type.setter def type(self, type): """ Sets the type of this Endpoint. Type of endpoint. (Free text) :param type: The type of this Endpoint. :type: str """ self._type = type def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, Endpoint): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class Endpoint(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, name=None, q=None, status=None, type=None): ''' Endpoint - a model defined in Swagger ''' pass @property def name(self): ''' Gets the name of this Endpoint. Unique Device Management Device ID representing the endpoint. :return: The name of this Endpoint. :rtype: str ''' pass @name.setter def name(self): ''' Sets the name of this Endpoint. Unique Device Management Device ID representing the endpoint. :param name: The name of this Endpoint. :type: str ''' pass @property def q(self): ''' Gets the q of this Endpoint. Determines whether the device is in queue mode. <br/><br/><b>Queue mode</b><br/> When an endpoint is in queue mode, messages sent to the endpoint do not wake up the physical device. The messages are queued and delivered when the device wakes up and connects to Device Management Connect itself. You can also use the queue mode when the device is behind a NAT and cannot be reached directly by Device Management Connect. :return: The q of this Endpoint. :rtype: bool ''' pass @q.setter def q(self): ''' Sets the q of this Endpoint. Determines whether the device is in queue mode. <br/><br/><b>Queue mode</b><br/> When an endpoint is in queue mode, messages sent to the endpoint do not wake up the physical device. The messages are queued and delivered when the device wakes up and connects to Device Management Connect itself. You can also use the queue mode when the device is behind a NAT and cannot be reached directly by Device Management Connect. :param q: The q of this Endpoint. :type: bool ''' pass @property def status(self): ''' Gets the status of this Endpoint. Deprecated and the value is always ACTIVE. Only used for API backwards compatibility reasons. :return: The status of this Endpoint. :rtype: str ''' pass @status.setter def status(self): ''' Sets the status of this Endpoint. Deprecated and the value is always ACTIVE. Only used for API backwards compatibility reasons. :param status: The status of this Endpoint. :type: str ''' pass @property def type(self): ''' Gets the type of this Endpoint. Type of endpoint. (Free text) :return: The type of this Endpoint. :rtype: str ''' pass @type.setter def type(self): ''' Sets the type of this Endpoint. Type of endpoint. (Free text) :param type: The type of this Endpoint. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
23
15
10
1
4
5
1
1.05
1
3
0
0
14
5
14
14
183
33
73
33
50
77
45
25
30
5
1
2
19
2,603
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/device_request.py
mbed_cloud._backends.mds.models.device_request.DeviceRequest
class DeviceRequest(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'accept': 'str', 'content_type': 'str', 'method': 'str', 'payload_b64': 'str', 'uri': 'str' } attribute_map = { 'accept': 'accept', 'content_type': 'content-type', 'method': 'method', 'payload_b64': 'payload-b64', 'uri': 'uri' } def __init__(self, accept=None, content_type=None, method=None, payload_b64=None, uri=None): """ DeviceRequest - a model defined in Swagger """ self._accept = accept self._content_type = content_type self._method = method self._payload_b64 = payload_b64 self._uri = uri self.discriminator = None @property def accept(self): """ Gets the accept of this DeviceRequest. The content type of an accepted response. :return: The accept of this DeviceRequest. :rtype: str """ return self._accept @accept.setter def accept(self, accept): """ Sets the accept of this DeviceRequest. The content type of an accepted response. :param accept: The accept of this DeviceRequest. :type: str """ self._accept = accept @property def content_type(self): """ Gets the content_type of this DeviceRequest. The content type of the payload. :return: The content_type of this DeviceRequest. :rtype: str """ return self._content_type @content_type.setter def content_type(self, content_type): """ Sets the content_type of this DeviceRequest. The content type of the payload. :param content_type: The content_type of this DeviceRequest. :type: str """ self._content_type = content_type @property def method(self): """ Gets the method of this DeviceRequest. The CoAP request method. Allowed values are GET, POST, PUT and DELETE. :return: The method of this DeviceRequest. :rtype: str """ return self._method @method.setter def method(self, method): """ Sets the method of this DeviceRequest. The CoAP request method. Allowed values are GET, POST, PUT and DELETE. :param method: The method of this DeviceRequest. :type: str """ if method is None: raise ValueError("Invalid value for `method`, must not be `None`") self._method = method @property def payload_b64(self): """ Gets the payload_b64 of this DeviceRequest. The base64 encoded payload to be sent to the device. :return: The payload_b64 of this DeviceRequest. :rtype: str """ return self._payload_b64 @payload_b64.setter def payload_b64(self, payload_b64): """ Sets the payload_b64 of this DeviceRequest. The base64 encoded payload to be sent to the device. :param payload_b64: The payload_b64 of this DeviceRequest. :type: str """ self._payload_b64 = payload_b64 @property def uri(self): """ Gets the uri of this DeviceRequest. The URI path of the requested resource. :return: The uri of this DeviceRequest. :rtype: str """ return self._uri @uri.setter def uri(self, uri): """ Sets the uri of this DeviceRequest. The URI path of the requested resource. :param uri: The uri of this DeviceRequest. :type: str """ if uri is None: raise ValueError("Invalid value for `uri`, must not be `None`") self._uri = uri def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, DeviceRequest): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class DeviceRequest(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, accept=None, content_type=None, method=None, payload_b64=None, uri=None): ''' DeviceRequest - a model defined in Swagger ''' pass @property def accept(self): ''' Gets the accept of this DeviceRequest. The content type of an accepted response. :return: The accept of this DeviceRequest. :rtype: str ''' pass @accept.setter def accept(self): ''' Sets the accept of this DeviceRequest. The content type of an accepted response. :param accept: The accept of this DeviceRequest. :type: str ''' pass @property def content_type(self): ''' Gets the content_type of this DeviceRequest. The content type of the payload. :return: The content_type of this DeviceRequest. :rtype: str ''' pass @content_type.setter def content_type(self): ''' Sets the content_type of this DeviceRequest. The content type of the payload. :param content_type: The content_type of this DeviceRequest. :type: str ''' pass @property def method(self): ''' Gets the method of this DeviceRequest. The CoAP request method. Allowed values are GET, POST, PUT and DELETE. :return: The method of this DeviceRequest. :rtype: str ''' pass @method.setter def method(self): ''' Sets the method of this DeviceRequest. The CoAP request method. Allowed values are GET, POST, PUT and DELETE. :param method: The method of this DeviceRequest. :type: str ''' pass @property def payload_b64(self): ''' Gets the payload_b64 of this DeviceRequest. The base64 encoded payload to be sent to the device. :return: The payload_b64 of this DeviceRequest. :rtype: str ''' pass @payload_b64.setter def payload_b64(self): ''' Sets the payload_b64 of this DeviceRequest. The base64 encoded payload to be sent to the device. :param payload_b64: The payload_b64 of this DeviceRequest. :type: str ''' pass @property def uri(self): ''' Gets the uri of this DeviceRequest. The URI path of the requested resource. :return: The uri of this DeviceRequest. :rtype: str ''' pass @uri.setter def uri(self): ''' Sets the uri of this DeviceRequest. The URI path of the requested resource. :param uri: The uri of this DeviceRequest. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
27
17
10
1
4
5
1
1.03
1
4
0
0
16
6
16
16
213
38
86
38
59
89
54
28
37
5
1
2
23
2,604
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/async_id_response.py
mbed_cloud._backends.mds.models.async_id_response.AsyncIDResponse
class AsyncIDResponse(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'ct': 'str', 'error': 'str', 'id': 'str', 'max_age': 'str', 'payload': 'str', 'status': 'int' } attribute_map = { 'ct': 'ct', 'error': 'error', 'id': 'id', 'max_age': 'max-age', 'payload': 'payload', 'status': 'status' } def __init__(self, ct=None, error=None, id=None, max_age=None, payload=None, status=None): """ AsyncIDResponse - a model defined in Swagger """ self._ct = ct self._error = error self._id = id self._max_age = max_age self._payload = payload self._status = status self.discriminator = None @property def ct(self): """ Gets the ct of this AsyncIDResponse. The content type. :return: The ct of this AsyncIDResponse. :rtype: str """ return self._ct @ct.setter def ct(self, ct): """ Sets the ct of this AsyncIDResponse. The content type. :param ct: The ct of this AsyncIDResponse. :type: str """ self._ct = ct @property def error(self): """ Gets the error of this AsyncIDResponse. An optional error message describing the error. :return: The error of this AsyncIDResponse. :rtype: str """ return self._error @error.setter def error(self, error): """ Sets the error of this AsyncIDResponse. An optional error message describing the error. :param error: The error of this AsyncIDResponse. :type: str """ self._error = error @property def id(self): """ Gets the id of this AsyncIDResponse. The unique ID of the asynchronous response. :return: The id of this AsyncIDResponse. :rtype: str """ return self._id @id.setter def id(self, id): """ Sets the id of this AsyncIDResponse. The unique ID of the asynchronous response. :param id: The id of this AsyncIDResponse. :type: str """ self._id = id @property def max_age(self): """ Gets the max_age of this AsyncIDResponse. Determines how long this value stays valid in the cache, in seconds. 0 means that the value is not stored in the cache. :return: The max_age of this AsyncIDResponse. :rtype: str """ return self._max_age @max_age.setter def max_age(self, max_age): """ Sets the max_age of this AsyncIDResponse. Determines how long this value stays valid in the cache, in seconds. 0 means that the value is not stored in the cache. :param max_age: The max_age of this AsyncIDResponse. :type: str """ self._max_age = max_age @property def payload(self): """ Gets the payload of this AsyncIDResponse. Requested data, base64 encoded. :return: The payload of this AsyncIDResponse. :rtype: str """ return self._payload @payload.setter def payload(self, payload): """ Sets the payload of this AsyncIDResponse. Requested data, base64 encoded. :param payload: The payload of this AsyncIDResponse. :type: str """ self._payload = payload @property def status(self): """ Gets the status of this AsyncIDResponse. The asynchronous response status code for a device operation related to a proxy request or manual subscription. :return: The status of this AsyncIDResponse. :rtype: int """ return self._status @status.setter def status(self, status): """ Sets the status of this AsyncIDResponse. The asynchronous response status code for a device operation related to a proxy request or manual subscription. :param status: The status of this AsyncIDResponse. :type: int """ self._status = status def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, AsyncIDResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class AsyncIDResponse(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, ct=None, error=None, id=None, max_age=None, payload=None, status=None): ''' AsyncIDResponse - a model defined in Swagger ''' pass @property def ct(self): ''' Gets the ct of this AsyncIDResponse. The content type. :return: The ct of this AsyncIDResponse. :rtype: str ''' pass @ct.setter def ct(self): ''' Sets the ct of this AsyncIDResponse. The content type. :param ct: The ct of this AsyncIDResponse. :type: str ''' pass @property def error(self): ''' Gets the error of this AsyncIDResponse. An optional error message describing the error. :return: The error of this AsyncIDResponse. :rtype: str ''' pass @error.setter def error(self): ''' Sets the error of this AsyncIDResponse. An optional error message describing the error. :param error: The error of this AsyncIDResponse. :type: str ''' pass @property def id(self): ''' Gets the id of this AsyncIDResponse. The unique ID of the asynchronous response. :return: The id of this AsyncIDResponse. :rtype: str ''' pass @id.setter def id(self): ''' Sets the id of this AsyncIDResponse. The unique ID of the asynchronous response. :param id: The id of this AsyncIDResponse. :type: str ''' pass @property def max_age(self): ''' Gets the max_age of this AsyncIDResponse. Determines how long this value stays valid in the cache, in seconds. 0 means that the value is not stored in the cache. :return: The max_age of this AsyncIDResponse. :rtype: str ''' pass @max_age.setter def max_age(self): ''' Sets the max_age of this AsyncIDResponse. Determines how long this value stays valid in the cache, in seconds. 0 means that the value is not stored in the cache. :param max_age: The max_age of this AsyncIDResponse. :type: str ''' pass @property def payload(self): ''' Gets the payload of this AsyncIDResponse. Requested data, base64 encoded. :return: The payload of this AsyncIDResponse. :rtype: str ''' pass @payload.setter def payload(self): ''' Sets the payload of this AsyncIDResponse. Requested data, base64 encoded. :param payload: The payload of this AsyncIDResponse. :type: str ''' pass @property def status(self): ''' Gets the status of this AsyncIDResponse. The asynchronous response status code for a device operation related to a proxy request or manual subscription. :return: The status of this AsyncIDResponse. :rtype: int ''' pass @status.setter def status(self): ''' Sets the status of this AsyncIDResponse. The asynchronous response status code for a device operation related to a proxy request or manual subscription. :param status: The status of this AsyncIDResponse. :type: int ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
31
19
10
1
3
5
1
1.11
1
3
0
0
18
7
18
18
235
43
91
43
60
101
55
31
36
5
1
2
23
2,605
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/async_id.py
mbed_cloud._backends.mds.models.async_id.AsyncID
class AsyncID(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'async_response_id': 'str' } attribute_map = { 'async_response_id': 'async-response-id' } def __init__(self, async_response_id=None): """ AsyncID - a model defined in Swagger """ self._async_response_id = async_response_id self.discriminator = None @property def async_response_id(self): """ Gets the async_response_id of this AsyncID. Asynchronous response unique ID. :return: The async_response_id of this AsyncID. :rtype: str """ return self._async_response_id @async_response_id.setter def async_response_id(self, async_response_id): """ Sets the async_response_id of this AsyncID. Asynchronous response unique ID. :param async_response_id: The async_response_id of this AsyncID. :type: str """ self._async_response_id = async_response_id def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, AsyncID): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class AsyncID(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, async_response_id=None): ''' AsyncID - a model defined in Swagger ''' pass @property def async_response_id(self): ''' Gets the async_response_id of this AsyncID. Asynchronous response unique ID. :return: The async_response_id of this AsyncID. :rtype: str ''' pass @async_response_id.setter def async_response_id(self): ''' Sets the async_response_id of this AsyncID. Asynchronous response unique ID. :param async_response_id: The async_response_id of this AsyncID. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
11
9
9
1
5
4
2
0.89
1
3
0
0
8
2
8
8
105
18
46
18
35
41
30
16
21
5
1
2
13
2,606
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/mds/models/resources_data.py
mbed_cloud._backends.mds.models.resources_data.ResourcesData
class ResourcesData(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'ct': 'str', '_if': 'str', 'obs': 'bool', 'path': 'str', 'rt': 'str' } attribute_map = { 'ct': 'ct', '_if': 'if', 'obs': 'obs', 'path': 'path', 'rt': 'rt' } def __init__(self, ct=None, _if=None, obs=None, path=None, rt=None): """ ResourcesData - a model defined in Swagger """ self._ct = ct self.__if = _if self._obs = obs self._path = path self._rt = rt self.discriminator = None @property def ct(self): """ Gets the ct of this ResourcesData. Content type. :return: The ct of this ResourcesData. :rtype: str """ return self._ct @ct.setter def ct(self, ct): """ Sets the ct of this ResourcesData. Content type. :param ct: The ct of this ResourcesData. :type: str """ self._ct = ct @property def _if(self): """ Gets the _if of this ResourcesData. Interface description that defines a name or URI that indicates how to interact with the target resource. It describes a generic interface type, such as a \"sensor\". :return: The _if of this ResourcesData. :rtype: str """ return self.__if @_if.setter def _if(self, _if): """ Sets the _if of this ResourcesData. Interface description that defines a name or URI that indicates how to interact with the target resource. It describes a generic interface type, such as a \"sensor\". :param _if: The _if of this ResourcesData. :type: str """ self.__if = _if @property def obs(self): """ Gets the obs of this ResourcesData. Whether the resource is observable or not (true/false). :return: The obs of this ResourcesData. :rtype: bool """ return self._obs @obs.setter def obs(self, obs): """ Sets the obs of this ResourcesData. Whether the resource is observable or not (true/false). :param obs: The obs of this ResourcesData. :type: bool """ self._obs = obs @property def path(self): """ Gets the path of this ResourcesData. Resource's URI path. :return: The path of this ResourcesData. :rtype: str """ return self._path @path.setter def path(self, path): """ Sets the path of this ResourcesData. Resource's URI path. :param path: The path of this ResourcesData. :type: str """ self._path = path @property def rt(self): """ Gets the rt of this ResourcesData. Application-specific resource type that describes this resource. [It is created by the client side application](/docs/current/connecting/resource-setup-in-mbed-cloud-client.html). Not meant to be a human-readable name for the resource. Multiple resource types may be included, they are separated by a space. :return: The rt of this ResourcesData. :rtype: str """ return self._rt @rt.setter def rt(self, rt): """ Sets the rt of this ResourcesData. Application-specific resource type that describes this resource. [It is created by the client side application](/docs/current/connecting/resource-setup-in-mbed-cloud-client.html). Not meant to be a human-readable name for the resource. Multiple resource types may be included, they are separated by a space. :param rt: The rt of this ResourcesData. :type: str """ self._rt = rt def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, ResourcesData): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class ResourcesData(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, ct=None, _if=None, obs=None, path=None, rt=None): ''' ResourcesData - a model defined in Swagger ''' pass @property def ct(self): ''' Gets the ct of this ResourcesData. Content type. :return: The ct of this ResourcesData. :rtype: str ''' pass @ct.setter def ct(self): ''' Sets the ct of this ResourcesData. Content type. :param ct: The ct of this ResourcesData. :type: str ''' pass @property def _if(self): ''' Gets the _if of this ResourcesData. Interface description that defines a name or URI that indicates how to interact with the target resource. It describes a generic interface type, such as a "sensor". :return: The _if of this ResourcesData. :rtype: str ''' pass @_if.setter def _if(self): ''' Sets the _if of this ResourcesData. Interface description that defines a name or URI that indicates how to interact with the target resource. It describes a generic interface type, such as a "sensor". :param _if: The _if of this ResourcesData. :type: str ''' pass @property def obs(self): ''' Gets the obs of this ResourcesData. Whether the resource is observable or not (true/false). :return: The obs of this ResourcesData. :rtype: bool ''' pass @obs.setter def obs(self): ''' Sets the obs of this ResourcesData. Whether the resource is observable or not (true/false). :param obs: The obs of this ResourcesData. :type: bool ''' pass @property def path(self): ''' Gets the path of this ResourcesData. Resource's URI path. :return: The path of this ResourcesData. :rtype: str ''' pass @path.setter def path(self): ''' Sets the path of this ResourcesData. Resource's URI path. :param path: The path of this ResourcesData. :type: str ''' pass @property def rt(self): ''' Gets the rt of this ResourcesData. Application-specific resource type that describes this resource. [It is created by the client side application](/docs/current/connecting/resource-setup-in-mbed-cloud-client.html). Not meant to be a human-readable name for the resource. Multiple resource types may be included, they are separated by a space. :return: The rt of this ResourcesData. :rtype: str ''' pass @rt.setter def rt(self): ''' Sets the rt of this ResourcesData. Application-specific resource type that describes this resource. [It is created by the client side application](/docs/current/connecting/resource-setup-in-mbed-cloud-client.html). Not meant to be a human-readable name for the resource. Multiple resource types may be included, they are separated by a space. :param rt: The rt of this ResourcesData. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
27
17
10
1
4
5
1
1.09
1
3
0
0
16
6
16
16
209
38
82
38
55
89
50
28
33
5
1
2
21
2,607
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/_backends/device_directory/models/device_data_patch_request.py
mbed_cloud._backends.device_directory.models.device_data_patch_request.DeviceDataPatchRequest
class DeviceDataPatchRequest(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'auto_update': 'bool', 'ca_id': 'str', 'custom_attributes': 'dict(str, str)', 'description': 'str', 'device_key': 'str', 'endpoint_name': 'str', 'endpoint_type': 'str', 'groups': 'list[str]', 'host_gateway': 'str', 'name': 'str', 'object': 'str' } attribute_map = { 'auto_update': 'auto_update', 'ca_id': 'ca_id', 'custom_attributes': 'custom_attributes', 'description': 'description', 'device_key': 'device_key', 'endpoint_name': 'endpoint_name', 'endpoint_type': 'endpoint_type', 'groups': 'groups', 'host_gateway': 'host_gateway', 'name': 'name', 'object': 'object' } def __init__(self, auto_update=None, ca_id=None, custom_attributes=None, description=None, device_key=None, endpoint_name=None, endpoint_type=None, groups=None, host_gateway=None, name=None, object=None): """ DeviceDataPatchRequest - a model defined in Swagger """ self._auto_update = auto_update self._ca_id = ca_id self._custom_attributes = custom_attributes self._description = description self._device_key = device_key self._endpoint_name = endpoint_name self._endpoint_type = endpoint_type self._groups = groups self._host_gateway = host_gateway self._name = name self._object = object self.discriminator = None @property def auto_update(self): """ Gets the auto_update of this DeviceDataPatchRequest. DEPRECATED: Mark this device for automatic firmware update. :return: The auto_update of this DeviceDataPatchRequest. :rtype: bool """ return self._auto_update @auto_update.setter def auto_update(self, auto_update): """ Sets the auto_update of this DeviceDataPatchRequest. DEPRECATED: Mark this device for automatic firmware update. :param auto_update: The auto_update of this DeviceDataPatchRequest. :type: bool """ self._auto_update = auto_update @property def ca_id(self): """ Gets the ca_id of this DeviceDataPatchRequest. The certificate issuer's ID. :return: The ca_id of this DeviceDataPatchRequest. :rtype: str """ return self._ca_id @ca_id.setter def ca_id(self, ca_id): """ Sets the ca_id of this DeviceDataPatchRequest. The certificate issuer's ID. :param ca_id: The ca_id of this DeviceDataPatchRequest. :type: str """ if ca_id is not None and len(ca_id) > 500: raise ValueError("Invalid value for `ca_id`, length must be less than or equal to `500`") self._ca_id = ca_id @property def custom_attributes(self): """ Gets the custom_attributes of this DeviceDataPatchRequest. Up to five custom key-value attributes. Note that keys cannot start with a number. Both keys and values are limited to 128 characters. :return: The custom_attributes of this DeviceDataPatchRequest. :rtype: dict(str, str) """ return self._custom_attributes @custom_attributes.setter def custom_attributes(self, custom_attributes): """ Sets the custom_attributes of this DeviceDataPatchRequest. Up to five custom key-value attributes. Note that keys cannot start with a number. Both keys and values are limited to 128 characters. :param custom_attributes: The custom_attributes of this DeviceDataPatchRequest. :type: dict(str, str) """ self._custom_attributes = custom_attributes @property def description(self): """ Gets the description of this DeviceDataPatchRequest. The description of the device. :return: The description of this DeviceDataPatchRequest. :rtype: str """ return self._description @description.setter def description(self, description): """ Sets the description of this DeviceDataPatchRequest. The description of the device. :param description: The description of this DeviceDataPatchRequest. :type: str """ if description is not None and len(description) > 2000: raise ValueError("Invalid value for `description`, length must be less than or equal to `2000`") self._description = description @property def device_key(self): """ Gets the device_key of this DeviceDataPatchRequest. The fingerprint of the device certificate. :return: The device_key of this DeviceDataPatchRequest. :rtype: str """ return self._device_key @device_key.setter def device_key(self, device_key): """ Sets the device_key of this DeviceDataPatchRequest. The fingerprint of the device certificate. :param device_key: The device_key of this DeviceDataPatchRequest. :type: str """ if device_key is not None and len(device_key) > 512: raise ValueError("Invalid value for `device_key`, length must be less than or equal to `512`") self._device_key = device_key @property def endpoint_name(self): """ Gets the endpoint_name of this DeviceDataPatchRequest. The endpoint name given to the device. :return: The endpoint_name of this DeviceDataPatchRequest. :rtype: str """ return self._endpoint_name @endpoint_name.setter def endpoint_name(self, endpoint_name): """ Sets the endpoint_name of this DeviceDataPatchRequest. The endpoint name given to the device. :param endpoint_name: The endpoint_name of this DeviceDataPatchRequest. :type: str """ if endpoint_name is not None and len(endpoint_name) > 64: raise ValueError("Invalid value for `endpoint_name`, length must be less than or equal to `64`") self._endpoint_name = endpoint_name @property def endpoint_type(self): """ Gets the endpoint_type of this DeviceDataPatchRequest. The endpoint type of the device. For example, the device is a gateway. :return: The endpoint_type of this DeviceDataPatchRequest. :rtype: str """ return self._endpoint_type @endpoint_type.setter def endpoint_type(self, endpoint_type): """ Sets the endpoint_type of this DeviceDataPatchRequest. The endpoint type of the device. For example, the device is a gateway. :param endpoint_type: The endpoint_type of this DeviceDataPatchRequest. :type: str """ if endpoint_type is not None and len(endpoint_type) > 64: raise ValueError("Invalid value for `endpoint_type`, length must be less than or equal to `64`") self._endpoint_type = endpoint_type @property def groups(self): """ Gets the groups of this DeviceDataPatchRequest. An array containing an id of each group this device belongs to :return: The groups of this DeviceDataPatchRequest. :rtype: list[str] """ return self._groups @groups.setter def groups(self, groups): """ Sets the groups of this DeviceDataPatchRequest. An array containing an id of each group this device belongs to :param groups: The groups of this DeviceDataPatchRequest. :type: list[str] """ self._groups = groups @property def host_gateway(self): """ Gets the host_gateway of this DeviceDataPatchRequest. The `endpoint_name` of the host gateway, if appropriate. :return: The host_gateway of this DeviceDataPatchRequest. :rtype: str """ return self._host_gateway @host_gateway.setter def host_gateway(self, host_gateway): """ Sets the host_gateway of this DeviceDataPatchRequest. The `endpoint_name` of the host gateway, if appropriate. :param host_gateway: The host_gateway of this DeviceDataPatchRequest. :type: str """ self._host_gateway = host_gateway @property def name(self): """ Gets the name of this DeviceDataPatchRequest. The name of the device. :return: The name of this DeviceDataPatchRequest. :rtype: str """ return self._name @name.setter def name(self, name): """ Sets the name of this DeviceDataPatchRequest. The name of the device. :param name: The name of this DeviceDataPatchRequest. :type: str """ if name is not None and len(name) > 128: raise ValueError("Invalid value for `name`, length must be less than or equal to `128`") self._name = name @property def object(self): """ Gets the object of this DeviceDataPatchRequest. The API resource entity. :return: The object of this DeviceDataPatchRequest. :rtype: str """ return self._object @object.setter def object(self, object): """ Sets the object of this DeviceDataPatchRequest. The API resource entity. :param object: The object of this DeviceDataPatchRequest. :type: str """ self._object = object def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, DeviceDataPatchRequest): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
class DeviceDataPatchRequest(object): ''' NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. ''' def __init__(self, auto_update=None, ca_id=None, custom_attributes=None, description=None, device_key=None, endpoint_name=None, endpoint_type=None, groups=None, host_gateway=None, name=None, object=None): ''' DeviceDataPatchRequest - a model defined in Swagger ''' pass @property def auto_update(self): ''' Gets the auto_update of this DeviceDataPatchRequest. DEPRECATED: Mark this device for automatic firmware update. :return: The auto_update of this DeviceDataPatchRequest. :rtype: bool ''' pass @auto_update.setter def auto_update(self): ''' Sets the auto_update of this DeviceDataPatchRequest. DEPRECATED: Mark this device for automatic firmware update. :param auto_update: The auto_update of this DeviceDataPatchRequest. :type: bool ''' pass @property def ca_id(self): ''' Gets the ca_id of this DeviceDataPatchRequest. The certificate issuer's ID. :return: The ca_id of this DeviceDataPatchRequest. :rtype: str ''' pass @ca_id.setter def ca_id(self): ''' Sets the ca_id of this DeviceDataPatchRequest. The certificate issuer's ID. :param ca_id: The ca_id of this DeviceDataPatchRequest. :type: str ''' pass @property def custom_attributes(self): ''' Gets the custom_attributes of this DeviceDataPatchRequest. Up to five custom key-value attributes. Note that keys cannot start with a number. Both keys and values are limited to 128 characters. :return: The custom_attributes of this DeviceDataPatchRequest. :rtype: dict(str, str) ''' pass @custom_attributes.setter def custom_attributes(self): ''' Sets the custom_attributes of this DeviceDataPatchRequest. Up to five custom key-value attributes. Note that keys cannot start with a number. Both keys and values are limited to 128 characters. :param custom_attributes: The custom_attributes of this DeviceDataPatchRequest. :type: dict(str, str) ''' pass @property def description(self): ''' Gets the description of this DeviceDataPatchRequest. The description of the device. :return: The description of this DeviceDataPatchRequest. :rtype: str ''' pass @description.setter def description(self): ''' Sets the description of this DeviceDataPatchRequest. The description of the device. :param description: The description of this DeviceDataPatchRequest. :type: str ''' pass @property def device_key(self): ''' Gets the device_key of this DeviceDataPatchRequest. The fingerprint of the device certificate. :return: The device_key of this DeviceDataPatchRequest. :rtype: str ''' pass @device_key.setter def device_key(self): ''' Sets the device_key of this DeviceDataPatchRequest. The fingerprint of the device certificate. :param device_key: The device_key of this DeviceDataPatchRequest. :type: str ''' pass @property def endpoint_name(self): ''' Gets the endpoint_name of this DeviceDataPatchRequest. The endpoint name given to the device. :return: The endpoint_name of this DeviceDataPatchRequest. :rtype: str ''' pass @endpoint_name.setter def endpoint_name(self): ''' Sets the endpoint_name of this DeviceDataPatchRequest. The endpoint name given to the device. :param endpoint_name: The endpoint_name of this DeviceDataPatchRequest. :type: str ''' pass @property def endpoint_type(self): ''' Gets the endpoint_type of this DeviceDataPatchRequest. The endpoint type of the device. For example, the device is a gateway. :return: The endpoint_type of this DeviceDataPatchRequest. :rtype: str ''' pass @endpoint_type.setter def endpoint_type(self): ''' Sets the endpoint_type of this DeviceDataPatchRequest. The endpoint type of the device. For example, the device is a gateway. :param endpoint_type: The endpoint_type of this DeviceDataPatchRequest. :type: str ''' pass @property def groups(self): ''' Gets the groups of this DeviceDataPatchRequest. An array containing an id of each group this device belongs to :return: The groups of this DeviceDataPatchRequest. :rtype: list[str] ''' pass @groups.setter def groups(self): ''' Sets the groups of this DeviceDataPatchRequest. An array containing an id of each group this device belongs to :param groups: The groups of this DeviceDataPatchRequest. :type: list[str] ''' pass @property def host_gateway(self): ''' Gets the host_gateway of this DeviceDataPatchRequest. The `endpoint_name` of the host gateway, if appropriate. :return: The host_gateway of this DeviceDataPatchRequest. :rtype: str ''' pass @host_gateway.setter def host_gateway(self): ''' Sets the host_gateway of this DeviceDataPatchRequest. The `endpoint_name` of the host gateway, if appropriate. :param host_gateway: The host_gateway of this DeviceDataPatchRequest. :type: str ''' pass @property def name(self): ''' Gets the name of this DeviceDataPatchRequest. The name of the device. :return: The name of this DeviceDataPatchRequest. :rtype: str ''' pass @name.setter def name(self): ''' Sets the name of this DeviceDataPatchRequest. The name of the device. :param name: The name of this DeviceDataPatchRequest. :type: str ''' pass @property def object(self): ''' Gets the object of this DeviceDataPatchRequest. The API resource entity. :return: The object of this DeviceDataPatchRequest. :rtype: str ''' pass @object.setter def object(self): ''' Sets the object of this DeviceDataPatchRequest. The API resource entity. :param object: The object of this DeviceDataPatchRequest. :type: str ''' pass def to_dict(self): ''' Returns the model properties as a dict ''' pass def to_str(self): ''' Returns the string representation of the model ''' pass def __repr__(self): ''' For `print` and `pprint` ''' pass def __eq__(self, other): ''' Returns true if both objects are equal ''' pass def __ne__(self, other): ''' Returns true if both objects are not equal ''' pass
51
29
10
1
4
5
1
1.09
1
4
0
0
28
12
28
28
377
68
148
68
97
161
92
46
63
5
1
2
39
2,608
ARMmbed/mbed-cloud-sdk-python
ARMmbed_mbed-cloud-sdk-python/src/mbed_cloud/foundation/entities/device_update/enums.py
mbed_cloud.foundation.entities.device_update.enums.CampaignDeviceMetadataDeploymentStateEnum
class CampaignDeviceMetadataDeploymentStateEnum(BaseEnum): """Represents expected values of `CampaignDeviceMetadataDeploymentStateEnum` This is used by Entities in the "device_update" category. .. note:: If new values are added to the enum in the API they will be passed through unchanged by the SDK, but will not be on this list. If this occurs please update the SDK to the most recent version. """ DEPLOYED = "deployed" DEREGISTERED = "deregistered" FAILED_CONNECTOR_CHANNEL_UPDATE = "failed_connector_channel_update" MANIFESTREMOVED = "manifestremoved" PENDING = "pending" UPDATED_CONNECTOR_CHANNEL = "updated_connector_channel" values = frozenset( ( "deployed", "deregistered", "failed_connector_channel_update", "manifestremoved", "pending", "updated_connector_channel", ) )
class CampaignDeviceMetadataDeploymentStateEnum(BaseEnum): '''Represents expected values of `CampaignDeviceMetadataDeploymentStateEnum` This is used by Entities in the "device_update" category. .. note:: If new values are added to the enum in the API they will be passed through unchanged by the SDK, but will not be on this list. If this occurs please update the SDK to the most recent version. '''
1
1
0
0
0
0
0
0.35
1
0
0
0
0
0
0
1
27
4
17
8
16
6
8
8
7
0
2
0
0
2,609
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/mbed_connector_api.py
mbed_connector_api.mbed_connector_api.connector.vividict
class vividict(dict): def __missing__(self, key): value = self[key] = type(self)() return value
class vividict(dict): def __missing__(self, key): pass
2
0
3
0
3
0
1
0
1
1
0
0
1
0
1
28
4
0
4
3
2
0
4
3
2
1
2
0
1
2,610
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/tests/Test_connector_live.py
Test_connector_live.test_connector_live
class test_connector_live: # this function is called before every test function in this class # Initialize the mbed connector object and start longpolling def setUp(self): self.connector = mbed_connector_api.connector(token) self.longPollThread = self.connector.startLongPolling() # self.connector.debug(True,level='INFO') # this function is called after every test function in this class # stop longpolling def tearDown(self): self.connector.stopLongPolling() # This function takes an async object and waits untill it is completed def waitOnAsync(self, asyncObject): while asyncObject.isDone() == False: None return # test the getLimits function @timed(10) def test_getLimits(self): x = self.connector.getLimits() self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test the getConnectorVersion function @timed(10) def test_getConnectorVersion(self): x = self.connector.getConnectorVersion() self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test the getApiVersion function @timed(10) def test_getApiVersion(self): x = self.connector.getApiVersions() self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test the getEndpoints function @timed(10) def test_getEndpoints(self): x = self.connector.getEndpoints() self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test the getEndpoints function, subfuntion typeFilter @timed(10) def test_getEndpointsByType(self): x = self.connector.getEndpoints(typeOfEndpoint="ci-endpoint") self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test the getResources function @timed(10) def test_getResources(self): x = self.connector.getResources(_ep) # use first endpoint returned self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_getResourceValue(self): x = self.connector.getResourceValue(_ep, _res) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_postResource(self): # test POST without data x = self.connector.postResource(_ep, _res) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) # test POST with data x = self.connector.postResource(_ep, _res, "Hello World from the CI") self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_deleteEndpoint(self): # TODO return @timed(10) def test_putResourceSubscription(self): x = self.connector.putResourceSubscription(_ep, _res) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_getEndpointSubscriptions(self): x = self.connector.putResourceSubscription(_ep, _res) self.waitOnAsync(x) assert x.error == False x = self.connector.getEndpointSubscriptions(_ep) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_getResourceSubscription(self): x = self.connector.putResourceSubscription(_ep, _res) self.waitOnAsync(x) assert x.error == False x = self.connector.getResourceSubscription(_ep, _res) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_deleteResourceSubscription(self): # TODO, may need to first subscribe, then unsubscribe? x = self.connector.putResourceSubscription(_ep, _res) self.waitOnAsync(x) assert x.error == False x = self.connector.deleteResourceSubscription(_ep, _res) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_deleteEndpointSubscriptions(self): x = self.connector.putResourceSubscription(_ep, _res) self.waitOnAsync(x) assert x.error == False x = self.connector.deleteEndpointSubscriptions(_ep) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_deleteAllSubscriptions(self): x = self.connector.deleteAllSubscriptions() self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_putPreSubscription(self): # check subscription is put-able j = [{ 'endpoint-name': "node-001", 'resource-path': ["/dev"]}, { 'endpoint-type': "Light", 'resource-path': ["/sen/*"]}, { 'resource-path': ["/dev/temp", "/dev/hum"] }] x = self.connector.putPreSubscription(j) self.waitOnAsync(x) ok_(x.error == False, msg=("Error: "+x.error.error + " errType: "+x.error.errType if x.error else "")) @timed(10) def test_getPreSubscription(self): # Check subscription put can be retrieved j = [{ 'endpoint-name': "node-001", 'resource-path': ["/dev"]}, { 'endpoint-type': "Light", 'resource-path': ["/sen/*"]}, { 'resource-path': ["/dev/temp", "/dev/hum"] }] e = self.connector.putPreSubscription(j) self.waitOnAsync(e) ok_(e.error == False, msg="There was an error putting the pre-subscription ") e = self.connector.getPreSubscription() self.waitOnAsync(e) ok_(e.error == False, msg="There was an error getting the pre-subscription ") assert e.result == j @timed(10) def test_putCallbackURL(self): # TODO return @timed(10) def test_getCallbackURL(self): # TODO return @timed(10) def test_deleteCallbackURL(self): # TODO return
class test_connector_live: def setUp(self): pass def tearDown(self): pass def waitOnAsync(self, asyncObject): pass @timed(10) def test_getLimits(self): pass @timed(10) def test_getConnectorVersion(self): pass @timed(10) def test_getApiVersion(self): pass @timed(10) def test_getEndpoints(self): pass @timed(10) def test_getEndpointsByType(self): pass @timed(10) def test_getResources(self): pass @timed(10) def test_getResourceValue(self): pass @timed(10) def test_postResource(self): pass @timed(10) def test_deleteEndpoint(self): pass @timed(10) def test_putResourceSubscription(self): pass @timed(10) def test_getEndpointSubscriptions(self): pass @timed(10) def test_getResourceSubscription(self): pass @timed(10) def test_deleteResourceSubscription(self): pass @timed(10) def test_deleteEndpointSubscriptions(self): pass @timed(10) def test_deleteAllSubscriptions(self): pass @timed(10) def test_putPreSubscription(self): pass @timed(10) def test_getPreSubscription(self): pass @timed(10) def test_putCallbackURL(self): pass @timed(10) def test_getCallbackURL(self): pass @timed(10) def test_deleteCallbackURL(self): pass
44
0
6
0
5
0
2
0.16
0
1
1
0
23
2
23
23
182
22
139
64
95
22
103
44
79
3
0
1
40
2,611
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/connectorError.py
mbed_connector_api.connectorError.response_codes
class response_codes: """ Error class for connector L1 library. Contains the error type, and error string. :var status_code: status code returned by connector request :var errType: combination of parent calling function and status code :var error: error given by the https://docs.mbed.com/docs/mbed-device-connector-web-interface docs. """ # list of all possible error tuples __errList = { # GET Errors # GET / "get_mdc_version200": "Successful response containing version of mbed Device Connector and recent REST API version it supports.", # GET /rest-versions "get_rest_version200": "Successful response with a list of version(s) supported by the server.", # GET /endpoints "get_endpoints200": "Successful response with a list of endpoints.", # GET /endpoint/{endpoint-name} "get_resources200": "Successful response with a list of metainformation.", "get_resources404": "Endpoint not found.", # Resource Errors # GET, PUT, POST, DELETE /endpoints/{endpoint-name}/{resource-path} "resource200": "Successful GET, PUT, DELETE operation.", "resource201": "Successful POST operation.", "resource202": "Accepted. Asynchronous response ID.", "resource204": "Non confirmable request made, this may or may not reach the endpoint. No Content given as response.", "resource205": "No cache available for resource.", "resource404": "Requested endpoint's resource is not found.", "resource409": "Conflict. Endpoint is in queue mode and synchronous request can not be made. If noResp=true, the request is not supported.", "resource410": "Gone. Endpoint not found.", "resource412": "Request payload has been incomplete.", "resource413": "Precondition failed.", "resource415": "Media type is not supported by the endpoint.", "resource429": "Cannot make a request at the moment, already ongoing other request for this endpoint or queue is full (for endpoints in queue mode).", "resource502": "TCP or TLS connection to endpoint is not established.", "resource503": "Operation cannot be executed because endpoint is currently unavailable.", "resource504": "Operation cannot be executed due to a time-out from the endpoint.", # Subscription / Notification Errors # PUT /subscriptions/{endpoint-name}/{resource-path} "subscribe200": "Successfully subscribed.", "subscribe202": "Accepted. Asynchronous response ID.", "subscribe404": "Endpoint or its resource not found.", "subscribe412": "Cannot make a subscription for a non-observable resource.", "subscribe413": "Cannot make a subscription due to failed precondition.", "subscribe415": "Media type is not supported by the endpoint.", "subscribe429": "Cannot make subscription request at the moment due to already ongoing other request for this endpoint or (for endpoints in queue mode) queue is full or queue was cleared because endpoint made full registration.", "subscribe502": "Subscription failed.", "subscribe503": "Subscription could not be established because endpoint is currently unavailable.", "subscribe504": "Subscription could not be established due to a time-out from the endpoint.", # DELETE /subscriptions/{endpoint-name}/{resource-path} # DELETE /subscriptions "unsubscribe204": "Successfully removed subscription.", "unsubscribe404": "Endpoint or endpoint's resource not found.", # GET /subscriptions/{endpoint-name}/{resource-path} "get_resource_subscription200": "Resource is subscribed.", "get_resource_subscription404": "Resource is not subscribed.", # GET /subscriptions/{endpoint-name} "get_endpoint_subscription200": "List of subscribed resources.", "get_endpoint_subscription404": "Endpoint not found or there are no subscriptions for that endpoint.", # DELETE /subscriptions/{endpoint-name} "delete_endpoint_subscription204": "Successfully removed.", "delete_endpoint_subscription404": "Endpoint not found.", # GET /subscriptions - Presubscription Data # Nothing yet? "put_callback_url204": "Successfully set pre-subscription data.", "put_callback_url400": "Malformed content.", # Callback # PUT /notification/callback "put_callback_url204": "Successfully subscribed.", "put_callback_url400": "Given URL is not accessible.", # GET /notification/callback "get_callback_url200": "URL found.", "get_callback_url404": "Callback URL does not exist.", # DELETE /notification/callback "delete_callback_url204": "Successfully removed.", "delete_callback_url404": "Callback URL does not exist.", # Long polling # GET /notification/pull "longpoll200": "OK.", "longpoll204": "No new notifications.", # Limits # GET /limits "limit200": "OK.", } # set the error type by querying the __errList def _setError(self, errType): if errType in self.__errList.keys(): return self.__errList[errType] else: return "ERROR: Unknown error." def __init__(self, errParent, status_code): self.status_code = status_code self.errType = str(errParent)+str(status_code) self.error = self._setError(self.errType)
class response_codes: ''' Error class for connector L1 library. Contains the error type, and error string. :var status_code: status code returned by connector request :var errType: combination of parent calling function and status code :var error: error given by the https://docs.mbed.com/docs/mbed-device-connector-web-interface docs. ''' def _setError(self, errType): pass def __init__(self, errParent, status_code): pass
3
1
5
0
5
0
2
0.52
0
1
0
0
2
3
2
2
112
19
61
7
58
32
10
7
7
2
0
1
3
2,612
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/mbed_connector_api.py
mbed_connector_api.mbed_connector_api.asyncResult
class asyncResult: """ AsyncResult objects returned by all mbed_connector_api library calls. Make sure to check the ``.isDone()`` function and the ``.error`` variable before accessing the ``.result`` variable. :var error: False if no error, if error then populated by :class:'connectorError.response_codes` object :var result: initial value: {} :var status_code: status code returned from REST request :var raw_data: raw returned object form the request """ def isDone(self): """ :returns: True / False based on completion of async operation :rtype: bool """ return self.is_done def fill(self, data): if type(data) == r.models.Response: try: self.result = json.loads(data.content) except: self.result = [] if isinstance(data.content, str): # string handler self.result = data.content elif isinstance(data.content, int): # int handler self.log.debug( "data returned is an integer, not sure what to do with that") else: # all other handler self.log.debug( "unhandled data type, type of content : %s" % type(data.content)) self.status_code = data.status_code self.raw_data = data.content else: # error self.log.error("type not found : %s" % type(data)) return def __init__(self, callback=""): self.is_done = False self.result = {} self.status_code = '' self.raw_data = {} self.callback = callback self.next_step = "" self.extra = {} self.error = "" self.endpoint = "" self.resource = ""
class asyncResult: ''' AsyncResult objects returned by all mbed_connector_api library calls. Make sure to check the ``.isDone()`` function and the ``.error`` variable before accessing the ``.result`` variable. :var error: False if no error, if error then populated by :class:'connectorError.response_codes` object :var result: initial value: {} :var status_code: status code returned from REST request :var raw_data: raw returned object form the request ''' def isDone(self): ''' :returns: True / False based on completion of async operation :rtype: bool ''' pass def fill(self, data): pass def __init__(self, callback=""): pass
4
2
12
0
10
3
2
0.52
0
3
0
0
3
10
3
3
49
5
31
14
27
16
28
14
24
5
0
3
7
2,613
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/mbed_connector_api.py
mbed_connector_api.mbed_connector_api.connector
class connector: """ Interface class to use the connector.mbed.com REST API. This class will by default handle asyncronous events. All function return :class:'.asyncResult' objects """ # Return connector version number and recent rest API version number it supports def getConnectorVersion(self): """ GET the current Connector version. :returns: asyncResult object, populates error and result fields :rtype: asyncResult """ result = asyncResult() data = self._getURL("/", versioned=False) result.fill(data) if data.status_code == 200: result.error = False else: result.error = response_codes("get_mdc_version", data.status_code) result.is_done = True return result # Return API version of connector def getApiVersions(self): """ Get the REST API versions that connector accepts. :returns: :class:asyncResult object, populates error and result fields :rtype: asyncResult """ result = asyncResult() data = self._getURL("/rest-versions", versioned=False) result.fill(data) if data.status_code == 200: result.error = False else: result.error = response_codes("get_rest_version", data.status_code) result.is_done = True return result # Returns metadata about connector limits as JSON blob def getLimits(self): """return limits of account in async result object. :returns: asyncResult object, populates error and result fields :rtype: asyncResult """ result = asyncResult() data = self._getURL("/limits") result.fill(data) if data.status_code == 200: result.error = False else: result.error = response_codes("limit", data.status_code) result.is_done = True return result # return json list of all endpoints. # optional type field can be used to match all endpoints of a certain type. def getEndpoints(self, typeOfEndpoint=""): """ Get list of all endpoints on the domain. :param str typeOfEndpoint: Optional filter endpoints returned by type :return: list of all endpoints :rtype: asyncResult """ q = {} result = asyncResult() if typeOfEndpoint: q['type'] = typeOfEndpoint result.extra['type'] = typeOfEndpoint data = self._getURL("/endpoints", query=q) result.fill(data) if data.status_code == 200: result.error = False else: result.error = response_codes("get_endpoints", data.status_code) result.is_done = True return result # return json list of all resources on an endpoint def getResources(self, ep, noResp=False, cacheOnly=False): """ Get list of resources on an endpoint. :param str ep: Endpoint to get the resources of :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: list of resources :rtype: asyncResult """ # load query params if set to other than defaults q = {} result = asyncResult() result.endpoint = ep if noResp or cacheOnly: q['noResp'] = 'true' if noResp == True else 'false' q['cacheOnly'] = 'true' if cacheOnly == True else 'false' # make query self.log.debug("ep = %s, query=%s", ep, q) data = self._getURL("/endpoints/"+ep, query=q) result.fill(data) # check sucess of call if data.status_code == 200: # sucess result.error = False self.log.debug("getResources sucess, status_code = `%s`, content = `%s`", str( data.status_code), data.content) else: # fail result.error = response_codes("get_resources", data.status_code) self.log.debug("getResources failed with error code `%s`" % str(data.status_code)) result.is_done = True return result # return async object def getResourceValue(self, ep, res, cbfn="", noResp=False, cacheOnly=False): """ Get value of a specific resource on a specific endpoint. :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback function to be called on completion :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: value of the resource, usually a string :rtype: asyncResult """ q = {} # set callback fn for use in async handler result = asyncResult(callback=cbfn) result.endpoint = ep result.resource = res if noResp or cacheOnly: q['noResp'] = 'true' if noResp == True else 'false' q['cacheOnly'] = 'true' if cacheOnly == True else 'false' # make query data = self._getURL("/endpoints/"+ep+res, query=q) result.fill(data) if data.status_code == 200: # immediate success result.error = False result.is_done = True if cbfn: cbfn(result) return result elif data.status_code == 202: self.database['async-responses'][json.loads( data.content)["async-response-id"]] = result else: # fail result.error = response_codes("resource", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # return async object def putResourceValue(self, ep, res, data, cbfn=""): """ Put a value to a resource on an endpoint :param str ep: name of endpoint :param str res: name of resource :param str data: data to send via PUT :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult """ result = asyncResult(callback=cbfn) result.endpoint = ep result.resource = res data = self._putURL("/endpoints/"+ep+res, payload=data) if data.status_code == 200: # immediate success result.error = False result.is_done = True elif data.status_code == 202: self.database['async-responses'][json.loads( data.content)["async-response-id"]] = result else: result.error = response_codes("resource", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # return async object def postResource(self, ep, res, data="", cbfn=""): ''' POST data to a resource on an endpoint. :param str ep: name of endpoint :param str res: name of resource :param str data: Optional - data to send via POST :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult(callback=cbfn) result.endpoint = ep result.resource = res data = self._postURL("/endpoints/"+ep+res, data) if data.status_code == 201: # immediate success result.error = False result.is_done = True elif data.status_code == 202: self.database['async-responses'][json.loads( data.content)["async-response-id"]] = result else: result.error = response_codes("resource", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # return async object def deleteEndpoint(self, ep, cbfn=""): ''' Send DELETE message to an endpoint. :param str ep: name of endpoint :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult(callback=cbfn) result.endpoint = ep data = self._deleteURL("/endpoints/"+ep) if data.status_code == 200: # immediate success result.error = False result.is_done = True elif data.status_code == 202: self.database['async-responses'][json.loads( data.content)["async-response-id"]] = result else: result.error = response_codes("resource", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # subscribe to endpoint/resource, the cbfn is given an asynch object that # represents the result. it is up to the user to impliment the notification # channel callback in a higher level library. def putResourceSubscription(self, ep, res, cbfn=""): ''' Subscribe to changes in a specific resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult(callback=cbfn) result.endpoint = ep result.resource = res data = self._putURL("/subscriptions/"+ep+res) if data.status_code == 200: # immediate success result.error = False result.is_done = True elif data.status_code == 202: self.database['async-responses'][json.loads( data.content)["async-response-id"]] = result else: result.error = response_codes("subscribe", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def deleteEndpointSubscriptions(self, ep): ''' Delete all subscriptions on specified endpoint ``ep`` :param str ep: name of endpoint :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() result.endpoint = ep data = self._deleteURL("/subscriptions/"+ep) if data.status_code == 204: # immediate success result.error = False result.is_done = True else: result.error = response_codes( "delete_endpoint_subscription", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def deleteResourceSubscription(self, ep, res): ''' Delete subscription to a resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() result.endpoint = ep result.resource = res data = self._deleteURL("/subscriptions/"+ep+res) if data.status_code == 204: # immediate success result.error = False result.is_done = True else: result.error = response_codes("unsubscribe", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def deleteAllSubscriptions(self): ''' Delete all subscriptions on the domain (all endpoints, all resources) :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() data = self._deleteURL("/subscriptions/") if data.status_code == 204: # immediate success result.error = False result.is_done = True else: result.error = response_codes("unsubscribe", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # return async object # result field is a string def getEndpointSubscriptions(self, ep): ''' Get list of all subscriptions on a given endpoint ``ep`` :param str ep: name of endpoint :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() result.endpoint = ep data = self._getURL("/subscriptions/"+ep) if data.status_code == 200: # immediate success result.error = False result.is_done = True result.result = data.content else: result.error = response_codes("unsubscribe", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result # return async object # result field is a string def getResourceSubscription(self, ep, res): ''' Get list of all subscriptions for a resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() result.endpoint = ep result.resource = res data = self._getURL("/subscriptions/"+ep+res) if data.status_code == 200: # immediate success result.error = False result.is_done = True result.result = data.content else: result.error = response_codes("unsubscribe", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def putPreSubscription(self, JSONdata): ''' Set pre-subscription rules for all endpoints / resources on the domain. This can be useful for all current and future endpoints/resources. :param json JSONdata: data to use as pre-subscription data. Wildcards are permitted :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' if isinstance(JSONdata, str) and self._isJSON(JSONdata): self.log.warn( "pre-subscription data was a string, converting to a list : %s", JSONdata) JSONdata = json.loads(JSONdata) # convert json string to list if not (isinstance(JSONdata, list) and self._isJSON(JSONdata)): self.log.error( "pre-subscription data is not valid. Please make sure it is a valid JSON list") result = asyncResult() data = self._putURL("/subscriptions", JSONdata, versioned=False) if data.status_code == 204: # immediate success with no response result.error = False result.is_done = True result.result = [] else: result.error = response_codes("presubscription", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def getPreSubscription(self): ''' Get the current pre-subscription data from connector :return: JSON that represents the pre-subscription data in the ``.result`` field :rtype: asyncResult ''' result = asyncResult() data = self._getURL("/subscriptions") if data.status_code == 200: # immediate success result.error = False result.is_done = True result.result = data.json() else: result.error = response_codes("presubscription", data.status_code) result.is_done = True result.raw_data = data.content result.status_code = data.status_code return result def putCallback(self, url, headers=""): ''' Set the callback URL. To be used in place of LongPolling when deploying a webapp. **note**: make sure you set up a callback URL in your web app :param str url: complete url, including port, where the callback url is located :param str headers: Optional - Headers to have Connector send back with all calls :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() payloadToSend = {"url": url} if headers: payload['headers':headers] data = self._putURL(url="/notification/callback", payload=payloadToSend, versioned=False) if data.status_code == 204: # immediate success result.error = False result.result = data.content else: result.error = response_codes("put_callback_url", data.status_code) result.raw_data = data.content result.status_code = data.status_code result.is_done = True return result def getCallback(self): ''' Get the callback URL currently registered with Connector. :return: callback url in ``.result``, error if applicable in ``.error`` :rtype: asyncResult ''' result = asyncResult() data = self._getURL("/notification/callback", versioned=False) if data.status_code == 200: # immediate success result.error = False result.result = data.json() else: result.error = response_codes("get_callback_url", data.status_code) result.raw_data = data.content result.status_code = data.status_code result.is_done = True return result def deleteCallback(self): ''' Delete the Callback URL currently registered with Connector. :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' result = asyncResult() data = self._deleteURL("/notification/callback") if data.status_code == 204: # immediate success result.result = data.content result.error = False else: result.error = response_codes( "delete_callback_url", data.status_code) result.raw_data = data.content result.status_code = data.status_code result.is_done = True return result # set a specific handler to call the cbfn def setHandler(self, handler, cbfn): ''' Register a handler for a particular notification type. These are the types of notifications that are acceptable. | 'async-responses' | 'registrations-expired' | 'de-registrations' | 'reg-updates' | 'registrations' | 'notifications' :param str handler: name of the notification type :param fnptr cbfn: function to pass the notification channel messages to. :return: Nothing. ''' if handler == "async-responses": self.async_responses_callback = cbfn elif handler == "registrations-expired": self.registrations_expired_callback = cbfn elif handler == "de-registrations": self.de_registrations_callback = cbfn elif handler == "reg-updates": self.reg_updates_callback = cbfn elif handler == "registrations": self.registrations_callback = cbfn elif handler == "notifications": self.notifications_callback = cbfn else: self.log.warn( "'%s' is not a legitimate notification channel option. Please check your spelling.", handler) # this function needs to spin off a thread that is constantally polling, # should match asynch ID's to values and call their function def startLongPolling(self, noWait=False): ''' Start LongPolling Connector for notifications. :param bool noWait: Optional - use the cached values in connector, do not wait for the device to respond :return: Thread of constantly running LongPoll. To be used to kill the thred if necessary. :rtype: pythonThread ''' # check Asynch ID's against insternal database of ID's # Call return function with the value given, maybe decode from base64? wait = '' if (noWait == True): wait = "?noWait=true" # check that there isn't another thread already running, only one longPolling instance per is acceptable if (self.longPollThread.isAlive()): self.log.warn("LongPolling is already active.") else: # start infinite longpolling thread self._stopLongPolling.clear() self.longPollThread.start() self.log.info("Spun off LongPolling thread") # return thread instance so user can manually intervene if necessary return self.longPollThread # stop longpolling by switching the flag off. def stopLongPolling(self): ''' Stop LongPolling thread :return: none ''' if (self.longPollThread.isAlive()): self._stopLongPolling.set() self.log.debug("set stop longpolling flag") else: self.log.warn("LongPolling thread already stopped") return # Thread to constantly long poll connector and process the feedback. # TODO: pass wait / noWait on to long polling thread, currently the user can set it but it doesnt actually affect anything. def longPoll(self, versioned=True): self.log.debug("LongPolling Started, self.address = %s" % self.address) while (not self._stopLongPolling.is_set()): try: if versioned: data = r.get(self.address+self.apiVersion+'/notification/pull', headers={ "Authorization": "Bearer "+self.bearer, "Connection": "keep-alive", "accept": "application/json"}) else: data = r.get(self.address+'/notification/pull', headers={ "Authorization": "Bearer "+self.bearer, "Connection": "keep-alive", "accept": "application/json"}) self.log.debug("Longpoll Returned, len = %d, statuscode=%d", len( data.text), data.status_code) # process callbacks if data.status_code == 200: # 204 means no content, do nothing self.handler(data.content) self.log.debug("Longpoll data = "+data.content) except: self.log.error( "longPolling had an issue and threw an exception") ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) self.log.error(sys.exc_info()) del tb self.log.info("Killing Longpolling Thread") # parse the notification channel responses and call appropriate handlers def handler(self, data): ''' Function to handle notification data as part of Callback URL handler. :param str data: data posted to Callback URL by connector. :return: nothing ''' if isinstance(data, r.models.Response): self.log.debug("data is request object = %s", str(data.content)) data = data.content elif isinstance(data, str): self.log.info("data is json string with len %d", len(data)) if len(data) == 0: self.log.warn( "Handler received data of 0 length, exiting handler.") return else: self.log.error( "Input is not valid request object or json string : %s" % str(data)) return False try: data = json.loads(data) if 'async-responses' in data.keys(): self.async_responses_callback(data) if 'notifications' in data.keys(): self.notifications_callback(data) if 'registrations' in data.keys(): self.registrations_callback(data) if 'reg-updates' in data.keys(): self.reg_updates_callback(data) if 'de-registrations' in data.keys(): self.de_registrations_callback(data) if 'registrations-expired' in data.keys(): self.registrations_expired_callback(data) except: self.log.error("handle router had an issue and threw an exception") ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) self.log.error(sys.exc_info()) del tb # Turn on / off debug messages based on the onOff variable def debug(self, onOff, level='DEBUG'): ''' Enable / Disable debugging :param bool onOff: turn debugging on / off :return: none ''' if onOff: if level == 'DEBUG': self.log.setLevel(logging.DEBUG) self._ch.setLevel(logging.DEBUG) self.log.debug("Debugging level DEBUG enabled") elif level == "INFO": self.log.setLevel(logging.INFO) self._ch.setLevel(logging.INFO) self.log.info("Debugging level INFO enabled") elif level == "WARN": self.log.setLevel(logging.WARN) self._ch.setLevel(logging.WARN) self.log.warn("Debugging level WARN enabled") elif level == "ERROR": self.log.setLevel(logging.ERROR) self._ch.setLevel(logging.ERROR) self.log.error("Debugging level ERROR enabled") else: self.log.setLevel(logging.ERROR) self._ch.setLevel(logging.ERROR) self.log.error( "Unrecognized debug level `%s`, set to default level `ERROR` instead", level) # internal async-requests handler. # data input is json data def _asyncHandler(self, data): try: responses = data['async-responses'] for entry in responses: if entry['id'] in self.database['async-responses'].keys(): # get the asynch object out of database result = self.database['async-responses'].pop(entry['id']) # fill in async-result object if 'error' in entry.keys(): # error happened, handle it result.error = response_codes( 'async-responses-handler', entry['status']) result.error.error = entry['error'] result.is_done = True if result.callback: result.callback(result) else: return result else: # everything is good, fill it out result.result = b64decode(entry['payload']) result.raw_data = entry result.status = entry['status'] result.error = False for thing in entry.keys(): result.extra[thing] = entry[thing] result.is_done = True # call associated callback function if result.callback: result.callback(result) else: self.log.warn("No callback function given") else: # TODO : object not found int asynch database self.log.warn( "No asynch entry for '%s' found in databse", entry['id']) except: # TODO error handling here self.log.error( "Bad data encountered and failed to elegantly handle it. ") ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) self.log.error(sys.exc_info()) del tb return # default handler for notifications. User should impliment all of these in # a L2 implimentation or in their webapp. # @input data is a dictionary def _defaultHandler(self, data): if 'async-responses' in data.keys(): self.log.info("async-responses detected : len = %d", len(data["async-responses"])) self.log.debug(data["async-responses"]) if 'notifications' in data.keys(): self.log.info("notifications' detected : len = %d", len(data["notifications"])) self.log.debug(data["notifications"]) if 'registrations' in data.keys(): self.log.info("registrations' detected : len = %d", len(data["registrations"])) self.log.debug(data["registrations"]) if 'reg-updates' in data.keys(): # removed because this happens every 10s or so, spamming the output self.log.info("reg-updates detected : len = %d", len(data["reg-updates"])) self.log.debug(data["reg-updates"]) if 'de-registrations' in data.keys(): self.log.info("de-registrations detected : len = %d", len(data["de-registrations"])) self.log.debug(data["de-registrations"]) if 'registrations-expired' in data.keys(): self.log.info("registrations-expired detected : len = %d", len(data["registrations-expired"])) self.log.debug(data["registrations-expired"]) # make the requests. # url is the API url to hit # query are the optional get params # versioned tells the API whether to hit the /v#/ version. set to false for # commands that break with this, like the API and Connector version calls # TODO: spin this off to be non-blocking def _getURL(self, url, query={}, versioned=True): if versioned: return r.get(self.address+self.apiVersion+url, headers={"Authorization": "Bearer "+self.bearer}, params=query) else: return r.get(self.address+url, headers={"Authorization": "Bearer "+self.bearer}, params=query) # put data to URL with json payload in dataIn def _putURL(self, url, payload=None, versioned=True): if self._isJSON(payload): self.log.debug("PUT payload is json") if versioned: return r.put(self.address+self.apiVersion+url, json=payload, headers={"Authorization": "Bearer "+self.bearer}) else: return r.put(self.address+url, json=payload, headers={"Authorization": "Bearer "+self.bearer}) else: self.log.debug("PUT payload is NOT json") if versioned: return r.put(self.address+self.apiVersion+url, data=payload, headers={"Authorization": "Bearer "+self.bearer}) else: return r.put(self.address+url, data=payload, headers={"Authorization": "Bearer "+self.bearer}) # put data to URL with json payload in dataIn def _postURL(self, url, payload="", versioned=True): addr = self.address+self.apiVersion+url if versioned else self.address+url h = {"Authorization": "Bearer "+self.bearer} if payload: self.log.info("POSTing with payload: %s ", payload) return r.post(addr, data=payload, headers=h) else: self.log.info("POSTing") return r.post(addr, headers=h) # delete endpoint def _deleteURL(self, url, versioned=True): if versioned: return r.delete(self.address+self.apiVersion+url, headers={"Authorization": "Bearer "+self.bearer}) else: return r.delete(self.address+url, headers={"Authorization": "Bearer "+self.bearer}) # check if input is json, return true or false accordingly def _isJSON(self, dataIn): try: json.dumps(dataIn) return True except: self.log.debug("[_isJSON] exception triggered, input is not json") return False # extend dictionary class so we can instantiate multiple levels at once class vividict(dict): def __missing__(self, key): value = self[key] = type(self)() return value # Initialization function, set the token used by this object. def __init__(self, token, webAddress="https://api.connector.mbed.com", port="80",): # set token self.bearer = token # set version of REST API self.apiVersion = "/v2" # Init database, used for callback fn's for various tasks (asynch, subscriptions...etc) self.database = self.vividict() self.database['notifications'] self.database['registrations'] self.database['reg-updates'] self.database['de-registrations'] self.database['registrations-expired'] self.database['async-responses'] # longpolling variable # must initialize false to avoid race condition self._stopLongPolling = threading.Event() self._stopLongPolling.clear() # create thread for long polling self.longPollThread = threading.Thread( target=self.longPoll, name="mdc-api-longpoll") # Do this so the thread exits when the overall process does self.longPollThread.daemon = True # set default webAddress and port to mbed connector self.address = webAddress self.port = port # Initialize the callbacks self.async_responses_callback = self._asyncHandler self.registrations_expired_callback = self._defaultHandler self.de_registrations_callback = self._defaultHandler self.reg_updates_callback = self._defaultHandler self.registrations_callback = self._defaultHandler self.notifications_callback = self._defaultHandler # add logger self.log = logging.getLogger(name="mdc-api-logger") self.log.setLevel(logging.ERROR) self._ch = logging.StreamHandler() self._ch.setLevel(logging.ERROR) formatter = logging.Formatter( "\r\n[%(levelname)s \t %(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s") self._ch.setFormatter(formatter) self.log.addHandler(self._ch)
class connector: ''' Interface class to use the connector.mbed.com REST API. This class will by default handle asyncronous events. All function return :class:'.asyncResult' objects ''' def getConnectorVersion(self): ''' GET the current Connector version. :returns: asyncResult object, populates error and result fields :rtype: asyncResult ''' pass def getApiVersions(self): ''' Get the REST API versions that connector accepts. :returns: :class:asyncResult object, populates error and result fields :rtype: asyncResult ''' pass def getLimits(self): '''return limits of account in async result object. :returns: asyncResult object, populates error and result fields :rtype: asyncResult ''' pass def getEndpoints(self, typeOfEndpoint=""): ''' Get list of all endpoints on the domain. :param str typeOfEndpoint: Optional filter endpoints returned by type :return: list of all endpoints :rtype: asyncResult ''' pass def getResources(self, ep, noResp=False, cacheOnly=False): ''' Get list of resources on an endpoint. :param str ep: Endpoint to get the resources of :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: list of resources :rtype: asyncResult ''' pass def getResourceValue(self, ep, res, cbfn="", noResp=False, cacheOnly=False): ''' Get value of a specific resource on a specific endpoint. :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback function to be called on completion :param bool noResp: Optional - specify no response necessary from endpoint :param bool cacheOnly: Optional - get results from cache on connector, do not wake up endpoint :return: value of the resource, usually a string :rtype: asyncResult ''' pass def putResourceValue(self, ep, res, data, cbfn=""): ''' Put a value to a resource on an endpoint :param str ep: name of endpoint :param str res: name of resource :param str data: data to send via PUT :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def postResource(self, ep, res, data="", cbfn=""): ''' POST data to a resource on an endpoint. :param str ep: name of endpoint :param str res: name of resource :param str data: Optional - data to send via POST :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def deleteEndpoint(self, ep, cbfn=""): ''' Send DELETE message to an endpoint. :param str ep: name of endpoint :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def putResourceSubscription(self, ep, res, cbfn=""): ''' Subscribe to changes in a specific resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :param fnptr cbfn: Optional - callback funtion to call when operation is completed :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def deleteEndpointSubscriptions(self, ep): ''' Delete all subscriptions on specified endpoint ``ep`` :param str ep: name of endpoint :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def deleteResourceSubscription(self, ep, res): ''' Delete subscription to a resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def deleteAllSubscriptions(self): ''' Delete all subscriptions on the domain (all endpoints, all resources) :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def getEndpointSubscriptions(self, ep): ''' Get list of all subscriptions on a given endpoint ``ep`` :param str ep: name of endpoint :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def getResourceSubscription(self, ep, res): ''' Get list of all subscriptions for a resource ``res`` on an endpoint ``ep`` :param str ep: name of endpoint :param str res: name of resource :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def putPreSubscription(self, JSONdata): ''' Set pre-subscription rules for all endpoints / resources on the domain. This can be useful for all current and future endpoints/resources. :param json JSONdata: data to use as pre-subscription data. Wildcards are permitted :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def getPreSubscription(self): ''' Get the current pre-subscription data from connector :return: JSON that represents the pre-subscription data in the ``.result`` field :rtype: asyncResult ''' pass def putCallback(self, url, headers=""): ''' Set the callback URL. To be used in place of LongPolling when deploying a webapp. **note**: make sure you set up a callback URL in your web app :param str url: complete url, including port, where the callback url is located :param str headers: Optional - Headers to have Connector send back with all calls :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def getCallback(self): ''' Get the callback URL currently registered with Connector. :return: callback url in ``.result``, error if applicable in ``.error`` :rtype: asyncResult ''' pass def deleteCallback(self): ''' Delete the Callback URL currently registered with Connector. :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' pass def setHandler(self, handler, cbfn): ''' Register a handler for a particular notification type. These are the types of notifications that are acceptable. | 'async-responses' | 'registrations-expired' | 'de-registrations' | 'reg-updates' | 'registrations' | 'notifications' :param str handler: name of the notification type :param fnptr cbfn: function to pass the notification channel messages to. :return: Nothing. ''' pass def startLongPolling(self, noWait=False): ''' Start LongPolling Connector for notifications. :param bool noWait: Optional - use the cached values in connector, do not wait for the device to respond :return: Thread of constantly running LongPoll. To be used to kill the thred if necessary. :rtype: pythonThread ''' pass def stopLongPolling(self): ''' Stop LongPolling thread :return: none ''' pass def longPoll(self, versioned=True): pass def handler(self, data): ''' Function to handle notification data as part of Callback URL handler. :param str data: data posted to Callback URL by connector. :return: nothing ''' pass def debug(self, onOff, level='DEBUG'): ''' Enable / Disable debugging :param bool onOff: turn debugging on / off :return: none ''' pass def _asyncHandler(self, data): pass def _defaultHandler(self, data): pass def _getURL(self, url, query={}, versioned=True): pass def _putURL(self, url, payload=None, versioned=True): pass def _postURL(self, url, payload="", versioned=True): pass def _deleteURL(self, url, versioned=True): pass def _isJSON(self, dataIn): pass class vividict(dict): def __missing__(self, key): pass def __init__(self, token, webAddress="https://api.connector.mbed.com", port="80",): pass
37
26
21
1
15
6
3
0.5
0
9
3
0
34
15
34
34
823
64
523
110
483
261
470
107
433
11
0
5
120
2,614
ARMmbed/mbed-connector-api-python
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_mbed-connector-api-python/mbed_connector_api/tests/mock_data.py
mock_data.mockData
class mockData: """dictionary of mocking data for the mocking tests""" # dictionary to hold the mock data _data = {} # function to add mock data to the _mock_data dictionary def _add(self, uri, status, payload): self._data[uri] = {"status": status, "payload": payload } return def getPayload(self, input): return self._data[input]['payload'] def getStatusCode(self, input): return self._data[input]['status'] # initialize the _mock_data dictionary with all the appropriate mocking data def __init__(self): self._add(uri="limits", status=200, payload='{"transaction-quota":10000,"transaction-count":259,"endpoint-quota":100,"endpoint-count":1}') self._add(uri="connectorVersion", status=200, payload='DeviceServer v3.0.0-520\nREST version = v2') self._add(uri="apiVersion", status=200, payload='["v1","v2"]') self._add(uri="endpoints", status=200, payload='[{"name":"51f540a2-3113-46e2-aef4-96e94a637b31","type":"test","status":"ACTIVE"}]') self._add(uri="resources", status=200, payload='[{"uri":"/Test/0/S","rt":"Static","obs":false,"type":""},{"uri":"/Test/0/D","rt":"Dynamic","obs":true,"type":""},{"uri":"/3/0/2","obs":false,"type":""},{"uri":"/3/0/1","obs":false,"type":""},{"uri":"/3/0/17","obs":false,"type":""},{"uri":"/3/0/0","obs":false,"type":""},{"uri":"/3/0/16","obs":false,"type":""},{"uri":"/3/0/11","obs":false,"type":""},{"uri":"/3/0/11/0","obs":false,"type":""},{"uri":"/3/0/4","obs":false,"type":""}]')
class mockData: '''dictionary of mocking data for the mocking tests''' def _add(self, uri, status, payload): pass def getPayload(self, input): pass def getStatusCode(self, input): pass def __init__(self): pass
5
1
5
0
5
0
1
0.18
0
0
0
0
4
0
4
4
31
5
22
6
17
4
15
6
10
1
0
0
4
2,615
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_target.py
yotta.test.test_target.ComponentTestCase
class ComponentTestCase(unittest.TestCase): def test_mergeDictionaries(self): a = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('subdict', OrderedDict([('a',7), ('c',0)]))]) b = OrderedDict([('a', 2), ('d', 4), ('e', 5), ('subdict', OrderedDict([('a',12), ('b',8), ('subsubdict', OrderedDict([(1,'a'), (2,'b')]))]))]) c = OrderedDict([('subdict', {'subsubdict':{3:'c'}})]) self.assertEqual(target._mergeDictionaries(a, {}), a) self.assertEqual(target._mergeDictionaries(b, {}), b) self.assertEqual(target._mergeDictionaries(c, {}), c) self.assertEqual( target._mergeDictionaries(a, b), OrderedDict([ ('a', 1), ('b', 2), ('c', 3), ('subdict', OrderedDict([('a', 7), ('c', 0), ('b',8), ('subsubdict', OrderedDict([(1,'a'), (2,'b')]))])), ('d', 4), ('e', 5) ]) ) self.assertEqual( target._mergeDictionaries(target._mergeDictionaries(a, b), c), OrderedDict([ ('a', 1), ('b', 2), ('c', 3), ('subdict', OrderedDict([('a', 7), ('c', 0), ('b',8), ('subsubdict', OrderedDict([(1,'a'), (2,'b'), (3,'c')]))])), ('d', 4), ('e', 5) ]) )
class ComponentTestCase(unittest.TestCase): def test_mergeDictionaries(self): pass
2
0
31
2
29
0
1
0
1
1
0
0
1
0
1
73
32
2
30
5
28
0
10
5
8
1
2
0
1
2,616
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_test_subcommand.py
yotta.test.test_test_subcommand.TestTestSubcommandModule
class TestTestSubcommandModule(unittest.TestCase): def test_moduleFromDirname(self): self.assertTrue(test_subcommand.moduleFromDirname('ym/b/ym/c/d', {'b':'b', 'c':'c'}, 'a') == 'c') self.assertTrue(test_subcommand.moduleFromDirname('ym/b/q/c/d', {'b':'b', 'c':'c'}, 'a') == 'b') self.assertTrue(test_subcommand.moduleFromDirname('z/b/q/c/d', {'b':'b', 'c':'c'}, 'a') == 'a') self.assertTrue(test_subcommand.moduleFromDirname('ym/e/d', {'b':'b', 'c':'c'}, 'a') == 'a') self.assertTrue(test_subcommand.moduleFromDirname('ym/e/d', {'b':'b', 'c':'c', 'e':'e'}, 'a') == 'e')
class TestTestSubcommandModule(unittest.TestCase): def test_moduleFromDirname(self): pass
2
0
6
0
6
0
1
0
1
0
0
0
1
0
1
73
7
0
7
2
5
0
7
2
5
1
2
0
1
2,617
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_validation.py
yotta.test.test_validation.TestValidation
class TestValidation(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_validateSourceDirNames(self): self.assertTrue(validate.sourceDirValidationError('Source', 'testcomponent')) self.assertTrue(validate.sourceDirValidationError('src', 'testcomponent')) self.assertTrue(validate.sourceDirValidationError('Src', 'testcomponent')) self.assertTrue(validate.sourceDirValidationError('Test', 'testcomponent')) self.assertTrue(validate.sourceDirValidationError('with space', 'testcomponent')) self.assertTrue(validate.sourceDirValidationError('with nonvalid!', 'testcomponent')) def test_validateSourceDirSuggestions(self): self.assertTrue('abcde' in validate.sourceDirValidationError('a b c!%^& d e', 'testcomponent')) self.assertTrue('source' in validate.sourceDirValidationError('Source', 'testcomponent')) self.assertTrue('source' in validate.sourceDirValidationError('src', 'testcomponent')) self.assertTrue('test' in validate.sourceDirValidationError('Test', 'testcomponent')) def test_componentNameCoerced(self): self.assertTrue('some-name' == validate.componentNameCoerced('Some Name')) self.assertTrue('some-name' == validate.componentNameCoerced('Some Name')) self.assertTrue('moo-moo-moo' == validate.componentNameCoerced('MOO!!!!MOO-----Moo')) def test_looksLikeAnEmail(self): self.assertTrue(validate.looksLikeAnEmail('test@example.com')) self.assertTrue(validate.looksLikeAnEmail('test.testytest@test.com')) self.assertFalse(validate.looksLikeAnEmail('@.com')) self.assertFalse(validate.looksLikeAnEmail('moo.moo')) self.assertFalse(validate.looksLikeAnEmail('thingy')) self.assertFalse(validate.looksLikeAnEmail('thingy@thingy')) self.assertFalse(validate.looksLikeAnEmail(''))
class TestValidation(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_validateSourceDirNames(self): pass def test_validateSourceDirSuggestions(self): pass def test_componentNameCoerced(self): pass def test_looksLikeAnEmail(self): pass
7
0
5
0
5
0
1
0
1
0
0
0
6
0
6
78
34
5
29
7
22
0
29
7
22
1
2
0
6
2,618
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_vcs.py
yotta.test.test_vcs.TestGit
class TestGit(unittest.TestCase): @classmethod def setUpClass(cls): # set up the git user environment variables so that git doesn't barf # if we try to commit without a user: util.setupGitUser() cls.working_copy = vcs.Git.cloneToTemporaryDir(Test_Repo_git) @classmethod def tearDownClass(cls): cls.working_copy.remove() def test_creation(self): self.assertTrue(self.working_copy) def test_getCommitId(self): commit_id = self.working_copy.getCommitId() self.assertTrue(len(commit_id) >= 6) def test_getDescription(self): description = self.working_copy.getDescription() self.assertTrue(len(description) >= 1) def test_isClean(self): self.assertTrue(self.working_copy.isClean()) fsutils.rmF(os.path.join(self.working_copy.workingDirectory(), 'module.json')) self.assertFalse(self.working_copy.isClean()) def test_commit(self): with open(os.path.join(self.working_copy.workingDirectory(), 'module.json'), "a") as f: f.write("\n") self.working_copy.markForCommit('module.json') self.working_copy.commit('test commit: DO NOT PUSH') self.assertTrue(self.working_copy.isClean())
class TestGit(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_creation(self): pass def test_getCommitId(self): pass def test_getDescription(self): pass def test_isClean(self): pass def test_commit(self): pass
10
0
4
0
3
0
1
0.08
1
1
1
0
5
0
7
79
34
6
26
13
16
2
24
10
16
1
2
1
7
2,619
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_vcs.py
yotta.test.test_vcs.TestHg
class TestHg(unittest.TestCase): @classmethod def setUpClass(cls): # test if we have a git user set up, if not we need to set one info = hgapi.Repo.command(".", os.environ, "showconfig") if info.find("ui.username") == -1: # hg doesn't provide a way to set the username from the command line. # The HGUSER environment variable can be used for that purpose. os.environ['HGUSER'] = 'Yotta Test <test@yottabuild.org>' cls.working_copy = vcs.HG.cloneToTemporaryDir(Test_Repo_hg) @classmethod def tearDownClass(cls): cls.working_copy.remove() def test_creation(self): self.assertTrue(self.working_copy) def test_getCommitId(self): commit_id = self.working_copy.getCommitId() self.assertTrue(len(commit_id) >= 6) def test_getDescription(self): description = self.working_copy.getDescription() self.assertTrue(len(description) >= 1) def test_isClean(self): self.assertTrue(self.working_copy.isClean()) fsutils.rmF(os.path.join(self.working_copy.workingDirectory(), 'module.json')) self.assertFalse(self.working_copy.isClean()) def test_commit(self): with open(os.path.join(self.working_copy.workingDirectory(), 'module.json'), "a") as f: f.write("\n") self.working_copy.markForCommit('module.json') self.working_copy.commit('test commit: DO NOT PUSH') self.assertTrue(self.working_copy.isClean())
class TestHg(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_creation(self): pass def test_getCommitId(self): pass def test_getDescription(self): pass def test_isClean(self): pass def test_commit(self): pass
10
0
4
0
4
0
1
0.11
1
1
1
0
5
0
7
79
37
6
28
14
18
3
26
11
18
2
2
1
8
2,620
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/access_common.py
yotta.lib.access_common.NotInCache
class NotInCache(KeyError): pass
class NotInCache(KeyError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
13
2
0
2
1
1
0
2
1
1
0
5
0
0
2,621
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/access_common.py
yotta.lib.access_common.SpecificationNotMet
class SpecificationNotMet(AccessException): pass
class SpecificationNotMet(AccessException): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
2,622
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_sourceparse.py
yotta.test.test_sourceparse.TestParseSourceURL
class TestParseSourceURL(unittest.TestCase): def test_registryURLs(self): for url in Registry_Specs: sv = sourceparse.parseSourceURL(url) self.assertEqual(sv.source_type, 'registry') def test_shorthandURLs(self): for url in ShortHand_URLs: for s in Git_Specs: if len(s): # Shorthand URLs support '@' and ' ' as well as '#' for m in ['#', '@', ' ']: sv = sourceparse.parseSourceURL(url + m + s) self.assertEqual(sv.source_type, 'github') self.assertEqual(sv.spec, s) else: sv = sourceparse.parseSourceURL(url) self.assertEqual(sv.source_type, 'github') self.assertEqual(sv.spec, s) def test_githubURLs(self): for url in Github_URLs: for s in Git_Specs: if len(s): source = url + '#' + s else: source = url sv = sourceparse.parseSourceURL(source) self.assertEqual(sv.source_type, 'github') self.assertEqual(sv.spec, s) def test_gitURLs(self): for url in Git_URLs: for s in Git_Specs: if len(s): source = url + '#' + s else: source = url sv = sourceparse.parseSourceURL(source) self.assertEqual(sv.source_type, 'git') self.assertEqual(sv.spec, s) def test_hgURLs(self): for url in HG_URLs: for s in HG_Specs: if len(s): source = url + '#' + s else: source = url sv = sourceparse.parseSourceURL(source) self.assertEqual(sv.source_type, 'hg') self.assertEqual(sv.spec, s) def test_invalid(self): for url in test_invalid_urls: self.assertRaises(ValueError, sourceparse.parseSourceURL, url)
class TestParseSourceURL(unittest.TestCase): def test_registryURLs(self): pass def test_shorthandURLs(self): pass def test_githubURLs(self): pass def test_gitURLs(self): pass def test_hgURLs(self): pass def test_invalid(self): pass
7
0
8
0
8
1
4
0.1
1
1
0
0
6
0
6
78
56
5
50
26
43
5
46
26
39
5
2
4
21
2,623
ARMmbed/yotta
ARMmbed_yotta/yotta/options/plain.py
yotta.options.plain.ColourfulAction
class ColourfulAction(PlainAction): def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, False) logging_setup.setPlain(False)
class ColourfulAction(PlainAction): def __call__(self, parser, namespace, values, option_string=None): pass
2
0
3
0
3
0
1
0
1
0
0
0
1
0
1
10
4
0
4
2
2
0
4
2
2
1
4
0
1
2,624
ARMmbed/yotta
ARMmbed_yotta/yotta/options/noninteractive.py
yotta.options.noninteractive.Noninteractive
class Noninteractive(Action): def __init__(self, *args, **kwargs): kwargs['nargs'] = 0 kwargs['metavar'] = None self.dest = kwargs['dest'] super(Noninteractive, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, False)
class Noninteractive(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
4
0
4
0
1
0
1
1
0
0
2
1
2
9
9
1
8
4
5
0
8
4
5
1
3
0
2
2,625
ARMmbed/yotta
ARMmbed_yotta/yotta/options/debug.py
yotta.options.debug.DebugSubsystemsAction
class DebugSubsystemsAction(Action): def __init__(self, *args, **kwargs): self.subsystems = [] kwargs['nargs'] = 1 super(DebugSubsystemsAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): self.subsystems += values logging_setup.setEnabledModules(self.subsystems)
class DebugSubsystemsAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
4
0
4
0
1
0
1
1
0
0
2
1
2
9
9
1
8
4
5
0
8
4
5
1
3
0
2
2,626
ARMmbed/yotta
ARMmbed_yotta/yotta/options/config.py
yotta.options.config.ConfigAction
class ConfigAction(Action): def __init__(self, *args, **kwargs): kwargs['nargs'] = 1 self.dest = kwargs['dest'] super(ConfigAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): # delay importing target module until this option is used, as the # import would otherwise be unnecessary, and drag the target module # into being synchrously imported into main from yotta.lib import target error, config = target.loadAdditionalConfig(values[0]) if error: raise ArgumentError(self, error) else: setattr(namespace, self.dest, config)
class ConfigAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
7
0
6
2
2
0.25
1
2
0
0
2
1
2
9
16
1
12
6
8
3
11
6
7
2
3
1
3
2,627
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/access_common.py
yotta.lib.access_common.AccessException
class AccessException(Exception): pass
class AccessException(Exception): pass
1
0
0
0
0
0
0
0
1
0
0
2
0
0
0
10
2
0
2
1
1
0
2
1
1
0
3
0
0
2,628
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_sourceparse.py
yotta.test.test_sourceparse.TestParseModuleNameAndSpec
class TestParseModuleNameAndSpec(unittest.TestCase): def test_validNames(self): for name in Valid_Names: n, s = sourceparse.parseModuleNameAndSpec(name) self.assertEqual(n, name) self.assertEqual(s, '*') def test_ShorthandRefs(self): for url in ShortHand_URLs: for spec in Git_Specs: if len(spec): # Shorthand URLs support '@' and ' ' as well as '#' for m in ['#', '@', ' ']: ns = url + m + spec n, s = sourceparse.parseModuleNameAndSpec(ns) self.assertEqual(n, 'reponame') self.assertEqual(s, ns) else: n, s = sourceparse.parseModuleNameAndSpec(url) self.assertEqual(n, 'reponame') self.assertEqual(s, url) def test_GithubRefs(self): for url in Github_URLs: for spec in Git_Specs: if len(spec): ns = url + '#' + spec else: ns = url n, s = sourceparse.parseModuleNameAndSpec(ns) self.assertEqual(n, 'reponame') self.assertEqual(s, ns) def test_GitRefs(self): for url in Git_URLs: for spec in Git_Specs: if len(spec): ns = url + '#' + spec else: ns = url n, s = sourceparse.parseModuleNameAndSpec(ns) self.assertEqual(n, 'reponame') self.assertEqual(s, ns) def test_HGRefs(self): for url in HG_URLs: for spec in HG_Specs: if len(spec): ns = url + '#' + spec else: ns = url n, s = sourceparse.parseModuleNameAndSpec(ns) self.assertEqual(n, 'reponame') self.assertEqual(s, ns) def test_atVersion(self): for name in Valid_Names: for v in Registry_Specs: if len(v): nv = name + '@' + v n, s = sourceparse.parseModuleNameAndSpec(nv) self.assertEqual(n, name) self.assertEqual(s, v)
class TestParseModuleNameAndSpec(unittest.TestCase): def test_validNames(self): pass def test_ShorthandRefs(self): pass def test_GithubRefs(self): pass def test_GitRefs(self): pass def test_HGRefs(self): pass def test_atVersion(self): pass
7
0
10
0
9
1
4
0.09
1
0
0
0
6
0
6
78
63
5
57
30
50
5
53
30
46
5
2
4
23
2,629
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_settings.py
yotta.test.test_settings.TestSettings
class TestSettings(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = tempfile.mkdtemp() test_files = [ ('1.json', '{"a":{"b":{"c":"1-value"}}}'), ('2.json', '{"a":{"b":{"c":"2-value"}, "b2":"2-value"}}'), ('3.json', '{"a":{"b":{"c":"3-value"}, "b2":"3-value"}, "a2":"3-value"}') ] cls.filenames = [] for fn, s in test_files: cls.filenames.append(os.path.join(cls.test_dir, fn)) with open(cls.filenames[-1], 'w') as f: f.write(s) @classmethod def tearDownClass(cls): rmRf(cls.test_dir) def test_merging(self): p = settings._JSONConfigParser() p.read(self.filenames) self.assertEqual(p.get('a.b.c'), '1-value') self.assertEqual(p.get('a.b2'), '2-value') self.assertEqual(p.get('a2'), '3-value') def test_setting(self): p = settings._JSONConfigParser() p.read(self.filenames) p.set('foo', 'xxx') self.assertEqual(p.get('foo'), 'xxx') p.set('someLongNameHere_etc_etc', 'xxx') self.assertEqual(p.get('someLongNameHere_etc_etc'), 'xxx') p.set('someLongNameHere_etc_etc.with.a.path', True, filename=self.filenames[1]) self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), True) p.set('someLongNameHere_etc_etc.with.a.path', False, filename=self.filenames[1]) self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False) # NB: don't expect it to change when we set a value that's shadowed by # an earlier file: p.set('someLongNameHere_etc_etc.with.a.path', 7, filename=self.filenames[2]) self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False) p.set('someLongNameHere_etc_etc.with.another.path', 7, filename=self.filenames[2]) self.assertEqual(p.get('someLongNameHere_etc_etc.with.another.path'), 7) def test_writing(self): p = settings._JSONConfigParser() p.read(self.filenames) p.set('foo', 'xxx') p.set('someLongNameHere_etc_etc', 'xxx') p.set('someLongNameHere_etc_etc.with.a.path', True, filename=self.filenames[1]) p.set('someLongNameHere_etc_etc.with.a.path', False, filename=self.filenames[1]) p.set('someLongNameHere_etc_etc.with.a.path', 7, filename=self.filenames[2]) p.set('someLongNameHere_etc_etc.with.another.path', 7, filename=self.filenames[2]) # NB: only write settings to the first file p.write() self.assertEqual(p.get('foo'), 'xxx') self.assertEqual(p.get('someLongNameHere_etc_etc'), 'xxx') self.assertEqual(p.get('someLongNameHere_etc_etc.with.a.path'), False) self.assertEqual(p.get('someLongNameHere_etc_etc.with.another.path'), 7) p2 = settings._JSONConfigParser() p2.read(self.filenames) self.assertEqual(p2.get('foo'), 'xxx') self.assertEqual(p2.get('someLongNameHere_etc_etc'), 'xxx') # check that we only wrote settings to the first file self.assertEqual(p2.get('someLongNameHere_etc_etc.with.a.path'), None) # now write settings for the other files, and continue p.write(self.filenames[1]) p.write(self.filenames[2]) p3 = settings._JSONConfigParser() p3.read(self.filenames) self.assertEqual(p3.get('someLongNameHere_etc_etc.with.a.path'), False) self.assertEqual(p3.get('someLongNameHere_etc_etc.with.another.path'), 7) p4 = settings._JSONConfigParser() p4.read([self.filenames[1]]) self.assertEqual(p4.get('foo'), None) self.assertEqual(p4.get('someLongNameHere_etc_etc.with.a.path'), False) self.assertEqual(p4.get('someLongNameHere_etc_etc.with.another.path'), None) p5 = settings._JSONConfigParser() p5.read([self.filenames[2]]) self.assertEqual(p5.get('foo'), None) self.assertEqual(p5.get('someLongNameHere_etc_etc.with.a.path'), 7) self.assertEqual(p5.get('someLongNameHere_etc_etc.with.another.path'), 7) def test_envvars(self): testval = str(random.randint(1,100000)) os.environ['YOTTA_SETTINGS_TEST_VARIABLE'] = testval self.assertEqual(settings.get('settings.TEST.Variable'), testval)
class TestSettings(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_merging(self): pass def test_setting(self): pass def test_writing(self): pass def test_envvars(self): pass
9
0
16
3
12
1
1
0.06
1
2
1
0
4
0
6
78
104
22
77
20
68
5
71
17
64
2
2
2
7
2,630
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_hg_access.py
yotta.test.test_hg_access.TestHGAccess
class TestHGAccess(unittest.TestCase): def setUp(self): ensureHGConfig() vs = sourceparse.parseSourceURL(Test_Repo) self.remote_component = hg_access.HGComponent.createFromSource(vs, Test_Name) self.assertTrue(self.remote_component) self.working_copy = self.remote_component.clone() self.assertTrue(self.working_copy) def tearDown(self): fsutils.rmRf(self.working_copy.directory) def test_installDeps(self): Args = namedtuple('Args', ['component', 'target', 'act_globally', 'install_linked', 'install_test_deps', 'config']) install.installComponent(Args(Test_Deps_Name, Test_Deps_Target, False, False, 'own', {})) def test_availableVersions(self): versions = self.working_copy.availableVersions() self.assertIn(version.Version('v0.0.1'), versions) def test_versionSpec(self): vs = sourceparse.parseSourceURL(Test_Repo_With_Spec) spec = hg_access.HGComponent.createFromSource(vs, Test_Name).versionSpec() v = spec.select(self.working_copy.availableVersions()) self.assertTrue(v)
class TestHGAccess(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_installDeps(self): pass def test_availableVersions(self): pass def test_versionSpec(self): pass
6
0
4
0
4
0
1
0
1
2
2
0
5
2
5
77
25
4
21
14
15
0
21
14
15
1
2
0
5
2,631
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/access_common.py
yotta.lib.access_common.TargetUnavailable
class TargetUnavailable(Unavailable): pass
class TargetUnavailable(Unavailable): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
5
0
0
2,632
ARMmbed/yotta
ARMmbed_yotta/yotta/options/target.py
yotta.options.target.TargetAction
class TargetAction(Action): def __init__(self, *args, **kwargs): kwargs['nargs'] = 1 self.dest = kwargs['dest'] super(TargetAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, values[0]) setattr(namespace, '_target_set_explicitly', True)
class TargetAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
4
0
4
0
1
0
1
1
0
0
2
1
2
9
9
1
8
4
5
0
8
4
5
1
3
0
2
2,633
ARMmbed/yotta
ARMmbed_yotta/yotta/options/registry.py
yotta.options.registry.RegistryAction
class RegistryAction(Action): def __init__(self, *args, **kwargs): kwargs['nargs'] = 1 self.dest = kwargs['dest'] super(RegistryAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, values[0])
class RegistryAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
3
0
3
0
1
0
1
1
0
0
2
1
2
9
8
1
7
4
4
0
7
4
4
1
3
0
2
2,634
ARMmbed/yotta
ARMmbed_yotta/yotta/options/plain.py
yotta.options.plain.PlainAction
class PlainAction(Action): def __init__(self, *args, **kwargs): kwargs['nargs'] = 0 kwargs['metavar'] = None self.dest = kwargs['dest'] super(PlainAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, True) logging_setup.setPlain(True)
class PlainAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
4
0
4
0
1
0
1
1
0
1
2
1
2
9
10
1
9
4
6
0
9
4
6
1
3
0
2
2,635
ARMmbed/yotta
ARMmbed_yotta/yotta/options/verbosity.py
yotta.options.verbosity.VerbosityAction
class VerbosityAction(Action): def __init__(self, *args, **kwargs): self.level = 0 kwargs['nargs'] = 0 kwargs['dest'] = SUPPRESS super(VerbosityAction, self).__init__(*args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): self.level += 1 loglevel = logLevelFromVerbosity(self.level) logging_setup.setLevel(loglevel)
class VerbosityAction(Action): def __init__(self, *args, **kwargs): pass def __call__(self, parser, namespace, values, option_string=None): pass
3
0
5
0
5
0
1
0
1
1
0
0
2
1
2
9
11
1
10
5
7
0
10
5
7
1
3
0
2
2,636
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_account.py
yotta.test.cli.test_account.TestCLIAccount
class TestCLIAccount(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = tempfile.mkdtemp() with open(os.path.join(cls.test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) cls.saved_settings_dir = None # override the settings directory, so we don't clobber any actual # user's settings if 'YOTTA_USER_SETTINGS_DIR' in os.environ: cls.saved_settings_dir = os.environ['YOTTA_USER_SETTINGS_DIR'] # use a directory called tmp_yotta_settings in the working directory: os.environ['YOTTA_USER_SETTINGS_DIR'] = 'tmp_yotta_settings' @classmethod def tearDownClass(cls): rmRf(cls.test_dir) cls.test_dir = None if cls.saved_settings_dir is not None: os.environ['YOTTA_USER_SETTINGS_DIR'] = cls.saved_settings_dir cls.saved_settings_dir = None else: del os.environ['YOTTA_USER_SETTINGS_DIR'] @loggedout def test_logoutLoggedOut(self): # test logging out when already logged out: should be a no-op stdout, stderr, status = cli.run(['logout']) self.assertEqual(status, 0) # check that we're still logged out: stdout, stderr, status = cli.run(['whoami']) self.assertIn('not logged in', stdout+stderr) # !!! FIXME: adding a test for interactive login is somewhat difficult. # Non-interactive login is implicitly tested by all the @loggedin tests #@loggedout #def test_loginLoggedOut(self): # # test logging in when logged out # pass @loggedout def test_whoamiLoggedOut(self): # test whoami when loggedout stdout, stderr, status = cli.run(['whoami']) self.assertIn('not logged in', stdout+stderr) self.assertNotEqual(status, 0)
class TestCLIAccount(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass @loggedout def test_logoutLoggedOut(self): pass @loggedout def test_whoamiLoggedOut(self): pass
9
0
8
0
6
2
2
0.4
1
0
0
0
2
0
4
76
46
4
30
12
21
12
25
7
20
2
2
1
6
2,637
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_outdated.py
yotta.test.cli.test_outdated.TestCLIOutdated
class TestCLIOutdated(unittest.TestCase): def test_outdated(self): path = util.writeTestFiles(Test_Outdated, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'outdated'], cwd=path) self.assertNotEqual(statuscode, 0) self.assertIn('test-testing-dummy', stdout + stderr) util.rmRf(path) def test_notOutdated(self): path = util.writeTestFiles(Test_Outdated, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'up'], cwd=path) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'outdated'], cwd=path) self.assertEqual(statuscode, 0) self.assertNotIn('test-testing-dummy', stdout + stderr) util.rmRf(path)
class TestCLIOutdated(unittest.TestCase): def test_outdated(self): pass def test_notOutdated(self): pass
3
0
10
3
7
0
1
0
1
0
0
0
2
0
2
74
21
6
15
7
12
0
15
7
12
1
2
0
2
2,638
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_owners.py
yotta.test.cli.test_owners.TestCLIOwners
class TestCLIOwners(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = tempfile.mkdtemp() with open(os.path.join(cls.test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) cls.saved_settings_dir = None # override the settings directory, so that we can be sure we're not # logged in if 'YOTTA_USER_SETTINGS_DIR' in os.environ: cls.saved_settings_dir = os.environ['YOTTA_USER_SETTINGS_DIR'] # use a directory called tmp_yotta_settings in the working directory: os.environ['YOTTA_USER_SETTINGS_DIR'] = 'tmp_yotta_settings' @classmethod def tearDownClass(cls): rmRf(cls.test_dir) cls.test_dir = None if cls.saved_settings_dir is not None: os.environ['YOTTA_USER_SETTINGS_DIR'] = cls.saved_settings_dir cls.saved_settings_dir = None else: del os.environ['YOTTA_USER_SETTINGS_DIR'] # you have have to be authenticated to list owners, so currently we only # test that the commands fail correctly in noninteractive mode: def test_listOwners(self): stdout, stderr, statuscode = cli.run(['-n', 'owners', 'ls'], cwd=self.test_dir) if statuscode != 0: self.assertTrue((stdout+stderr).find('login required') != -1) def test_addOwner(self): stdout, stderr, statuscode = cli.run(['-n', 'owners', 'add', 'friend@example.com'], cwd=self.test_dir) if statuscode != 0: self.assertTrue((stdout+stderr).find('login required') != -1) def test_rmOwner(self): stdout, stderr, statuscode = cli.run(['-n', 'owners', 'rm', 'friend@example.com'], cwd=self.test_dir) if statuscode != 0: self.assertTrue((stdout+stderr).find('login required') != -1)
class TestCLIOwners(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_listOwners(self): pass def test_addOwner(self): pass def test_rmOwner(self): pass
8
0
6
0
6
1
2
0.16
1
0
0
0
3
0
5
77
41
5
31
12
23
5
28
9
22
2
2
1
10
2,639
ARMmbed/yotta
ARMmbed_yotta/yotta/list.py
yotta.list.ComponentDepsFormatter
class ComponentDepsFormatter(object): def __init__(self, target=None, available_components=None, list_all=False, plain=False, display_origin=False): # don't even try to do Unicode on windows. Even if we can encode it # correctly, the default terminal fonts don't support Unicode # characters :( self.use_unicode = not ((os.name == 'nt') or plain) self.use_colours = not plain self.target = target self.list_all = list_all self.available = available_components self.display_origin = display_origin if plain: self.L_Char = u' ' self.T_Char = u' ' self.Dash_Char = u' ' self.Pipe_Char = u' ' elif self.use_unicode: self.L_Char = u'\u2517' self.T_Char = u'\u2523' self.Dash_Char = u'\u2501' self.Pipe_Char = u'\u2503' else: self.L_Char = u'\\' self.T_Char = u'|' self.Dash_Char = u'_' self.Pipe_Char = u'|' super(ComponentDepsFormatter, self).__init__() def format( self, component, processed, indent=u'', tee=u'', installed_at=u'', test_dep=False, spec=None ): r = u'' if self.use_colours: DIM = colorama.Style.DIM #pylint: disable=no-member BRIGHT = colorama.Style.BRIGHT #pylint: disable=no-member GREEN = colorama.Fore.GREEN #pylint: disable=no-member RED = colorama.Fore.RED #pylint: disable=no-member RESET = colorama.Style.RESET_ALL #pylint: disable=no-member else: DIM = BRIGHT = GREEN = RED = RESET = u'' mods_path = component.modulesPath() deps = component.getDependencies( available_components = self.available, target = self.target, test = True, warnings = False ) specs = dict([(x.name, x) for x in component.getDependencySpecs(target=self.target)]) def isTestOnly(name): return specs[name].is_test_dependency def shouldDisplay(x): if self.list_all: # list everything everywhere (apart from test dependencies of test # dependencies, which should be considered irrelevant) if component.isTestDependency() and isTestOnly(x[0]): return False else: return True if (not isTestOnly(x[0]) or not len(indent)): # this is non-test dependency, or a top-level test dependency if not x[1]: # if it's missing, display it return True if x[1].path == os.path.join(mods_path, x[0]): # if it's installed in this module, display it return True if x[0] in deps_here: # if it's first depended on by this module, then display it return True # everything else shouldn't be displayed here return False origin_descr = '' if self.display_origin: origin = component.origin() if origin is not None: if origin.startswith('github://'): origin_descr = ' (' + origin[9:] + ')' else: origin_descr = ' (' + friendlyRegistryName(origin, short=True) + ')' line = indent[:-2] + tee + component.getName() + u' ' + DIM + str(component.getVersion()) + origin_descr + RESET if spec and not spec.match(component.getVersion()): line += u' ' + RESET + BRIGHT + RED + str(spec) + RESET if test_dep: line += u' ' + DIM + u'(test dependency)' + RESET if len(installed_at): line += u' ' + DIM + installed_at + RESET if component.installedLinked(): line += GREEN + BRIGHT + u' -> ' + RESET + GREEN + fsutils.realpath(component.path) + RESET r += line + '\n' deps_here = [x for x in list(deps.keys()) if (x not in processed)] print_deps = [x for x in list(deps.items()) if shouldDisplay(x)] processed += [x[0] for x in print_deps] for (name, dep), last in islast(print_deps): if last: next_indent = indent + u' ' tee = self.L_Char + self.Dash_Char + u' ' next_tee = self.L_Char + self.Dash_Char + u' ' else: next_indent = indent + self.Pipe_Char + u' ' tee = self.T_Char + self.Dash_Char + u' ' next_tee = self.T_Char + self.Dash_Char + u' ' test_dep_status = u'' if isTestOnly(name): test_dep_status = u' (test dependency)' version_req = specs[name].nonShrinkwrappedVersionReq() if not dep: r += indent + tee + name + u' ' + version_req + test_dep_status + BRIGHT + RED + ' missing' + RESET + '\n' else: spec = access.remoteComponentFor(name, version_req, 'modules').versionSpec() if not spec: spec_descr = u'' elif spec.match(dep.getVersion()): spec_descr = u' ' + str(spec) else: spec_descr = u' ' + RESET + BRIGHT + RED + str(spec) spec_descr += test_dep_status if name in deps_here: # dependencies that are first used here may actually be # installed higher up our dependency tree, if they are, # illustrate that: if dep.path == os.path.join(mods_path, name): r += self.format( dep, processed, next_indent, next_tee, test_dep = isTestOnly(name), spec = spec ) else: r += self.format( dep, processed, next_indent, next_tee, installed_at = relpathIfSubdir(dep.unresolved_path), test_dep = isTestOnly(name), spec = spec ) else: r += indent + tee + DIM + name + spec_descr + RESET + '\n' return r
class ComponentDepsFormatter(object): def __init__(self, target=None, available_components=None, list_all=False, plain=False, display_origin=False): pass def format( self, component, processed, indent=u'', tee=u'', installed_at=u'', test_dep=False, spec=None ): pass def isTestOnly(name): pass def shouldDisplay(x): pass
5
0
46
4
38
6
7
0.13
1
4
0
0
2
10
2
2
163
15
135
44
121
18
95
35
90
17
1
4
28
2,640
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_publish.py
yotta.test.cli.test_publish.TestCLIPublish
class TestCLIPublish(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = tempfile.mkdtemp() @classmethod def tearDownClass(cls): rmRf(cls.test_dir) def test_publishPrivate(self): with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(Private_Module_JSON) stdout, stderr, status = cli.run(['--target', Test_Target, 'publish'], cwd=self.test_dir) self.assertNotEqual(status, 0) self.assertTrue('is private and cannot be published' in ('%s %s' % (stdout, stderr))) def test_publishNotAuthed(self): # ensure we're not logged in by setting a different settings directory: saved_settings_dir = None if 'YOTTA_USER_SETTINGS_DIR' in os.environ: saved_settings_dir = os.environ['YOTTA_USER_SETTINGS_DIR'] os.environ['YOTTA_USER_SETTINGS_DIR'] = 'tmp_yotta_settings' try: with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(Public_Module_JSON) stdout, stderr, status = cli.run(['-n', '--target', Test_Target, 'publish'], cwd=self.test_dir) if status != 0: out = stdout+stderr self.assertTrue(out.find('login required') != -1 or out.find('not module owner') != -1) finally: if saved_settings_dir is not None: os.environ['YOTTA_USER_SETTINGS_DIR'] = saved_settings_dir else: del os.environ['YOTTA_USER_SETTINGS_DIR'] def test_prePublishPreventsPublish(self): path = util.writeTestFiles(Test_prePublish_Prevents_Publish, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', '--noninteractive', 'publish'], cwd=path) self.assertNotEqual(statuscode, 0) self.assertIn('prePublish script error code 1 prevents publishing', stdout + stderr) util.rmRf(path) def test_warnOfficialKeywords(self): path = util.writeTestFiles(Test_Publish, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', '--noninteractive', 'publish'], cwd=path) self.assertNotEqual(statuscode, 0) self.assertIn('Is this really an officially supported mbed module', stdout + stderr) util.rmRf(path)
class TestCLIPublish(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_publishPrivate(self): pass def test_publishNotAuthed(self): pass def test_prePublishPreventsPublish(self): pass def test_warnOfficialKeywords(self): pass
9
0
7
1
7
0
2
0.02
1
0
0
0
4
0
6
78
52
9
42
19
33
1
38
15
31
4
2
2
9
2,641
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_shrinkwrap.py
yotta.test.cli.test_shrinkwrap.TestCLIShrinkwrap
class TestCLIShrinkwrap(unittest.TestCase): def testCreateShrinkwrap(self): test_dir = util.writeTestFiles(Test_Shrinkwrap, True) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'shrinkwrap'], cwd=test_dir) self.assertEqual(statuscode, 0) self.assertTrue(os.path.exists(os.path.join(test_dir, 'yotta-shrinkwrap.json'))) util.rmRf(test_dir) def testMissingDependenciesShrinkwrap(self): test_dir = util.writeTestFiles(Test_Shrinkwrap_Missing_Dependency, True) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'shrinkwrap'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertFalse(os.path.exists(os.path.join(test_dir, 'yotta-shrinkwrap.json'))) self.assertIn('is missing', stdout+stderr) util.rmRf(test_dir) def testInstallWithShrinkwrap(self): test_dir = util.writeTestFiles(Test_Existing_Shrinkwrap_Missing_Dependency, True) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'install'], cwd=test_dir) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'list'], cwd=test_dir) self.assertEqual(statuscode, 0) # as opposed to 0.0.2 which is the latest self.assertIn('test-testing-dummy 0.0.1', stdout+stderr) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'target'], cwd=test_dir) self.assertEqual(statuscode, 0) self.assertIn('%s %s' % (Test_Target_Name, Test_Target_Old_Version), stdout+stderr) util.rmRf(test_dir) def testBaseTargetInstallWithShrinkwrap(self): test_dir = util.writeTestFiles(Test_Existing_Shrinkwrap, True) stdout, stderr, statuscode = cli.run(['-t', 'inherits-from-test-target', '--plain', 'install'], cwd=test_dir) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run(['-t', 'inherits-from-test-target', '--plain', 'target'], cwd=test_dir) self.assertEqual(statuscode, 0) self.assertIn('%s %s' % (Test_Target_Name, Test_Target_Old_Version), stdout+stderr) util.rmRf(test_dir) def testUpdateWithShrinkwrap(self): test_dir = util.writeTestFiles(Test_Existing_Shrinkwrap, True) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'update'], cwd=test_dir) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run(['-t', Test_Target, '--plain', 'list'], cwd=test_dir) self.assertEqual(statuscode, 0) # as opposed to 0.0.2 which is the latest self.assertIn('test-testing-dummy 0.0.1', stdout+stderr) util.rmRf(test_dir)
class TestCLIShrinkwrap(unittest.TestCase): def testCreateShrinkwrap(self): pass def testMissingDependenciesShrinkwrap(self): pass def testInstallWithShrinkwrap(self): pass def testBaseTargetInstallWithShrinkwrap(self): pass def testUpdateWithShrinkwrap(self): pass
6
0
10
2
8
0
1
0.05
1
0
0
0
5
0
5
77
57
14
41
16
35
2
41
16
35
1
2
0
5
2,642
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_target.py
yotta.test.cli.test_target.TestCLITarget
class TestCLITarget(unittest.TestCase): def setUp(self): self.test_dir = tempfile.mkdtemp() with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) def tearDown(self): rmRf(self.test_dir) def test_setTarget(self): rmRf(os.path.join(self.test_dir, '.yotta.json')) stdout = self.runCheckCommand(['target', 'testtarget', '-g', '-n']) stdout = self.runCheckCommand(['target']) self.assertTrue(stdout.find('testtarget') != -1) stdout = self.runCheckCommand(['target', 'x86-linux-native', '-g']) if os.name == 'posix': # check that the settings file was created with the right permissions self.assertFalse( os.stat(os.path.join(os.path.expanduser('~'), '.yotta', 'config.json')).st_mode & Check_Not_Stat ) def test_setTargetLocal(self): stdout = self.runCheckCommand(['target', 'testtarget', '-n']) stdout = self.runCheckCommand(['target']) self.assertTrue(stdout.find('testtarget') != -1) stdout = self.runCheckCommand(['target', 'x86-linux-native']) if os.name == 'posix': # check that the settings file was created with the right permissions self.assertFalse( os.stat(os.path.join(self.test_dir, '.yotta.json')).st_mode & Check_Not_Stat ) def test_setNonexistentTarget(self): stdout, stderr, statuscode = cli.run(['target', 'thisdoesnotexist'], cwd=self.test_dir) self.assertNotEqual(statuscode, 0) self.assertNotIn('Exception', stdout+stderr) self.assertIn('does not exist in the targets registry', stdout+stderr) def runCheckCommand(self, args): stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir) self.assertEqual(statuscode, 0) return stdout or stderr
class TestCLITarget(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_setTarget(self): pass def test_setTargetLocal(self): pass def test_setNonexistentTarget(self): pass def runCheckCommand(self, args): pass
7
0
6
0
6
0
1
0.06
1
0
0
0
6
1
6
78
42
5
35
13
28
2
31
12
24
2
2
1
8
2,643
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_test.py
yotta.test.cli.test_test.TestCLITestGenerated
class TestCLITestGenerated(TestCLITest): @classmethod def setUpClass(cls): cls.test_dir = tempfile.mkdtemp() + 'spaces in path' @classmethod def tearDownClass(cls): util.rmRf(cls.test_dir)
class TestCLITestGenerated(TestCLITest): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass
5
0
2
0
2
0
1
0
1
0
0
0
0
0
2
81
8
1
7
5
2
0
5
3
2
1
3
0
2
2,644
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_update.py
yotta.test.cli.test_update.TestCLIUpdate
class TestCLIUpdate(unittest.TestCase): def test_update(self): path = util.writeTestFiles(Test_Outdated, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'update'], cwd=path) self.assertEqual(statuscode, 0) self.assertIn('download test-testing-dummy', stdout + stderr) util.rmRf(path) def test_updateExplicit(self): path = util.writeTestFiles(Test_Outdated, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'update', 'test-testing-dummy'], cwd=path) self.assertEqual(statuscode, 0) self.assertIn('download test-testing-dummy', stdout + stderr) util.rmRf(path) def test_updateNothing(self): path = util.writeTestFiles(Test_Outdated, True) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'up'], cwd=path) self.assertEqual(statuscode, 0) self.assertIn('download test-testing-dummy', stdout + stderr) stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'up'], cwd=path) self.assertEqual(statuscode, 0) self.assertNotIn('download test-testing-dummy', stdout + stderr) util.rmRf(path)
class TestCLIUpdate(unittest.TestCase): def test_update(self): pass def test_updateExplicit(self): pass def test_updateNothing(self): pass
4
0
9
2
7
0
1
0
1
0
0
0
3
0
3
75
31
9
22
10
18
0
22
10
18
1
2
0
3
2,645
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_version.py
yotta.test.cli.test_version.TestCLIVersion
class TestCLIVersion(unittest.TestCase): def setUp(self): self.test_dir = tempfile.mkdtemp() with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) def tearDown(self): rmRf(self.test_dir) def test_displayVersion(self): stdout = self.runCheckCommand(['version']) self.assertTrue(stdout.find('0.0.0') != -1) def test_bumpVersion(self): stdout = self.runCheckCommand(['version', 'patch']) stdout = self.runCheckCommand(['version']) self.assertTrue(stdout.find('0.0.1') != -1) stdout = self.runCheckCommand(['version', 'major']) stdout = self.runCheckCommand(['version']) self.assertTrue(stdout.find('1.0.0') != -1) stdout = self.runCheckCommand(['version', 'minor']) stdout = self.runCheckCommand(['version']) self.assertTrue(stdout.find('1.1.0') != -1) stdout = self.runCheckCommand(['version', '1.2.3-alpha1']) stdout = self.runCheckCommand(['version']) self.assertTrue(stdout.find('1.2.3-alpha1') != -1) def test_prePostVersion(self): stdout = self.runCheckCommand(['version', 'patch']) self.assertIn('pre-version!', stdout) self.assertIn('post-version!', stdout) def test_preVersionPreventsBump(self): with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(Test_PreventVersion_JSON) stdout, stderr, statuscode = cli.run(['version', '1.2.3'], cwd=self.test_dir) self.assertNotEqual(statuscode, 0) stdout = self.runCheckCommand(['version']) self.assertNotIn('1.2.3', stdout) def runCheckCommand(self, args): stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir) self.assertEqual(statuscode, 0) return stdout or stderr
class TestCLIVersion(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_displayVersion(self): pass def test_bumpVersion(self): pass def test_prePostVersion(self): pass def test_preVersionPreventsBump(self): pass def runCheckCommand(self, args): pass
8
0
6
0
5
0
1
0
1
0
0
0
7
1
7
79
48
10
38
16
30
0
38
14
30
1
2
1
7
2,646
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_components.py
yotta.test.test_components.ComponentTestCase
class ComponentTestCase(unittest.TestCase): def setUp(self): self.test_dir = tempfile.mkdtemp() def tearDown(self): rmRf(self.test_dir) def test_creation(self): # test things about components that don't (and shouldn't) require # hitting the network with open(os.path.join(self.test_dir, 'module.json'), 'w') as f: f.write(test_json) c = component.Component(self.test_dir) self.assertTrue(c) self.assertEqual(c.getName(), 'something') self.assertEqual(str(c.getVersion()), '0.0.7') deps = c.getDependencies() self.assertEqual(list(deps.keys()), deps_in_order) test_deps = c.getDependencies(test=True) self.assertEqual(list(test_deps.keys()), test_deps_in_order)
class ComponentTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_creation(self): pass
4
0
7
2
5
1
1
0.13
1
3
1
0
3
1
3
75
27
9
16
9
12
2
16
8
12
1
2
1
3
2,647
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_config.py
yotta.test.test_config.ConfigTest
class ConfigTest(unittest.TestCase): def setUp(self): self.restore_cwd = os.getcwd() def tearDown(self): os.chdir(self.restore_cwd) def test_targetConfigMerge(self): test_dir = util.writeTestFiles(Test_Target_Config_Merge, True) os.chdir(test_dir) c = validate.currentDirectoryModule() target, errors = c.satisfyTarget('bar,') merged_config = target.getMergedConfig() self.assertIn("foo", merged_config) self.assertIn("bar", merged_config) self.assertIn("a", merged_config['foo']) self.assertIn("b", merged_config['foo']) self.assertIn("c", merged_config['foo']) self.assertEqual(merged_config['foo']['a'], 321) self.assertEqual(merged_config['foo']['b'], 456) self.assertEqual(merged_config['foo']['c'], 789) self.assertIn("bar", merged_config) self.assertIn("d", merged_config['bar']) self.assertEqual(merged_config['bar']['d'], "def") os.chdir(self.restore_cwd) util.rmRf(test_dir) def test_targetAppConfigMerge(self): test_dir = util.writeTestFiles(Test_Target_Config_Merge_App, True) os.chdir(test_dir) c = validate.currentDirectoryModule() target, errors = c.satisfyTarget('bar,') merged_config = target.getMergedConfig() self.assertIn("foo", merged_config) self.assertIn("bar", merged_config) self.assertIn("new", merged_config) self.assertIn("a", merged_config['foo']) self.assertIn("b", merged_config['foo']) self.assertIn("c", merged_config['foo']) self.assertEqual(merged_config['foo']['a'], 321) self.assertEqual(merged_config['foo']['b'], 456) self.assertEqual(merged_config['foo']['c'], 112233) self.assertIn("bar", merged_config) self.assertIn("d", merged_config['bar']) self.assertEqual(merged_config['bar']['d'], "ghi") self.assertIn("new", merged_config) self.assertEqual(merged_config['new'], 123) os.chdir(self.restore_cwd) util.rmRf(test_dir) def test_moduleConfigIgnored(self): test_dir = util.writeTestFiles(Test_Module_Config_Ignored, True) os.chdir(test_dir) c = validate.currentDirectoryModule() target, errors = c.satisfyTarget('bar,') merged_config = target.getMergedConfig() self.assertNotIn("new", merged_config) os.chdir(self.restore_cwd) util.rmRf(test_dir)
class ConfigTest(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_targetConfigMerge(self): pass def test_targetAppConfigMerge(self): pass def test_moduleConfigIgnored(self): pass
6
0
13
2
11
0
1
0
1
0
0
0
5
1
5
77
68
13
55
19
49
0
55
19
49
1
2
0
5
2,648
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_git_access.py
yotta.test.test_git_access.TestGitAccess
class TestGitAccess(unittest.TestCase): def setUp(self): ensureGitConfig() vs = sourceparse.parseSourceURL(Test_Repo) self.remote_component = git_access.GitComponent.createFromSource(vs, Test_Name) self.assertTrue(self.remote_component) self.working_copy = self.remote_component.clone() self.assertTrue(self.working_copy) def tearDown(self): fsutils.rmRf(self.working_copy.directory) def test_availableVersions(self): versions = self.working_copy.availableVersions() self.assertIn(version.Version('v0.0.1'), versions) def test_versionSpec(self): vs = sourceparse.parseSourceURL(Test_Repo_With_Spec) spec = git_access.GitComponent.createFromSource(vs, Test_Name).versionSpec() v = spec.select(self.working_copy.availableVersions()) self.assertTrue(v) def test_installDeps(self): Args = namedtuple('Args', ['component', 'target', 'act_globally', 'install_linked', 'install_test_deps', 'config']) install.installComponent(Args(Test_Deps_Name, Test_Deps_Target, False, False, 'own', {}))
class TestGitAccess(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_availableVersions(self): pass def test_versionSpec(self): pass def test_installDeps(self): pass
6
0
4
0
4
0
1
0
1
2
2
0
5
2
5
77
25
4
21
14
15
0
21
14
15
1
2
0
5
2,649
ARMmbed/yotta
ARMmbed_yotta/yotta/test/cli/test_search.py
yotta.test.cli.test_search.TestCLISearch
class TestCLISearch(unittest.TestCase): def test_bothModule(self): stdout = self.runCheckCommand(['search', 'polyfill', '--short']) self.assertTrue(stdout.find('compiler-polyfill') != -1) def test_bothTarget(self): stdout = self.runCheckCommand(['search', 'frdm-k64f', '--short']) self.assertTrue(stdout.find('frdm-k64f-gcc') != -1) def test_both(self): stdout = self.runCheckCommand(['--plain', 'search', 'both', 'polyfill', '--short']) self.assertTrue(stdout.find('compiler-polyfill') != -1) def test_modules(self): stdout = self.runCheckCommand(['search', 'module', 'polyfill', '--short']) self.assertTrue(stdout.find('compiler-polyfill') != -1) def test_targets(self): stdout = self.runCheckCommand(['search', 'target', 'frdm-k64f', '--short']) self.assertTrue(stdout.find('frdm-k64f-gcc') != -1) def test_keywords(self): stdout = self.runCheckCommand(['search', 'module', 'polyfill', '-k', 'polyfill', '--short']) self.assertTrue(stdout.find('compiler-polyfill') != -1) def test_limit(self): stdout = self.runCheckCommand(['search', 'module', 'compiler-polyfill', '-l', '5', '-k', 'polyfill']) self.assertTrue(stdout.find('compiler-polyfill') != -1) def test_author(self): stdout = self.runCheckCommand(['search', 'module', 'compiler-polyfill', '-l', '1', '-k', 'polyfill']) self.assertTrue(stdout.find('james.crosby@arm.com') != -1) def test_keyword_display(self): stdout = self.runCheckCommand(['search', 'module', 'compiler-polyfill', '-l', '1', '-k', 'polyfill']) self.assertTrue(stdout.find('mbed-official') != -1) def runCheckCommand(self, args): stdout, stderr, statuscode = cli.run(args) if statuscode != 0: print('command failed with status %s' % statuscode) print(stdout) print(stderr) self.assertEqual(statuscode, 0) return stdout or stderr
class TestCLISearch(unittest.TestCase): def test_bothModule(self): pass def test_bothTarget(self): pass def test_bothModule(self): pass def test_modules(self): pass def test_targets(self): pass def test_keywords(self): pass def test_limit(self): pass def test_author(self): pass def test_keyword_display(self): pass def runCheckCommand(self, args): pass
11
0
4
0
4
0
1
0
1
0
0
0
10
0
10
82
45
9
36
21
25
0
36
21
25
2
2
1
11
2,650
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/version.py
yotta.lib.version.Version
class Version(object): def __init__(self, version_string, url=None): ''' Wrap the semantic_version Version class so that we can represent 'tip' versions as well as specific versions, and store an optional URL that can represent the location from which we can retrieve this version. Also add some useful methods for manipulating versions. ''' super(Version, self).__init__() self.url = url version_string = str(version_string.strip()) # strip of leading v or = characters, these are permitted in npm's # semver, and npm tags versions as v1.2.3 self.version = None if version_string.startswith('v') or version_string.startswith('='): self.version = semantic_version.Version(version_string[1:], partial=False) elif not version_string: self.version = TipVersion() else: self.version = semantic_version.Version(version_string, partial=False) self.url = url def isTip(self): return isinstance(self.version, TipVersion) def major(self): assert(not isinstance(self.version, TipVersion)) return self.version.major def minor(self): assert(not isinstance(self.version, TipVersion)) return self.version.minor def patch(self): assert(not isinstance(self.version, TipVersion)) return self.version.patch def bump(self, bumptype): if isinstance(self.version, str): raise ValueError('cannot bump generic version "%s"' % self.version) if bumptype == 'major': self.version.major = self.version.major + 1 self.version.minor = 0 self.version.patch = 0 self.version.prerelease = '' self.version.build = '' elif bumptype == 'minor': self.version.minor = self.version.minor + 1 self.version.patch = 0 self.version.prerelease = '' self.version.build = '' elif bumptype == 'patch': self.version.patch = self.version.patch + 1 self.version.prerelease = '' self.version.build = '' else: raise ValueError('bumptype must be "major", "minor" or "patch"') self.version.prerelease = None self.version.build = None def truncate(self, level): return self.version.truncate(level) def __str__(self): return str(self.version) def __repr__(self): return 'Version(%s %s)' % (self.version, self.url) def __cmp__(self, other): # if the other is an unwrapped version (used within the Spec class) if isinstance(other, semantic_version.Version): other_is_specific_ver = True other_is_unwrapped = True elif not hasattr(other, 'version'): return NotImplemented else: other_is_specific_ver = isinstance(other.version, semantic_version.Version) other_is_unwrapped = False self_is_specific_ver = isinstance(self.version, semantic_version.Version) if isinstance(self.version, TipVersion) and other_is_specific_ver: return 1 elif (not other_is_unwrapped) and isinstance(other.version, TipVersion) and self_is_specific_ver: return -1 elif self_is_specific_ver and other_is_specific_ver: if other_is_unwrapped: return semantic_version.Version.__cmp__(self.version, other) else: return semantic_version.Version.__cmp__(self.version, other.version) elif isinstance(self.version, TipVersion) and isinstance(other.version, TipVersion): raise Exception('Comparing two "tip" versions is undefined') else: raise Exception('Unsupported version comparison: "%s" vs. "%s"' % (self.version, other.version)) def __eq__(self, other): return self.__cmp__(other) == 0 def __hash__(self): return hash(self.version) def __ne__(self, other): return self.__cmp__(other) != 0 def __lt__(self, other): return self.__cmp__(other) < 0 def __le__(self, other): return self.__cmp__(other) <= 0 def __gt__(self, other): return self.__cmp__(other) > 0 def __ge__(self, other): return self.__cmp__(other) >= 0
class Version(object): def __init__(self, version_string, url=None): ''' Wrap the semantic_version Version class so that we can represent 'tip' versions as well as specific versions, and store an optional URL that can represent the location from which we can retrieve this version. Also add some useful methods for manipulating versions. ''' pass def isTip(self): pass def major(self): pass def minor(self): pass def patch(self): pass def bump(self, bumptype): pass def truncate(self, level): pass def __str__(self): pass def __repr__(self): pass def __cmp__(self, other): pass def __eq__(self, other): pass def __hash__(self): pass def __ne__(self, other): pass def __lt__(self, other): pass def __le__(self, other): pass def __gt__(self, other): pass def __ge__(self, other): pass
18
1
6
0
5
1
2
0.1
1
5
1
3
17
2
17
17
115
17
89
23
71
9
77
23
59
8
1
2
30
2,651
ARMmbed/yotta
ARMmbed_yotta/yotta/test/test_versions.py
yotta.test.test_versions.VersionTestCase
class VersionTestCase(unittest.TestCase): matches = { '>=0.1.1,<0.1.2': ( ['v0.1.1', '0.1.1+4', '0.1.1-alpha'], ['0.1.2-alpha', '0.1.2', '1.3.4'], ), '>=0.1.0,!=0.1.3-rc1,<0.1.4': ( # 0.1.0-alpha satisfies >=0.1.0, but is lower precedence than 0.1.0 ['0.1.0-alpha', '0.1.1', 'v0.1.0+b4', '0.1.2', '0.1.3-rc2'], ['0.0.1', '0.1.4', '0.1.4-alpha', '0.1.3-rc1+4', 'v0.2.2', '0.2.2', '0.1.4-rc1'] ), '^1.2.3':( ['1.2.3', '1.5.1'], ['1.2.2', '2.0.0-beta'] ), '^0.1.2': ( ['0.1.2'], ['0.1.3'] ), '~1.2.3':( ['1.2.3', '1.2.4'], ['1.3.0-beta', '1.4.0'] ), '>4.5.6':( ['4.5.7', ''], ['4.5.5', '4.5.6-a1', '4.5.6'] ), '>=4.5.6':( ['4.5.7', '4.5.6', '4.5.6-a1', ''], ['4.5.5'] ), '==0.1.7':( ['0.1.7', '0.1.7-a4'], ['0.1.6', '0.1.8', ''], ), '=0.1.7':( ['0.1.7', '0.1.7-a4'], ['0.1.6', '0.1.8', ''], ), '':( ['0.0.1', 'v0.1.4', '0.1.4-alpha', '0.1.3-rc1+4', '0.1.0-alpha', '0.2.2', '0.1.4-rc1', ''], [] ), '*':( ['0.0.1', 'v0.1.4', '0.1.4-alpha', '0.1.3-rc1+4', '0.1.0-alpha', '0.2.2', '0.1.4-rc1', ''], [] ), } def test_matches(self): for spec, (matching, failing) in self.matches.items(): spec = version.Spec(spec) for v in [version.Version(v) for v in matching]: self.assertTrue( v in spec, "%r should be in %r" % (v, spec) ) self.assertTrue( spec.match(v), "%r should match %r" % (v, spec) ) for v in [version.Version(v) for v in failing]: self.assertFalse( v in spec, "%r should not be in %r" % (v, spec) ) self.assertFalse( spec.match(v), "%r should not match %r" % (v, spec) ) def test_hash(self): sets = [ ['0.1.1', '==0.1.1', '=0.1.1'], ['', '*'], ] for s in [set([version.Spec(x) for x in l]) for l in sets]: self.assertEqual(1, len(s))
class VersionTestCase(unittest.TestCase): def test_matches(self): pass def test_hash(self): pass
3
0
15
1
14
0
3
0.01
1
3
2
0
2
0
2
74
82
4
77
8
74
1
15
8
12
4
2
2
6
2,652
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/version.py
yotta.lib.version.Spec
class Spec(semantic_version.Spec): def __init__(self, version_spec): if not version_spec: version_spec = '*' # add support for version specs that are unadorned versions, or a # single equals if re.match('^[0-9]', version_spec): version_spec = '==' + version_spec elif re.match('^=[0-9]', version_spec): version_spec = '=' + version_spec # add support for the ~ and ^ version specifiers: # ~1.2.3 := >=1.2.3-0 <1.3.0-0 # ^1.2.3 := >=1.2.3-0 <2.0.0-0 # ^0.1.2 := 0.1.2 exactly (for 0.x.x versions) elif re.match('^\^', version_spec): v = semantic_version.Version(version_spec[1:]) if v.major == 0: # for 0. releases, ^ means exact version only version_spec = '==' + str(v) else: v2 = Version(version_spec[1:]) v2.bump('major') version_spec = '>=' + str(v) + ',<' +str(v2) elif re.match('^~', version_spec): v = semantic_version.Version(version_spec[1:]) v2 = Version(version_spec[1:]) v2.bump('minor') version_spec = '>=' + str(v) + ',<' +str(v2) super(Spec, self).__init__(version_spec) # base type contains function checks the type, so must replace it def __contains__(self, version): return self.match(version)
class Spec(semantic_version.Spec): def __init__(self, version_spec): pass def __contains__(self, version): pass
3
0
15
0
12
4
4
0.33
1
3
1
0
2
0
2
2
33
1
24
5
21
8
20
5
17
7
1
2
8
2,653
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_unlink.py
yotta.test.cli.test_unlink.TestCLIUnLink
class TestCLIUnLink(unittest.TestCase): @classmethod def setUpClass(cls): cls.prefix_dir = tempfile.mkdtemp() os.environ['YOTTA_PREFIX'] = cls.prefix_dir @classmethod def tearDownClass(cls): util.rmRf(cls.prefix_dir) cls.prefix_dir = None def testUnlinkNonexistentModule(self): test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink', 'doesnotexist'], cwd=test_module) self.assertNotEqual(statuscode, 0) util.rmRf(test_module) def testUnlinkNonexistentTarget(self): test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink-target', 'doesnotexist'], cwd=test_module) self.assertNotEqual(statuscode, 0) util.rmRf(test_module) def testUnlinkNotLinkedModuleGlobally(self): test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink'], cwd=test_module) self.assertNotEqual(statuscode, 0) util.rmRf(test_module) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def testUnlinkNotLinkedTargetGlobally(self): test_target = util.writeTestFiles( util.getNativeTargetDescription(), True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink'], cwd=test_target) self.assertNotEqual(statuscode, 0) util.rmRf(test_target) def testUnlinkModuleGlobally(self): test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'link'], cwd=test_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink'], cwd=test_module) self.assertEqual(statuscode, 0) util.rmRf(test_module) def testUnlinkTargetGlobally(self): test_target = util.writeTestFiles( util.getNativeTargetDescription(), True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'link-target'], cwd=test_target) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'unlink-target'], cwd=test_target) self.assertEqual(statuscode, 0) util.rmRf(test_target) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def testUnlinkModule(self): linked_in_module = util.writeTestFiles(util.Test_Trivial_Lib, True) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link'], cwd=linked_in_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link', 'test-trivial-lib'], cwd=test_module) self.assertEqual(statuscode, 0) self.assertTrue(os.path.exists(os.path.join( test_module, 'yotta_modules', 'test-trivial-lib'))) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'unlink', 'test-trivial-lib'], cwd=test_module) self.assertEqual(statuscode, 0) self.assertTrue(not os.path.exists(os.path.join( test_module, 'yotta_modules', 'test-trivial-lib'))) util.rmRf(test_module) util.rmRf(linked_in_module) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testUnlinkTarget(self): linked_in_target = util.writeTestFiles( util.getNativeTargetDescription(), True) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep_Preinstalled, True) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'link-target'], cwd=linked_in_target) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'link-target', 'test-native-target'], cwd=test_module) self.assertEqual(statuscode, 0) self.assertTrue(os.path.exists(os.path.join( test_module, 'yotta_targets', 'test-native-target'))) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'unlink-target', 'test-native-target'], cwd=test_module) self.assertEqual(statuscode, 0) self.assertTrue(not os.path.exists(os.path.join( test_module, 'yotta_targets', 'test-native-target'))) util.rmRf(test_module) util.rmRf(linked_in_target)
class TestCLIUnLink(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def testUnlinkNonexistentModule(self): pass def testUnlinkNonexistentTarget(self): pass def testUnlinkNotLinkedModuleGlobally(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def testUnlinkNotLinkedTargetGlobally(self): pass def testUnlinkModuleGlobally(self): pass def testUnlinkTargetGlobally(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def testUnlinkModuleGlobally(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testUnlinkTargetGlobally(self): pass
16
0
7
0
7
0
1
0
1
0
0
0
8
0
10
82
85
13
72
34
56
0
67
29
56
1
2
0
10
2,654
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/test_github_access.py
yotta.test.test_github_access.TestGitHubAccess
class TestGitHubAccess(unittest.TestCase): def setUp(self): pass def tearDown(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installDeps(self): Args = namedtuple('Args', ['component', 'target', 'act_globally', 'install_linked', 'install_test_deps', 'config']) install.installComponent( Args(Test_Deps_Name, Test_Deps_Target, False, False, 'own', {})) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_branchAccess(self): Args = namedtuple('Args', ['component', 'target', 'act_globally', 'install_linked', 'install_test_deps', 'config']) install.installComponent( Args(Test_Branch_Name, Test_Deps_Target, False, False, 'own', {}))
class TestGitHubAccess(unittest.TestCase): def setUp(self): pass def tearDown(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_branchAccess(self): pass
7
0
3
0
3
0
1
0
1
0
0
0
4
0
4
76
16
3
13
9
6
0
11
7
6
1
2
0
4
2,655
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/test_ignores.py
yotta.test.test_ignores.TestPackIgnores
class TestPackIgnores(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_dir = util.writeTestFiles(Test_Files) @classmethod def tearDownClass(cls): util.rmRf(cls.test_dir) def test_absolute_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('moo')) self.assertTrue(c.ignores('test/foo/ignored.c')) def test_glob_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('a/b/c/test.txt')) self.assertTrue(c.ignores('a/b/test.txt')) self.assertTrue(c.ignores('a/b/test.c')) self.assertTrue(c.ignores('source/a/b/c/test.txt')) self.assertTrue(c.ignores('source/a/b/test.txt')) self.assertTrue(c.ignores('source/a/b/test.c')) def test_relative_ignores(self): c = component.Component(self.test_dir) self.assertTrue(c.ignores('a/b/c/d/e/f/test.txt')) self.assertTrue(c.ignores('a/b/test.txt')) self.assertTrue(c.ignores('source/a/b/c/d/e/f/test.txt')) self.assertTrue(c.ignores('source/a/b/test.txt')) self.assertTrue(c.ignores('test/anothertest/ignoredbyfname.c')) self.assertTrue(c.ignores('test/someothertest/alsoignored.c')) def test_default_ignores(self): default_test_dir = util.writeTestFiles(Default_Test_Files) c = component.Component(default_test_dir) self.assertTrue(c.ignores('.something.c.swp')) self.assertTrue(c.ignores('.something.c~')) self.assertTrue(c.ignores('path/to/.something.c.swm')) self.assertTrue(c.ignores('path/to/.something.c~')) self.assertTrue(c.ignores('.DS_Store')) self.assertTrue(c.ignores('.git')) self.assertTrue(c.ignores('.hg')) self.assertTrue(c.ignores('.svn')) self.assertTrue(c.ignores('yotta_modules')) self.assertTrue(c.ignores('yotta_targets')) self.assertTrue(c.ignores('build')) self.assertTrue(c.ignores('.yotta.json')) util.rmRf(default_test_dir) def test_comments(self): c = component.Component(self.test_dir) self.assertFalse(c.ignores('comment')) @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_build(self): stdout = self.runCheckCommand( ['--target', systemDefaultTarget(), 'clean'], self.test_dir) stdout = self.runCheckCommand( ['--target', systemDefaultTarget(), 'build'], self.test_dir) self.assertNotIn('ignoredbyfname', stdout) self.assertNotIn('someothertest', stdout) self.assertNotIn('sometest', stdout) @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_test(self): stdout = self.runCheckCommand( ['--target', systemDefaultTarget(), 'clean'], self.test_dir) stdout = self.runCheckCommand( ['--target', systemDefaultTarget(), 'test'], self.test_dir) self.assertNotIn('ignoredbyfname', stdout) self.assertNotIn('someothertest', stdout) self.assertNotIn('sometest', stdout) def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir) if statuscode != 0: print('command failed with status %s' % statuscode) print(stdout) print(stderr) self.assertEqual(statuscode, 0) return stdout or stderr
class TestPackIgnores(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def test_absolute_ignores(self): pass def test_glob_ignores(self): pass def test_relative_ignores(self): pass def test_default_ignores(self): pass def test_comments(self): pass @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_build(self): pass @unittest.skipIf(isWindows(), "can't build natively on windows yet") def test_test(self): pass def runCheckCommand(self, args, test_dir): pass
15
0
6
0
6
0
1
0
1
1
1
0
8
0
10
82
78
10
68
24
53
0
64
20
53
2
2
1
11
2,656
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/logging_setup.py
yotta.lib.logging_setup.FancyFormatter
class FancyFormatter(logging.Formatter): #pylint: disable=no-member def __init__(self): super(FancyFormatter, self).__init__() def levelStyle(self, record): if record.levelno <= logging.DEBUG: return colorama.Style.DIM + colorama.Fore.RESET #pylint: disable=no-member elif record.levelno >= logging.CRITICAL: return colorama.Style.BRIGHT + colorama.Fore.RED #pylint: disable=no-member elif record.levelno >= logging.ERROR: return colorama.Style.BRIGHT + colorama.Fore.RED #pylint: disable=no-member elif record.levelno >= logging.WARNING: return colorama.Style.BRIGHT + colorama.Fore.YELLOW #pylint: disable=no-member return colorama.Style.NORMAL + colorama.Fore.GREEN #pylint: disable=no-member def messageStyle(self, record): if record.levelno <= logging.DEBUG: return colorama.Style.DIM + colorama.Fore.RESET #pylint: disable=no-member elif record.levelno >= logging.CRITICAL: return colorama.Style.BRIGHT + colorama.Fore.RED #pylint: disable=no-member elif record.levelno >= logging.ERROR: return colorama.Style.NORMAL + colorama.Fore.RED #pylint: disable=no-member elif record.levelno >= logging.WARNING: return colorama.Style.NORMAL + colorama.Fore.YELLOW #pylint: disable=no-member return colorama.Style.NORMAL + colorama.Fore.RESET #pylint: disable=no-member def format(self, record): s = '' s += self.levelStyle(record) s += record.levelname.lower() s += colorama.Fore.RESET + ':' #pylint: disable=no-member if record.levelno <= logging.DEBUG: s += record.name + ': ' else: s += ' ' s += self.messageStyle(record) s += record.getMessage() s += colorama.Style.RESET_ALL #pylint: disable=no-member return s
class FancyFormatter(logging.Formatter): def __init__(self): pass def levelStyle(self, record): pass def messageStyle(self, record): pass def format(self, record): pass
5
0
9
0
9
3
3
0.36
1
1
0
0
4
0
4
11
41
4
36
6
31
13
29
6
24
5
2
1
13
2,657
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/lazyregex.py
yotta.lib.lazyregex.ReCompileProxy
class ReCompileProxy(object): def __init__(self, *args, **kwargs): self._args = args self._kwargs = kwargs self._real_obj = None def __getattribute__(self, name): if object.__getattribute__(self, '_real_obj') is None: self._real_obj = _original_re_compile( *object.__getattribute__(self,'_args'), **object.__getattribute__(self,'_kwargs') ) self._args = None self._kwargs = None return getattr(object.__getattribute__(self, '_real_obj'), name)
class ReCompileProxy(object): def __init__(self, *args, **kwargs): pass def __getattribute__(self, name): pass
3
0
7
0
7
0
2
0
1
0
0
0
2
3
2
2
15
1
14
6
11
0
11
6
8
2
1
1
3
2,658
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/hg_access.py
yotta.lib.hg_access.HGComponent
class HGComponent(access_common.RemoteComponent): def __init__(self, url, version_spec=''): self.url = url self.spec = version.Spec(version_spec) @classmethod def createFromSource(cls, vs, name=None): ''' returns a hg component for any hg:// url, or None if this is not a hg component. Normally version will be empty, unless the original url was of the form 'hg+ssh://...#version', which can be used to grab a particular tagged version. ''' # strip hg of the url scheme: if vs.location.startswith('hg+'): location = vs.location[3:] else: location = vs.location return HGComponent(location, vs.spec) def versionSpec(self): return self.spec # clone the remote repository: this is necessary to find out what tagged # versions are available. # The clone is created in /tmp, and is not automatically deleted, but the # returned version object maintains a handle to it, so that when a specific # version is requested it can be retrieved from the temporary clone, # instead of from the remote origin. def clone(self): clone = vcs.HG.cloneToTemporaryDir(self.url) return HGWorkingCopy(clone) @classmethod def remoteType(cls): return 'hg'
class HGComponent(access_common.RemoteComponent): def __init__(self, url, version_spec=''): pass @classmethod def createFromSource(cls, vs, name=None): ''' returns a hg component for any hg:// url, or None if this is not a hg component. Normally version will be empty, unless the original url was of the form 'hg+ssh://...#version', which can be used to grab a particular tagged version. ''' pass def versionSpec(self): pass def clone(self): pass @classmethod def remoteType(cls): pass
8
1
5
0
3
1
1
0.68
1
3
3
0
3
2
5
10
37
5
19
12
11
13
16
10
10
2
2
1
6
2,659
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/hg_access.py
yotta.lib.hg_access.HGCloneVersion
class HGCloneVersion(version.Version): def __init__(self, tag, working_copy): self.working_copy = working_copy self.tag = tag super(HGCloneVersion, self).__init__(tag) def unpackInto(self, directory): logger.debug('unpack version %s from hg repo %s to %s' % (self.version, self.working_copy.directory, directory)) if self.isTip(): tag = None else: tag = self.tag fsutils.rmRf(directory) vcs.HG.cloneToDirectory(self.working_copy.directory, directory, tag) # remove temporary files created by the HGWorkingCopy clone self.working_copy.remove()
class HGCloneVersion(version.Version): def __init__(self, tag, working_copy): pass def unpackInto(self, directory): pass
3
0
8
1
7
1
2
0.07
1
2
1
0
2
2
2
19
17
2
14
6
11
1
13
6
10
2
2
1
3
2,660
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/github_access.py
yotta.lib.github_access.GithubComponentVersion
class GithubComponentVersion(access_common.RemoteVersion): def __init__(self, semver, tag, url, name, cache_key=None): # if cache key is None, then we won't cache this version self.cache_key = cache_key self.tag = tag github_spec = re.search('/(repos|codeload.github.com)/([^/]*/[^/]*)/', url).group(2) self.origin_info = { 'url':('github://'+github_spec+'#'+(semver or tag)) } super(GithubComponentVersion, self).__init__( semver, url, name=name, friendly_version=(semver or tag), friendly_source=('GitHub %s' % github_spec) ) def unpackInto(self, directory): assert(self.url) _getTarball( self.url, directory, self.cache_key, origin_info=self.origin_info )
class GithubComponentVersion(access_common.RemoteVersion): def __init__(self, semver, tag, url, name, cache_key=None): pass def unpackInto(self, directory): pass
3
0
8
0
8
1
1
0.13
1
1
0
0
2
4
2
24
18
1
16
8
13
2
10
7
7
1
3
0
2
2,661
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_test.py
yotta.test.cli.test_test.TestCLITest
class TestCLITest(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_tests(self): test_dir = util.writeTestFiles(Test_Tests, True) output = self.runCheckCommand( ['--target', systemDefaultTarget(), 'build'], test_dir) output = self.runCheckCommand( ['--target', systemDefaultTarget(), 'test'], test_dir) self.assertIn('test-a passed', output) self.assertIn('test-c passed', output) self.assertIn('test-d passed', output) self.assertIn('test-e passed', output) self.assertIn('test-f passed', output) self.assertIn('test-g passed', output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterPassing(self): test_dir = util.writeTestFiles(Test_Fitler_Pass, True) stdout = self.runCheckCommand( ['--target', systemDefaultTarget(), 'test'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterFailing(self): test_dir = util.writeTestFiles(Test_Fitler_Fail, True) stdout, stderr, statuscode = cli.run( ['--target', systemDefaultTarget(), 'test'], cwd=test_dir) if statuscode == 0: print(stdout) print(stderr) self.assertIn('test-a failed', '%s %s' % (stdout, stderr)) self.assertIn('test-c failed', '%s %s' % (stdout, stderr)) self.assertIn('test-d failed', '%s %s' % (stdout, stderr)) self.assertIn('test-e failed', '%s %s' % (stdout, stderr)) self.assertIn('test-f failed', '%s %s' % (stdout, stderr)) self.assertIn('test-g failed', '%s %s' % (stdout, stderr)) self.assertNotEqual(statuscode, 0) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterNotFound(self): test_dir = util.writeTestFiles(Test_Fitler_NotFound, True) stdout, stderr, statuscode = cli.run( ['--target', systemDefaultTarget(), 'test'], cwd=test_dir) if statuscode == 0: print(stdout) print(stderr) self.assertNotEqual(statuscode, 0) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testCustomCMake(self): test_dir = util.writeTestFiles(util.Test_Test_Custom_CMake, True) output = self.runCheckCommand( ['--target', systemDefaultTarget(), 'test'], test_dir) self.assertIn('test-trivial-lib-maintest passed', output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testAdditionalCMake(self): test_dir = util.writeTestFiles(util.Test_Test_Extra_CMake, True) output = self.runCheckCommand( ['--target', systemDefaultTarget(), 'test'], test_dir) self.assertIn('test-trivial-lib-test-main passed', output) util.rmRf(test_dir) def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=test_dir) if statuscode != 0: print('command failed with status %s' % statuscode) print(stdout) print(stderr) self.assertEqual(statuscode, 0) return '%s %s' % (stdout, stderr)
class TestCLITest(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_tests(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterPassing(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterFailing(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testOutputFilterNotFound(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testCustomCMake(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_testAdditionalCMake(self): pass def runCheckCommand(self, args, test_dir): pass
14
0
8
0
8
0
1
0
1
0
0
1
7
0
7
79
68
6
62
27
48
0
56
21
48
2
2
1
10
2,662
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/github_access.py
yotta.lib.github_access.GithubComponent
class GithubComponent(access_common.RemoteComponent): def __init__(self, repo, tag_or_branch=None, semantic_spec=None, name=None): logging.debug('create Github component for repo:%s version spec:%s' % (repo, semantic_spec or tag_or_branch)) self.repo = repo self.spec = semantic_spec self.tag_or_branch = tag_or_branch self.tags = None self.name = name @classmethod def createFromSource(cls, vs, name=None): ''' returns a github component for any github url (including git+ssh:// git+http:// etc. or None if this is not a Github URL. For all of these we use the github api to grab a tarball, because that's faster. Normally version will be empty, unless the original url was of the form: 'owner/repo @version' or 'url://...#version', which can be used to grab a particular tagged version. (Note that for github components we ignore the component name - it doesn't have to match the github module name) ''' return GithubComponent(vs.location, vs.spec, vs.semantic_spec, name) def versionSpec(self): return self.spec def tagOrBranchSpec(self): return self.tag_or_branch def _getTags(self): if self.tags is None: try: self.tags = _getTags(self.repo).items() except github.UnknownObjectException as e: raise access_common.Unavailable( 'could not locate github component "%s", either the name is misspelt, you do not have access to it, or it does not exist' % self.repo ) return self.tags def availableVersions(self): ''' return a list of Version objects, each with a tarball URL set ''' r = [] for t in self._getTags(): logger.debug("available version tag: %s", t) # ignore empty tags: if not len(t[0].strip()): continue try: r.append(GithubComponentVersion(t[0], t[0], url=t[1], name=self.name, cache_key=None)) except ValueError: logger.debug('invalid version tag: %s', t) return r def availableTags(self): ''' return a list of GithubComponentVersion objects for all tags ''' return [ GithubComponentVersion( '', t[0], t[1], self.name, cache_key=_createCacheKey('tag', t[0], t[1], self.name) ) for t in self._getTags() ] def availableBranches(self): ''' return a list of GithubComponentVersion objects for the tip of each branch ''' return [ GithubComponentVersion( '', b[0], b[1], self.name, cache_key=None ) for b in _getBranchHeads(self.repo).items() ] def tipVersion(self): return GithubComponentVersion( '', '', _getTipArchiveURL(self.repo), self.name, cache_key=None ) def commitVersion(self): ''' return a GithubComponentVersion object for a specific commit if valid ''' import re commit_match = re.match('^[a-f0-9]{7,40}$', self.tagOrBranchSpec(), re.I) if commit_match: return GithubComponentVersion( '', '', _getCommitArchiveURL(self.repo, self.tagOrBranchSpec()), self.name, cache_key=None ) return None @classmethod def remoteType(cls): return 'github'
class GithubComponent(access_common.RemoteComponent): def __init__(self, repo, tag_or_branch=None, semantic_spec=None, name=None): pass @classmethod def createFromSource(cls, vs, name=None): ''' returns a github component for any github url (including git+ssh:// git+http:// etc. or None if this is not a Github URL. For all of these we use the github api to grab a tarball, because that's faster. Normally version will be empty, unless the original url was of the form: 'owner/repo @version' or 'url://...#version', which can be used to grab a particular tagged version. (Note that for github components we ignore the component name - it doesn't have to match the github module name) ''' pass def versionSpec(self): pass def tagOrBranchSpec(self): pass def _getTags(self): pass def availableVersions(self): ''' return a list of Version objects, each with a tarball URL set ''' pass def availableTags(self): ''' return a list of GithubComponentVersion objects for all tags ''' pass def availableBranches(self): ''' return a list of GithubComponentVersion objects for the tip of each branch ''' pass def tipVersion(self): pass def commitVersion(self): ''' return a GithubComponentVersion object for a specific commit if valid ''' pass @classmethod def remoteType(cls): pass
14
5
7
0
5
2
2
0.29
1
3
2
0
9
5
11
16
95
15
62
24
47
18
46
21
33
4
2
2
17
2,663
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/git_access.py
yotta.lib.git_access.GitComponent
class GitComponent(access_common.RemoteComponent): def __init__(self, url, tag_or_branch=None, semantic_spec=None): logging.debug('create git component for url:%s version spec:%s' % (url, semantic_spec or tag_or_branch)) self.url = url # !!! TODO: handle non-semantic spec self.spec = semantic_spec self.tag_or_branch = tag_or_branch @classmethod def createFromSource(cls, vs, name=None): ''' returns a git component for any git:// url, or None if this is not a git component. Normally version will be empty, unless the original url was of the form 'git://...#version', which can be used to grab a particular tag or branch, or ...#>=1.2.3, which can be used to specify semantic version specifications on tags. ''' return GitComponent(vs.location, vs.spec, vs.semantic_spec) def versionSpec(self): return self.spec def tagOrBranchSpec(self): return self.tag_or_branch # clone the remote repository: this is necessary to find out what tagged # versions are available. # The clone is created in /tmp, and is not automatically deleted, but the # returned version object maintains a handle to it, so that when a specific # version is requested it can be retrieved from the temporary clone, # instead of from the remote origin. def clone(self): # vcs, , represent version controlled directories, internal from yotta.lib import vcs clone = vcs.Git.cloneToTemporaryDir(self.url) clone.fetchAllBranches() return GitWorkingCopy(clone) @classmethod def remoteType(cls): return 'git'
class GitComponent(access_common.RemoteComponent): def __init__(self, url, tag_or_branch=None, semantic_spec=None): pass @classmethod def createFromSource(cls, vs, name=None): ''' returns a git component for any git:// url, or None if this is not a git component. Normally version will be empty, unless the original url was of the form 'git://...#version', which can be used to grab a particular tag or branch, or ...#>=1.2.3, which can be used to specify semantic version specifications on tags. ''' pass def versionSpec(self): pass def tagOrBranchSpec(self): pass def clone(self): pass @classmethod def remoteType(cls): pass
9
1
5
0
3
2
1
0.71
1
2
2
0
4
3
6
11
42
6
21
14
11
15
19
12
11
1
2
0
6
2,664
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/version.py
yotta.lib.version.TipVersion
class TipVersion(object): pass
class TipVersion(object): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
1
1
0
2
1
1
0
1
0
0
2,665
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/component.py
yotta.lib.component.Component
class Component(pack.Pack): def __init__( self, path, installed_linked = False, latest_suitable_version = None, test_dependency = False, inherit_shrinkwrap = None ): ''' How to use a Component: Initialise it with the directory into which the component has been downloaded, (or with a symlink that points to a directory containing the component) Check that 'if component:' is true, which indicates that the download is indeed a valid component. Check that component.getVersion() returns the version you think you've downloaded. Use component.getDependencySpecs() to get the names of the dependencies of the component, or component.getDependencies() to get Component objects (which may not be valid unless the dependencies have been installed) for each of the dependencies. ''' self.description = OrderedDict() logger.log(VVVERBOSE_DEBUG, "Component: " + path + ' installed_linked=' + str(installed_linked)) warn_deprecated_filename = False if (not os.path.exists(os.path.join(path, Component_Description_File))) and \ os.path.exists(os.path.join(path, Component_Description_File_Fallback)): warn_deprecated_filename = True description_filename = Component_Description_File_Fallback else: description_filename = Component_Description_File super(Component, self).__init__( path, description_filename = description_filename, installed_linked = installed_linked, schema_filename = Schema_File, latest_suitable_version = latest_suitable_version, inherit_shrinkwrap = inherit_shrinkwrap ) if self.description and inherit_shrinkwrap is not None: # when inheriting a shrinkwrap, check that this module is # listed in the shrinkwrap, otherwise emit a warning: if next((x for x in inherit_shrinkwrap.get('modules', []) if x['name'] == self.getName()), None) is None: logger.warning("%s missing from shrinkwrap", self.getName()) if warn_deprecated_filename: logger.warning( "Component %s uses deprecated %s file, use %s instead." % ( self.getName(), Component_Description_File_Fallback, Component_Description_File ) ) if 'bin' in self.description and 'lib' in self.description: self.error = 'Both "lib" and "bin" are specified in module.json: '+\ 'only one is allowed. If this is an executable module, then '+\ 'it should not specify a "lib" subdirectory, and if this is '+\ 'a re-usable library module, it should not specify a "bin" '+\ 'subdirectory' self.description = OrderedDict() # specified in the description self.installed_dependencies = False self.dependencies_failed = False self.is_test_dependency = test_dependency # read definitions for applications self.defines = {} defines_path = os.path.join(path, Component_Definitions_File) if os.path.isfile(defines_path): if not self.isApplication(): # only applications can have definitions logger.warning("%s ignored in library module '%s'" % (Component_Definitions_File, self.getName())) else: # TODO: define a schema for this file self.defines = pack.tryReadJSON(defines_path, None) def getDependencySpecs(self, target=None): ''' Returns [DependencySpec] These are returned in the order that they are listed in the component description file: this is so that dependency resolution proceeds in a predictable way. ''' deps = [] def specForDependency(name, version_spec, istest): shrinkwrap = self.getShrinkwrapMapping() shrinkwrap_version_req = None if name in shrinkwrap: # exact version, and pull from registry: shrinkwrap_version_req = shrinkwrap[name] logger.debug( 'respecting %s shrinkwrap version %s for %s', self.getName(), shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_spec, istest, shrinkwrap_version_req = shrinkwrap_version_req, specifying_module = self.getName() ) deps += [specForDependency(x[0], x[1], False) for x in self.description.get('dependencies', {}).items()] target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], False) for x in target_conf_deps.items()] deps += [specForDependency(x[0], x[1], True) for x in self.description.get('testDependencies', {}).items()] target_deps = self.description.get('testTargetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): logger.debug( 'Adding test-target-dependent dependency specs for target config %s to component %s' % (conf_key, self.getName()) ) deps += [specForDependency(x[0], x[1], True) for x in target_conf_deps.items()] # remove duplicates (use the first occurrence) seen = set() r = [] for dep in deps: if not dep.name in seen: r.append(dep) seen.add(dep.name) return r def hasDependency(self, name, target=None, test_dependencies=False): ''' Check if this module has any dependencies with the specified name in its dependencies list, or in target dependencies for the specified target ''' if name in self.description.get('dependencies', {}).keys(): return True target_deps = self.description.get('targetDependencies', {}) if target is not None: for conf_key, target_conf_deps in target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True if test_dependencies: if name in self.description.get('testDependencies', {}).keys(): return True if target is not None: test_target_deps = self.description.get('testTargetDependencies', {}) for conf_key, target_conf_deps in test_target_deps.items(): if _truthyConfValue(target.getConfigValue(conf_key)) or conf_key in target.getSimilarTo_Deprecated(): if name in target_conf_deps: return True return False def hasDependencyRecursively(self, name, target=None, test_dependencies=False): ''' Check if this module, or any of its dependencies, have a dependencies with the specified name in their dependencies, or in their targetDependencies corresponding to the specified target. Note that if recursive dependencies are not installed, this test may return a false-negative. ''' # checking dependencies recursively isn't entirely straightforward, so # use the existing method to resolve them all before checking: dependencies = self.getDependenciesRecursive( target = target, test = test_dependencies ) return (name in dependencies) def getDependencies(self, available_components = None, search_dirs = None, target = None, available_only = False, test = False, warnings = True ): ''' Returns {component_name:component} ''' if search_dirs is None: search_dirs = [self.modulesPath()] available_components = self.ensureOrderedDict(available_components) components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, update_installed = False, provider = self.provideInstalled, test = test ) if warnings: for error in errors: logger.warning(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components def __getDependenciesWithProvider(self, available_components = None, search_dirs = None, target = None, update_installed = False, provider = None, test = False ): ''' Get installed components using "provider" to find (and possibly install) components. See documentation for __getDependenciesRecursiveWithProvider returns (components, errors) ''' # sourceparse, , parse version source urls, internal from yotta.lib import sourceparse errors = [] modules_path = self.modulesPath() def satisfyDep(dspec): try: r = provider( dspec, available_components, search_dirs, modules_path, update_installed, self ) if r and not sourceparse.parseSourceURL(dspec.versionReq()).semanticSpecMatches(r.getVersion()): shrinkwrap_msg = '' if dspec.isShrinkwrapped(): shrinkwrap_msg = 'shrinkwrap on ' msg = 'does not meet specification %s required by %s%s' % ( dspec.versionReq(), shrinkwrap_msg, self.getName() ) logger.debug('%s %s', r.getName(), msg) r.setError(msg) return r except access_common.Unavailable as e: errors.append(e) self.dependencies_failed = True except vcs.VCSError as e: errors.append(e) self.dependencies_failed = True specs = self.getDependencySpecs(target=target) if not test: # filter out things that aren't test dependencies if necessary: specs = [x for x in specs if not x.is_test_dependency] #dependencies = pool.map( dependencies = map( satisfyDep, specs ) self.installed_dependencies = True # stable order is important! return (OrderedDict([((d and d.getName()) or specs[i].name, d) for i, d in enumerate(dependencies)]), errors) def __getDependenciesRecursiveWithProvider(self, available_components = None, search_dirs = None, target = None, traverse_links = False, update_installed = False, provider = None, test = False, _processed = None ): ''' Get installed components using "provider" to find (and possibly install) components. This function is called with different provider functions in order to retrieve a list of all of the dependencies, or install all dependencies. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed traverse_links: False (default) or True: whether to recurse into linked dependencies. You normally want to set this to "True" when getting a list of dependencies, and False when installing them (unless the user has explicitly asked dependencies to be installed in linked components). provider: None (default) or function: provider( dependency_spec, available_components, search_dirs, working_directory, update_if_installed ) test: True, False, 'toplevel': should test-only dependencies be included (yes, no, or only at this level, not recursively) ''' def recursionFilter(c): if not c: logger.debug('do not recurse into failed component') # don't recurse into failed components return False if c.getName() in _processed: logger.debug('do not recurse into already processed component: %s' % c) return False if c.installedLinked() and not traverse_links: return False return True available_components = self.ensureOrderedDict(available_components) if search_dirs is None: search_dirs = [] if _processed is None: _processed = set() assert(test in [True, False, 'toplevel']) search_dirs.append(self.modulesPath()) logger.debug('process %s\nsearch dirs:%s' % (self.getName(), search_dirs)) if self.isTestDependency(): logger.debug("won't provide test dependencies recursively for test dependency %s", self.getName()) test = False components, errors = self.__getDependenciesWithProvider( available_components = available_components, search_dirs = search_dirs, update_installed = update_installed, target = target, provider = provider, test = test ) _processed.add(self.getName()) if errors: errors = ['Failed to satisfy dependencies of %s:' % self.path] + errors need_recursion = [x for x in filter(recursionFilter, components.values())] available_components.update(components) logger.debug('processed %s\nneed recursion: %s\navailable:%s\nsearch dirs:%s' % (self.getName(), need_recursion, available_components, search_dirs)) if test == 'toplevel': test = False # NB: can't perform this step in parallel, since the available # components list must be updated in order for c in need_recursion: dep_components, dep_errors = c.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test, _processed = _processed ) available_components.update(dep_components) components.update(dep_components) errors += dep_errors return (components, errors) def provideInstalled(self, dspec, available_components, search_dirs, working_directory, update_installed, dep_of ): #logger.info('%s provideInstalled: %s', dep_of.getName(), dspec.name) r = access.satisfyFromAvailable(dspec.name, available_components) if r: if r.isTestDependency() and not dspec.is_test_dependency: logger.debug('test dependency subsequently occurred as real dependency: %s', r.getName()) r.setTestDependency(False) return r update_if_installed = False if update_installed is True: update_if_installed = True elif update_installed: update_if_installed = dspec.name in update_installed r = access.satisfyVersionFromSearchPaths( dspec.name, dspec.versionReq(), search_dirs, update_if_installed, inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: r.setTestDependency(dspec.is_test_dependency) return r # return a module initialised to the path where we would have # installed this module, so that it's possible to use # getDependenciesRecursive to find a list of failed dependencies, # as well as just available ones # note that this Component object may still be valid (usable to # attempt a build), if a different version was previously installed # on disk at this location (which means we need to check if the # existing version is linked) default_path = os.path.join(self.modulesPath(), dspec.name) r = Component( default_path, test_dependency = dspec.is_test_dependency, installed_linked = fsutils.isLink(default_path), inherit_shrinkwrap = dep_of.getShrinkwrap() ) return r def getDependenciesRecursive(self, available_components = None, processed = None, search_dirs = None, target = None, available_only = False, test = False ): ''' Get available and already installed components, don't check for remotely available components. See also satisfyDependenciesRecursive() Returns {component_name:component} ''' components, errors = self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = True, update_installed = False, provider = self.provideInstalled, test = test ) for error in errors: logger.error(error) if available_only: components = OrderedDict((k, v) for k, v in components.items() if v) return components def modulesPath(self): return os.path.join(self.path, Modules_Folder) def targetsPath(self): return os.path.join(self.path, Targets_Folder) def satisfyDependenciesRecursive( self, available_components = None, search_dirs = None, update_installed = False, traverse_links = False, target = None, test = False ): ''' Retrieve and install all the dependencies of this component and its dependencies, recursively, or satisfy them from a collection of available_components or from disk. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. update_installed: False (default), True, or set(): whether to check the available versions of installed components, and update if a newer version is available. If this is a set(), only update things in the specified set. traverse_links: False (default) or True: whether to recurse into linked dependencies when updating/installing. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed test: True, False, or 'toplevel: should test-only dependencies be installed? (yes, no, or only for this module, not its dependencies). ''' def provider( dspec, available_components, search_dirs, working_directory, update_installed, dep_of=None ): r = access.satisfyFromAvailable(dspec.name, available_components) if r: if r.isTestDependency() and not dspec.is_test_dependency: logger.debug('test dependency subsequently occurred as real dependency: %s', r.getName()) r.setTestDependency(False) return r update_if_installed = False if update_installed is True: update_if_installed = True elif update_installed: update_if_installed = dspec.name in update_installed r = access.satisfyVersionFromSearchPaths( dspec.name, dspec.versionReq(), search_dirs, update_if_installed, inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: r.setTestDependency(dspec.is_test_dependency) return r # before resorting to install this module, check if we have an # existing linked module (which wasn't picked up because it didn't # match the version specification) - if we do, then we shouldn't # try to install, but should return that anyway: default_path = os.path.join(self.modulesPath(), dspec.name) if fsutils.isLink(default_path): r = Component( default_path, test_dependency = dspec.is_test_dependency, installed_linked = fsutils.isLink(default_path), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if r: assert(r.installedLinked()) return r else: logger.error('linked module %s is invalid: %s', dspec.name, r.getError()) return r r = access.satisfyVersionByInstalling( dspec.name, dspec.versionReq(), self.modulesPath(), inherit_shrinkwrap = dep_of.getShrinkwrap() ) if not r: logger.error('could not install %s' % dspec.name) if r is not None: r.setTestDependency(dspec.is_test_dependency) return r return self.__getDependenciesRecursiveWithProvider( available_components = available_components, search_dirs = search_dirs, target = target, traverse_links = traverse_links, update_installed = update_installed, provider = provider, test = test ) def satisfyTarget(self, target_name_and_version, update_installed=False, additional_config=None, install_missing=True): ''' Ensure that the specified target name (and optionally version, github ref or URL) is installed in the targets directory of the current component returns (derived_target, errors) ''' # Target, , represent an installed target, internal from yotta.lib import target application_dir = None if self.isApplication(): application_dir = self.path return target.getDerivedTarget( target_name_and_version, self.targetsPath(), install_missing = install_missing, application_dir = application_dir, update_installed = update_installed, additional_config = additional_config, shrinkwrap = self.getShrinkwrap() ) def getTarget(self, target_name_and_version, additional_config=None): ''' Return a derived target object representing the selected target: if the target is not installed, or is invalid then the returned object will test false in a boolean context. Returns derived_target Errors are not displayed. ''' derived_target, errors = self.satisfyTarget( target_name_and_version, additional_config = additional_config, install_missing = False ) if len(errors): return None else: return derived_target def installedDependencies(self): ''' Return true if satisfyDependencies has been called. Note that this is slightly different to when all of the dependencies are actually satisfied, but can be used as if it means that. ''' return self.installed_dependencies def isApplication(self): ''' Return true if this module is an application instead of a reusable library ''' return bool(len(self.getBinaries())) def getBinaries(self): ''' Return a dictionary of binaries to compile: {"dirname":"exename"}, this is used when automatically generating CMakeLists Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' # the module.json syntax is a subset of the package.json syntax: a # single string that defines the source directory to use to build an # executable with the same name as the component. This may be extended # to include the rest of the npm syntax in future (map of source-dir to # exe name). if 'bin' in self.description: return {os.path.normpath(self.description['bin']): self.getName()} else: return {} def getLibs(self, explicit_only=False): ''' Return a dictionary of libraries to compile: {"dirname":"libname"}, this is used when automatically generating CMakeLists. If explicit_only is not set, then in the absence of both 'lib' and 'bin' sections in the module.json file, the "source" directory will be returned. Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' if 'lib' in self.description: return {os.path.normpath(self.description['lib']): self.getName()} elif 'bin' not in self.description and not explicit_only: return {'source': self.getName()} else: return {} def licenses(self): ''' Return a list of licenses that apply to this module. (Strings, which may be SPDX identifiers) ''' if 'license' in self.description: return [self.description['license']] else: return [x['type'] for x in self.description['licenses']] def getExtraIncludes(self): ''' Some components must export whole directories full of headers into the search path. This is really really bad, and they shouldn't do it, but support is provided as a concession to compatibility. ''' if 'extraIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraIncludes']] else: return [] def getExtraSysIncludes(self): ''' Some components (e.g. libc) must export directories of header files into the system include search path. They do this by adding a 'extraSysIncludes' : [ array of directories ] field in their package description. This function returns the list of directories (or an empty list), if it doesn't exist. ''' if 'extraSysIncludes' in self.description: return [os.path.normpath(x) for x in self.description['extraSysIncludes']] else: return [] def getRegistryNamespace(self): return Registry_Namespace def setTestDependency(self, status): self.is_test_dependency = status def isTestDependency(self): return self.is_test_dependency def __saveSpecForComponent(self, component): version = component.getVersion() if version.isTip(): spec = '*' elif version.major() == 0: # for 0.x.x versions, when we save a dependency we don't use ^0.x.x # a that would peg to the exact version - instead we use ~ to peg # to the same minor version spec = '~' + str(version) else: spec = '^' + str(version) return spec def saveDependency(self, component, spec=None): if not 'dependencies' in self.description: self.description['dependencies'] = OrderedDict() if spec is None: spec = self.__saveSpecForComponent(component) self.description['dependencies'][component.getName()] = spec return spec def removeDependency(self, component): if not component in self.description.get('dependencies', {}): logger.error('%s is not listed as a dependency', component) return False del self.description['dependencies'][component] return True def getDefines(self): return self.defines
class Component(pack.Pack): def __init__( self, path, installed_linked = False, latest_suitable_version = None, test_dependency = False, inherit_shrinkwrap = None ): ''' How to use a Component: Initialise it with the directory into which the component has been downloaded, (or with a symlink that points to a directory containing the component) Check that 'if component:' is true, which indicates that the download is indeed a valid component. Check that component.getVersion() returns the version you think you've downloaded. Use component.getDependencySpecs() to get the names of the dependencies of the component, or component.getDependencies() to get Component objects (which may not be valid unless the dependencies have been installed) for each of the dependencies. ''' pass def getDependencySpecs(self, target=None): ''' Returns [DependencySpec] These are returned in the order that they are listed in the component description file: this is so that dependency resolution proceeds in a predictable way. ''' pass def specForDependency(name, version_spec, istest): pass def hasDependency(self, name, target=None, test_dependencies=False): ''' Check if this module has any dependencies with the specified name in its dependencies list, or in target dependencies for the specified target ''' pass def hasDependencyRecursively(self, name, target=None, test_dependencies=False): ''' Check if this module, or any of its dependencies, have a dependencies with the specified name in their dependencies, or in their targetDependencies corresponding to the specified target. Note that if recursive dependencies are not installed, this test may return a false-negative. ''' pass def getDependencies(self, available_components = None, search_dirs = None, target = None, available_only = False, test = False, warnings = True ): ''' Returns {component_name:component} ''' pass def __getDependenciesWithProvider(self, available_components = None, search_dirs = None, target = None, update_installed = False, provider = None, test = False ): ''' Get installed components using "provider" to find (and possibly install) components. See documentation for __getDependenciesRecursiveWithProvider returns (components, errors) ''' pass def satisfyDep(dspec): pass def __getDependenciesRecursiveWithProvider(self, available_components = None, search_dirs = None, target = None, traverse_links = False, update_installed = False, provider = None, test = False, _processed = None ): ''' Get installed components using "provider" to find (and possibly install) components. This function is called with different provider functions in order to retrieve a list of all of the dependencies, or install all dependencies. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed traverse_links: False (default) or True: whether to recurse into linked dependencies. You normally want to set this to "True" when getting a list of dependencies, and False when installing them (unless the user has explicitly asked dependencies to be installed in linked components). provider: None (default) or function: provider( dependency_spec, available_components, search_dirs, working_directory, update_if_installed ) test: True, False, 'toplevel': should test-only dependencies be included (yes, no, or only at this level, not recursively) ''' pass def recursionFilter(c): pass def provideInstalled(self, dspec, available_components, search_dirs, working_directory, update_installed, dep_of ): pass def getDependenciesRecursive(self, available_components = None, processed = None, search_dirs = None, target = None, available_only = False, test = False ): ''' Get available and already installed components, don't check for remotely available components. See also satisfyDependenciesRecursive() Returns {component_name:component} ''' pass def modulesPath(self): pass def targetsPath(self): pass def satisfyDependenciesRecursive( self, available_components = None, search_dirs = None, update_installed = False, traverse_links = False, target = None, test = False ): ''' Retrieve and install all the dependencies of this component and its dependencies, recursively, or satisfy them from a collection of available_components or from disk. Returns ======= (components, errors) components: dictionary of name:Component errors: sequence of errors Parameters ========== available_components: None (default) or a dictionary of name:component. This is searched before searching directories or fetching remote components search_dirs: None (default), or sequence of directories to search for already installed, (but not yet loaded) components. Used so that manually installed or linked components higher up the dependency tree are found by their users lower down. These directories are searched in order, and finally the current directory is checked. update_installed: False (default), True, or set(): whether to check the available versions of installed components, and update if a newer version is available. If this is a set(), only update things in the specified set. traverse_links: False (default) or True: whether to recurse into linked dependencies when updating/installing. target: None (default), or a Target object. If specified the target name and it's similarTo list will be used in resolving dependencies. If None, then only target-independent dependencies will be installed test: True, False, or 'toplevel: should test-only dependencies be installed? (yes, no, or only for this module, not its dependencies). ''' pass def provider( dspec, available_components, search_dirs, working_directory, update_installed, dep_of=None ): pass def satisfyTarget(self, target_name_and_version, update_installed=False, additional_config=None, install_missing=True): ''' Ensure that the specified target name (and optionally version, github ref or URL) is installed in the targets directory of the current component returns (derived_target, errors) ''' pass def getTarget(self, target_name_and_version, additional_config=None): ''' Return a derived target object representing the selected target: if the target is not installed, or is invalid then the returned object will test false in a boolean context. Returns derived_target Errors are not displayed. ''' pass def installedDependencies(self): ''' Return true if satisfyDependencies has been called. Note that this is slightly different to when all of the dependencies are actually satisfied, but can be used as if it means that. ''' pass def isApplication(self): ''' Return true if this module is an application instead of a reusable library ''' pass def getBinaries(self): ''' Return a dictionary of binaries to compile: {"dirname":"exename"}, this is used when automatically generating CMakeLists Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' pass def getLibs(self, explicit_only=False): ''' Return a dictionary of libraries to compile: {"dirname":"libname"}, this is used when automatically generating CMakeLists. If explicit_only is not set, then in the absence of both 'lib' and 'bin' sections in the module.json file, the "source" directory will be returned. Note that currently modules may define only a single executable binary or library to be built by the automatic build system, by specifying `"bin": "dir-to-be-built-into-binary"`, or `"lib": "dir-to-be-built-into-library"`, and the bin/lib will always have the same name as the module. The default behaviour if nothing is specified is for the 'source' directory to be built into a library. The module.json syntax may allow for other combinations in the future (and callers of this function should not rely on it returning only a single item). For example, a "bin": {"dirname": "exename"} syntax might be supported, however currently more complex builds must be controlled by custom CMakeLists. ''' pass def licenses(self): ''' Return a list of licenses that apply to this module. (Strings, which may be SPDX identifiers) ''' pass def getExtraIncludes(self): ''' Some components must export whole directories full of headers into the search path. This is really really bad, and they shouldn't do it, but support is provided as a concession to compatibility. ''' pass def getExtraSysIncludes(self): ''' Some components (e.g. libc) must export directories of header files into the system include search path. They do this by adding a 'extraSysIncludes' : [ array of directories ] field in their package description. This function returns the list of directories (or an empty list), if it doesn't exist. ''' pass def getRegistryNamespace(self): pass def setTestDependency(self, status): pass def isTestDependency(self): pass def __saveSpecForComponent(self, component): pass def saveDependency(self, component, spec=None): pass def removeDependency(self, component): pass def getDefines(self): pass
33
18
27
2
18
7
3
0.48
1
11
3
0
28
6
28
60
781
80
475
141
381
226
278
81
243
12
2
5
106
2,666
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/cmakegen.py
yotta.lib.cmakegen.SourceFile
class SourceFile(object): def __init__(self, fullpath, relpath, lang): super(SourceFile, self).__init__() self.fullpath = fullpath self.relpath = relpath self.lang = lang def __repr__(self): return self.fullpath
class SourceFile(object): def __init__(self, fullpath, relpath, lang): pass def __repr__(self): pass
3
0
4
0
4
0
1
0
1
1
0
0
2
3
2
2
8
0
8
6
5
0
8
6
5
1
1
0
2
2,667
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/cmakegen.py
yotta.lib.cmakegen.CMakeGen
class CMakeGen(object): def __init__(self, directory, target): super(CMakeGen, self).__init__() self.buildroot = directory logger.info("generate for target: %s" % target) self.target = target self.configured = False self.config_include_file = None self.config_json_file = None self.build_info_include_file = None self.build_uuid = None def _writeFile(self, path, contents): dirname = os.path.dirname(path) fsutils.mkDirP(dirname) self.writeIfDifferent(path, contents) def configure(self, component, all_dependencies): ''' Ensure all config-time files have been generated. Return a dictionary of generated items. ''' r = {} builddir = self.buildroot # only dependencies which are actually valid can contribute to the # config data (which includes the versions of all dependencies in its # build info) if the dependencies aren't available we can't tell what # version they are. Anything missing here should always be a test # dependency that isn't going to be used, otherwise the yotta build # command will fail before we get here available_dependencies = OrderedDict((k, v) for k, v in all_dependencies.items() if v) self.set_toplevel_definitions = '' if self.build_info_include_file is None: self.build_info_include_file, build_info_definitions = self.getBuildInfo(component.path, builddir) self.set_toplevel_definitions += build_info_definitions if self.config_include_file is None: self.config_include_file, config_definitions, self.config_json_file = self._getConfigData(available_dependencies, component, builddir, self.build_info_include_file) self.set_toplevel_definitions += config_definitions self.configured = True return { 'merged_config_include': self.config_include_file, 'merged_config_json': self.config_json_file, 'build_info_include': self.build_info_include_file } def generateRecursive(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None): ''' generate top-level CMakeLists for this component and its dependencies: the CMakeLists are all generated in self.buildroot, which MUST be out-of-source !!! NOTE: experimenting with a slightly different way of doing things here, this function is a generator that yields any errors produced, so the correct use is: for error in gen.generateRecursive(...): print(error) ''' assert(self.configured) if builddir is None: builddir = self.buildroot if modbuilddir is None: modbuilddir = os.path.join(builddir, 'ym') if processed_components is None: processed_components = dict() if not self.target: yield 'Target "%s" is not a valid build target' % self.target toplevel = not len(processed_components) logger.debug('generate build files: %s (target=%s)' % (component, self.target)) # because of the way c-family language includes work we need to put the # public header directories of all components that this component # depends on (directly OR indirectly) into the search path, which means # we need to first enumerate all the direct and indirect dependencies recursive_deps = component.getDependenciesRecursive( available_components = all_components, target = self.target, available_only = True, test = True ) dependencies = component.getDependencies( all_components, target = self.target, available_only = True, test = True ) for name, dep in dependencies.items(): # if dep is a test dependency, then it might not be required (if # we're not building tests). We don't actually know at this point if not dep: if dep.isTestDependency(): logger.debug('Test dependency "%s" of "%s" is not installed.' % (name, component)) else: yield 'Required dependency "%s" of "%s" is not installed.' % (name, component) # ensure this component is assumed to have been installed before we # check for its dependencies, in case it has a circular dependency on # itself processed_components[component.getName()] = component new_dependencies = OrderedDict([(name,c) for name,c in dependencies.items() if c and not name in processed_components]) self.generate(builddir, modbuilddir, component, new_dependencies, dependencies, recursive_deps, application, toplevel) logger.debug('recursive deps of %s:' % component) for d in recursive_deps.values(): logger.debug(' %s' % d) processed_components.update(new_dependencies) for name, c in new_dependencies.items(): for error in self.generateRecursive( c, all_components, os.path.join(modbuilddir, name), modbuilddir, processed_components, application=application ): yield error def checkStandardSourceDir(self, dirname, component): # validate, , validate various things, internal from yotta.lib import validate err = validate.sourceDirValidationError(dirname, component.getName()) if err: logger.warning(err) def _validateListedSubdirsExist(self, component): ''' Return true if all the subdirectories which this component lists in its module.json file exist (although their validity is otherwise not checked). If they don't, warning messages are printed. ''' lib_subdirs = component.getLibs(explicit_only=True) bin_subdirs = component.getBinaries() ok = True for d in lib_subdirs: if not os.path.exists(os.path.join(component.path, d)): logger.warning( "lib directory \"%s\" doesn't exist but is listed in the module.json file of %s", d, component ) ok = False for d in bin_subdirs: if not os.path.exists(os.path.join(component.path, d)): logger.warning( "bin directory \"%s\" doesn't exist but is listed in the module.json file of %s", d, component ) ok = False return ok def _listSubDirectories(self, component, toplevel): ''' return: { manual: [list of subdirectories with manual CMakeLists], auto: [list of pairs: (subdirectories name to autogenerate, a list of source files in that dir)], bin: {dictionary of subdirectory name to binary name}, lib: {dictionary of subdirectory name to binary name}, test: [list of directories that build tests], resource: [list of directories that contain resources] } ''' manual_subdirs = [] auto_subdirs = [] header_subdirs = [] lib_subdirs = component.getLibs() bin_subdirs = component.getBinaries() test_subdirs = [] resource_subdirs = [] # if the application or library is set to get the sources from top level ("."), # they'll be acumulated into a single array (top_sources below). top_sources = [] start_on_top = "." in [os.path.normpath(x) for x in list(lib_subdirs.keys()) + list(bin_subdirs.keys())] for f in sorted(os.listdir(component.path)): if f in Ignore_Subdirs or f.startswith('.') or f.startswith('_'): continue check_cmakefile_path = os.path.join(f, 'CMakeLists.txt') if os.path.isfile(os.path.join(component.path, check_cmakefile_path)) and not \ component.ignores(check_cmakefile_path): self.checkStandardSourceDir(f, component) # if the subdirectory has a CMakeLists.txt in it (and it isn't # ignored), then delegate to that: manual_subdirs.append(f) # tests only supported in the `test` directory for now if f in ('test',): test_subdirs.append(f) else: if os.path.isfile(os.path.join(component.path, f)): # top level source: check if it should be included if not component.ignores(f) and start_on_top: sf = self.createSourceFile(f, os.path.join(component.path, f), ".") if sf is not None: top_sources.append(sf) else: # otherwise, if the directory has source files, and is listed # as a source/test directory, generate a CMakeLists in the # corresponding temporary directory, and add that. sources = self.containsSourceFiles(os.path.join(component.path, f), component) if sources: if f in ('test',): auto_subdirs.append((f, sources)) test_subdirs.append(f) elif start_on_top: # include the sources in this directory only if it's not # a potential test directory from yotta.lib import validate if not validate.isPotentialTestDir(f): top_sources.extend(sources) if f == component.getName(): header_subdirs.append((f, sources)) elif os.path.normpath(f) in [fsutils.fullySplitPath(x)[0] for x in lib_subdirs] or \ os.path.normpath(f) in [fsutils.fullySplitPath(x)[0] for x in bin_subdirs]: for full_subpath in list(lib_subdirs.keys()) + list(bin_subdirs.keys()): if fsutils.fullySplitPath(full_subpath)[0] == os.path.normpath(f): # this might be a sub-sub directory, in which # case we need to re-calculate the sources just # for the part we care about: sources = self.containsSourceFiles(os.path.join(component.path, full_subpath), component) auto_subdirs.append((full_subpath, sources)) elif f == component.getName(): header_subdirs.append((f, sources)) elif toplevel and \ ((f in ('test',)) or \ (os.path.normpath(f) in lib_subdirs or start_on_top) or \ (os.path.normpath(f) in bin_subdirs or start_on_top) and not \ component.ignores(f)): # (if there aren't any source files then do nothing) # !!! FIXME: ensure this warning is covered in tests logger.warning("subdirectory \"%s\" of %s was ignored because it doesn't appear to contain any source files", f, component) # 'resource' directory also has special meaning, but there's no # pattern for the files which might be in here: if f in ('resource',): resource_subdirs.append(os.path.join(component.path, f)) # issue a warning if a differently cased or common misspelling of a # standard directory name was encountered: check_directory_name_cases = list(lib_subdirs.keys()) + list(bin_subdirs.keys()) + ['test', 'resource'] if f.lower() in check_directory_name_cases + ['src'] and not \ f in check_directory_name_cases and not \ component.ignores(f): self.checkStandardSourceDir(f, component) if top_sources: # all the top level sources are grouped into a single cmake-generated directory # which is given the same name as the component auto_subdirs.append((component.getName(), top_sources)) return { "manual": manual_subdirs, "auto": auto_subdirs, "headers": header_subdirs, "bin": {component.getName(): component.getName()} if (start_on_top and component.isApplication()) else bin_subdirs, "lib": {component.getName(): component.getName()} if (start_on_top and not component.isApplication()) else lib_subdirs, "test": test_subdirs, "resource": resource_subdirs } def _definitionsForConfig(self, config, key_path=None): if key_path is None: key_path = list() key_prefix = '_'.join([sanitizePreprocessorSymbol(x) for x in key_path]) r = [] if len(key_prefix): r.append((key_prefix,None)) for (k, v) in config.items(): if isinstance(v, dict): r += self._definitionsForConfig(v, key_path + [k]) else: # Don't validate the value here (we wouldn't know where an # invalid value came from, so the error message would be # unhelpful) - the target schema should validate values, or if # that isn't possible then the target should check when loading if isinstance(v, bool): # convert bool to 1/0, since we can't know the availability # of a C bool type v = 1 if v else 0 r.append(('%s_%s' % (key_prefix, sanitizePreprocessorSymbol(k)), v)) return r def _getConfigData(self, all_dependencies, component, builddir, build_info_header_path): ''' returns (path_to_config_header, cmake_set_definitions) ''' # ordered_json, , read/write ordered json, internal from yotta.lib import ordered_json add_defs_header = '' set_definitions = '' # !!! backwards-compatible "TARGET_LIKE" definitions for the top-level # of the config. NB: THESE WILL GO AWAY definitions = [] definitions.append(('TARGET', sanitizePreprocessorSymbol(self.target.getName()))) definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(self.target.getName()),None)) # make the path to the build-info header available both to CMake and # in the preprocessor: full_build_info_header_path = replaceBackslashes(os.path.abspath(build_info_header_path)) logger.debug('build info header include path: "%s"', full_build_info_header_path) definitions.append(('YOTTA_BUILD_INFO_HEADER', '"'+full_build_info_header_path+'"')) for target in self.target.getSimilarTo_Deprecated(): if '*' not in target: definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(target),None)) merged_config = self.target.getMergedConfig() logger.debug('target configuration data: %s', merged_config) definitions += self._definitionsForConfig(merged_config, ['YOTTA', 'CFG']) add_defs_header += '// yotta config data (including backwards-compatible definitions)\n' for k, v in definitions: if v is not None: add_defs_header += '#define %s %s\n' % (k, v) set_definitions += 'set(%s %s)\n' % (k, v) else: add_defs_header += '#define %s\n' % k set_definitions += 'set(%s TRUE)\n' % k add_defs_header += '\n// version definitions\n' for dep in list(all_dependencies.values()) + [component]: add_defs_header += "#define YOTTA_%s_VERSION_STRING \"%s\"\n" % (sanitizePreprocessorSymbol(dep.getName()), str(dep.getVersion())) add_defs_header += "#define YOTTA_%s_VERSION_MAJOR %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().major()) add_defs_header += "#define YOTTA_%s_VERSION_MINOR %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().minor()) add_defs_header += "#define YOTTA_%s_VERSION_PATCH %d\n" % (sanitizePreprocessorSymbol(dep.getName()), dep.getVersion().patch()) # add the component's definitions defines = component.getDefines() if defines: add_defs_header += "\n// direct definitions (defines.json)\n" for name, value in defines.items(): add_defs_header += "#define %s %s\n" % (name, value) add_defs_header += '\n' # use -include <definitions header> instead of lots of separate # defines... this is compiler specific, but currently testing it # out for gcc-compatible compilers only: config_include_file = os.path.join(builddir, 'yotta_config.h') config_json_file = os.path.join(builddir, 'yotta_config.json') set_definitions += 'set(YOTTA_CONFIG_MERGED_JSON_FILE \"%s\")\n' % replaceBackslashes(os.path.abspath(config_json_file)) self._writeFile( config_include_file, '#ifndef __YOTTA_CONFIG_H__\n'+ '#define __YOTTA_CONFIG_H__\n'+ add_defs_header+ '#endif // ndef __YOTTA_CONFIG_H__\n' ) self._writeFile( config_json_file, ordered_json.dumps(merged_config) ) return (config_include_file, set_definitions, config_json_file) def getBuildInfo(self, sourcedir, builddir): ''' Write the build info header file, and return (path_to_written_header, set_cmake_definitions) ''' cmake_defs = '' preproc_defs = '// yotta build info, #include YOTTA_BUILD_INFO_HEADER to access\n' # standard library modules import datetime # vcs, , represent version controlled directories, internal from yotta.lib import vcs now = datetime.datetime.utcnow() vcs_instance = vcs.getVCS(sourcedir) if self.build_uuid is None: import uuid self.build_uuid = uuid.uuid4() definitions = [ ('YOTTA_BUILD_YEAR', now.year, 'UTC year'), ('YOTTA_BUILD_MONTH', now.month, 'UTC month 1-12'), ('YOTTA_BUILD_DAY', now.day, 'UTC day 1-31'), ('YOTTA_BUILD_HOUR', now.hour, 'UTC hour 0-24'), ('YOTTA_BUILD_MINUTE', now.minute, 'UTC minute 0-59'), ('YOTTA_BUILD_SECOND', now.second, 'UTC second 0-61'), ('YOTTA_BUILD_UUID', self.build_uuid, 'unique random UUID for each build'), ] if vcs_instance is not None: commit_id = None repotype = vcs_instance.__class__.__name__ try: commit_id = vcs_instance.getCommitId() except vcs.VCSNotInstalled as e: logger.warning('%s is not installed, VCS status build info is not available', repotype) commit_id = None except vcs.VCSError as e: logger.debug('%s', e) logger.warning( 'error detecting build info: "%s", build info is not available to the build. Please check that this is a valid %s repository!', str(e).split('\n')[0], repotype ) if commit_id is not None: clean_state = int(vcs_instance.isClean()) description = vcs_instance.getDescription() definitions += [ ('YOTTA_BUILD_VCS_ID', commit_id, 'git or mercurial hash'), ('YOTTA_BUILD_VCS_CLEAN', clean_state, 'evaluates true if the version control system was clean, otherwise false'), ('YOTTA_BUILD_VCS_DESCRIPTION', description, 'git describe or mercurial equivalent') ] for d in definitions: preproc_defs += '#define %s %s // %s\n' % d cmake_defs += 'set(%s "%s") # %s\n' % d buildinfo_include_file = os.path.join(builddir, 'yotta_build_info.h') self._writeFile( buildinfo_include_file, '#ifndef __YOTTA_BUILD_INFO_H__\n'+ '#define __YOTTA_BUILD_INFO_H__\n'+ preproc_defs+ '#endif // ndef __YOTTA_BUILD_INFO_H__\n' ) return (buildinfo_include_file, cmake_defs) def generate( self, builddir, modbuilddir, component, active_dependencies, immediate_dependencies, all_dependencies, application, toplevel ): ''' active_dependencies is the dictionary of components that need to be built for this component, but will not already have been built for another component. ''' include_root_dirs = '' if application is not None and component is not application: include_root_dirs += 'include_directories("%s")\n' % replaceBackslashes(application.path) include_sys_dirs = '' include_other_dirs = '' for name, c in itertools.chain(((component.getName(), component),), all_dependencies.items()): if c is not component and c.isTestDependency(): continue include_root_dirs += 'include_directories("%s")\n' % replaceBackslashes(c.path) dep_sys_include_dirs = c.getExtraSysIncludes() for d in dep_sys_include_dirs: include_sys_dirs += 'include_directories(SYSTEM "%s")\n' % replaceBackslashes(os.path.join(c.path, d)) dep_extra_include_dirs = c.getExtraIncludes() for d in dep_extra_include_dirs: include_other_dirs += 'include_directories("%s")\n' % replaceBackslashes(os.path.join(c.path, d)) add_depend_subdirs = '' for name, c in active_dependencies.items(): depend_subdir = replaceBackslashes(os.path.join(modbuilddir, name)) relpath = replaceBackslashes(os.path.relpath(depend_subdir, self.buildroot)) add_depend_subdirs += \ 'add_subdirectory(\n' \ ' "%s"\n' \ ' "${CMAKE_BINARY_DIR}/%s"\n' \ ')\n' \ % (depend_subdir, relpath) delegate_to_existing = None delegate_build_dir = None module_is_empty = False if os.path.isfile(os.path.join(component.path, 'CMakeLists.txt')) and not component.ignores('CMakeLists.txt'): # adding custom CMake is a promise to generate a library: so the # module is never empty in this case. delegate_to_existing = component.path add_own_subdirs = [] logger.debug("delegate to build dir: %s", builddir) delegate_build_dir = os.path.join(builddir, 'existing') else: # !!! TODO: if they don't exist, that should possibly be a fatal # error, not just a warning self._validateListedSubdirsExist(component) subdirs = self._listSubDirectories(component, toplevel) manual_subdirs = subdirs['manual'] autogen_subdirs = subdirs['auto'] binary_subdirs = subdirs['bin'] lib_subdirs = subdirs['lib'] test_subdirs = subdirs['test'] resource_subdirs = subdirs['resource'] header_subdirs = subdirs['headers'] logger.debug("%s lib subdirs: %s, bin subdirs: %s", component, lib_subdirs, binary_subdirs) add_own_subdirs = [] for f in manual_subdirs: if os.path.isfile(os.path.join(component.path, f, 'CMakeLists.txt')): # if this module is a test dependency, then don't recurse # to building its own tests. if f in test_subdirs and component.isTestDependency(): continue add_own_subdirs.append( (os.path.join(component.path, f), f) ) # names of all directories at this level with stuff in: used to figure # out what to link automatically all_subdirs = manual_subdirs + [x[0] for x in autogen_subdirs] # first check if this module is empty: if component.isTestDependency(): if len(autogen_subdirs) + len(add_own_subdirs) == 0: module_is_empty = True else: if len(autogen_subdirs) + len(add_own_subdirs) <= len(test_subdirs): module_is_empty = True # autogenerate CMakeLists for subdirectories as appropriate: for f, source_files in autogen_subdirs: if f in test_subdirs: # if this module is a test dependency, then don't recurse # to building its own tests. if component.isTestDependency(): continue self.generateTestDirList( builddir, f, source_files, component, immediate_dependencies, toplevel=toplevel, module_is_empty=module_is_empty ) else: if f in binary_subdirs: is_executable = True object_name = binary_subdirs[f] else: # not a test subdir or binary subdir: it must be a lib # subdir assert(f in lib_subdirs) object_name = lib_subdirs[f] for header_dir, header_files in header_subdirs: source_files.extend(header_files) self.generateSubDirList( builddir = builddir, dirname = f, source_files = source_files, component = component, all_subdirs = all_subdirs, immediate_dependencies = immediate_dependencies, object_name = object_name, resource_subdirs = resource_subdirs, is_executable = (f in binary_subdirs) ) add_own_subdirs.append( (os.path.join(builddir, f), f) ) # from now on, completely forget that this component had any tests # if it is itself a test dependency: if component.isTestDependency(): test_subdirs = [] # if we're not building anything other than tests, and this is a # library module (not a binary) then we need to generate a dummy # library so that this component can still be linked against if module_is_empty: if len(binary_subdirs): logger.warning('nothing to build!') else: add_own_subdirs.append(self.createDummyLib( component, builddir, [x[0] for x in immediate_dependencies.items() if not x[1].isTestDependency()] )) toolchain_file_path = os.path.join(builddir, 'toolchain.cmake') if toplevel: # generate the top-level toolchain file: template = jinja_environment.get_template('toolchain.cmake') file_contents = template.render({ #pylint: disable=no-member # toolchain files are provided in hierarchy # order, but the template needs them in reverse # order (base-first): "toolchain_files": self.target.getToolchainFiles() }) self._writeFile(toolchain_file_path, file_contents) # generate the top-level CMakeLists.txt template = jinja_environment.get_template('base_CMakeLists.txt') relpath = os.path.relpath(builddir, self.buildroot) file_contents = template.render({ #pylint: disable=no-member "toplevel": toplevel, "target_name": self.target.getName(), "set_definitions": self.set_toplevel_definitions, "toolchain_file": toolchain_file_path, "component": component, "relpath": relpath, "include_root_dirs": include_root_dirs, "include_sys_dirs": include_sys_dirs, "include_other_dirs": include_other_dirs, "add_depend_subdirs": add_depend_subdirs, "add_own_subdirs": add_own_subdirs, "config_include_file": self.config_include_file, "delegate_to": delegate_to_existing, "delegate_build_dir": delegate_build_dir, "active_dependencies": active_dependencies, "module_is_empty": module_is_empty, "cmake_includes": self.target.getAdditionalIncludes() }) self._writeFile(os.path.join(builddir, 'CMakeLists.txt'), file_contents) def createDummyLib(self, component, builddir, link_dependencies): safe_name = sanitizeSymbol(component.getName()) dummy_dirname = 'yotta_dummy_lib_%s' % safe_name dummy_cfile_name = 'dummy.c' logger.debug("create dummy lib: %s, %s, %s" % (safe_name, dummy_dirname, dummy_cfile_name)) cmake_files = [] source_dir = os.path.join(component.path, 'source') if os.path.exists(source_dir): for root, dires, files in os.walk(os.path.join(component.path, 'source')): for f in files: name, ext = os.path.splitext(f) if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) dummy_template = jinja_environment.get_template('dummy_CMakeLists.txt') dummy_cmakelists = dummy_template.render({ #pylint: disable=no-member "cfile_name": dummy_cfile_name, "libname": component.getName(), "link_dependencies": link_dependencies, "cmake_files": cmake_files }) self._writeFile(os.path.join(builddir, dummy_dirname, "CMakeLists.txt"), dummy_cmakelists) dummy_cfile = "void __yotta_dummy_lib_symbol_%s(){}\n" % safe_name self._writeFile(os.path.join(builddir, dummy_dirname, dummy_cfile_name), dummy_cfile) return (os.path.join(builddir, dummy_dirname), dummy_dirname) def writeIfDifferent(self, fname, contents): try: with open(fname, "r+") as f: current_contents = f.read() if current_contents != contents: f.seek(0) f.write(contents) f.truncate() except IOError: with open(fname, "w") as f: f.write(contents) def generateTestDirList(self, builddir, dirname, source_files, component, immediate_dependencies, toplevel=False, module_is_empty=False): logger.debug('generate CMakeLists.txt for directory: %s' % os.path.join(component.path, dirname)) link_dependencies = [x for x in immediate_dependencies] fname = os.path.join(builddir, dirname, 'CMakeLists.txt') # group the list of source files by subdirectory: generate one test for # each subdirectory, and one test for each file at the top level subdirs = defaultdict(list) toplevel_srcs = [] for f in source_files: if f.lang in ('c', 'cpp', 'objc', 's'): subrelpath = os.path.relpath(f.relpath, dirname) subdir = fsutils.fullySplitPath(subrelpath)[0] if subdir and subdir != subrelpath: subdirs[subdir].append(f) else: toplevel_srcs.append(f) tests = [] for f in toplevel_srcs: object_name = '%s-test-%s' % ( component.getName(), os.path.basename(os.path.splitext(str(f))[0]).lower() ) tests.append([[str(f)], object_name, [f.lang]]) for subdirname, sources in sorted(subdirs.items(), key=lambda x: x[0]): object_name = '%s-test-%s' % ( component.getName(), fsutils.fullySplitPath(subdirname)[0].lower() ) tests.append([[str(f) for f in sources], object_name, [f.lang for f in sources]]) # link tests against the main executable if not module_is_empty: link_dependencies.append(component.getName()) # Find cmake files cmake_files = [] for root, dires, files in os.walk(os.path.join(component.path, dirname)): for f in files: name, ext = os.path.splitext(f) if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) test_template = jinja_environment.get_template('test_CMakeLists.txt') file_contents = test_template.render({ #pylint: disable=no-member 'source_directory':os.path.join(component.path, dirname), 'tests':tests, 'link_dependencies':link_dependencies, 'cmake_files': cmake_files, 'exclude_from_all': (not toplevel), 'test_dependencies': [x[1] for x in immediate_dependencies.items() if x[1].isTestDependency()] }) self._writeFile(fname, file_contents) def generateSubDirList(self, builddir, dirname, source_files, component, all_subdirs, immediate_dependencies, object_name, resource_subdirs, is_executable): logger.debug('generate CMakeLists.txt for directory: %s' % os.path.join(component.path, dirname)) link_dependencies = [x[0] for x in immediate_dependencies.items() if not x[1].isTestDependency()] fname = os.path.join(builddir, dirname, 'CMakeLists.txt') assert(object_name) object_name = object_name # if we're building the main library, or an executable for this # component, then we should link against all the other directories # containing cmakelists: link_dependencies += [x for x in all_subdirs if x not in ('source', 'test', dirname)] # Find resource files resource_files = [] for f in resource_subdirs: for root, dires, files in os.walk(f): if root.endswith(".xcassets") or root.endswith(".bundle"): resource_files.append(root) del dires[:] else: for f in files: resource_files.append(os.path.join(root, f)) # Find cmake files cmake_files = [] for root, dires, files in os.walk(os.path.join(component.path, dirname)): for f in files: name, ext = os.path.splitext(f) if ext.lower() == '.cmake' and not component.ignores(os.path.relpath(os.path.join(root, f), component.path)): cmake_files.append(os.path.join(root, f)) subdir_template = jinja_environment.get_template('subdir_CMakeLists.txt') file_contents = subdir_template.render({ #pylint: disable=no-member 'source_directory': os.path.join(component.path, dirname), "config_include_file": self.config_include_file, 'executable': is_executable, 'file_names': [str(f) for f in source_files], 'object_name': object_name, 'link_dependencies': link_dependencies, 'languages': set(f.lang for f in source_files), 'source_files': set((f.fullpath, f.lang) for f in source_files), 'resource_files': resource_files, 'cmake_files': cmake_files }) self._writeFile(fname, file_contents) def createSourceFile(self, f, fullpath, relpath): c_exts = set(('.c',)) cpp_exts = set(('.cpp','.cc','.cxx')) asm_exts = set(('.s',)) objc_exts = set(('.m', '.mm')) header_exts = set(('.h',)) name, ext = os.path.splitext(f) ext = ext.lower() if ext in c_exts: return SourceFile(fullpath, relpath, 'c') elif ext in cpp_exts: return SourceFile(fullpath, relpath, 'cpp') elif ext in asm_exts: return SourceFile(fullpath, relpath, 's') elif ext in objc_exts: return SourceFile(fullpath, relpath, 'objc') elif ext in header_exts: return SourceFile(fullpath, relpath, 'header') else: return None def containsSourceFiles(self, directory, component): sources = [] for root, dires, files in os.walk(directory): for f in sorted(files): fullpath = os.path.join(root, f) relpath = os.path.relpath(fullpath, component.path) if component.ignores(relpath): continue sf = self.createSourceFile(f, fullpath, relpath) if sf is not None: sources.append(sf) return sources
class CMakeGen(object): def __init__(self, directory, target): pass def _writeFile(self, path, contents): pass def configure(self, component, all_dependencies): ''' Ensure all config-time files have been generated. Return a dictionary of generated items. ''' pass def generateRecursive(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None): ''' generate top-level CMakeLists for this component and its dependencies: the CMakeLists are all generated in self.buildroot, which MUST be out-of-source !!! NOTE: experimenting with a slightly different way of doing things here, this function is a generator that yields any errors produced, so the correct use is: for error in gen.generateRecursive(...): print(error) ''' pass def checkStandardSourceDir(self, dirname, component): pass def _validateListedSubdirsExist(self, component): ''' Return true if all the subdirectories which this component lists in its module.json file exist (although their validity is otherwise not checked). If they don't, warning messages are printed. ''' pass def _listSubDirectories(self, component, toplevel): ''' return: { manual: [list of subdirectories with manual CMakeLists], auto: [list of pairs: (subdirectories name to autogenerate, a list of source files in that dir)], bin: {dictionary of subdirectory name to binary name}, lib: {dictionary of subdirectory name to binary name}, test: [list of directories that build tests], resource: [list of directories that contain resources] } ''' pass def _definitionsForConfig(self, config, key_path=None): pass def _getConfigData(self, all_dependencies, component, builddir, build_info_header_path): ''' returns (path_to_config_header, cmake_set_definitions) ''' pass def getBuildInfo(self, sourcedir, builddir): ''' Write the build info header file, and return (path_to_written_header, set_cmake_definitions) ''' pass def generateRecursive(self, component, all_components, builddir=None, modbuilddir=None, processed_components=None, application=None): ''' active_dependencies is the dictionary of components that need to be built for this component, but will not already have been built for another component. ''' pass def createDummyLib(self, component, builddir, link_dependencies): pass def writeIfDifferent(self, fname, contents): pass def generateTestDirList(self, builddir, dirname, source_files, component, immediate_dependencies, toplevel=False, module_is_empty=False): pass def generateSubDirList(self, builddir, dirname, source_files, component, all_subdirs, immediate_dependencies, object_name, resource_subdirs, is_executable): pass def createSourceFile(self, f, fullpath, relpath): pass def containsSourceFiles(self, directory, component): pass
18
7
44
4
33
8
8
0.25
1
13
3
0
17
8
17
17
769
87
562
174
536
141
402
170
378
23
1
7
128
2,668
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/auth.py
yotta.lib.auth.AuthTimedOut
class AuthTimedOut(AuthException): pass
class AuthTimedOut(AuthException): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
2,669
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/auth.py
yotta.lib.auth.AuthException
class AuthException(Exception): pass
class AuthException(Exception): pass
1
0
0
0
0
0
0
0
1
0
0
1
0
0
0
10
2
0
2
1
1
0
2
1
1
0
3
0
0
2,670
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/access_common.py
yotta.lib.access_common.Unavailable
class Unavailable(AccessException): pass
class Unavailable(AccessException): pass
1
0
0
0
0
0
0
0
1
0
0
1
0
0
0
10
2
0
2
1
1
0
2
1
1
0
4
0
0
2,671
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/git_access.py
yotta.lib.git_access.GitWorkingCopy
class GitWorkingCopy(object): def __init__(self, vcs): self.vcs = vcs self.directory = vcs.workingDirectory() def remove(self): self.vcs.remove() self.directory = None def availableVersions(self): ''' return a list of GitCloneVersion objects for tags which are valid semantic version idenfitifiers. ''' r = [] for t in self.vcs.tags(): logger.debug("available version tag: %s", t) # ignore empty tags: if not len(t.strip()): continue try: r.append(GitCloneVersion(t, t, self)) except ValueError: logger.debug('invalid version tag: %s', t) return r def availableTags(self): ''' return a list of GitCloneVersion objects for all tags ''' return [GitCloneVersion('', t, self) for t in self.vcs.tags()] def availableBranches(self): ''' return a list of GitCloneVersion objects for the tip of each branch ''' return [GitCloneVersion('', b, self) for b in self.vcs.branches()] def tipVersion(self): return GitCloneVersion('', '', self) def commitVersion(self, spec): ''' return a GithubComponentVersion object for a specific commit if valid ''' import re commit_match = re.match('^[a-f0-9]{7,40}$', spec, re.I) if commit_match: return GitCloneVersion('', spec, self) return None
class GitWorkingCopy(object): def __init__(self, vcs): pass def remove(self): pass def availableVersions(self): ''' return a list of GitCloneVersion objects for tags which are valid semantic version idenfitifiers. ''' pass def availableTags(self): ''' return a list of GitCloneVersion objects for all tags ''' pass def availableBranches(self): ''' return a list of GitCloneVersion objects for the tip of each branch ''' pass def tipVersion(self): pass def commitVersion(self, spec): ''' return a GithubComponentVersion object for a specific commit if valid ''' pass
8
4
6
0
4
1
2
0.33
1
2
1
0
7
2
7
7
49
9
30
14
21
10
30
14
21
4
1
2
11
2,672
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_start.py
yotta.test.cli.test_start.TestCLIStart
class TestCLIStart(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_noop_start(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe, True) util.writeTestFiles(_nopStartTargetDescription('start-test-target'), test_dir=os.path.join(test_dir, 'yotta_targets', 'start-test-target')) output = util.runCheckCommand( ['--target', 'start-test-target', 'build'], test_dir) output = util.runCheckCommand( ['--target', 'start-test-target', 'start'], test_dir) self.assertIn('would start source/test-trivial-exe', output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_native_start(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe, True) output = util.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) output = util.runCheckCommand( ['--target', util.nativeTarget(), 'start'], test_dir) self.assertIn('[trivial-exe-running]', output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_notfound_start(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe, True) target_descr = _nopStartTargetDescription('start-test-target') del target_descr['./scripts/nop.py'] util.writeTestFiles(target_descr, test_dir=os.path.join( test_dir, 'yotta_targets', 'start-test-target')) # in this case, without the script present we expect a failure output = util.runCheckCommand( ['--target', 'start-test-target', 'build'], test_dir) stdout, stderr, statuscode = cli.run( ['--target', 'start-test-target', 'start'], cwd=test_dir) self.assertNotEqual(statuscode, 0) util.rmRf(test_dir)
class TestCLIStart(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_noop_start(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_native_start(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_notfound_start(self): pass
7
0
8
0
7
0
1
0.04
1
0
0
0
3
0
3
75
30
3
26
15
19
1
23
12
19
1
2
0
3
2,673
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/git_access.py
yotta.lib.git_access.GitCloneVersion
class GitCloneVersion(version.Version): def __init__(self, semver, tag, working_copy): self.working_copy = working_copy self.tag = tag super(GitCloneVersion, self).__init__(semver) def unpackInto(self, directory): # vcs, , represent version controlled directories, internal from yotta.lib import vcs # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils logger.debug('unpack version %s from git repo %s to %s' % (self.version, self.working_copy.directory, directory)) tag = self.tag fsutils.rmRf(directory) vcs.Git.cloneToDirectory(self.working_copy.directory, directory, tag) # remove temporary files created by the GitWorkingCopy clone self.working_copy.remove()
class GitCloneVersion(version.Version): def __init__(self, semver, tag, working_copy): pass def unpackInto(self, directory): pass
3
0
8
1
6
2
1
0.23
1
2
1
0
2
2
2
19
18
2
13
8
8
3
13
8
8
1
2
0
2
2,674
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_link.py
yotta.test.cli.test_link.TestCLILink
class TestCLILink(unittest.TestCase): @classmethod def setUpClass(cls): cls.prefix_dir = tempfile.mkdtemp() os.environ['YOTTA_PREFIX'] = cls.prefix_dir @classmethod def tearDownClass(cls): util.rmRf(cls.prefix_dir) cls.prefix_dir = None def testLink(self): linked_in_module = util.writeTestFiles(util.Test_Trivial_Lib, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'link'], cwd=linked_in_module) self.assertEqual(statuscode, 0) self.assertTrue(os.path.exists(os.path.join( globalInstallDirectory(), 'test-trivial-lib'))) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'list'], cwd=test_module) self.assertIn('missing', stdout+stderr) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'link', 'test-trivial-lib'], cwd=test_module) self.assertEqual(statuscode, 0) self.assertNotIn('broken', stdout+stderr) stdout, stderr, statuscode = cli.run( ['-t', Test_Target, '--plain', 'list'], cwd=test_module) self.assertNotIn('missing', stdout+stderr) util.rmRf(test_module) util.rmRf(linked_in_module) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testLinkedBuild(self): linked_in_module = util.writeTestFiles(util.Test_Trivial_Lib, True) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link'], cwd=linked_in_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link', 'test-trivial-lib'], cwd=test_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'build'], cwd=test_module) self.assertEqual(statuscode, 0) util.rmRf(test_module) util.rmRf(linked_in_module) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testLinkedReBuild(self): # test that changing which module is linked triggers a re-build linked_in_module_1 = util.writeTestFiles(util.Test_Trivial_Lib, True) linked_in_module_2 = util.writeTestFiles(util.Test_Trivial_Lib, True) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep, True) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link'], cwd=linked_in_module_1) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link', 'test-trivial-lib'], cwd=test_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'build'], cwd=test_module) self.assertEqual(statuscode, 0) # check that rebuild is no-op stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'build'], cwd=test_module) self.assertIn('no work to do', stdout+stderr) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'link'], cwd=linked_in_module_2) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', util.nativeTarget(), '--plain', 'build'], cwd=test_module) self.assertNotIn('no work to do', stdout+stderr) self.assertEqual(statuscode, 0) util.rmRf(test_module) util.rmRf(linked_in_module_1) util.rmRf(linked_in_module_2) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testTargetLinkedBuild(self): linked_in_target = util.writeTestFiles( util.getNativeTargetDescription(), True) test_module = util.writeTestFiles( util.Test_Testing_Trivial_Lib_Dep_Preinstalled, True) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'link-target'], cwd=linked_in_target) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'link-target', 'test-native-target'], cwd=test_module) self.assertEqual(statuscode, 0) stdout, stderr, statuscode = cli.run( ['-t', 'test-native-target', '--plain', 'build'], cwd=test_module) self.assertEqual(statuscode, 0) util.rmRf(test_module) util.rmRf(linked_in_target)
class TestCLILink(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): pass def testLink(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testLinkedBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testLinkedReBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on this platform yet") def testTargetLinkedBuild(self): pass
12
0
13
2
11
0
1
0.03
1
0
0
0
4
0
6
78
91
19
70
25
58
2
65
20
58
1
2
0
6
2,675
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/vcs.py
yotta.lib.vcs.VCSNotInstalled
class VCSNotInstalled(VCSError): pass
class VCSNotInstalled(VCSError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
11
2
0
2
1
1
0
2
1
1
0
4
0
0
2,676
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_minversion.py
yotta.test.cli.test_minversion.TestCLIYottaVersionSpecs
class TestCLIYottaVersionSpecs(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_yottaVersionCheckTooLowBuilding(self): test_dir = util.writeTestFiles(Test_Min_Version_Insufficient) stdout, stderr, statuscode = cli.run( ['--target', util.nativeTarget(), 'build'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertIn('requires yotta version >', stdout+stderr) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_yottaVersionCheckOKBuilding(self): test_dir = util.writeTestFiles(Test_Min_Version_OK) stdout, stderr, statuscode = cli.run( ['--target', util.nativeTarget(), 'build'], cwd=test_dir) self.assertEqual(statuscode, 0) self.assertNotIn('requires yotta version >', stdout+stderr) util.rmRf(test_dir) def test_yottaVersionCheckTooLowInstalling(self): test_dir = util.writeTestFiles(Test_Min_Version_Insufficient) stdout, stderr, statuscode = cli.run( ['--target', 'x86-linux-native', 'install'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertIn('requires yotta version >', stdout+stderr) util.rmRf(test_dir) def test_yottaVersionCheckOKInstalling(self): test_dir = util.writeTestFiles(Test_Min_Version_OK) stdout, stderr, statuscode = cli.run( ['--target', 'x86-linux-native', 'install'], cwd=test_dir) self.assertEqual(statuscode, 0) self.assertNotIn('requires yotta version >', stdout+stderr) util.rmRf(test_dir) def test_yottaVersionTargetCheck(self): test_dir = util.writeTestFiles(Test_Min_Version_Target_Insufficient) stdout, stderr, statuscode = cli.run( ['--target', Test_Min_Version_Target_Name, 'install'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertIn('requires yotta version >', stdout+stderr) util.rmRf(test_dir) def test_unparseableSpec(self): test_dir = util.writeTestFiles(Test_Unparsable_Spec) stdout, stderr, statuscode = cli.run( ['--target', Test_Min_Version_Target_Name, 'install'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertIn('could not parse yotta version spec', stdout+stderr) util.rmRf(test_dir)
class TestCLIYottaVersionSpecs(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_yottaVersionCheckTooLowBuilding(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_yottaVersionCheckOKBuilding(self): pass def test_yottaVersionCheckTooLowInstalling(self): pass def test_yottaVersionCheckOKInstalling(self): pass def test_yottaVersionTargetCheck(self): pass def test_unparseableSpec(self): pass
9
0
6
0
6
0
1
0
1
0
0
0
6
0
6
78
44
5
39
21
30
0
37
19
30
1
2
0
6
2,677
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/vcs.py
yotta.lib.vcs.VCSError
class VCSError(Exception): def __init__(self, message, returncode=None, command=None): super(VCSError, self).__init__(message) self.returncode = returncode self.command = command
class VCSError(Exception): def __init__(self, message, returncode=None, command=None): pass
2
0
4
0
4
0
1
0
1
1
0
1
1
2
1
11
5
0
5
4
3
0
5
4
3
1
3
0
1
2,678
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/vcs.py
yotta.lib.vcs.Git
class Git(VCS): def __init__(self, path): self.worktree = path self.gitdir = os.path.join(path, '.git') @classmethod def cloneToTemporaryDir(cls, remote): return cls.cloneToDirectory(remote, tempfile.mkdtemp()) @classmethod def cloneToDirectory(cls, remote, directory, tag=None): commands = [ ['git', 'clone', remote, directory] ] cls._execCommands(commands) r = Git(directory) if tag is not None: r.updateToTag(tag) return r def fetchAllBranches(self): remote_branches = [] local_branches = [] # list remote branches out, err = self._execCommands([self._gitCmd('branch', '-r')]) for line in out.split(b'\n'): branch_info = line.split(b' -> ') # skip HEAD: if len(branch_info) > 1: continue remote_branch = branch_info[0].strip() branch = b'/'.join(remote_branch.split(b'/')[1:]) remote_branches.append((remote_branch, branch)) # list already-existing local branches out, err = self._execCommands([self._gitCmd('branch')]) for line in out.split(b'\n'): local_branches.append(line.strip(b' *')) for remote, branchname in remote_branches: # don't try to replace existing local branches if branchname in local_branches: continue try: out, err = self._execCommands([ self._gitCmd('checkout', '-b', branchname, remote) ]) except VCSError as e: git_logger.error('failed to fetch remote branch %s %s' % (remote, branchname)) raise def remove(self): # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils fsutils.rmRf(self.worktree) def getCommitId(self): out, err = self._execCommands([self._gitCmd('rev-parse', 'HEAD')]) return out.strip() def getDescription(self): out, err = self._execCommands([self._gitCmd('describe', '--always', '--tags')]) return out.strip() def workingDirectory(self): return self.worktree def _gitCmd(self, *args): return ['git','--work-tree=%s' % self.worktree,'--git-dir=%s'%self.gitdir.replace('\\', '/')] + list(args); @classmethod def _execCommands(cls, commands): out, err = None, None for cmd in commands: try: child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=os.environ) except OSError as e: if e.errno == errno.ENOENT: if cmd[0] == 'git': raise VCSNotInstalled( 'git is not installed, or not in your path. Please follow the installation instructions at http://docs.yottabuild.org/#installing' ) else: raise VCSNotInstalled('%s is not installed' % (cmd[0])) else: raise VCSError('command failed', command=cmd) out, err = child.communicate() returncode = child.returncode if returncode: raise VCSError("command failed: %s" % (err or out), returncode=returncode, command=cmd) return out, err def isClean(self): commands = [ self._gitCmd('diff', '--quiet', '--exit-code'), self._gitCmd('diff', '--cached', '--quiet', '--exit-code'), ] try: out, err = self._execCommands(commands) except VCSError as e: if e.returncode: return False else: raise return True def markForCommit(self, relative_path): commands = [ self._gitCmd('add', os.path.join(self.worktree, relative_path)), ] self._execCommands(commands) def updateToTag(self, tag): commands = [ self._gitCmd('checkout', tag), ] self._execCommands(commands) def tags(self): commands = [ self._gitCmd('tag', '-l') ] out, err = self._execCommands(commands) # I think utf-8 is the right encoding? commit messages are utf-8 # encoded, couldn't find any documentation on tag names. return out.decode('utf-8').split(u'\n') def branches(self): commands = [ self._gitCmd('branch', '--list') ] out, err = self._execCommands(commands) return [x.lstrip(' *') for x in out.decode('utf-8').split('\n')] def commit(self, message, tag=None): commands = [ self._gitCmd('commit', '-m', message), ] if tag: commands.append( self._gitCmd('tag', tag, '-a', '-m', tag), ) self._execCommands(commands) def __nonzero__(self): return True
class Git(VCS): def __init__(self, path): pass @classmethod def cloneToTemporaryDir(cls, remote): pass @classmethod def cloneToDirectory(cls, remote, directory, tag=None): pass def fetchAllBranches(self): pass def remove(self): pass def getCommitId(self): pass def getDescription(self): pass def workingDirectory(self): pass def _gitCmd(self, *args): pass @classmethod def _execCommands(cls, commands): pass def isClean(self): pass def markForCommit(self, relative_path): pass def updateToTag(self, tag): pass def tags(self): pass def branches(self): pass def commit(self, message, tag=None): pass def __nonzero__(self): pass
21
0
8
0
7
0
2
0.07
1
5
2
0
14
2
17
28
149
21
121
52
99
8
94
46
75
7
2
4
32
2,679
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/target.py
yotta.lib.target.Target
class Target(pack.Pack): def __init__( self, path, installed_linked = False, latest_suitable_version = None, inherit_shrinkwrap = None ): ''' Initialise a Target based on a directory. If the directory does not contain a valid target.json file the initialised object will test false, and will contain an error property containing the failure. ''' # re-initialise with the information from the most-derived target super(Target, self).__init__( path, description_filename = Target_Description_File, installed_linked = installed_linked, schema_filename = Schema_File, latest_suitable_version = latest_suitable_version, inherit_shrinkwrap = inherit_shrinkwrap ) if self.description and inherit_shrinkwrap is not None: # when inheriting a shrinkwrap, check that this module is # listed in the shrinkwrap, otherwise emit a warning: if next((x for x in inherit_shrinkwrap.get('targets', []) if x['name'] == self.getName()), None) is None: logger.warning("%s missing from shrinkwrap", self.getName()) def baseTargetSpec(self): ''' returns pack.DependencySpec for the base target of this target (or None if this target does not inherit from another target. ''' inherits = self.description.get('inherits', {}) if len(inherits) == 1: name, version_req = list(inherits.items())[0] shrinkwrap_version_req = self.getShrinkwrapMapping('targets').get(name, None) if shrinkwrap_version_req is not None: logger.debug( 'respecting shrinkwrap version %s for %s', shrinkwrap_version_req, name ) return pack.DependencySpec( name, version_req, shrinkwrap_version_req = shrinkwrap_version_req ) elif len(inherits) > 1: logger.error('target %s specifies multiple base targets, but only one is allowed', self.getName()) return None def getRegistryNamespace(self): return Registry_Namespace def getConfig(self): return self.description.get('config', OrderedDict())
class Target(pack.Pack): def __init__( self, path, installed_linked = False, latest_suitable_version = None, inherit_shrinkwrap = None ): ''' Initialise a Target based on a directory. If the directory does not contain a valid target.json file the initialised object will test false, and will contain an error property containing the failure. ''' pass def baseTargetSpec(self): ''' returns pack.DependencySpec for the base target of this target (or None if this target does not inherit from another target. ''' pass def getRegistryNamespace(self): pass def getConfig(self): pass
5
2
12
0
10
3
2
0.25
1
4
1
1
4
0
4
36
53
3
40
14
29
10
20
8
15
4
2
2
9
2,680
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/target.py
yotta.lib.target.DerivedTarget
class DerivedTarget(Target): def __init__(self, leaf_target, base_targets, app_config, additional_config): ''' Initialise a DerivedTarget (representing an inheritance hierarchy of Targets.), given the most-derived Target description, and a set of available Targets to compose the rest of the lineage from. DerivedTarget provides build & debug commands, and access to the derived target config info (merged with the application config info from config.json, if any). It's possible to update the application config for an existing DerivedTarget instance. DerivedTarget can also be used as a stand-in for the most-derived (leaf) target in the inheritance hierarchy. ''' # initialise the base class as a copy of leaf_target super(DerivedTarget, self).__init__( path = leaf_target.path, installed_linked = leaf_target.installed_linked, latest_suitable_version = leaf_target.latest_suitable_version ) self.hierarchy = [leaf_target] + base_targets[:] self.config = None self.config_blame = None self.app_config = app_config self.additional_config = additional_config or {} # override truthiness to test validity of the entire hierarchy: def __nonzero__(self): for t in self.hierarchy: if not t: return False return bool(len(self.hierarchy)) def __bool__(self): return self.__nonzero__() def getScript(self, scriptname): ''' return the specified script if one exists (possibly inherited from a base target) ''' for t in self.hierarchy: s = t.getScript(scriptname) if s: return s return None def _loadConfig(self): ''' load the configuration information from the target hierarchy ''' config_dicts = [self.additional_config, self.app_config] + [t.getConfig() for t in self.hierarchy] # create an identical set of dictionaries, but with the names of the # sources in place of the values. When these are merged they will show # where each merged property came from: config_blame = [ _mirrorStructure(self.additional_config, 'command-line config'), _mirrorStructure(self.app_config, 'application\'s config.json'), ] + [ _mirrorStructure(t.getConfig(), t.getName()) for t in self.hierarchy ] self.config = _mergeDictionaries(*config_dicts) self.config_blame = _mergeDictionaries(*config_blame) # note that backwards compatibility with the "similarTo" data that used # to be used for target-dependencies is ensured at the point of use. We # don't merge similarTo into the config because it might break things # in the config (clobber objects with scalar values, for example) def _ensureConfig(self): if self.config is None: self._loadConfig() def getConfigValue(self, conf_key): self._ensureConfig() # jsonpointer, pip install jsonpointer, BSD 3 Clause import jsonpointer try: return jsonpointer.resolve_pointer(self.config, conf_key) except jsonpointer.JsonPointerException as e: # fall back to legacy dot-separated pointers key_path = conf_key.split('.'); c = self.config for part in key_path: if part in c: c = c[part] else: return None return c def getSimilarTo_Deprecated(self): r = [] for t in self.hierarchy: r.append(t.getName()) r += t.description.get('similarTo', []) return r def getMergedConfig(self): self._ensureConfig() return self.config def getConfigBlame(self): self._ensureConfig() return self.config_blame def getToolchainFiles(self): ''' return a list of toolchain file paths in override order (starting at the bottom/leaf of the hierarchy and ending at the base). The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(x.path, x.description['toolchain']) for x in self.hierarchy if 'toolchain' in x.description ]) def getAdditionalIncludes(self): ''' Return the list of cmake files which are to be included by yotta in every module built. The list is returned in the order they should be included (most-derived last). ''' return reversed([ os.path.join(t.path, include_file) for t in self.hierarchy for include_file in t.description.get('cmakeIncludes', []) ]) def inheritsFrom(self, target_name): ''' Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false. ''' for t in self.hierarchy: if t and t.getName() == target_name or target_name in t.description.get('inherits', {}): return True return False @classmethod def addBuildOptions(cls, parser): parser.add_argument('-G', '--cmake-generator', dest='cmake_generator', default='Ninja', help='CMake generator to use (defaults to Ninja). You can use this '+ 'to generate IDE project files instead, see cmake --help for '+ 'possible generator names. Note that only Ninja or Unix Makefile '+ 'based generators will work correctly with yotta.', metavar='CMAKE_GENERATOR', type=str ) @classmethod def _findNinja(cls): # sometimes ninja is called ninja-build for name in ('ninja', 'ninja-build'): if fsutils.which(name) is not None: return name # default to ninja: return 'ninja' @classmethod def overrideBuildCommand(cls, generator_name, targets=None): if targets is None: targets = [] # when we build using cmake --build, the nice colourised output is lost # - so override with the actual build command for command-line # generators where people will care: try: r = { 'Unix Makefiles': ['make'], 'Ninja': [cls._findNinja()] }[generator_name] # all of the above build programs take the build targets (e.g. # "all") as the last arguments if targets is not None: r += targets return r except KeyError: return None def hintForCMakeGenerator(self, generator_name, component): if generator_name in ('Ninja', 'Unix Makefiles'): return None try: name = self.getName() component_name = component.getName() return { 'Xcode': 'a project file has been generated at ./build/%s/%s.xcodeproj' % (name, component_name), 'Sublime Text 2 - Ninja': 'a project file has been generated at ./build/%s/%s.sublime-project' % (name, component_name), 'Sublime Text 2 - Unix Makefiles': 'a project file has been generated at ./build/%s/%s.sublime-project' % (name, component_name), 'Eclipse CDT4 - Ninja': 'a project file has been generated at ./build/%s/.project' % name, 'Eclipse CDT4 - Unix Makefiles': 'a project file has been generated at ./build/%s/.project' % name }[generator_name] except KeyError: return 'project files for %s have been generated in ./build/%s' % (component_name, name) def exec_helper(self, cmd, builddir): ''' Execute the given command, returning an error message if an error occured or None if the command was succesful.''' try: child = subprocess.Popen(cmd, cwd=builddir) child.wait() except OSError as e: if e.errno == errno.ENOENT: if cmd[0] == 'cmake': return 'CMake is not installed, please follow the installation instructions at http://docs.yottabuild.org/#installing' else: return '%s is not installed' % (cmd[0]) else: return 'command %s failed' % (cmd) if child.returncode: return 'command %s failed' % (cmd) @fsutils.dropRootPrivs def build(self, builddir, component, args, release_build=False, build_args=None, targets=None, release_no_debug_info_build=False): ''' Execute the commands necessary to build this component, and all of its dependencies. ''' if build_args is None: build_args = [] if targets is None: targets = [] # in the future this may be specified in the target description, but # for now we only support cmake, so everything is simple: if release_no_debug_info_build: build_type = 'Release' elif release_build: build_type = 'RelWithDebInfo' else: build_type = 'Debug' cmd = ['cmake', '-D', 'CMAKE_BUILD_TYPE=%s' % build_type, '-G', args.cmake_generator, '.'] res = self.exec_helper(cmd, builddir) if res is not None: return res # work-around various yotta-specific issues with the generated # Ninja/project files: from yotta.lib import cmake_fixups cmake_fixups.applyFixupsForFenerator(args.cmake_generator, builddir, component) build_command = self.overrideBuildCommand(args.cmake_generator, targets=targets) if build_command: cmd = build_command + build_args else: cmd = ['cmake', '--build', builddir] if len(targets): # !!! FIXME: support multiple targets with the default CMake # build command cmd += ['--target', targets[0]] cmd += build_args res = self.exec_helper(cmd, builddir) if res is not None: return res hint = self.hintForCMakeGenerator(args.cmake_generator, component) if hint: logger.info(hint) def findProgram(self, builddir, program): ''' Return the builddir-relative path of program, if only a partial path is specified. Returns None and logs an error message if the program is ambiguous or not found ''' # if this is an exact match, do no further checking: if os.path.isfile(os.path.join(builddir, program)): logging.info('found %s' % program) return program exact_matches = [] insensitive_matches = [] approx_matches = [] for path, dirs, files in os.walk(builddir): if program in files: exact_matches.append(os.path.relpath(os.path.join(path, program), builddir)) continue files_lower = [f.lower() for f in files] if program.lower() in files_lower: insensitive_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(program.lower())]), builddir ) ) continue # !!! TODO: in the future add approximate string matching (typos, # etc.), for now we just test stripping any paths off program, and # looking for substring matches: pg_basen_lower_noext = os.path.splitext(os.path.basename(program).lower())[0] for f in files_lower: if pg_basen_lower_noext in f: approx_matches.append( os.path.relpath( os.path.join(path, files[files_lower.index(f)]), builddir ) ) if len(exact_matches) == 1: logging.info('found %s at %s', program, exact_matches[0]) return exact_matches[0] elif len(exact_matches) > 1: logging.error( '%s matches multiple executables, please use a full path (one of %s)' % ( program, ', or '.join(['"'+os.path.join(m, program)+'"' for m in exact_matches]) ) ) return None # if we have matches with and without a file extension, prefer the # no-file extension version, and discard the others (so we avoid # picking up post-processed files): reduced_approx_matches = [] for m in approx_matches: root = os.path.splitext(m)[0] if (m == root) or (root not in approx_matches): reduced_approx_matches.append(m) approx_matches = reduced_approx_matches for matches in (insensitive_matches, approx_matches): if len(matches) == 1: logging.info('found %s at %s' % ( program, matches[0] )) return matches[0] elif len(matches) > 1: logging.error( '%s is similar to several executables found. Please use an exact name:\n%s' % ( program, '\n'.join(matches) ) ) return None logging.error('could not find program "%s" to debug' % program) return None def buildProgEnvAndVars(self, program, build_dir): prog_env = os.environ.copy() prog_env['YOTTA_PROGRAM'] = _encodePathForEnv(program) prog_env['YOTTA_BUILD_DIR'] = _encodePathForEnv(build_dir) prog_env['YOTTA_TARGET_DIR'] = _encodePathForEnv(self.path) prog_vars = dict(program=program, build_dir=build_dir, target_dir=self.path) return (prog_env, prog_vars) @fsutils.dropRootPrivs def start(self, builddir, program, forward_args): ''' Launch the specified program. Uses the `start` script if specified by the target, attempts to run it natively if that script is not defined. ''' child = None try: prog_path = self.findProgram(builddir, program) if prog_path is None: return start_env, start_vars = self.buildProgEnvAndVars(prog_path, builddir) if self.getScript('start'): cmd = [ os.path.expandvars(string.Template(x).safe_substitute(**start_vars)) for x in self.getScript('start') ] + forward_args else: cmd = shlex.split('./' + prog_path) + forward_args logger.debug('starting program: %s', cmd) child = subprocess.Popen( cmd, cwd = builddir, env = start_env ) child.wait() if child.returncode: return "process exited with status %s" % child.returncode child = None except OSError as e: import errno if e.errno == errno.ENOEXEC: return ("the program %s cannot be run (perhaps your target "+ "needs to define a 'start' script to start it on its " "intended execution target?)") % prog_path finally: if child is not None: _tryTerminate(child) def debug(self, builddir, program): ''' Launch a debugger for the specified program. Uses the `debug` script if specified by the target, falls back to the `debug` and `debugServer` commands if not. `program` is inserted into the $program variable in commands. ''' try: signal.signal(signal.SIGINT, _ignoreSignal); if self.getScript('debug') is not None: return self._debugWithScript(builddir, program) elif 'debug' in self.description: logger.warning( 'target %s provides deprecated debug property. It should '+ 'provide script.debug instead.', self.getName() ) return self._debugDeprecated(builddir, program) else: return "Target %s does not specify debug commands" % self finally: # clear the sigint handler signal.signal(signal.SIGINT, signal.SIG_DFL); @fsutils.dropRootPrivs def _debugWithScript(self, builddir, program): child = None try: prog_path = self.findProgram(builddir, program) if prog_path is None: return debug_env, debug_vars = self.buildProgEnvAndVars(prog_path, builddir) cmd = [ os.path.expandvars(string.Template(x).safe_substitute(**debug_vars)) for x in self.getScript('debug') ] logger.debug('starting debugger: %s', cmd) child = subprocess.Popen( cmd, cwd = builddir, env = debug_env ) child.wait() if child.returncode: return "debug process exited with status %s" % child.returncode child = None except: # reset the terminal, in case the debugger has screwed it up os.system('reset') raise finally: if child is not None: _tryTerminate(child) @fsutils.dropRootPrivs def _debugDeprecated(self, builddir, program): prog_path = self.findProgram(builddir, program) if prog_path is None: return with open(os.devnull, "w") as dev_null: daemon = None child = None try: # debug-server is the old name, debugServer is the new name debug_server_prop = 'debugServer' if not debug_server_prop in self.description: debug_server_prop = 'debug-server' if debug_server_prop in self.description: logger.debug('starting debug server...') daemon = subprocess.Popen( self.description[debug_server_prop], cwd = builddir, stdout = dev_null, stderr = dev_null, preexec_fn = _newPGroup ) else: daemon = None cmd = [ os.path.expandvars(string.Template(x).safe_substitute(program=prog_path)) for x in self.description['debug'] ] logger.debug('starting debugger: %s', cmd) child = subprocess.Popen( cmd, cwd = builddir ) child.wait() if child.returncode: return "debug process executed with status %s" % child.returncode child = None except: # reset the terminal, in case the debugger has screwed it up os.system('reset') raise finally: if child is not None: try: child.terminate() except OSError as e: pass if daemon is not None: logger.debug('shutting down debug server...') try: daemon.terminate() except OSError as e: pass @fsutils.dropRootPrivs def test(self, test_dir, module_dir, test_command, filter_command, forward_args): # we assume that test commands are relative to the current directory # (filter commands are relative to the module dir to make it possible # to use filter scripts shipped with the module) test_command = './' + test_command test_script = self.getScript('test') test_env, test_vars = self.buildProgEnvAndVars(os.path.abspath(os.path.join(test_dir, test_command)), test_dir) if test_script is None: cmd = shlex.split(test_command) + forward_args else: cmd = [ os.path.expandvars(string.Template(x).safe_substitute(**test_vars)) for x in test_script ] + forward_args # if the command is a python script, run it with the python interpreter # being used to run yotta: if test_command[0].lower().endswith('.py'): import sys python_interpreter = sys.executable cmd = [python_interpreter] + cmd if filter_command and filter_command[0].lower().endswith('.py'): import sys python_interpreter = sys.executable filter_command = [python_interpreter] + filter_command test_child = None test_filter = None try: logger.debug('running test: %s', cmd) if filter_command: logger.debug('using output filter command: %s', filter_command) test_child = subprocess.Popen( cmd, cwd = test_dir, stdout = subprocess.PIPE, env = test_env ) try: test_filter = subprocess.Popen( filter_command, cwd = module_dir, stdin = test_child.stdout, env = test_env ) except OSError as e: logger.error('error starting test output filter "%s": %s', filter_command, e) _tryTerminate(test_child) return 1 logger.debug('waiting for filter process') test_filter.communicate() if test_child.poll() is None: logger.warning('test child has not exited and will be terminated') _tryTerminate(test_child) test_child.stdout.close() returncode = test_filter.returncode test_child = None test_filter = None if returncode: logger.debug("test filter exited with status %s (=fail)", returncode) return 1 else: try: test_child = subprocess.Popen( cmd, cwd = test_dir, env = test_env ) logger.debug('waiting for test child') except OSError as e: if e.errno == errno.ENOENT: logger.error('Error: no such file or directory: "%s"', cmd[0]) return 1 raise test_child.wait() returncode = test_child.returncode test_child = None if returncode: logger.debug("test process exited with status %s (=fail)", returncode) return 1 finally: if test_child is not None: _tryTerminate(test_child) if test_filter is not None: _tryTerminate(test_filter) logger.debug("test %s passed", test_command) return 0
class DerivedTarget(Target): def __init__(self, leaf_target, base_targets, app_config, additional_config): ''' Initialise a DerivedTarget (representing an inheritance hierarchy of Targets.), given the most-derived Target description, and a set of available Targets to compose the rest of the lineage from. DerivedTarget provides build & debug commands, and access to the derived target config info (merged with the application config info from config.json, if any). It's possible to update the application config for an existing DerivedTarget instance. DerivedTarget can also be used as a stand-in for the most-derived (leaf) target in the inheritance hierarchy. ''' pass def __nonzero__(self): pass def __bool__(self): pass def getScript(self, scriptname): ''' return the specified script if one exists (possibly inherited from a base target) ''' pass def _loadConfig(self): ''' load the configuration information from the target hierarchy ''' pass def _ensureConfig(self): pass def getConfigValue(self, conf_key): pass def getSimilarTo_Deprecated(self): pass def getMergedConfig(self): pass def getConfigBlame(self): pass def getToolchainFiles(self): ''' return a list of toolchain file paths in override order (starting at the bottom/leaf of the hierarchy and ending at the base). The list is returned in the order they should be included (most-derived last). ''' pass def getAdditionalIncludes(self): ''' Return the list of cmake files which are to be included by yotta in every module built. The list is returned in the order they should be included (most-derived last). ''' pass def inheritsFrom(self, target_name): ''' Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false. ''' pass @classmethod def addBuildOptions(cls, parser): pass @classmethod def _findNinja(cls): pass @classmethod def overrideBuildCommand(cls, generator_name, targets=None): pass def hintForCMakeGenerator(self, generator_name, component): pass def exec_helper(self, cmd, builddir): ''' Execute the given command, returning an error message if an error occured or None if the command was succesful.''' pass @fsutils.dropRootPrivs def build(self, builddir, component, args, release_build=False, build_args=None, targets=None, release_no_debug_info_build=False): ''' Execute the commands necessary to build this component, and all of its dependencies. ''' pass def findProgram(self, builddir, program): ''' Return the builddir-relative path of program, if only a partial path is specified. Returns None and logs an error message if the program is ambiguous or not found ''' pass def buildProgEnvAndVars(self, program, build_dir): pass @fsutils.dropRootPrivs def start(self, builddir, program, forward_args): ''' Launch the specified program. Uses the `start` script if specified by the target, attempts to run it natively if that script is not defined. ''' pass def debug(self, builddir, program): ''' Launch a debugger for the specified program. Uses the `debug` script if specified by the target, falls back to the `debug` and `debugServer` commands if not. `program` is inserted into the $program variable in commands. ''' pass @fsutils.dropRootPrivs def _debugWithScript(self, builddir, program): pass @fsutils.dropRootPrivs def _debugDeprecated(self, builddir, program): pass @fsutils.dropRootPrivs def test(self, test_dir, module_dir, test_command, filter_command, forward_args): pass
35
11
21
1
17
3
4
0.19
1
10
0
0
23
5
26
62
576
48
443
104
402
86
320
89
288
14
3
4
103
2,681
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/sourceparse.py
yotta.lib.sourceparse.VersionSource
class VersionSource(object): def __init__(self, source_type, location, spec): assert(source_type in ('registry', 'github', 'git', 'hg')) self.source_type = source_type self.location = location self.spec = spec try: self.semantic_spec = version.Spec(spec) except ValueError: # for git/github source URLs the spec is allowed to be a branch # name or tag name, as well as a valid semantic version # specification # !!! TODO: also allow hg here if source_type in ('git', 'github'): self.semantic_spec = None else: raise ValueError( "Invalid semantic version spec: \"%s\"" % spec ) def semanticSpec(self): return self.semantic_spec or version.Spec('*') def semanticSpecMatches(self, v): if isinstance(v, str): v = version.Version(v) if self.semantic_spec is None: return True else: return self.semantic_spec.match(v)
class VersionSource(object): def __init__(self, source_type, location, spec): pass def semanticSpec(self): pass def semanticSpecMatches(self, v): pass
4
0
9
0
8
1
2
0.17
1
4
2
0
3
4
3
3
30
2
24
8
20
4
20
8
16
3
1
2
7
2,682
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/sourceparse.py
yotta.lib.sourceparse.InvalidVersionSpec
class InvalidVersionSpec(ValueError): pass
class InvalidVersionSpec(ValueError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
11
2
0
2
1
1
0
2
1
1
0
4
0
0
2,683
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/settings.py
yotta.lib.settings._JSONConfigParser
class _JSONConfigParser(object): def __init__(self): self.configs = OrderedDict() def read(self, filenames): '''' Read a list of files. Their configuration values are merged, with preference to values from files earlier in the list. ''' for fn in filenames: try: self.configs[fn] = ordered_json.load(fn) except IOError: self.configs[fn] = OrderedDict() except Exception as e: self.configs[fn] = OrderedDict() logging.warning( "Failed to read settings file %s, it will be ignored. The error was: %s", fn, e ) def get(self, path): ''' return a configuration value usage: get('section.property') Note that currently array indexes are not supported. You must get the whole array. returns None if any path element or the property is missing ''' path = _splitPath(path) for config in self.configs.values(): cur = config for el in path: if el in cur: cur = cur[el] else: cur = None break if cur is not None: return cur return None def set(self, path, value=None, filename=None): ''' Set a configuration value. If no filename is specified, the property is set in the first configuration file. Note that if a filename is specified and the property path is present in an earlier filename then set property will be hidden. usage: set('section.property', value='somevalue') Note that currently array indexes are not supported. You must set the whole array. ''' if filename is None: config = self._firstConfig()[1] else: config = self.configs[filename] path = _splitPath(path) for el in path[:-1]: if el in config: config = config[el] else: config[el] = OrderedDict() config = config[el] config[path[-1]] = value def write(self, filename=None): # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils if filename is None: filename, data = self._firstConfig() elif filename in self.configs: data = self.configs[filename] else: raise ValueError('No such file.') dirname = os.path.normpath(os.path.dirname(filename)) logging.debug('write settings to "%s" (will ensure directory "%s" exists)', filename, dirname) try: fsutils.mkDirP(dirname) ordered_json.dump(filename, data) except OSError as e: logging.error('Failed to save user settings to %s/%s, please check that the path exists and is writable.', dirname, filename) def _firstConfig(self): for fn, data in self.configs.items(): return fn, data raise ValueError('No configs available.')
class _JSONConfigParser(object): def __init__(self): pass def read(self, filenames): '''' Read a list of files. Their configuration values are merged, with preference to values from files earlier in the list. ''' pass def get(self, path): ''' return a configuration value usage: get('section.property') Note that currently array indexes are not supported. You must get the whole array. returns None if any path element or the property is missing ''' pass def set(self, path, value=None, filename=None): ''' Set a configuration value. If no filename is specified, the property is set in the first configuration file. Note that if a filename is specified and the property path is present in an earlier filename then set property will be hidden. usage: set('section.property', value='somevalue') Note that currently array indexes are not supported. You must set the whole array. ''' pass def write(self, filename=None): pass def _firstConfig(self): pass
7
3
14
1
10
3
3
0.33
1
4
0
0
6
1
6
6
91
11
60
20
52
20
52
18
44
5
1
3
20
2,684
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/vcs.py
yotta.lib.vcs.HG
class HG(VCS): hgapi = None def __init__(self, path): self._loadHGApi() self.worktree = path self.repo = self.hgapi.Repo(path) @classmethod def _loadHGApi(cls): # only import hgapi on demand, since it is rarely needed if cls.hgapi is None: import hgapi cls.hgapi = hgapi @classmethod def cloneToTemporaryDir(cls, remote): return cls.cloneToDirectory(remote, tempfile.mkdtemp()) @classmethod def cloneToDirectory(cls, remote, directory, tag=None): cls._loadHGApi() # hg doesn't automatically create the directories needed by destination try: os.makedirs(directory) except: pass hg_logger.debug('will clone %s into %s', remote, directory) cls.hgapi.Repo.hg_clone(remote, directory) r = HG(directory) if tag is not None: r.updateToTag(tag) return r def remove(self): # fsutils, , misc filesystem utils, internal from yotta.lib import fsutils fsutils.rmRf(self.worktree) def getCommitId(self): return self.repo.hg_node() def getDescription(self): try: return self.repo.hg_command('log', '--rev', '.', '--template', "{latesttag}{sub('^-0-.*', '', '-{latesttagdistance}-m{node|short}')}") except self.hgapi.HgException: # old mercurial doesn't support above command, output short hash, m-prefixed return "m" + self.getCommitId()[:12] def workingDirectory(self): return self.worktree def isClean(self): return not bool(self.repo.hg_status(empty=True)) def markForCommit(self, relative_path): self.repo.hg_add(os.path.join(self.worktree, relative_path)) def updateToTag(self, tag): self.repo.hg_update(tag) def tags(self): l = list(self.repo.hg_tags().keys()) l.remove('tip') return l def commit(self, message, tag=None): self.repo.hg_commit(message) if tag: self.repo.hg_tag(tag) def __nonzero__(self): return True
class HG(VCS): def __init__(self, path): pass @classmethod def _loadHGApi(cls): pass @classmethod def cloneToTemporaryDir(cls, remote): pass @classmethod def cloneToDirectory(cls, remote, directory, tag=None): pass def remove(self): pass def getCommitId(self): pass def getDescription(self): pass def workingDirectory(self): pass def isClean(self): pass def markForCommit(self, relative_path): pass def updateToTag(self, tag): pass def tags(self): pass def commit(self, message, tag=None): pass def __nonzero__(self): pass
18
0
4
0
4
0
1
0.07
1
2
0
0
11
2
14
25
71
13
55
25
35
4
52
22
35
3
2
1
19
2,685
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/registry_access.py
yotta.lib.registry_access.AuthError
class AuthError(RuntimeError): pass
class AuthError(RuntimeError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
11
2
0
2
1
1
0
2
1
1
0
4
0
0
2,686
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/pack.py
yotta.lib.pack.OptionalFileWrapper
class OptionalFileWrapper(object): def __init__(self, fname=None, mode=None): self.fname = fname self.mode = mode super(OptionalFileWrapper, self).__init__() def __enter__(self): if self.fname: self.file = open(self.fname, self.mode) else: self.file = open(os.devnull) return self def __exit__(self, type, value, traceback): self.file.close() def contents(self): if self.fname: return self.file.read() else: return '' def extension(self): if self.fname: return os.path.splitext(self.fname)[1] else: return '' def __nonzero__(self): return bool(self.fname) # python 3 truthiness def __bool__(self): return bool(self.fname)
class OptionalFileWrapper(object): def __init__(self, fname=None, mode=None): pass def __enter__(self): pass def __exit__(self, type, value, traceback): pass def contents(self): pass def extension(self): pass def __nonzero__(self): pass def __bool__(self): pass
8
0
4
0
4
0
1
0.04
1
2
0
0
7
3
7
7
29
1
27
11
19
1
24
11
16
2
1
1
10
2,687
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/pack.py
yotta.lib.pack.InvalidDescription
class InvalidDescription(Exception): pass
class InvalidDescription(Exception): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
3
0
0
2,688
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/pack.py
yotta.lib.pack.DependencySpec
class DependencySpec(object): def __init__(self, name, version_req, is_test_dependency=False, shrinkwrap_version_req=None, specifying_module=None): self.name = name self.version_req = version_req self.specifying_module = specifying_module # for diagnostic info only, may not be present self.is_test_dependency = is_test_dependency self.shrinkwrap_version_req = shrinkwrap_version_req def isShrinkwrapped(self): return self.shrinkwrap_version_req is not None def nonShrinkwrappedVersionReq(self): ''' return the dependency specification ignoring any shrinkwrap ''' return self.version_req def versionReq(self): ''' return the dependency specification, which may be from a shrinkwrap file ''' return self.shrinkwrap_version_req or self.version_req def __unicode__(self): return u'%s at %s' % (self.name, self.version_req) def __str__(self): import sys # in python 3 __str__ must return a string (i.e. unicode), in # python 2, it must not return unicode, so: if sys.version_info[0] >= 3: return self.__unicode__() else: return self.__unicode__().encode('utf8') def __repr__(self): return self.__unicode__()
class DependencySpec(object): def __init__(self, name, version_req, is_test_dependency=False, shrinkwrap_version_req=None, specifying_module=None): pass def isShrinkwrapped(self): pass def nonShrinkwrappedVersionReq(self): ''' return the dependency specification ignoring any shrinkwrap ''' pass def versionReq(self): ''' return the dependency specification, which may be from a shrinkwrap file ''' pass def __unicode__(self): pass def __str__(self): pass def __repr__(self): pass
8
2
4
0
3
1
1
0.22
1
0
0
0
7
5
7
7
31
4
23
14
14
5
22
14
13
2
1
1
8
2,689
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/logging_setup.py
yotta.lib.logging_setup.PlainFormatter
class PlainFormatter(logging.Formatter): def __init__(self): super(PlainFormatter, self).__init__() def format(self, record): return record.levelname.lower() + ': ' + record.getMessage()
class PlainFormatter(logging.Formatter): def __init__(self): pass def format(self, record): pass
3
0
2
0
2
0
1
0
1
1
0
0
2
0
2
9
6
1
5
3
2
0
5
3
2
1
2
0
2
2,690
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_build.py
yotta.test.cli.test_build.TestCLIBuild
class TestCLIBuild(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTrivialLib(self): test_dir = util.writeTestFiles(util.Test_Trivial_Lib) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTrivialExe(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildComplex(self): test_dir = util.writeTestFiles(Test_Complex) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildComplexSpaceInPath(self): test_dir = util.writeTestFiles(Test_Complex, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTests(self): test_dir = util.writeTestFiles(Test_Tests, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'test'], test_dir) self.assertIn('test-a', stdout) self.assertIn('test-c', stdout) self.assertIn('test-d', stdout) self.assertIn('test-e', stdout) self.assertIn('test-f', stdout) self.assertIn('test-g', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildInfo(self): test_dir = util.writeTestFiles(Test_Build_Info, True) # commit all the test files to git so that the VCS build info gets # defined: # (set up the git user env vars so we can run git commit without barfing) util.setupGitUser() subprocess.check_call(['git', 'init', '-q'], cwd=test_dir) subprocess.check_call(['git', 'add', '.'], cwd=test_dir) subprocess.check_call( ['git', 'commit', '-m', 'test build info automated commit', '-q'], cwd=test_dir) self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) build_time = datetime.datetime.utcnow() output = subprocess.check_output(['./build/' + util.nativeTarget().split( ',')[0] + '/source/test-trivial-exe'], cwd=test_dir).decode() self.assertIn('vcs clean: 1', output) # check build timestamp self.assertIn('build timestamp: ', output) build_timestamp_s = re.search('build timestamp: (.*)\n', output) self.assertTrue(build_timestamp_s) build_timestamp_s = build_timestamp_s.group(1) build_time_parsed = datetime.datetime.strptime( build_timestamp_s, '%Y-%m-%d-%H-%M-%S') build_time_skew = build_time_parsed - build_time self.assertTrue(abs(build_time_skew.total_seconds()) < 3) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_extraCMakeBuild(self): test_dir = util.writeTestFiles(util.Test_Extra_CMake_Lib, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customCMakeBuild(self): test_dir = util.writeTestFiles(util.Test_Custom_CMake_Lib, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_extraCMakeBuildExe(self): test_dir = util.writeTestFiles(util.Test_Extra_CMake_Exe, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customCMakeBuildExe(self): test_dir = util.writeTestFiles(util.Test_Custom_CMake_Exe, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_ignoreCustomCMake(self): test_dir = util.writeTestFiles(Test_Ignore_Custom_Cmake, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertNotIn('should be ignored', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibDir(self): test_dir = util.writeTestFiles(Test_Custom_Lib_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibSubDir(self): test_dir = util.writeTestFiles(Test_Custom_Lib_Sub_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibSubSourceDir(self): test_dir = util.writeTestFiles(Test_Custom_Lib_Sub_Source_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinDir(self): test_dir = util.writeTestFiles(Test_Custom_Bin_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinSubDir(self): test_dir = util.writeTestFiles(Test_Custom_Bin_Sub_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinSubSourceDir(self): test_dir = util.writeTestFiles(Test_Custom_Bin_Sub_Source_Dir, True) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('this message should be printed', stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_libAndBinSpecified(self): test_dir = util.writeTestFiles(Test_Lib_And_Bin) stdout, stderr, statuscode = cli.run( ['--target', util.nativeTarget(), 'build'], cwd=test_dir) self.assertNotEqual(statuscode, 0) self.assertIn( 'Both "lib" and "bin" are specified in module.json: only one is allowed', stdout+stderr) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_libNonExistent(self): test_dir = util.writeTestFiles(Test_Lib_Nonexistent) stdout, stderr, statuscode = cli.run( ['--target', util.nativeTarget(), 'build'], cwd=test_dir) self.assertIn('directory "doesntexist" doesn\'t exist', stdout+stderr) # !!! FIXME: should this error be fatal? # self.assertNotEqual(statuscode, 0) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_binNonExistent(self): test_dir = util.writeTestFiles(Test_Bin_Nonexistent) stdout, stderr, statuscode = cli.run( ['--target', util.nativeTarget(), 'build'], cwd=test_dir) self.assertIn('directory "doesntexist" doesn\'t exist', stdout+stderr) # !!! FIXME: should this error be fatal? # self.assertNotEqual(statuscode, 0) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDir1(self): test_dir = util.writeTestFiles(Test_Misspelt_Source_Dir_1) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn("has non-standard source directory name", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDir2(self): test_dir = util.writeTestFiles(Test_Misspelt_Source_Dir_2) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn("has non-standard source directory name", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDirIgnored(self): test_dir = util.writeTestFiles(Test_Ignored_Misspelt_Source_Dir) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertNotIn("has non-standard source directory name", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPreBuild(self): test_dir = util.writeTestFiles(Test_Scripts_PreBuild) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn("running prebuild", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPostBuild(self): test_dir = util.writeTestFiles(Test_Scripts_PostBuild) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn("running postbuild", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPreGenerate(self): test_dir = util.writeTestFiles(Test_Scripts_PreGenerate) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn("running pregenerate", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Defines_Application(self): test_dir = util.writeTestFiles(Test_Defines_Application) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) output = subprocess.check_output(['./build/' + util.nativeTarget().split( ',')[0] + '/source/test-defines-app'], cwd=test_dir).decode() self.assertIn("1234 yotta", output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Defines_Library(self): test_dir = util.writeTestFiles(Test_Defines_Library) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn( "defines.json ignored in library module 'test-defines-lib'", stdout) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Toplevel_Application(self): test_dir = util.writeTestFiles(Test_Toplevel_Application) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) output = subprocess.check_output(['./build/' + util.nativeTarget().split( ',')[0] + '/test-toplevel-app/test-toplevel-app'], cwd=test_dir).decode() self.assertIn("13", output) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Toplevel_Library(self): test_dir = util.writeTestFiles(Test_Toplevel_Library) stdout = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) output = subprocess.check_output(['./build/' + util.nativeTarget().split( ',')[0] + '/test/test-toplevel-lib-test-test'], cwd=test_dir).decode() self.assertIn("42", output) util.rmRf(test_dir) def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=test_dir) if statuscode != 0: print('command failed with status %s' % statuscode) print(stdout) print(stderr) self.assertEqual(statuscode, 0) return stdout + stderr
class TestCLIBuild(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTrivialLib(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTrivialExe(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildComplex(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildComplexSpaceInPath(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildTests(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_buildInfo(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_extraCMakeBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customCMakeBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_extraCMakeBuildExe(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customCMakeBuildExe(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_ignoreCustomCMake(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibSubDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customLibSubSourceDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinSubDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_customBinSubSourceDir(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_libAndBinSpecified(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_libNonExistent(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_binNonExistent(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDir1(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDir2(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_misspeltSourceDirIgnored(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPreBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPostBuild(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_scriptsPreGenerate(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Defines_Application(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Defines_Library(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Toplevel_Application(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_Toplevel_Library(self): pass def runCheckCommand(self, args, test_dir): pass
62
0
6
0
6
0
1
0.04
1
1
0
0
31
0
31
103
253
42
203
130
141
8
173
100
141
2
2
1
32
2,691
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_install.py
yotta.test.cli.test_install.TestCLIInstall
class TestCLIInstall(unittest.TestCase): @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installRegistryRef(self): test_dir = tempfile.mkdtemp() stdout = self.runCheckCommand( ['--target', Test_Target, 'install', Test_Name], test_dir) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installGithubRef(self): test_dir = tempfile.mkdtemp() stdout = self.runCheckCommand( ['--target', Test_Target, 'install', Test_Github_Name], test_dir) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls'], test_dir) self.assertIn('testmod', stdout) self.assertIn('other-testing-dummy', stdout) self.assertIn('test-testing-dummy', stdout) self.assertIn('test-target-dep', stdout) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('testmod', stdout) self.assertIn('other-testing-dummy', stdout) self.assertIn('test-testing-dummy', stdout) self.assertIn('test-target-dep', stdout) # and test install <modulename> stdout = self.runCheckCommand( ['--target', Test_Target, 'install', 'hg-access-testing'], test_dir) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls'], test_dir) self.assertIn('hg-access-testing', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installAllTestDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install', '--test-dependencies', 'all'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('testmod', stdout) self.assertIn('other-testing-dummy', stdout) self.assertIn('test-testing-dummy', stdout) self.assertIn('test-target-dep', stdout) self.assertNotIn('missing', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installNoTestDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install', '--test-dependencies', 'none'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls'], test_dir) self.assertIn('testmod', stdout) self.assertIn('other-testing-dummy', stdout) self.assertTrue(re.search('test-testing-dummy.*missing', stdout)) self.assertTrue(re.search('test-target-dep.*missing', stdout)) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('testmod', stdout) self.assertIn('other-testing-dummy', stdout) self.assertTrue(re.search('test-testing-dummy.*missing', stdout)) self.assertTrue(re.search('test-target-dep.*missing', stdout)) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installComplexDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Complex_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls'], test_dir) self.assertIn('test-testdep-b', stdout) self.assertIn('test-testdep-c', stdout) self.assertIn('test-testdep-d', stdout) self.assertIn('test-testdep-e', stdout) self.assertIn('test-testdep-f', stdout) self.assertIn('test-testdep-h', stdout) self.assertNotIn('test-testdep-g', stdout) self.assertNotIn('test-testdep-i', stdout) self.assertNotIn('test-testdep-j', stdout) self.assertNotIn('test-testdep-k', stdout) self.assertNotIn('missing', stdout) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('test-testdep-b', stdout) self.assertIn('test-testdep-c', stdout) self.assertIn('test-testdep-d', stdout) self.assertIn('test-testdep-e', stdout) self.assertIn('test-testdep-f', stdout) self.assertIn('test-testdep-h', stdout) self.assertTrue(re.search('test-testdep-nodeps.*missing', stdout)) self.assertTrue(re.search('test-testdep-i.*missing', stdout)) self.assertTrue(re.search('test-testdep-g.*missing', stdout)) self.assertNotIn('test-testdep-j', stdout) self.assertNotIn('test-testdep-k', stdout) # test update stdout = self.runCheckCommand( ['--target', Test_Target, 'up'], test_dir) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installAllComplexTestDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Complex_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install', '--test-dependencies', 'all'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('test-testdep-b', stdout) self.assertIn('test-testdep-c', stdout) self.assertIn('test-testdep-d', stdout) self.assertIn('test-testdep-e', stdout) self.assertIn('test-testdep-f', stdout) self.assertIn('test-testdep-g', stdout) self.assertIn('test-testdep-h', stdout) self.assertIn('test-testdep-i', stdout) self.assertNotIn('test-testdep-j', stdout) self.assertNotIn('test-testdep-k', stdout) self.assertNotIn('missing', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installNoComplexTestDeps(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Complex_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install', '--test-dependencies', 'none'], test_dir) # also sanity-check listing: stdout = self.runCheckCommand( ['--target', Test_Target, 'ls'], test_dir) self.assertIn('test-testdep-b', stdout) self.assertIn('test-testdep-c', stdout) self.assertIn('test-testdep-d', stdout) # e should be installed because it is both a test dep and non-test dep: # maybe it shouldn't show up in the listing without -a though? self.assertIn('test-testdep-e', stdout) self.assertIn('test-testdep-f', stdout) self.assertIn('test-testdep-h', stdout) self.assertNotIn('test-testdep-g', stdout) self.assertNotIn('test-testdep-i', stdout) self.assertNotIn('test-testdep-j', stdout) self.assertNotIn('test-testdep-k', stdout) self.assertNotIn('missing', stdout) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('test-testdep-b', stdout) self.assertIn('test-testdep-c', stdout) self.assertIn('test-testdep-d', stdout) self.assertIn('test-testdep-e', stdout) self.assertIn('test-testdep-f', stdout) self.assertIn('test-testdep-h', stdout) self.assertTrue(re.search('test-testdep-nodeps.*missing', stdout)) self.assertTrue(re.search('test-testdep-i.*missing', stdout)) self.assertTrue(re.search('test-testdep-g.*missing', stdout)) self.assertNotIn('test-testdep-j', stdout) self.assertNotIn('test-testdep-k', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_remove(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install'], test_dir) self.assertTrue(os.path.exists(os.path.join( test_dir, 'yotta_modules', 'testing-dummy'))) self.runCheckCommand(['remove', 'testing-dummy'], test_dir) self.assertFalse(os.path.exists(os.path.join( test_dir, 'yotta_modules', 'testing-dummy'))) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertIn('testing-dummy', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_uninstall(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install'], test_dir) self.assertTrue(os.path.exists(os.path.join( test_dir, 'yotta_modules', 'testing-dummy'))) self.runCheckCommand(['uninstall', 'testing-dummy'], test_dir) self.assertFalse(os.path.exists(os.path.join( test_dir, 'yotta_modules', 'testing-dummy'))) stdout = self.runCheckCommand( ['--target', Test_Target, 'ls', '-a'], test_dir) self.assertNotIn(' testing-dummy', stdout) rmRf(test_dir) @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_uninstallNonExistent(self): test_dir = tempfile.mkdtemp() with open(os.path.join(test_dir, 'module.json'), 'w') as f: f.write(Test_Module_JSON) stdout = self.runCheckCommand( ['--target', Test_Target, 'install'], test_dir) self.assertTrue(os.path.exists(os.path.join( test_dir, 'yotta_modules', 'testing-dummy'))) stdout, stderr, statuscode = cli.run( ['uninstall', 'nonexistent'], cwd=test_dir) self.assertNotEqual(statuscode, 0) rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "cannot build natively") def test_postInstall_topLevel(self): test_dir = util.writeTestFiles({}) self.runCheckCommand( ['--target', util.nativeTarget(), 'install', 'test-post-install'], test_dir) output = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], os.path.join(test_dir, 'test-post-install')) self.assertIn('post-install generated file compiled', output) self.assertIn('post-install generated header file included', output) self.assertIn('generated .cmake file included', output) @unittest.skipIf(not util.canBuildNatively(), "cannot build natively") def test_postInstall_dependency(self): test_dir = util.writeTestFiles({ 'module.json': '''{ "name": "test-postinstall", "version": "1.0.0", "license": "Apache-2.0", "dependencies": { "test-post-install": "*" }, "bin":"./source" }''', 'source/lib.c': ''' #include <stdio.h> #include "test-post-install/generated.h" int main(){ printf("generated return val=%d\\n", postInstallGenerated()); return 0; } '''}) output = self.runCheckCommand( ['--target', util.nativeTarget(), 'build'], test_dir) self.assertIn('post-install generated file compiled', output) self.assertIn('post-install generated header file included', output) self.assertIn('generated .cmake file included', output) def runCheckCommand(self, args, test_dir): stdout, stderr, statuscode = cli.run(args, cwd=test_dir) # print stdout self.assertEqual(statuscode, 0) return stdout + stderr
class TestCLIInstall(unittest.TestCase): @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installRegistryRef(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installGithubRef(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installAllTestDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installNoTestDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installComplexDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installAllComplexTestDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_installNoComplexTestDeps(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_remove(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_uninstall(self): pass @unittest.skipIf(not hasGithubConfig(), "a github authtoken must be specified for this test (run yotta login, or set YOTTA_GITHUB_AUTHTOKEN)") def test_uninstallNonExistent(self): pass @unittest.skipIf(not util.canBuildNatively(), "cannot build natively") def test_postInstall_topLevel(self): pass @unittest.skipIf(not util.canBuildNatively(), "cannot build natively") def test_postInstall_dependency(self): pass def runCheckCommand(self, args, test_dir): pass
28
0
17
2
14
1
1
0.06
1
0
0
0
14
0
14
86
264
43
208
65
180
13
183
43
168
1
2
1
14
2,692
ARMmbed/yotta
ARMmbed_yotta/yotta/lib/registry_access.py
yotta.lib.registry_access.RegistryThingVersion
class RegistryThingVersion(access_common.RemoteVersion): def __init__(self, data, namespace, name, registry=None): logger.debug('RegistryThingVersion %s/%s data: %s' % (namespace, name, data)) version = data['version'] self.namespace = namespace self.name = name self.version = version if 'hash' in data and 'sha256' in data['hash']: self.sha256 = data['hash']['sha256'] else: self.sha256 = None url = _tarballURL(self.namespace, self.name, version, registry) super(RegistryThingVersion, self).__init__( version, url, name=name, friendly_source=friendlyRegistryName(registry) ) def unpackInto(self, directory): assert(self.url) _getTarball(self.url, directory, self.sha256)
class RegistryThingVersion(access_common.RemoteVersion): def __init__(self, data, namespace, name, registry=None): pass def unpackInto(self, directory): pass
3
0
9
0
9
0
2
0
1
1
0
0
2
4
2
24
19
1
18
9
15
0
15
9
12
2
3
1
3
2,693
ARMmbed/yotta
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ARMmbed_yotta/yotta/test/cli/test_debug.py
yotta.test.cli.test_debug.TestCLIDebug
class TestCLIDebug(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_noop_debug(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe, True) target_dir = os.path.realpath(os.path.join( test_dir, 'yotta_targets', 'debug-test-target')) build_dir = os.path.realpath(os.path.join( test_dir, 'build', 'debug-test-target')) util.writeTestFiles(_nopDebugTargetDescription( 'debug-test-target'), test_dir=target_dir) output = util.runCheckCommand( ['--target', 'debug-test-target', 'build'], test_dir) output = util.runCheckCommand( ['--target', 'debug-test-target', 'debug'], test_dir) json_output = output[:output.index(JSON_MARKER)] result = json.loads(json_output) self.assertTrue(result is not None) self.assertEqual(len(result['argv']), 3) self.assertEqual(result['argv'][0], 'source/test-trivial-exe') self.assertEqual(result['env']['YOTTA_PROGRAM'], 'source/test-trivial-exe') self.assertEqual(result['argv'][1], build_dir) self.assertEqual(result['env']['YOTTA_BUILD_DIR'], build_dir) self.assertEqual(result['argv'][2], target_dir) self.assertEqual(result['env']['YOTTA_TARGET_DIR'], target_dir) util.rmRf(test_dir) @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_notfound_debug(self): test_dir = util.writeTestFiles(util.Test_Trivial_Exe, True) target_descr = _nopDebugTargetDescription('debug-test-target') del target_descr['./scripts/nop.py'] util.writeTestFiles(target_descr, test_dir=os.path.join( test_dir, 'yotta_targets', 'debug-test-target')) # in this case, without the script present we expect a failure output = util.runCheckCommand( ['--target', 'debug-test-target', 'build'], test_dir) stdout, stderr, statuscode = cli.run( ['--target', 'debug-test-target', 'debug'], cwd=test_dir) self.assertNotEqual(statuscode, 0) util.rmRf(test_dir)
class TestCLIDebug(unittest.TestCase): @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_noop_debug(self): pass @unittest.skipIf(not util.canBuildNatively(), "can't build natively on windows yet") def test_notfound_debug(self): pass
5
0
16
2
14
1
1
0.03
1
0
0
0
2
0
2
74
36
5
30
15
25
1
28
13
25
1
2
0
2
2,694
ASKIDA/Selenium2LibraryExtension
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ASKIDA_Selenium2LibraryExtension/src/Selenium2LibraryExtension/patches/__init__.py
Selenium2LibraryExtension.patches._patches
class _patches(): def __init__(self): pass def _wait_until_exp(self, timeout, error, function, *args): """This replaces the method from Selenium2Library to fix the major logic error in it""" error = error.replace('<TIMEOUT>', self._format_timeout(timeout)) def wait_func(): return None if function(*args) else error self._wait_until_no_error_exp(timeout, wait_func) def _wait_until_no_error_exp(self, timeout, wait_func, *args): """This replaces the method from Selenium2Library to fix the major logic error in it""" timeout = robot.utils.timestr_to_secs( timeout) if timeout is not None else self._timeout_in_secs maxtime = time.time() + timeout while True: try: timeout_error = wait_func(*args) if not timeout_error: return if time.time() > maxtime: raise AssertionError(timeout_error) time.sleep(0.2) except AssertionError: raise except: if time.time() > maxtime: raise continue # patches here Selenium2Library._wait_until = _wait_until_exp Selenium2Library._wait_until_no_error = _wait_until_no_error_exp
class _patches(): def __init__(self): pass def _wait_until_exp(self, timeout, error, function, *args): '''This replaces the method from Selenium2Library to fix the major logic error in it''' pass def wait_func(): pass def _wait_until_no_error_exp(self, timeout, wait_func, *args): '''This replaces the method from Selenium2Library to fix the major logic error in it''' pass
5
2
9
3
6
1
3
0.13
0
1
0
1
3
0
3
3
42
15
24
7
19
3
27
7
22
8
0
3
12
2,695
ASKIDA/Selenium2LibraryExtension
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ASKIDA_Selenium2LibraryExtension/src/Selenium2LibraryExtension/keywords/__init__.py
Selenium2LibraryExtension.keywords._keywords
class _keywords(): def __init__(self): pass ################################################################################################## ## Wait Until Element Has Focus ## ## ## ## Author: Olivier Verville ## ## Date: May 6th 2016 ## ################################################################################################## def wait_until_element_has_focus(self, locator, timeout=None): """Waits until the element identified by `locator` has focus. You might rather want to use `Element Focus Should Be Set` | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" self._info("Waiting for focus on '%s'" % (locator)) self._wait_until_no_error( timeout, self._check_element_focus_exp, True, locator, timeout) ################################################################################################## ## Wait Until Element Does Not Have Focus ## ## ## ## Author: Olivier Verville ## ## Date: May 6th 2016 ## ################################################################################################## def wait_until_element_does_not_have_focus(self, locator, timeout=None): """Waits until the element identified by `locator` doesn't have focus. You might rather want to use `Element Focus Should Not Be Set` | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" self._info("Waiting until '%s' does not have focus" % (locator)) self._wait_until_no_error( timeout, self._check_element_focus_exp, False, locator, timeout) ################################################################################################## ## Wait Until Element Value Is ## ## ## ## Author: Olivier Verville ## ## Date: May 7th 2016 ## ################################################################################################## def wait_until_element_value_is(self, locator, expected, strip=False, timeout=None): """Waits until the element identified by `locator` value is exactly the expected value. You might want to use `Element Value Should Be` instead. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | My Name Is Slim Shady | | strip | boolean, determines whether it should strip the value of the field before comparison | ${True} / ${False} | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" self._info("Waiting for '%s' value to be '%s'" % (locator, expected)) self._wait_until_no_error( timeout, self._check_element_value_exp, False, locator, expected, strip, timeout) ################################################################################################## ## Wait Until Element Value Contains ## ## ## ## Author: Olivier Verville ## ## Date: May 7th 2016 ## ################################################################################################## def wait_until_element_value_contains(self, locator, expected, timeout=None): """Waits until the element identified by `locator` contains the expected value. You might want to use `Element Value Should Contain` instead. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | Slim Shady | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" self._info("Waiting for '%s' value to contain '%s'" % (locator, expected)) self._wait_until_no_error( timeout, self._check_element_value_exp, True, locator, expected, False, timeout) ################################################################################################## ## Set Element Focus ## ## ## ## Author: Olivier Verville ## ## Date: May 7th 2016 ## ################################################################################################## def set_element_focus(self, locator): """Sets focus on the element identified by `locator`. Should be used with elements meant to have focus only, such as text fields. This keywords also waits for the focus to be active by calling the `Wait Until Element Has Focus` keyword. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |""" self._info("Setting focus on element '%s'" % (locator)) element = self._element_find(locator, True, True) element.send_keys(Keys.NULL) self._wait_until_no_error( None, self._check_element_focus, True, locator) ################################################################################################## ## Clear Input Field ## ## ## ## Author: Olivier Verville ## ## Date: May 7th 2016 ## ################################################################################################## def clear_input_field(self, locator, method=0): """Clears the text field identified by `locator` The element.clear() method doesn't seem to work properly on all browsers, so this keyword was created to offer alternatives. The `method` argument defines the method it should use in order to clear the target field. 0 = Uses the selenium method by doing element.clear \n 1 = Sets focus on the field and presses CTRL + A, and then DELETE \n 2 = Repeatedly presses BACKSPACE until the field is empty This keyword, when using a method other than '2' does not validate it successfully cleared the field, you should handle this verification by yourself. When using the method '2', it presses delete until the field's value is empty. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | method | the clearing method that should be used | no example provided |""" element = self._element_find(locator, True, True) if (int(method) == 0): self._info("Clearing input on element '%s'" % (locator)) element.clear() elif (int(method) == 1): self._info( "Clearing input on element '%s' by pressing 'CTRL + A + DELETE'" % (locator)) element.send_keys(Keys.CONTROL + 'a') element.send_keys(Keys.DELETE) elif (int(method) == 2): self._info( "Clearing input on element '%s' by repeatedly pressing BACKSPACE" % (locator)) while (len(element.get_attribute('value')) != 0): element.send_keys(Keys.BACKSPACE) else: element.clear() ################################################################################################## ## Element Text Color Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 8th 2016 ## ################################################################################################## def element_text_color_should_be(self, locator, expected): """Verifies the element identified by `locator` has the expected text color (it verifies the CSS attribute color). Color should be in RGBA format. Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA) | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected color | rgba(0, 128, 0, 1) |""" self._info("Verifying element '%s' has text color '%s'" % (locator, expected)) self._check_element_css_value(locator, 'color', expected) ################################################################################################## ## Element Background Color Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 8th 2016 ## ################################################################################################## def element_background_color_should_be(self, locator, expected): """Verifies the element identified by `locator` has the expected background color (it verifies the CSS attribute background-color). Color should be in RGBA format. Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA) | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected color | rgba(0, 128, 0, 1) |""" self._info("Verifying element '%s' has background color '%s'" % (locator, expected)) self._check_element_css_value(locator, 'background-color', expected) ################################################################################################## ## Element Width Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 8th 2016 ## ################################################################################################## def element_width_should_be(self, locator, expected): """Verifies the element identified by `locator` has the expected width. Expected width should be in pixels. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected width | 800 |""" self._info("Verifying element '%s' width is '%s'" % (locator, expected)) self._check_element_size(locator, 'width', expected) ################################################################################################## ## Element Height Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 8th 2016 ## ################################################################################################## def element_height_should_be(self, locator, expected): """Verifies the element identified by `locator` has the expected height. Expected height should be in pixels. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected height | 600 |""" self._info("Verifying element '%s' height is '%s'" % (locator, expected)) self._check_element_size(locator, 'height', expected) ################################################################################################## ## Element Value Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 9th 2016 ## ################################################################################################## def element_value_should_be(self, locator, expected, strip=False): """Verifies the element identified by `locator` has the expected value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | My Name Is Slim Shady | | strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |""" self._info("Verifying element '%s' value is '%s'" % (locator, expected)) element = self._element_find(locator, True, True) value = element.get_attribute('value') if (strip): value = value.strip() if str(value) == expected: return else: raise AssertionError( "Element '%s' value was not '%s', it was '%s'" % (locator, expected, value)) ################################################################################################## ## Element Value Should Not Be ## ## ## ## Author: Olivier Verville ## ## Date: May 9th 2016 ## ################################################################################################## def element_value_should_not_be(self, locator, value, strip=False): """Verifies the element identified by `locator` is not the specified value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | value | value it should not be | My Name Is Slim Shady | | strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |""" self._info("Verifying element '%s' value is not '%s'" % (locator, value)) element = self._element_find(locator, True, True) elem_value = str(element.get_attribute('value')) if (strip): elem_value = elem_value.strip() if elem_value == value: raise AssertionError( "Value was '%s' for element '%s' while it shouldn't have" % (elem_value, locator)) ################################################################################################## ## Element Value Should Contain ## ## ## ## Author: Olivier Verville ## ## Date: May 9th 2016 ## ################################################################################################## def element_value_should_contain(self, locator, expected): """Verifies the element identified by `locator` contains the expected value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | Slim Shady |""" self._info("Verifying element '%s' value contains '%s'" % (locator, expected)) element = self._element_find(locator, True, True) value = str(element.get_attribute('value')) if expected in value: return else: raise AssertionError("Value '%s' did not appear in element '%s'. It's value was '%s'" % ( expected, locator, value)) ################################################################################################## ## Element Value Should Not Contain ## ## ## ## Author: Olivier Verville ## ## Date: May 9th 2016 ## ################################################################################################## def element_value_should_not_contain(self, locator, value): """Verifies the element identified by `locator` does not contain the specified value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | value | value it should not contain | Slim Shady |""" self._info("Verifying element '%s' value does not contain '%s'" % (locator, value)) element = self._element_find(locator, True, True) elem_value = str(element.get_attribute('value')) if value in elem_value: raise AssertionError( "Value '%s' was found in element '%s' while it shouldn't have" % (value, locator)) ################################################################################################## ## Element Focus Should Be Set ## ## ## ## Author: Olivier Verville ## ## Date: May 24th 2016 ## ################################################################################################## def element_focus_should_be_set(self, locator): """Verifies the element identified by `locator` has focus. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |""" self._info("Verifying element '%s' focus is set" % locator) self._check_element_focus(True, locator) ################################################################################################## ## Element Focus Should Not Be Set ## ## ## ## Author: Olivier Verville ## ## Date: May 24th 2016 ## ################################################################################################## def element_focus_should_not_be_set(self, locator): """Verifies the element identified by `locator` does not have focus. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |""" self._info("Verifying element '%s' focus is not set" % locator) self._check_element_focus(False, locator) ################################################################################################## ## Element Css Attribute Should Be ## ## ## ## Author: Olivier Verville ## ## Date: May 24th 2016 ## ################################################################################################## def element_css_attribute_should_be(self, locator, prop, expected): """Verifies the element identified by `locator` has the expected value for the targeted `prop`. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | prop | targeted css attribute | background-color | | expected | expected value | rgba(0, 128, 0, 1) |""" self._info("Verifying element '%s' has css attribute '%s' with a value of '%s'" % ( locator, prop, expected)) self._check_element_css_value(locator, prop, expected) ################################################################################################## ## Wait Until Page Contains Elements ## ## ## ## Author: Olivier Verville ## ## Date: May 24th 2016 ## ################################################################################################## def wait_until_page_contains_elements(self, timeout, *locators): """This is a copy of `Wait Until Page Contains Element` but it allows multiple arguments in order to wait for more than one element. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |""" self._wait_until_no_error(timeout, self._wait_for_elements, locators) ################################################################################################## ## Wait Until Page Contains One Of These Elements ## ## ## ## Author: Olivier Verville ## ## Date: May 24th 2016 ## ################################################################################################## def wait_until_page_contains_one_of_these_elements(self, timeout, *locators): """Waits until at least one of the specified elements is found. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |""" self._wait_until_no_error( timeout, self._wait_for_at_least_one_element, locators) ################################################################################################## ## Wait Until Page Does Not Contain These Elements ## ## ## ## Author: Olivier Verville ## ## Date: June 29th 2016 ## ################################################################################################## def wait_until_page_does_not_contain_these_elements(self, timeout, *locators): """Waits until all of the specified elements are not found on the page. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |""" self._wait_until_no_error( timeout, self._wait_for_elements_to_go_away, locators) ############################################################################################## ## Tap Key ## ## ## ## Author: Olivier Verville ## ## Date: June 28th 2016 ## ############################################################################################## def tap_key(self, key, complementKey=None): """Presses the specified `key`. The `complementKey` defines the key to hold when pressing the specified `key`. For example, you could use ${VK_TAB} as `key` and use ${VK_SHIFT} as `complementKey' in order to press Shift + Tab (back tab) | =Argument= | =Description= | =Example= | | key | the key to press | ${VK_F4} | | complementKey | the key to hold while pressing the key passed in previous argument | ${VK_ALT} |""" driver = self._current_browser() if (complementKey is not None): ActionChains(driver).key_down(complementKey).send_keys( key).key_up(complementKey).perform() else: ActionChains(driver).send_keys(key).perform() ############################################################################################## ## Wait Until Element Is Clickable ## ## ## ## Author: Olivier Verville ## ## Date: July 13th 2016 ## ############################################################################################## def wait_until_element_is_clickable(self, locator, timeout=None): """Clicks the element specified by `locator` until the operation succeeds. This should be used with buttons that are generated in real-time and that don't have their click handling available immediately. This keyword avoids unclickable element exceptions. | =Argument= | =Description= | =Example= | | locator | Selenium 2 element locator(s) | id=MyId | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s |""" self._wait_until_no_error( timeout, self._wait_for_click_to_succeed, locator) ################################################################################################################################################################ ## |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||## ## HELPER METHODS ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||## ## |||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||## ################################################################################################################################################################ ############################## ## Checks an element focus ## ############################## def _check_element_focus_exp(self, set, locator, timeout=None): if set: element = self._element_find(locator, True, False) if not element: return "Element locator '%s' did not match any elements after %s" % (locator, self._format_timeout(timeout)) driver = self._current_browser() if element == driver.switch_to.active_element: return else: return "Element '%s' did not get focus after %s" % (locator, self._format_timeout(timeout)) else: element = self._element_find(locator, True, False) if not element: return "Element locator '%s' did not match any elements after %s" % (locator, self._format_timeout(timeout)) driver = self._current_browser() if element != driver.switch_to.active_element: return else: return "Element '%s' still had focus after %s while it shouldn't have" % (locator, self._format_timeout(timeout)) ################################## ## Checks an element's value ## ################################## def _check_element_value_exp(self, partial, locator, expected, strip=False, timeout=None): if partial: element = self._element_find(locator, True, False) if not element: return "Element locator '%s' did not match any elements after %s" % (locator, self._format_timeout(timeout)) value = str(element.get_attribute('value')) if (strip): value = value.strip() if expected in value: return else: return "Value '%s' did not appear in %s to element '%s'. It's value was '%s'." % (expected, self._format_timeout(timeout), locator, value) else: element = self._element_find(locator, True, False) if not element: return "Element locator '%s' did not match any elements after %s" % (locator, self._format_timeout(timeout)) value = element.get_attribute('value') if (strip): value = value.strip() if str(value) == expected: return else: return "Element '%s' value was not %s after %s" % (locator, expected, self._format_timeout(timeout)) ############################## ## Checks an element focus ## ############################## def _check_element_focus(self, set, locator): if set: element = self._element_find(locator, True, True) driver = self._current_browser() if element == driver.switch_to.active_element: return else: raise AssertionError( "Element '%s' did not have focus while it should have" % locator) else: element = self._element_find(locator, True, True) driver = self._current_browser() if element != driver.switch_to.active_element: return else: raise AssertionError( "Element '%s' had focus while it shouldn't have" % locator) ########################################## ## Checks the specified css attribute ## ########################################## def _check_element_css_value(self, locator, prop, expected): element = self._element_find(locator, True, True) value = element.value_of_css_property(prop) if (value != expected): raise AssertionError("Element locator '%s' css property '%s' had a value of '%s' while it should have been '%s'" % ( locator, prop, value, expected)) ###################################### ## Checks element size attribute ## ###################################### def _check_element_size(self, locator, type, expected): element = self._element_find(locator, True, True) size = str(element.size.get(type)) if size != expected: raise AssertionError("The %s of element '%s' should have been '%s' but in fact it was '%s'" % ( type, locator, expected, size)) ################################## ## Waits for multiple elements ## ################################## def _wait_for_elements(self, locators): for locator in locators: element = self._element_find(locator, True, False) if not element: return "Element '%s' couldn't be found" % locator ################################################## ## Waits for at least one of the given elements ## ################################################## def _wait_for_at_least_one_element(self, locators): for locator in locators: element = self._element_find(locator, True, False) if element is not None: return return "Couldn't find any of the expected elements from '%s'" % str(locators) ################################################################## ## Simpler implementation of Wait Until Page Contains Element ## ################################################################## def _wait_for_element(self, locator): element = self._element_find(locator, True, False) if not element: return "Element '%s' couldn't be found" % locator ######################################################################## ## Simpler implementation of Wait Until Page Does Not Contain Element ## ######################################################################## def _wait_for_element_to_go_away(self, locator): element = self._element_find(locator, True, False) if element is not None: return "Element '%s' shouldn't have been there" % locator ################################################### ## Waits for multiple elements to not be present ## ################################################### def _wait_for_elements_to_go_away(self, locators): for locator in locators: element = self._element_find(locator, True, False) if element is not None: return "Element '%s' shouldn't have been there" % locator ################################################################################################################### ## Clicks an element until there is no error (created to avoid click errors on periodically unclickable elements ## ################################################################################################################### def _wait_for_click_to_succeed(self, locator): element = self._element_find(locator, True, False) if not element: return "Couldn't find the element '%s', click operation failed" % locator element.click()
class _keywords(): def __init__(self): pass def wait_until_element_has_focus(self, locator, timeout=None): '''Waits until the element identified by `locator` has focus. You might rather want to use `Element Focus Should Be Set` | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |''' pass def wait_until_element_does_not_have_focus(self, locator, timeout=None): '''Waits until the element identified by `locator` doesn't have focus. You might rather want to use `Element Focus Should Not Be Set` | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |''' pass def wait_until_element_value_is(self, locator, expected, strip=False, timeout=None): '''Waits until the element identified by `locator` value is exactly the expected value. You might want to use `Element Value Should Be` instead. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | My Name Is Slim Shady | | strip | boolean, determines whether it should strip the value of the field before comparison | ${True} / ${False} | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |''' pass def wait_until_element_value_contains(self, locator, expected, timeout=None): '''Waits until the element identified by `locator` contains the expected value. You might want to use `Element Value Should Contain` instead. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | Slim Shady | | timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |''' pass def set_element_focus(self, locator): '''Sets focus on the element identified by `locator`. Should be used with elements meant to have focus only, such as text fields. This keywords also waits for the focus to be active by calling the `Wait Until Element Has Focus` keyword. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |''' pass def clear_input_field(self, locator, method=0): '''Clears the text field identified by `locator` The element.clear() method doesn't seem to work properly on all browsers, so this keyword was created to offer alternatives. The `method` argument defines the method it should use in order to clear the target field. 0 = Uses the selenium method by doing element.clear 1 = Sets focus on the field and presses CTRL + A, and then DELETE 2 = Repeatedly presses BACKSPACE until the field is empty This keyword, when using a method other than '2' does not validate it successfully cleared the field, you should handle this verification by yourself. When using the method '2', it presses delete until the field's value is empty. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | method | the clearing method that should be used | no example provided |''' pass def element_text_color_should_be(self, locator, expected): '''Verifies the element identified by `locator` has the expected text color (it verifies the CSS attribute color). Color should be in RGBA format. Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA) | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected color | rgba(0, 128, 0, 1) |''' pass def element_background_color_should_be(self, locator, expected): '''Verifies the element identified by `locator` has the expected background color (it verifies the CSS attribute background-color). Color should be in RGBA format. Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA) | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected color | rgba(0, 128, 0, 1) |''' pass def element_width_should_be(self, locator, expected): '''Verifies the element identified by `locator` has the expected width. Expected width should be in pixels. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected width | 800 |''' pass def element_height_should_be(self, locator, expected): '''Verifies the element identified by `locator` has the expected height. Expected height should be in pixels. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected height | 600 |''' pass def element_value_should_be(self, locator, expected, strip=False): '''Verifies the element identified by `locator` has the expected value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | My Name Is Slim Shady | | strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |''' pass def element_value_should_not_be(self, locator, value, strip=False): '''Verifies the element identified by `locator` is not the specified value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | value | value it should not be | My Name Is Slim Shady | | strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |''' pass def element_value_should_contain(self, locator, expected): '''Verifies the element identified by `locator` contains the expected value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | expected | expected value | Slim Shady |''' pass def element_value_should_not_contain(self, locator, value): '''Verifies the element identified by `locator` does not contain the specified value. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | value | value it should not contain | Slim Shady |''' pass def element_focus_should_be_set(self, locator): '''Verifies the element identified by `locator` has focus. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |''' pass def element_focus_should_not_be_set(self, locator): '''Verifies the element identified by `locator` does not have focus. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id |''' pass def element_css_attribute_should_be(self, locator, prop, expected): '''Verifies the element identified by `locator` has the expected value for the targeted `prop`. | *Argument* | *Description* | *Example* | | locator | Selenium 2 element locator | id=my_id | | prop | targeted css attribute | background-color | | expected | expected value | rgba(0, 128, 0, 1) |''' pass def wait_until_page_contains_elements(self, timeout, *locators): '''This is a copy of `Wait Until Page Contains Element` but it allows multiple arguments in order to wait for more than one element. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |''' pass def wait_until_page_contains_one_of_these_elements(self, timeout, *locators): '''Waits until at least one of the specified elements is found. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |''' pass def wait_until_page_does_not_contain_these_elements(self, timeout, *locators): '''Waits until all of the specified elements are not found on the page. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |''' pass def tap_key(self, key, complementKey=None): '''Presses the specified `key`. The `complementKey` defines the key to hold when pressing the specified `key`. For example, you could use ${VK_TAB} as `key` and use ${VK_SHIFT} as `complementKey' in order to press Shift + Tab (back tab) | =Argument= | =Description= | =Example= | | key | the key to press | ${VK_F4} | | complementKey | the key to hold while pressing the key passed in previous argument | ${VK_ALT} |''' pass def wait_until_element_is_clickable(self, locator, timeout=None): '''Clicks the element specified by `locator` until the operation succeeds. This should be used with buttons that are generated in real-time and that don't have their click handling available immediately. This keyword avoids unclickable element exceptions. | =Argument= | =Description= | =Example= | | locator | Selenium 2 element locator(s) | id=MyId | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s |''' pass def _check_element_focus_exp(self, set, locator, timeout=None): pass def _check_element_value_exp(self, partial, locator, expected, strip=False, timeout=None): pass def _check_element_focus_exp(self, set, locator, timeout=None): pass def _check_element_css_value(self, locator, prop, expected): pass def _check_element_size(self, locator, type, expected): pass def _wait_for_elements(self, locators): pass def _wait_for_at_least_one_element(self, locators): pass def _wait_for_elements(self, locators): pass def _wait_for_element_to_go_away(self, locator): pass def _wait_for_elements_to_go_away(self, locators): pass def _wait_for_click_to_succeed(self, locator): pass
35
22
12
3
5
4
2
1.67
0
3
0
1
34
0
34
34
606
139
175
65
140
292
183
65
148
8
0
2
71
2,696
ASKIDA/Selenium2LibraryExtension
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ASKIDA_Selenium2LibraryExtension/src/Selenium2LibraryExtension/__init__.py
Selenium2LibraryExtension.Selenium2LibraryExtension
class Selenium2LibraryExtension(Selenium2Library, _patches, _keywords): """*Selenium2LibraryExtension*\n --- Created to provide functionalities that either weren't available in the original library, or required overhead in order to be achieved.\n Also provides major bugfixes. This library is meant to give a control closer to the level of control you would get from using Selenium's API directly. = New Locators = Selenium2LibraryExtension implements new element location strategies on top of the original location strategies from Selenium2Library. For more information about locators, see Selenium2Library's [http://robotframework.org/Selenium2Library/doc/Selenium2Library.html#Introduction|locator documentation]. | =locator= | =Description= | =Example of Usage= | | input | retrieves the currently active input element. the argument always is `current` | input=current | | meta_name | retrieves the meta element(s) with the specified name | meta_name=description | | last_tag | retrieves the last element with the specified tag | last_tag=div | | first_tag | retrieves the first element with the specified tag | first_tag=div |""" ROBOT_LIBRARY_DOC_FORMAT = 'ROBOT' ROBOT_LIBRARY_SCOPE = 'GLOBAL' __version__ = '1.1.0' def __init__(self, timeout=10.0, implicit_wait=0.0, run_on_failure='Capture Page Screenshot', screenshot_root_directory=None): """Selenium2LibraryExtension can be imported with the same arguments as Selenium2Library. See the [http://robotframework.org/Selenium2Library/doc/Selenium2Library.html#Importing|importing] documentation for more details.""" for base in Selenium2Library.__bases__: base.__init__(self) self.screenshot_root_directory = screenshot_root_directory self.set_selenium_timeout(timeout) self.set_selenium_implicit_wait(implicit_wait) self.register_keyword_to_run_on_failure(run_on_failure) self.ROBOT_LIBRARY_LISTENER = LibraryListener() # register new locators self.add_location_strategy( 'input', self._locator_find_active_element, persist=True) self.add_location_strategy( 'meta_name', self._locator_find_by_meta_name, persist=True) self.add_location_strategy( 'last_tag', self._locator_find_last_by_tag, persist=True) self.add_location_strategy( 'first_tag', self._locator_find_first_by_tag, persist=True) ############################################################################################################## ## ## ## LOCATORS ## ## ## ############################################################################################################## def _locator_find_active_element(self, browser, criteria, tag, constraints): return browser.switch_to.active_element def _locator_find_by_meta_name(self, browser, criteria, tag, constraints): return browser.execute_script("return $('meta[name=%s]');" % criteria) def _locator_find_last_by_tag(self, browser, criteria, tag, constraints): return browser.execute_script("return $('%s:last');" % criteria) def _locator_find_first_by_tag(self, browser, criteria, tag, constraints): return browser.execute_script("return $('%s:first');" % criteria)
class Selenium2LibraryExtension(Selenium2Library, _patches, _keywords): '''*Selenium2LibraryExtension* --- Created to provide functionalities that either weren't available in the original library, or required overhead in order to be achieved. Also provides major bugfixes. This library is meant to give a control closer to the level of control you would get from using Selenium's API directly. = New Locators = Selenium2LibraryExtension implements new element location strategies on top of the original location strategies from Selenium2Library. For more information about locators, see Selenium2Library's [http://robotframework.org/Selenium2Library/doc/Selenium2Library.html#Introduction|locator documentation]. | =locator= | =Description= | =Example of Usage= | | input | retrieves the currently active input element. the argument always is `current` | input=current | | meta_name | retrieves the meta element(s) with the specified name | meta_name=description | | last_tag | retrieves the last element with the specified tag | last_tag=div | | first_tag | retrieves the first element with the specified tag | first_tag=div |''' def __init__(self, timeout=10.0, implicit_wait=0.0, run_on_failure='Capture Page Screenshot', screenshot_root_directory=None): '''Selenium2LibraryExtension can be imported with the same arguments as Selenium2Library. See the [http://robotframework.org/Selenium2Library/doc/Selenium2Library.html#Importing|importing] documentation for more details.''' pass def _locator_find_active_element(self, browser, criteria, tag, constraints): pass def _locator_find_by_meta_name(self, browser, criteria, tag, constraints): pass def _locator_find_last_by_tag(self, browser, criteria, tag, constraints): pass def _locator_find_first_by_tag(self, browser, criteria, tag, constraints): pass
6
2
5
1
4
1
1
0.88
3
0
0
0
5
2
5
42
58
13
24
12
18
21
24
12
18
2
1
1
6
2,697
ASMfreaK/habitipy
ASMfreaK_habitipy/habitipy/cli.py
habitipy.cli.FeedPet
class FeedPet(Pets): """Feeds a pet or pets with specified food.""" sleep_time = cli.SwitchAttr( ['-S', '--sleep-time'], argtype=int, default=1, help=_('Time to wait between feeding each pet to avoid overloading the server')) # pylint: disable=line-too-long maximum_food = cli.SwitchAttr( ['-M', '--maxmimum-food'], argtype=int, default=10, help=_('Maximum amount of food to feed a pet') ) def main(self, *food): super().main() if len(food) != 1: self.log.error(_('error: must specify one food to feed.')) # noqa: Q000 return food = food[0] user = self.api.user.get() pets = user['items']['pets'] mounts = user['items']['mounts'] color_specifier = self.color_specifier if color_specifier: color_specifier = color_specifier[0].capitalize() + color_specifier[1:] pet_specifier = self.pet_specifier if pet_specifier: pet_specifier = pet_specifier[0].capitalize() + pet_specifier[1:] for pet in pets: (pettype, color) = pet.split('-') if pet_specifier and pettype != pet_specifier: continue if color_specifier and color != color_specifier: continue food_needed = self.get_food_needed(pets[pet]) if food_needed > 0 and pet not in mounts: food_amount = min(food_needed, self.maximum_food) print(_(f'feeding {food_amount} {food} to {color} {pettype}')) response = self.api.user.feed[pet][food].post(uri_params={ 'amount': food_amount, }) print(_(f' new fullness: {self.get_full_percent(response)}%')) time.sleep(self.sleep_time) else: print(_(f'NOT feeding {color} {pettype}'))
class FeedPet(Pets): '''Feeds a pet or pets with specified food.''' def main(self, *food): pass
2
1
36
4
32
1
8
0.07
1
1
0
0
1
0
1
6
46
5
40
14
38
3
32
14
30
8
4
2
8
2,698
ASMfreaK/habitipy
ASMfreaK_habitipy/habitipy/cli.py
habitipy.cli.Food
class Food(ApplicationWithApi): """Lists food from the inventory.""" DESCRIPTION = _('List inventory food and their quantities available') def main(self): super().main() user = self.api.user.get() food_list = user['items']['food'] food_list_keys = sorted(food_list, key=lambda x: food_list[x]) for food in food_list_keys: print(f'{food:<30}: {food_list[food]}')
class Food(ApplicationWithApi): '''Lists food from the inventory.''' def main(self): pass
2
1
7
0
7
0
2
0.11
1
1
0
0
1
0
1
3
11
1
9
7
7
1
9
7
7
2
3
1
2
2,699
ASMfreaK/habitipy
ASMfreaK_habitipy/habitipy/cli.py
habitipy.cli.HabiticaCli
class HabiticaCli(ConfiguredApplication): # pylint: disable=missing-class-docstring DESCRIPTION = _("tools and library for Habitica restful API") # noqa: Q000 VERSION = pkg_resources.get_distribution('habitipy').version def main(self): if self.nested_command: return super().main() self.log.error(_("No subcommand given, exiting"))
class HabiticaCli(ConfiguredApplication): def main(self): pass
2
0
5
0
5
1
2
0.38
1
1
0
0
1
0
1
2
8
0
8
4
6
3
8
4
6
2
2
1
2