id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4,300 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Part
|
class Part(models.Model):
car = models.ForeignKey(Car, on_delete=models.CASCADE)
name = models.CharField(max_length=60)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
|
class Part(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
4,301 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Location
|
class Location(models.Model):
name = models.TextField()
blob = models.TextField()
|
class Location(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,302 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/benchmarks/drest.py
|
benchmarks.drest.UserViewSet
|
class UserViewSet(viewsets.DynamicModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
class UserViewSet(viewsets.DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 3 | 0 | 0 |
4,303 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.UserViewSet
|
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
class UserViewSet(viewsets.ModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,304 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.UserWithAllViewSet
|
class UserWithAllViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserWithAllSerializer
|
class UserWithAllViewSet(viewsets.ModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,305 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/dynamic_rest/renderers.py
|
dynamic_rest.renderers.DynamicBrowsableAPIRenderer
|
class DynamicBrowsableAPIRenderer(BrowsableAPIRenderer):
"""Renderer class that adds directory support to the Browsable API."""
template = 'dynamic_rest/api.html'
def get_context(self, data, media_type, context):
from dynamic_rest.routers import get_directory
context = super(DynamicBrowsableAPIRenderer, self).get_context(
data,
media_type,
context
)
request = context['request']
context['directory'] = get_directory(request)
return context
|
class DynamicBrowsableAPIRenderer(BrowsableAPIRenderer):
'''Renderer class that adds directory support to the Browsable API.'''
def get_context(self, data, media_type, context):
pass
| 2 | 1 | 11 | 1 | 10 | 0 | 1 | 0.08 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 16 | 3 | 12 | 5 | 9 | 1 | 8 | 5 | 5 | 1 | 1 | 0 | 1 |
4,306 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/dynamic_rest/serializers.py
|
dynamic_rest.serializers.WithDynamicSerializerMixin
|
class WithDynamicSerializerMixin(
CacheableFieldMixin,
WithResourceKeyMixin,
DynamicSerializerBase
):
"""Base class for DREST serializers.
This class provides support for dynamic field inclusions/exclusions.
Like DRF, DREST serializers support a few Meta class options:
- model - class
- name - string
- plural_name - string
- defer_many_relations - bool
- hash_ids - bool
- fields - list of strings
- deferred_fields - list of strings
- immutable_fields - list of strings
- read_only_fields - list of strings
- untrimmed_fields - list of strings
"""
ENABLE_FIELDS_CACHE = False
def __new__(cls, *args, **kwargs):
"""
Custom constructor that sets the ListSerializer to
DynamicListSerializer to avoid re-evaluating querysets.
Addresses DRF 3.1.0 bug:
https://github.com/tomchristie/django-rest-framework/issues/2704
"""
meta = getattr(cls, 'Meta', None)
if not meta:
meta = type('Meta', (), {})
cls.Meta = meta
list_serializer_class = getattr(
meta,
'list_serializer_class',
settings.LIST_SERIALIZER_CLASS or DynamicListSerializer,
)
if not issubclass(list_serializer_class, DynamicListSerializer):
list_serializer_class = DynamicListSerializer
meta.list_serializer_class = list_serializer_class
return super(
WithDynamicSerializerMixin, cls
).__new__(
cls, *args, **kwargs
)
def __init__(
self,
instance=None,
data=fields.empty,
only_fields=None,
include_fields=None,
exclude_fields=None,
request_fields=None,
sideloading=None,
debug=False,
dynamic=True,
embed=False,
envelope=False,
**kwargs
):
"""
Custom initializer that builds `request_fields`.
Arguments:
instance: Initial instance, used by updates.
data: Initial data, used by updates / creates.
only_fields: List of field names to render.
include_fields: List of field names to include.
exclude_fields: List of field names to exclude.
request_fields: Map of field names that supports
nested inclusions / exclusions.
embed: If True, embed the current representation.
If False, sideload the current representation.
sideloading: If True, force sideloading for all descendents.
If False, force embedding for all descendents.
If None (default), respect descendents' embed parameters.
dynamic: If False, disable inclusion / exclusion features.
envelope: If True, wrap `.data` in an envelope.
If False, do not use an envelope.
"""
name = self.get_name()
if data is not fields.empty and name in data and len(data) == 1:
# support POST/PUT key'd by resource name
data = data[name]
if data is not fields.empty:
# if a field is nullable but not required and the implementation
# passes null as a value, remove the field from the data
# this addresses the frontends that send
# undefined resource fields as null on POST/PUT
for field_name, field in six.iteritems(self.get_all_fields()):
if (
field.allow_null is False and
field.required is False and
field_name in data and
data[field_name] is None
):
data.pop(field_name)
kwargs['instance'] = instance
kwargs['data'] = data
# "sideload" argument is pending deprecation as of 1.6
if kwargs.pop('sideload', False):
# if "sideload=True" is passed, turn on the envelope
envelope = True
super(WithDynamicSerializerMixin, self).__init__(**kwargs)
self.envelope = envelope
self.sideloading = sideloading
self.debug = debug
self.dynamic = dynamic
self.request_fields = request_fields or {}
# `embed` is overriden by `sideloading`
embed = embed if sideloading is None else not sideloading
self.embed = embed
self._dynamic_init(only_fields, include_fields, exclude_fields)
self.enable_optimization = settings.ENABLE_SERIALIZER_OPTIMIZATIONS
def _dynamic_init(self, only_fields, include_fields, exclude_fields):
"""
Modifies `request_fields` via higher-level dynamic field interfaces.
Arguments:
only_fields: List of field names to render.
All other fields will be deferred (respects sideloads).
include_fields: List of field names to include.
Adds to default field set, (respects sideloads).
`*` means include all fields.
exclude_fields: List of field names to exclude.
Removes from default field set. If set to '*', all fields are
removed, except for ones that are explicitly included.
"""
if not self.dynamic:
return
if (
isinstance(self.request_fields, dict)
and self.request_fields.pop('*', None) is False
):
exclude_fields = '*'
only_fields = set(only_fields or [])
include_fields = include_fields or []
exclude_fields = exclude_fields or []
if only_fields:
exclude_fields = '*'
include_fields = only_fields
if exclude_fields == '*':
# First exclude all, then add back in explicitly included fields.
include_fields = set(
list(include_fields) + [
field for field, val in six.iteritems(self.request_fields)
if val or val == {}
]
)
all_fields = set(self.get_all_fields().keys()) # this is slow
exclude_fields = all_fields - include_fields
elif include_fields == '*':
all_fields = set(self.get_all_fields().keys()) # this is slow
include_fields = all_fields
for name in exclude_fields:
self.request_fields[name] = False
for name in include_fields:
if not isinstance(self.request_fields.get(name), dict):
# not sideloading this field
self.request_fields[name] = True
@classmethod
def get_model(cls):
"""Get the model, if the serializer has one.
Model serializers should implement this method.
"""
return None
@classmethod
def get_name(cls):
"""Get the serializer name.
The name can be defined on the Meta class or will be generated
automatically from the model name.
"""
if not hasattr(cls.Meta, 'name'):
class_name = getattr(cls.get_model(), '__name__', None)
setattr(
cls.Meta,
'name',
inflection.underscore(class_name) if class_name else None,
)
return cls.Meta.name
@classmethod
def get_plural_name(cls):
"""Get the serializer's plural name.
The plural name may be defined on the Meta class.
If the plural name is not defined,
the pluralized form of the name will be returned.
"""
if not hasattr(cls.Meta, 'plural_name'):
setattr(
cls.Meta,
'plural_name',
inflection.pluralize(cls.get_name())
)
return cls.Meta.plural_name
def get_request_attribute(self, attribute, default=None):
return getattr(
self.context.get('request'),
attribute,
default
)
def get_request_method(self):
return self.get_request_attribute('method', '').upper()
@resettable_cached_property
def _all_fields(self):
"""Returns the entire serializer field set.
Does not respect dynamic field inclusions/exclusions.
"""
if (
not settings.ENABLE_FIELDS_CACHE or
not self.ENABLE_FIELDS_CACHE or
self.__class__ not in FIELDS_CACHE
):
all_fields = super(
WithDynamicSerializerMixin,
self
).get_fields()
if settings.ENABLE_FIELDS_CACHE and self.ENABLE_FIELDS_CACHE:
FIELDS_CACHE[self.__class__] = all_fields
else:
all_fields = copy.copy(FIELDS_CACHE[self.__class__])
for k, field in six.iteritems(all_fields):
if hasattr(field, 'reset'):
field.reset()
for k, field in six.iteritems(all_fields):
field.field_name = k
field.parent = self
return all_fields
def get_all_fields(self):
return self._all_fields
def _get_flagged_field_names(self, fields, attr, meta_attr=None):
if meta_attr is None:
meta_attr = '%s_fields' % attr
meta_list = set(getattr(self.Meta, meta_attr, []))
return {
name for name, field in six.iteritems(fields)
if getattr(field, attr, None) is True or name in
meta_list
}
def _get_deferred_field_names(self, fields):
deferred_fields = self._get_flagged_field_names(fields, 'deferred')
defer_many_relations = (
settings.DEFER_MANY_RELATIONS
if not hasattr(self.Meta, 'defer_many_relations')
else self.Meta.defer_many_relations
)
if defer_many_relations:
# Auto-defer all fields, unless the 'deferred' attribute
# on the field is specifically set to False.
many_fields = self._get_flagged_field_names(fields, 'many')
deferred_fields.update({
name for name in many_fields
if getattr(fields[name], 'deferred', None) is not False
})
return deferred_fields
def flag_fields(self, all_fields, fields_to_flag, attr, value):
for name in fields_to_flag:
field = all_fields.get(name)
if not field:
continue
setattr(field, attr, value)
def get_fields(self):
"""Returns the serializer's field set.
If `dynamic` is True, respects field inclusions/exlcusions.
Otherwise, reverts back to standard DRF behavior.
"""
all_fields = self.get_all_fields()
if self.dynamic is False:
return all_fields
if self.id_only():
return {}
serializer_fields = copy.deepcopy(all_fields)
request_fields = self.request_fields
deferred = self._get_deferred_field_names(serializer_fields)
# apply request overrides
if request_fields:
for name, include in six.iteritems(request_fields):
if name not in serializer_fields:
raise exceptions.ParseError(
'"%s" is not a valid field name for "%s".' %
(name, self.get_name())
)
if include is not False and name in deferred:
deferred.remove(name)
elif include is False:
deferred.add(name)
for name in deferred:
serializer_fields.pop(name)
# Set read_only flags based on read_only_fields meta list.
# Here to cover DynamicFields not covered by DRF.
ro_fields = getattr(self.Meta, 'read_only_fields', [])
self.flag_fields(serializer_fields, ro_fields, 'read_only', True)
pw_fields = getattr(self.Meta, 'untrimmed_fields', [])
self.flag_fields(
serializer_fields,
pw_fields,
'trim_whitespace',
False,
)
# Toggle read_only flags for immutable fields.
# Note: This overrides `read_only` if both are set, to allow
# inferred DRF fields to be made immutable.
immutable_field_names = self._get_flagged_field_names(
serializer_fields,
'immutable'
)
self.flag_fields(
serializer_fields,
immutable_field_names,
'read_only',
value=False if self.get_request_method() == 'POST' else True,
)
return serializer_fields
def is_field_sideloaded(self, field_name):
if not isinstance(self.request_fields, dict):
return False
return isinstance(self.request_fields.get(field_name), dict)
def get_link_fields(self):
return self._link_fields
@resettable_cached_property
def _link_fields(self):
"""Construct dict of name:field for linkable fields."""
query_params = self.get_request_attribute('query_params', {})
if 'exclude_links' in query_params:
return {}
else:
all_fields = self.get_all_fields()
return {
name: field for name, field in six.iteritems(all_fields)
if isinstance(field, DynamicRelationField) and
getattr(field, 'link', True) and
not (
# Skip sideloaded fields
name in self.fields and
self.is_field_sideloaded(name)
) and not (
# Skip included single relations
# TODO: Use links, when we can generate canonical URLs
name in self.fields and
not getattr(field, 'many', False)
)
}
@cached_property
def _readable_fields(self):
# NOTE: Copied from DRF, exists in 3.2.x but not 3.1
return [
field for field in self.fields.values()
if not field.write_only
]
@cached_property
def _readable_id_fields(self):
fields = self._readable_fields
return {
field for field in fields
if (
isinstance(
field,
(DynamicRelationField, RelatedField)
)
and not isinstance(
self.request_fields.get(field.field_name), dict
)
)
}
def _get_hash_ids(self):
"""
Check whether ids should be hashed or not.
Determined by the hash_ids boolean Meta field.
Defaults to False.
Returns:
Boolean.
"""
if hasattr(self.Meta, 'hash_ids'):
return self.Meta.hash_ids
else:
return False
def _faster_to_representation(self, instance):
"""Modified to_representation with optimizations.
1) Returns a plain old dict as opposed to OrderedDict.
(Constructing ordered dict is ~100x slower than `{}`.)
2) Ensure we use a cached list of fields
(this optimization exists in DRF 3.2 but not 3.1)
Arguments:
instance: a model instance or data object
Returns:
Dict of primitive datatypes.
"""
ret = {}
fields = self._readable_fields
is_fast = isinstance(instance, prefetch.FastObject)
id_fields = self._readable_id_fields
for field in fields:
attribute = None
# we exclude dynamic fields here because the proper fastquery
# dereferencing happens in the `get_attribute` method now
if is_fast and not isinstance(
field,
(DynamicGenericRelationField, DynamicRelationField)
):
if field in id_fields and field.source not in instance:
# TODO - make better.
attribute = instance.get(field.source + '_id')
ret[field.field_name] = attribute
continue
else:
try:
attribute = instance[field.source]
except KeyError:
# slower, but does more stuff
# Also, some temp debugging
if hasattr(instance, field.source):
attribute = getattr(instance, field.source)
else:
# Fall back on DRF behavior
attribute = field.get_attribute(instance)
print(
'Missing %s from %s' % (
field.field_name,
self.__class__.__name__
)
)
else:
try:
attribute = field.get_attribute(instance)
except SkipField:
continue
if attribute is None:
# We skip `to_representation` for `None` values so that
# fields do not have to explicitly deal with that case.
ret[field.field_name] = None
else:
ret[field.field_name] = field.to_representation(attribute)
return ret
@resettable_cached_property
def obj_cache(self):
# Note: This gets cached by resettable_cached_property so this
# function only needs to return the initial value.
return {}
def _to_representation(self, instance):
"""Uncached `to_representation`."""
if self.enable_optimization:
representation = self._faster_to_representation(instance)
else:
representation = super(
WithDynamicSerializerMixin,
self
).to_representation(instance)
if settings.ENABLE_LINKS:
# TODO: Make this function configurable to support other
# formats like JSON API link objects.
representation = merge_link_object(
self, representation, instance
)
if self.debug:
representation['_meta'] = {
'id': instance.pk,
'type': self.get_plural_name(),
}
# tag the representation with the serializer and instance
return tag_dict(
representation,
serializer=self,
instance=instance,
embed=self.embed
)
def to_representation(self, instance):
"""Modified to_representation method. Optionally may cache objects.
Arguments:
instance: A model instance or data object.
Returns:
Instance ID if the serializer is meant to represent its ID.
Otherwise, a tagged data dict representation.
"""
if self.id_only():
if self._get_hash_ids():
return external_id_from_model_and_internal_id(
self.get_model(), instance.pk
)
return instance.pk
pk = getattr(instance, 'pk', None)
if not settings.ENABLE_SERIALIZER_OBJECT_CACHE or pk is None:
return self._to_representation(instance)
else:
if pk not in self.obj_cache:
self.obj_cache[pk] = self._to_representation(instance)
return self.obj_cache[pk]
def to_internal_value(self, data):
value = super(WithDynamicSerializerMixin, self).to_internal_value(data)
id_attr = getattr(self.Meta, 'update_lookup_field', 'id')
request_method = self.get_request_method()
# Add update_lookup_field field back to validated data
# since super by default strips out read-only fields
# hence id will no longer be present in validated_data.
if all(
(
isinstance(self.root, DynamicListSerializer),
id_attr,
request_method in ('PUT', 'PATCH'),
)
):
id_field = self.fields[id_attr]
id_value = id_field.get_value(data)
value[id_attr] = id_value
return value
def save(self, *args, **kwargs):
"""Serializer save that address prefetch issues."""
update = getattr(self, 'instance', None) is not None
instance = super(
WithDynamicSerializerMixin,
self
).save(
*args,
**kwargs
)
view = self._context.get('view')
if view and update:
if int(DRF_VERSION[0]) <= 3 and int(DRF_VERSION[1]) < 5:
# Reload the object on update
# to get around prefetch cache issues
# Fixed in DRF in 3.5.0
instance = self.instance = view.get_object()
return instance
def id_only(self):
"""Whether the serializer should return an ID instead of an object.
Returns:
True if and only if `request_fields` is True.
"""
return self.dynamic and self.request_fields is True
@resettable_cached_property
def data(self):
if not hasattr(self, '_processed_data'):
data = super(WithDynamicSerializerMixin, self).data
data = SideloadingProcessor(
self, data
).data if self.envelope else data
processed_data = ReturnDict(data, serializer=self)
self._processed_data = post_process(processed_data)
return self._processed_data
|
class WithDynamicSerializerMixin(
CacheableFieldMixin,
WithResourceKeyMixin,
DynamicSerializerBase
):
'''Base class for DREST serializers.
This class provides support for dynamic field inclusions/exclusions.
Like DRF, DREST serializers support a few Meta class options:
- model - class
- name - string
- plural_name - string
- defer_many_relations - bool
- hash_ids - bool
- fields - list of strings
- deferred_fields - list of strings
- immutable_fields - list of strings
- read_only_fields - list of strings
- untrimmed_fields - list of strings
'''
def __new__(cls, *args, **kwargs):
'''
Custom constructor that sets the ListSerializer to
DynamicListSerializer to avoid re-evaluating querysets.
Addresses DRF 3.1.0 bug:
https://github.com/tomchristie/django-rest-framework/issues/2704
'''
pass
def __init__(
self,
instance=None,
data=fields.empty,
only_fields=None,
include_fields=None,
exclude_fields=None,
request_fields=None,
sideloading=None,
debug=False,
dynamic=True,
embed=False,
envelope=False,
**kwargs
):
'''
Custom initializer that builds `request_fields`.
Arguments:
instance: Initial instance, used by updates.
data: Initial data, used by updates / creates.
only_fields: List of field names to render.
include_fields: List of field names to include.
exclude_fields: List of field names to exclude.
request_fields: Map of field names that supports
nested inclusions / exclusions.
embed: If True, embed the current representation.
If False, sideload the current representation.
sideloading: If True, force sideloading for all descendents.
If False, force embedding for all descendents.
If None (default), respect descendents' embed parameters.
dynamic: If False, disable inclusion / exclusion features.
envelope: If True, wrap `.data` in an envelope.
If False, do not use an envelope.
'''
pass
def _dynamic_init(self, only_fields, include_fields, exclude_fields):
'''
Modifies `request_fields` via higher-level dynamic field interfaces.
Arguments:
only_fields: List of field names to render.
All other fields will be deferred (respects sideloads).
include_fields: List of field names to include.
Adds to default field set, (respects sideloads).
`*` means include all fields.
exclude_fields: List of field names to exclude.
Removes from default field set. If set to '*', all fields are
removed, except for ones that are explicitly included.
'''
pass
@classmethod
def get_model(cls):
'''Get the model, if the serializer has one.
Model serializers should implement this method.
'''
pass
@classmethod
def get_name(cls):
'''Get the serializer name.
The name can be defined on the Meta class or will be generated
automatically from the model name.
'''
pass
@classmethod
def get_plural_name(cls):
'''Get the serializer's plural name.
The plural name may be defined on the Meta class.
If the plural name is not defined,
the pluralized form of the name will be returned.
'''
pass
def get_request_attribute(self, attribute, default=None):
pass
def get_request_method(self):
pass
@resettable_cached_property
def _all_fields(self):
'''Returns the entire serializer field set.
Does not respect dynamic field inclusions/exclusions.
'''
pass
def get_all_fields(self):
pass
def _get_flagged_field_names(self, fields, attr, meta_attr=None):
pass
def _get_deferred_field_names(self, fields):
pass
def flag_fields(self, all_fields, fields_to_flag, attr, value):
pass
def get_fields(self):
'''Returns the serializer's field set.
If `dynamic` is True, respects field inclusions/exlcusions.
Otherwise, reverts back to standard DRF behavior.
'''
pass
def is_field_sideloaded(self, field_name):
pass
def get_link_fields(self):
pass
@resettable_cached_property
def _link_fields(self):
'''Construct dict of name:field for linkable fields.'''
pass
@cached_property
def _readable_fields(self):
pass
@cached_property
def _readable_id_fields(self):
pass
def _get_hash_ids(self):
'''
Check whether ids should be hashed or not.
Determined by the hash_ids boolean Meta field.
Defaults to False.
Returns:
Boolean.
'''
pass
def _faster_to_representation(self, instance):
'''Modified to_representation with optimizations.
1) Returns a plain old dict as opposed to OrderedDict.
(Constructing ordered dict is ~100x slower than `{}`.)
2) Ensure we use a cached list of fields
(this optimization exists in DRF 3.2 but not 3.1)
Arguments:
instance: a model instance or data object
Returns:
Dict of primitive datatypes.
'''
pass
@resettable_cached_property
def obj_cache(self):
pass
def _to_representation(self, instance):
'''Uncached `to_representation`.'''
pass
def to_representation(self, instance):
'''Modified to_representation method. Optionally may cache objects.
Arguments:
instance: A model instance or data object.
Returns:
Instance ID if the serializer is meant to represent its ID.
Otherwise, a tagged data dict representation.
'''
pass
def to_internal_value(self, data):
pass
def save(self, *args, **kwargs):
'''Serializer save that address prefetch issues.'''
pass
def id_only(self):
'''Whether the serializer should return an ID instead of an object.
Returns:
True if and only if `request_fields` is True.
'''
pass
@resettable_cached_property
def data(self):
pass
| 38 | 16 | 20 | 2 | 13 | 5 | 3 | 0.37 | 3 | 12 | 5 | 2 | 25 | 9 | 28 | 31 | 622 | 88 | 392 | 112 | 336 | 144 | 225 | 82 | 196 | 10 | 2 | 5 | 88 |
4,307 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_meta.py
|
tests.test_meta.TestMeta
|
class TestMeta(TestCase):
def test_get_remote_model(self):
tests = [
(Location, 'user_set', User),
(User, 'location', Location),
(User, 'profile', Profile),
(User, 'groups', Group),
(Group, 'users', User),
(Profile, 'user', User),
]
for model, field_name, expected in tests:
remote_model = get_remote_model(
get_model_field(model, field_name)
)
self.assertEqual(
expected,
remote_model,
"For %s.%s expected %s got %s" % (
model,
field_name,
expected,
remote_model
)
)
def test_model_field_and_type(self):
tests = [
(Location, 'user_set', 'm2o'),
(User, 'location', 'fk'),
(User, 'profile', 'o2or'),
(User, 'groups', 'm2m'),
(Group, 'users', 'm2m'),
(Profile, 'user', 'o2o'),
(User, 'id', '')
]
for model, field_name, expected in tests:
field, typestr = get_model_field_and_type(model, field_name)
self.assertEqual(
expected,
typestr,
"%s.%s should be '%s', got '%s'" % (
model,
field_name,
expected,
typestr,
)
)
def test_reverse_m2m_field_name(self):
m2m_field = get_model_field(User, 'groups')
reverse = reverse_m2m_field_name(m2m_field)
self.assertEqual('users', reverse)
|
class TestMeta(TestCase):
def test_get_remote_model(self):
pass
def test_model_field_and_type(self):
pass
def test_reverse_m2m_field_name(self):
pass
| 4 | 0 | 17 | 1 | 16 | 0 | 2 | 0 | 1 | 4 | 4 | 0 | 3 | 0 | 3 | 3 | 56 | 6 | 50 | 12 | 46 | 0 | 15 | 12 | 11 | 2 | 1 | 1 | 5 |
4,308 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/dynamic_rest/fields/fields.py
|
dynamic_rest.fields.fields.CountField
|
class CountField(DynamicComputedField):
"""
Computed field that counts the number of elements in another field.
"""
def __init__(self, serializer_source, *args, **kwargs):
"""
Arguments:
serializer_source: A serializer field.
unique: Whether or not to perform a count of distinct elements.
"""
self.field_type = int
# Use `serializer_source`, which indicates a field at the API level,
# instead of `source`, which indicates a field at the model level.
self.serializer_source = serializer_source
# Set `source` to an empty value rather than the field name to avoid
# an attempt to look up this field.
kwargs['source'] = ''
self.unique = kwargs.pop('unique', True)
return super(CountField, self).__init__(*args, **kwargs)
def get_attribute(self, obj):
source = self.serializer_source
if source not in self.parent.fields:
return None
value = self.parent.fields[source].get_attribute(obj)
data = self.parent.fields[source].to_representation(value)
# How to count None is undefined... let the consumer decide.
if data is None:
return None
# Check data type. Technically len() works on dicts, strings, but
# since this is a "count" field, we'll limit to list, set, tuple.
if not isinstance(data, (list, set, tuple)):
raise TypeError(
"'%s' is %s. Must be list, set or tuple to be countable." % (
source, type(data)
)
)
if self.unique:
# Try to create unique set. This may fail if `data` contains
# non-hashable elements (like dicts).
try:
data = set(data)
except TypeError:
pass
return len(data)
|
class CountField(DynamicComputedField):
'''
Computed field that counts the number of elements in another field.
'''
def __init__(self, serializer_source, *args, **kwargs):
'''
Arguments:
serializer_source: A serializer field.
unique: Whether or not to perform a count of distinct elements.
'''
pass
def get_attribute(self, obj):
pass
| 3 | 2 | 22 | 2 | 13 | 7 | 4 | 0.63 | 1 | 7 | 0 | 0 | 2 | 3 | 2 | 7 | 51 | 7 | 27 | 9 | 24 | 17 | 23 | 9 | 20 | 6 | 4 | 2 | 7 |
4,309 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_fields.py
|
tests.test_fields.FieldsTestCase
|
class FieldsTestCase(TestCase):
def test_dynamic_hash_id_field_with_model_parameter(self):
class DogModelTestSerializer(serializers.ModelSerializer):
"""
A custom model serializer simply for testing purposes.
"""
id = DynamicHashIdField(model=Dog)
class Meta:
model = Dog
fields = ["id", "name", "fur_color", "origin"]
dog = Dog.objects.create(
name="Kazan",
fur_color="brown",
origin="Abuelos")
serializer = DogModelTestSerializer(dog)
self.assertEqual(
serializer.data["id"],
external_id_from_model_and_internal_id(
Dog,
dog.id))
def test_dynamic_hash_id_field_without_model_parameter(self):
class DogModelTestSerializer(serializers.ModelSerializer):
"""
A custom model serializer simply for testing purposes.
"""
id = DynamicHashIdField()
class Meta:
model = Dog
fields = ["id", "name", "fur_color", "origin"]
dog = Dog.objects.create(
name="Kazan",
fur_color="brown",
origin="Abuelos")
serializer = DogModelTestSerializer(dog)
self.assertEqual(
serializer.data["id"],
external_id_from_model_and_internal_id(
Dog,
dog.id))
|
class FieldsTestCase(TestCase):
def test_dynamic_hash_id_field_with_model_parameter(self):
pass
class DogModelTestSerializer(serializers.ModelSerializer):
'''
A custom model serializer simply for testing purposes.
'''
class Meta:
def test_dynamic_hash_id_field_without_model_parameter(self):
pass
class DogModelTestSerializer(serializers.ModelSerializer):
'''
A custom model serializer simply for testing purposes.
'''
class Meta:
| 7 | 2 | 23 | 4 | 16 | 3 | 1 | 0.18 | 1 | 3 | 3 | 0 | 2 | 0 | 2 | 2 | 48 | 9 | 33 | 15 | 26 | 6 | 19 | 15 | 12 | 1 | 1 | 0 | 2 |
4,310 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.CountrySerializer.Meta
|
class Meta:
model = Country
fields = ('id', 'name', 'short_name')
deferred_fields = ('name', 'short_name')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,311 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.CountsSerializer.Meta
|
class Meta:
name = 'counts'
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
4,312 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_generic.py
|
tests.test_generic.TestGenericRelationFieldAPI
|
class TestGenericRelationFieldAPI(APITestCase):
def setUp(self):
self.fixture = create_fixture()
f = self.fixture
f.users[0].favorite_pet = f.cats[0]
f.users[0].save()
f.users[1].favorite_pet = f.cats[1]
f.users[1].save()
f.users[2].favorite_pet = f.dogs[1]
f.users[2].save()
def test_id_only(self):
"""
In the id_only case, the favorite_pet field looks like:
```
"favorite_animal" : {
"type": "cats",
"id": "1"
}
```
"""
url = (
'/users/?include[]=favorite_pet'
'&filter{favorite_pet_id.isnull}=false'
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue(
all(
[_['favorite_pet'] for _ in content['users']]
)
)
self.assertFalse('cats' in content)
self.assertFalse('dogs' in content)
self.assertTrue('type' in content['users'][0]['favorite_pet'])
self.assertTrue('id' in content['users'][0]['favorite_pet'])
def test_sideload(self):
url = (
'/users/?include[]=favorite_pet.'
'&filter{favorite_pet_id.isnull}=false'
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue(
all(
[_['favorite_pet'] for _ in content['users']]
)
)
self.assertTrue('cats' in content)
self.assertEqual(2, len(content['cats']))
self.assertTrue('dogs' in content)
self.assertEqual(1, len(content['dogs']))
self.assertTrue('type' in content['users'][0]['favorite_pet'])
self.assertTrue('id' in content['users'][0]['favorite_pet'])
def test_multi_sideload_include(self):
url = (
'/cars/1/?include[]=name&include[]=country.short_name'
'&include[]=parts.name&include[]=parts.country.name'
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue('countries' in content)
country = None
for _ in content['countries']:
if _['id'] == 1:
country = _
self.assertTrue(country)
self.assertTrue('short_name' in country)
self.assertTrue('name' in country)
def test_query_counts(self):
# NOTE: Django doesn't seem to prefetch ContentType objects
# themselves, and rather caches internally. That means
# this call could do 5 SQL queries if the Cat and Dog
# ContentType objects haven't been cached.
with self.assertNumQueries(3):
url = (
'/users/?include[]=favorite_pet.'
'&filter{favorite_pet_id.isnull}=false'
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
with self.assertNumQueries(3):
url = '/users/?include[]=favorite_pet.'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_unknown_resource(self):
"""Test case where polymorhpic relation pulls in an object for
which there is no known canonical serializer.
"""
zork = Zebra.objects.create(
name='Zork',
origin='San Francisco Zoo'
)
user = self.fixture.users[0]
user.favorite_pet = zork
user.save()
self.assertIsNone(DynamicRouter.get_canonical_serializer(Zebra))
url = '/users/%s/?include[]=favorite_pet' % user.pk
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue('user' in content)
self.assertFalse('zebras' in content) # Not sideloaded
user_obj = content['user']
self.assertTrue('favorite_pet' in user_obj)
self.assertEqual('Zebra', user_obj['favorite_pet']['type'])
self.assertEqual(zork.pk, user_obj['favorite_pet']['id'])
def test_dgrf_with_requires_raises(self):
with self.assertRaises(Exception):
DynamicGenericRelationField(requires=['foo', 'bar'])
def test_if_field_inclusion_then_error(self):
url = (
'/users/?include[]=favorite_pet.name'
'&filter{favorite_pet_id.isnull}=false'
)
response = self.client.get(url)
self.assertEqual(400, response.status_code)
def test_patch_resource(self):
"""
Test that patching a content-type field updates the underlying
relationship
"""
user = self.fixture.users[0]
url = '/users/%s/?include[]=favorite_pet.' % user.pk
response = self.client.patch(
url,
json.dumps({
'id': user.id,
'favorite_pet': {
'type': 'dog',
'id': 1
}
}),
content_type='application/json'
)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue('user' in content)
self.assertFalse('cats' in content)
self.assertTrue('dogs' in content)
self.assertEqual(1, content['dogs'][0]['id'])
def test_non_deferred_generic_field(self):
class FooUserSerializer(UserSerializer):
class Meta:
model = User
name = 'user'
fields = (
'id',
'favorite_pet',
)
user = User.objects.filter(
favorite_pet_id__isnull=False
).prefetch_related(
'favorite_pet'
).first()
data = FooUserSerializer(user, envelope=True).data['user']
self.assertIsNotNone(data)
self.assertTrue('favorite_pet' in data)
self.assertTrue(isinstance(data['favorite_pet'], dict))
self.assertEqual(
set(['id', 'type']),
set(data['favorite_pet'].keys())
)
|
class TestGenericRelationFieldAPI(APITestCase):
def setUp(self):
pass
def test_id_only(self):
'''
In the id_only case, the favorite_pet field looks like:
```
"favorite_animal" : {
"type": "cats",
"id": "1"
}
```
'''
pass
def test_sideload(self):
pass
def test_multi_sideload_include(self):
pass
def test_query_counts(self):
pass
def test_unknown_resource(self):
'''Test case where polymorhpic relation pulls in an object for
which there is no known canonical serializer.
'''
pass
def test_dgrf_with_requires_raises(self):
pass
def test_if_field_inclusion_then_error(self):
pass
def test_patch_resource(self):
'''
Test that patching a content-type field updates the underlying
relationship
'''
pass
def test_non_deferred_generic_field(self):
pass
class FooUserSerializer(UserSerializer):
class Meta:
| 13 | 3 | 18 | 1 | 14 | 2 | 1 | 0.14 | 1 | 8 | 5 | 0 | 10 | 1 | 10 | 10 | 189 | 24 | 145 | 45 | 132 | 21 | 99 | 45 | 86 | 3 | 1 | 2 | 12 |
4,313 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.DogSerializer.Meta
|
class Meta:
model = Dog
fields = ('id', 'name', 'origin', 'fur', 'is_red')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,314 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.GroupSerializer.Meta
|
class Meta:
model = Group
name = 'group'
fields = (
'id',
'name',
'permissions',
'members',
'users',
'loc1users',
'loc1usersLambda'
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0 | 12 | 4 | 11 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,315 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.HorseSerializer.Meta
|
class Meta:
model = Horse
name = 'horse'
fields = (
'id',
'name',
'origin',
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 8 | 4 | 7 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,316 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.LocationGroupSerializer.Meta
|
class Meta:
name = 'locationgroup'
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
4,317 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.LocationSerializer.Meta
|
class Meta:
defer_many_relations = False
model = Location
name = 'location'
fields = (
'id', 'name', 'users', 'user_count', 'address',
'cats', 'friendly_cats', 'bad_cats'
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 8 | 5 | 7 | 0 | 5 | 5 | 4 | 0 | 0 | 0 | 0 |
4,318 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.NestedEphemeralSerializer.Meta
|
class Meta:
name = 'nested'
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
4,319 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.PartSerializer.Meta
|
class Meta:
model = Part
fields = ('id', 'name', 'country')
deferred_fields = ('name', 'country')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,320 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.PermissionSerializer.Meta
|
class Meta:
defer_many_relations = True
model = Permission
name = 'permission'
fields = ('id', 'name', 'code', 'users', 'groups')
deferred_fields = ('code',)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 0 | 0 | 0 |
4,321 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.CatSerializer.Meta
|
class Meta:
model = Cat
name = 'cat'
fields = ('id', 'name', 'home', 'backup_home', 'foobar', 'parent')
deferred_fields = ('home', 'backup_home', 'foobar', 'parent')
immutable_fields = ('name',)
untrimmed_fields = ('name',)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 7 | 7 | 6 | 0 | 7 | 7 | 6 | 0 | 0 | 0 | 0 |
4,322 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.ProfileSerializer.Meta
|
class Meta:
model = Profile
name = 'profile'
fields = (
'user',
'display_name',
'thumbnail_url',
'user_location_name',
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0 | 9 | 4 | 8 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,323 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.UserSerializer.Meta
|
class Meta:
model = User
name = 'user'
fields = (
'id',
'name',
'permissions',
'groups',
'location',
'last_name',
'display_name',
'thumbnail_url',
'number_of_cats',
'profile',
'date_of_birth',
'favorite_pet_id',
'favorite_pet',
'is_dead',
)
deferred_fields = (
'last_name',
'date_of_birth',
'display_name',
'profile',
'thumbnail_url',
'favorite_pet_id',
'favorite_pet',
'is_dead',
)
read_only_fields = ('profile',)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0 | 30 | 6 | 29 | 0 | 6 | 6 | 5 | 0 | 0 | 0 | 0 |
4,324 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.ZebraSerializer.Meta
|
class Meta:
model = Zebra
name = 'zebra'
fields = (
'id',
'name',
'origin',
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 8 | 4 | 7 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,325 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestLinks
|
class TestLinks(APITestCase):
def setUp(self):
self.fixture = create_fixture()
home = Location.objects.create()
hunting_ground = Location.objects.create()
self.cat = Cat.objects.create(
name='foo',
home=home,
backup_home=hunting_ground
)
self.cat.hunting_grounds.add(hunting_ground)
def test_deferred_relations_have_links(self):
r = self.client.get('/v2/cats/1/')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertTrue('links' in cat)
# 'home' has link=None set so should not have a link object
self.assertTrue('home' not in cat['links'])
# test for default link (auto-generated relation endpoint)
# Note that the pluralized name is used rather than the full prefix.
self.assertEqual(cat['links']['foobar'], '/v2/cats/1/foobar/')
# test for dynamically generated link URL
cat1 = Cat.objects.get(pk=1)
self.assertEqual(
cat['links']['backup_home'],
'/locations/%s/?include[]=address' % cat1.backup_home.pk
)
@override_settings(
DYNAMIC_REST={
'ENABLE_HOST_RELATIVE_LINKS': False
}
)
def test_relative_links(self):
r = self.client.get('/v2/cats/1/')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertTrue('links' in cat)
# test that links urls become resource-relative urls when
# host-relative urls are turned off.
self.assertEqual(cat['links']['foobar'], 'foobar/')
def test_including_empty_relation_hides_link(self):
r = self.client.get('/v2/cats/1/?include[]=foobar')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
# 'foobar' is included but empty, so don't return a link
cat = content['cat']
self.assertFalse(cat['foobar'])
self.assertFalse('foobar' in cat['links'])
def test_including_non_empty_many_relation_has_link(self):
r = self.client.get('/v2/cats/%s/?include[]=foobar' % self.cat.pk)
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertTrue('foobar' in cat)
self.assertTrue('foobar' in cat['links'])
def test_no_links_for_included_single_relations(self):
url = '/v2/cats/%s/?include[]=home' % self.cat.pk
r = self.client.get(url)
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertTrue('home' in cat)
self.assertFalse('home' in cat['links'])
def test_sideloading_relation_hides_link(self):
url = '/v2/cats/%s/?include[]=foobar.' % self.cat.pk
r = self.client.get(url)
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertTrue('foobar' in cat)
self.assertTrue('locations' in content) # check for sideload
self.assertFalse('foobar' in cat['links']) # no link
def test_one_to_one_dne(self):
user = User.objects.create(name='foo', last_name='bar')
url = '/users/%s/profile/' % user.pk
r = self.client.get(url)
self.assertEqual(200, r.status_code)
# Check error message to differentiate from a routing error 404
content = json.loads(r.content.decode('utf-8'))
self.assertEqual({}, content)
def test_ephemeral_object_link(self):
class FakeCountObject(object):
pk = 1
values = []
class FakeNested(object):
value_count = FakeCountObject()
szr = NestedEphemeralSerializer()
data = szr.to_representation(FakeNested())
self.assertEqual(data, {'value_count': 1}, data)
def test_meta_read_only_relation_field(self):
"""Test for making a DynamicRelationField read-only by adding
it to Meta.read_only_fields.
"""
data = {
'name': 'test ro',
'last_name': 'last',
'location': 1,
'profile': 'bogus value', # Read only relation field
}
response = self.client.post(
'/users/', json.dumps(data),
content_type='application/json'
)
# Note: if 'profile' isn't getting ignored, this will return
# a 404 since a matching Profile object isn't found.
self.assertEqual(201, response.status_code)
def test_no_links_when_excluded(self):
r = self.client.get('/v2/cats/1/?exclude_links')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
cat = content['cat']
self.assertFalse('links' in cat)
@override_settings(
DYNAMIC_REST={
'ENABLE_LINKS': True,
'DEFER_MANY_RELATIONS': True,
}
)
def test_auto_deferral(self):
perm = Permission.objects.create(
name='test',
code=1
)
perm.groups.add(self.fixture.groups[0])
# Check serializers
fields = PermissionSerializer().get_all_fields()
self.assertIs(fields['users'].deferred, False)
self.assertIs(fields['groups'].deferred, None)
url = '/permissions/%s/' % perm.pk
r = self.client.get(url)
data = json.loads(r.content.decode('utf-8'))
self.assertFalse('groups' in data['permission'])
# users shouldn't be deferred because `deferred=False` is
# explicitly set on the field.
self.assertTrue('users' in data['permission'])
|
class TestLinks(APITestCase):
def setUp(self):
pass
def test_deferred_relations_have_links(self):
pass
@override_settings(
DYNAMIC_REST={
'ENABLE_HOST_RELATIVE_LINKS': False
}
)
def test_relative_links(self):
pass
def test_including_empty_relation_hides_link(self):
pass
def test_including_non_empty_many_relation_has_link(self):
pass
def test_no_links_for_included_single_relations(self):
pass
def test_sideloading_relation_hides_link(self):
pass
def test_one_to_one_dne(self):
pass
def test_ephemeral_object_link(self):
pass
class FakeCountObject(object):
class FakeNested(object):
def test_meta_read_only_relation_field(self):
'''Test for making a DynamicRelationField read-only by adding
it to Meta.read_only_fields.
'''
pass
def test_no_links_when_excluded(self):
pass
@override_settings(
DYNAMIC_REST={
'ENABLE_LINKS': True,
'DEFER_MANY_RELATIONS': True,
}
)
def test_auto_deferral(self):
pass
| 17 | 1 | 12 | 2 | 9 | 2 | 1 | 0.16 | 1 | 7 | 7 | 0 | 12 | 2 | 12 | 12 | 167 | 30 | 121 | 70 | 95 | 19 | 92 | 59 | 77 | 1 | 1 | 0 | 12 |
4,326 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestLinks.test_ephemeral_object_link.FakeCountObject
|
class FakeCountObject(object):
pk = 1
values = []
|
class FakeCountObject(object):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,327 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestLinks.test_ephemeral_object_link.FakeNested
|
class FakeNested(object):
value_count = FakeCountObject()
|
class FakeNested(object):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
4,328 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_fields.py
|
tests.test_fields.FieldsTestCase.test_dynamic_hash_id_field_without_model_parameter.DogModelTestSerializer.Meta
|
class Meta:
model = Dog
fields = ["id", "name", "fur_color", "origin"]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,329 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_generic.py
|
tests.test_generic.TestGenericRelationFieldAPI.test_non_deferred_generic_field.FooUserSerializer.Meta
|
class Meta:
model = User
name = 'user'
fields = (
'id',
'favorite_pet',
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 7 | 4 | 6 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,330 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_serializers.py
|
tests.test_serializers.TestDynamicSerializer
|
class TestDynamicSerializer(TestCase):
def setUp(self):
self.fixture = create_fixture()
self.maxDiff = None
def test_data_without_envelope(self):
serializer = UserSerializer(
self.fixture.users,
many=True,
)
self.assertEqual(serializer.data, [
OrderedDict(
[('id', 1), ('name', '0'), ('location', 1)]),
OrderedDict(
[('id', 2), ('name', '1'), ('location', 1)]),
OrderedDict(
[('id', 3), ('name', '2'), ('location', 2)]),
OrderedDict(
[('id', 4), ('name', '3'), ('location', 3)])
])
def test_data_with_envelope(self):
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True
)
self.assertEqual(serializer.data, {
'users': [
OrderedDict(
[('id', 1), ('name', '0'), ('location', 1)]),
OrderedDict(
[('id', 2), ('name', '1'), ('location', 1)]),
OrderedDict(
[('id', 3), ('name', '2'), ('location', 2)]),
OrderedDict(
[('id', 4), ('name', '3'), ('location', 3)])
]
})
def test_data_with_included_field(self):
request_fields = {
'last_name': True
}
serializer = UserSerializer(
self.fixture.users,
many=True,
sideload=True, # pending deprecation 1.6
request_fields=request_fields,
)
self.assertEqual(serializer.data, {
'users': [
OrderedDict(
[('id', 1), ('name', '0'),
('location', 1), ('last_name', '0')]),
OrderedDict(
[('id', 2), ('name', '1'),
('location', 1), ('last_name', '1')]),
OrderedDict(
[('id', 3), ('name', '2'),
('location', 2), ('last_name', '2')]),
OrderedDict(
[('id', 4), ('name', '3'),
('location', 3), ('last_name', '3')])
]
})
def test_data_with_excluded_field(self):
request_fields = {
'location': False
}
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, {
'users': [
OrderedDict(
[('id', 1), ('name', '0')]),
OrderedDict(
[('id', 2), ('name', '1')]),
OrderedDict(
[('id', 3), ('name', '2')]),
OrderedDict(
[('id', 4), ('name', '3')])
]
})
def test_data_with_included_has_one(self):
request_fields = {
'location': {}
}
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, {
'locations': [{
'id': 1,
'name': '0'
}, {
'id': 2,
'name': '1'
}, {
'id': 3,
'name': '2'
}],
'users': [{
'location': 1,
'id': 1,
'name': '0'
}, {
'location': 1,
'id': 2,
'name': '1'
}, {
'location': 2,
'id': 3,
'name': '2'
}, {
'location': 3,
'id': 4,
'name': '3'
}]
})
serializer = UserSerializer(
self.fixture.users[0],
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, {
'locations': [{
'id': 1,
'name': '0'
}],
'user': {
'location': 1,
'id': 1,
'name': '0'
}
})
def test_data_with_included_has_many(self):
request_fields = {
'groups': {}
}
expected = {
'users': [
{
'id': 1,
'name': '0',
'groups': [
1,
2
],
'location': 1
},
{
'id': 2,
'name': '1',
'groups': [
1,
2
],
'location': 1
},
{
'id': 3,
'name': '2',
'groups': [
1,
2
],
'location': 2
},
{
'id': 4,
'name': '3',
'groups': [
1,
2
],
'location': 3
}
],
'groups': [
{
'id': 1,
'name': '0'
},
{
'id': 2,
'name': '1'
}
]
}
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, expected)
request_fields = {
'members': {}
}
expected = {
'users': [
{
'id': 1,
'name': '0',
'location': 1
},
{
'id': 2,
'name': '1',
'location': 1
},
{
'id': 3,
'name': '2',
'location': 2
},
{
'id': 4,
'name': '3',
'location': 3
}
],
'groups': [
{
'id': 1,
'name': '0',
'members': [
1,
2,
3,
4
]
},
{
'id': 2,
'name': '1',
'members': [
1,
2,
3,
4
]
}
]
}
serializer = GroupSerializer(
self.fixture.groups,
many=True,
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, expected)
def test_data_with_nested_include(self):
request_fields = {
'groups': {
'permissions': True
}
}
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields=request_fields,
)
expected = {
'users': [
{
'id': 1,
'name': '0',
'groups': [
1,
2
],
'location': 1
},
{
'id': 2,
'name': '1',
'groups': [
1,
2
],
'location': 1
},
{
'id': 3,
'name': '2',
'groups': [
1,
2
],
'location': 2
},
{
'id': 4,
'name': '3',
'groups': [
1,
2
],
'location': 3
}
],
'groups': [
{
'id': 1,
'name': '0',
'permissions': [
1
]
},
{
'id': 2,
'name': '1',
'permissions': [
2
]
}
]
}
self.assertEqual(serializer.data, expected)
def test_data_with_nested_exclude(self):
request_fields = {
'groups': {
'name': False
}
}
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields=request_fields,
)
self.assertEqual(serializer.data, {
'groups': [{
'id': 1
}, {
'id': 2
}],
'users': [{
'location': 1,
'id': 1,
'groups': [1, 2],
'name': '0'
}, {
'location': 1,
'id': 2,
'groups': [1, 2],
'name': '1'
}, {
'location': 2,
'id': 3,
'groups': [1, 2],
'name': '2'
}, {
'location': 3,
'id': 4,
'groups': [1, 2],
'name': '3'
}]
})
def test_get_all_fields(self):
s = GroupSerializer()
all_keys1 = six.iterkeys(s.get_all_fields())
f2 = s.fields
all_keys2 = six.iterkeys(s.get_all_fields())
expected = ['id', 'name']
self.assertEqual(list(six.iterkeys(f2)), expected)
self.assertEqual(list(all_keys1), list(all_keys2))
def test_get_fields_with_only_fields(self):
expected = ['id', 'last_name']
serializer = UserSerializer(only_fields=expected)
self.assertEqual(list(six.iterkeys(serializer.fields)), expected)
def test_get_fields_with_only_fields_and_request_fields(self):
expected = ['id', 'permissions']
serializer = UserSerializer(
only_fields=expected,
request_fields={
'permissions': {}
}
)
self.assertEqual(list(six.iterkeys(serializer.fields)), expected)
self.assertEqual(serializer.request_fields['permissions'], {})
def test_get_fields_with_only_fields_and_include_fields(self):
expected = ['id', 'name']
serializer = UserSerializer(
only_fields=expected,
include_fields=['permissions']
)
self.assertEqual(list(six.iterkeys(serializer.fields)), expected)
def test_get_fields_with_include_all(self):
expected = six.iterkeys(UserSerializer().get_all_fields())
serializer = UserSerializer(
include_fields='*'
)
self.assertEqual(list(six.iterkeys(serializer.fields)), list(expected))
def test_get_fields_with_include_all_and_exclude(self):
expected = six.iterkeys(UserSerializer().get_all_fields())
serializer = UserSerializer(
include_fields='*',
exclude_fields=['id']
)
self.assertEqual(list(six.iterkeys(serializer.fields)), list(expected))
def test_get_fields_with_include_fields(self):
include = ['permissions']
expected = set(
six.iterkeys(UserSerializer().get_fields())
) | set(include)
serializer = UserSerializer(
include_fields=include
)
self.assertEqual(set(six.iterkeys(serializer.fields)), expected)
def test_get_fields_with_include_fields_and_request_fields(self):
include = ['permissions']
expected = set(
six.iterkeys(UserSerializer().get_fields())
) | set(include)
serializer = UserSerializer(
include_fields=include,
request_fields={
'permissions': {}
}
)
self.assertEqual(set(six.iterkeys(serializer.fields)), expected)
self.assertEqual(serializer.request_fields['permissions'], {})
def test_get_fields_with_exclude_fields(self):
exclude = ['id']
expected = set(
six.iterkeys(UserSerializer().get_fields())
) - set(exclude)
serializer = UserSerializer(
exclude_fields=exclude,
)
self.assertEqual(set(six.iterkeys(serializer.fields)), expected)
def test_serializer_propagation_consistency(self):
s = CatSerializer(
request_fields={'home': True}
)
# In version <= 1.3.7 these will have returned different values.
r1 = s.get_all_fields()['home'].serializer.id_only()
r2 = s.fields['home'].serializer.id_only()
r3 = s.get_all_fields()['home'].serializer.id_only()
self.assertEqual(r1, r2)
self.assertEqual(r2, r3)
@patch.dict('dynamic_rest.processors.POST_PROCESSORS', {})
def test_post_processors(self):
@register_post_processor
def test_post_processor(data):
data['post_processed'] = True
return data
serializer = UserSerializer(
self.fixture.users,
many=True,
envelope=True,
request_fields={'groups': {}},
)
data = serializer.data
self.assertTrue(data.get('post_processed'))
|
class TestDynamicSerializer(TestCase):
def setUp(self):
pass
def test_data_without_envelope(self):
pass
def test_data_with_envelope(self):
pass
def test_data_with_included_field(self):
pass
def test_data_with_excluded_field(self):
pass
def test_data_with_included_has_one(self):
pass
def test_data_with_included_has_many(self):
pass
def test_data_with_nested_include(self):
pass
def test_data_with_nested_exclude(self):
pass
def test_get_all_fields(self):
pass
def test_get_fields_with_only_fields(self):
pass
def test_get_fields_with_only_fields_and_request_fields(self):
pass
def test_get_fields_with_only_fields_and_include_fields(self):
pass
def test_get_fields_with_include_all(self):
pass
def test_get_fields_with_include_all_and_exclude(self):
pass
def test_get_fields_with_include_fields(self):
pass
def test_get_fields_with_include_fields_and_request_fields(self):
pass
def test_get_fields_with_exclude_fields(self):
pass
def test_serializer_propagation_consistency(self):
pass
@patch.dict('dynamic_rest.processors.POST_PROCESSORS', {})
def test_post_processors(self):
pass
@register_post_processor
def test_post_processors(self):
pass
| 24 | 0 | 22 | 0 | 22 | 0 | 1 | 0 | 1 | 6 | 3 | 0 | 20 | 2 | 20 | 20 | 490 | 27 | 462 | 72 | 438 | 2 | 101 | 70 | 79 | 1 | 1 | 0 | 21 |
4,331 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_serializers.py
|
tests.test_serializers.TestSerializerCaching
|
class TestSerializerCaching(TestCase):
def setUp(self):
self.serializer = CatSerializer(
request_fields={'home': {}, 'backup_home': True}
)
def test_get_all_fields(self):
all_fields = self.serializer.get_all_fields()
# These are two different instances of the field object
# because get_all_fields() does a copy().
home_field_1 = self.serializer.fields['home']
home_field_2 = all_fields['home']
'''
# Expected with fields cache
self.assertNotEqual(
home_field_1,
home_field_2,
'Expected different field instances, got same.'
)
'''
self.assertEqual(
home_field_1.serializer,
home_field_2.serializer,
'Expected same serializer instance, got different.'
)
def test_serializer_args_busts_cache(self):
home_field = self.serializer.fields['home']
self.assertIsNot(
home_field.get_serializer(),
home_field.get_serializer('foo'),
(
'Passing arg to get_serializer should construct new'
' serializer. Instead got same one.'
)
)
def test_same_serializer_class_different_fields(self):
# These two use the same serializer class, but are different
# fields, so they should use different serializer instances.
home_field = self.serializer.fields['home']
backup_home_field = self.serializer.fields['backup_home']
self.assertIsNot(
home_field.serializer,
backup_home_field.serializer,
(
'Different fields that use same serializer should get',
' separate serializer instances.'
)
)
def test_different_roots(self):
serializer2 = CatSerializer(
request_fields={'home': {}, 'backup_home': {}}
)
home1 = self.serializer.fields['home']
home2 = serializer2.fields['home']
self.assertIsNot(
home1.serializer,
home2.serializer,
'Different root serializers should yield different instances.'
)
@unittest.skip(
"skipping because DRF's Field.root doesn't have cycle-detection."
)
def test_root_serializer_cycle_busting(self):
s = CatSerializer(
request_fields={'home': {}, 'backup_home': {}}
)
s.parent = s # Create cycle.
self.assertIsNone(s.fields['home'].root_serializer)
def test_root_serializer_trickledown_request_fields(self):
s = CatSerializer(
request_fields=True
)
self.assertIsNotNone(s.get_all_fields()['home'].serializer)
def test_recursive_serializer(self):
s = LocationSerializer(
request_fields={
'cats': {
'parent': {
'parent': True
}
}
}
)
cats_field = s.get_all_fields()['cats']
l1 = cats_field.serializer.child # .child because list
l2 = l1.get_all_fields()['parent'].serializer
l3 = l2.get_all_fields()['parent'].serializer
l4 = l3.get_all_fields()['parent'].serializer
self.assertIsNot(l2, l3)
# l3 and l4 should be same cached instance because both have
# request_fields=True (l3 by inheritence, l4 by default)
self.assertIs(l3, l4)
|
class TestSerializerCaching(TestCase):
def setUp(self):
pass
def test_get_all_fields(self):
pass
def test_serializer_args_busts_cache(self):
pass
def test_same_serializer_class_different_fields(self):
pass
def test_different_roots(self):
pass
@unittest.skip(
"skipping because DRF's Field.root doesn't have cycle-detection."
)
def test_root_serializer_cycle_busting(self):
pass
def test_root_serializer_trickledown_request_fields(self):
pass
def test_recursive_serializer(self):
pass
| 10 | 0 | 13 | 2 | 9 | 2 | 1 | 0.21 | 1 | 2 | 2 | 0 | 8 | 1 | 8 | 8 | 112 | 21 | 77 | 30 | 65 | 16 | 36 | 27 | 27 | 1 | 1 | 0 | 8 |
4,332 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_serializers.py
|
tests.test_serializers.TestUserLocationSerializer
|
class TestUserLocationSerializer(TestCase):
def setUp(self):
self.fixture = create_fixture()
def test_data_with_embed(self):
data = UserLocationSerializer(
self.fixture.users[0],
envelope=True
).data
self.assertEqual(data['user_location']['location']['name'], '0')
self.assertEqual(
['0', '1'],
sorted([g['name'] for g in data['user_location']['groups']])
)
def test_data_with_embed_deferred(self):
# Make sure 'embed' fields can be deferred
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
model = User
name = 'user_deferred_location'
fields = (
'id',
'name',
'location',
)
location = DynamicRelationField(
LocationSerializer, embed=True, deferred=True
)
data = UserDeferredLocationSerializer(
self.fixture.users[0],
envelope=True
).data
self.assertFalse('location' in data)
# Now include deferred embedded field
data = UserDeferredLocationSerializer(
self.fixture.users[0],
request_fields={
'id': True,
'name': True,
'location': True
},
envelope=True
).data['user_deferred_location']
self.assertTrue('location' in data)
self.assertEqual(data['location']['name'], '0')
@override_settings(
DYNAMIC_REST={
'DEFER_MANY_RELATIONS': False,
}
)
def test_data_with_many_deferred(self):
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
defer_many_relations = True
model = User
name = 'user_deferred_location'
fields = (
'id',
'name',
'groups',
)
groups = DynamicRelationField('GroupSerializer', many=True)
data = UserDeferredLocationSerializer(
self.fixture.users[0]).data
self.assertFalse('groups' in data)
# Now include deferred embedded field
data = UserDeferredLocationSerializer(
self.fixture.users[0],
request_fields={
'id': True,
'name': True,
'groups': True
},
envelope=True
).data['user_deferred_location']
self.assertTrue('groups' in data)
@override_settings(
DYNAMIC_REST={
'DEFER_MANY_RELATIONS': True,
}
)
def test_data_with_many_not_deferred(self):
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
defer_many_relations = False
model = User
name = 'user_deferred_location'
fields = (
'groups',
)
groups = DynamicRelationField('GroupSerializer', many=True)
data = UserDeferredLocationSerializer(
self.fixture.users[0],
envelope=True
).data['user_deferred_location']
self.assertTrue('groups' in data)
|
class TestUserLocationSerializer(TestCase):
def setUp(self):
pass
def test_data_with_embed(self):
pass
def test_data_with_embed_deferred(self):
pass
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
@override_settings(
DYNAMIC_REST={
'DEFER_MANY_RELATIONS': False,
}
)
def test_data_with_many_deferred(self):
pass
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
@override_settings(
DYNAMIC_REST={
'DEFER_MANY_RELATIONS': True,
}
)
def test_data_with_many_not_deferred(self):
pass
class UserDeferredLocationSerializer(UserLocationSerializer):
class Meta:
| 14 | 0 | 18 | 2 | 16 | 1 | 1 | 0.03 | 1 | 4 | 4 | 0 | 5 | 1 | 5 | 5 | 108 | 13 | 92 | 41 | 70 | 3 | 41 | 31 | 29 | 1 | 1 | 0 | 5 |
4,333 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_serializers.py
|
tests.test_serializers.TestUserLocationSerializer.test_data_with_many_not_deferred.UserDeferredLocationSerializer.Meta
|
class Meta:
defer_many_relations = False
model = User
name = 'user_deferred_location'
fields = (
'groups',
)
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 7 | 5 | 6 | 0 | 5 | 5 | 4 | 0 | 0 | 0 | 0 |
4,334 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.UserLocationSerializer.Meta
|
class Meta:
model = User
name = 'user_location'
fields = ('groups', 'location', 'id')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,335 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.CarSerializer.Meta
|
class Meta:
model = Car
fields = ('id', 'name', 'country', 'parts')
deferred_fields = ('name', 'country', 'parts')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,336 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/integration/test_blueprints.py
|
tests.integration.test_blueprints.DJBlueprintsTestCase
|
class DJBlueprintsTestCase(TestCase):
@skipIf(
not settings.ENABLE_INTEGRATION_TESTS,
'Integration tests disabled'
)
def test_blueprints(self):
params = {
"app": "dummy",
"description": "dummy",
"author": "dummy",
"email": "dummy@foo.com",
"version": "0.0.1",
"django_version": "2.2",
}
# generate a test application
application = TemporaryApplication(params=params)
# add a model
application.execute('generate model foo --not-interactive')
# create and apply migrations
application.execute('migrate')
# add this project as a dependency
# this file is ROOT/tests/integration/test_blueprints.py
root = os.path.abspath(os.path.join(__file__, '../../..'))
application.execute('add %s --dev --not-interactive' % root)
# generate an API endpoint for the generated model
application.execute('generate api v0 foo --not-interactive')
# start the server
server = application.execute('serve 9123', run_async=True)
time.sleep(2)
# verify a simple POST flow for the "foo" resource
response = requests.post('http://localhost:9123/api/v0/foos/')
self.assertTrue(response.status_code, 201)
content = json.loads(response.content)
self.assertEquals(content, {'foo': {'id': 1}})
# stop the server
server.terminate()
|
class DJBlueprintsTestCase(TestCase):
@skipIf(
not settings.ENABLE_INTEGRATION_TESTS,
'Integration tests disabled'
)
def test_blueprints(self):
pass
| 3 | 0 | 33 | 2 | 22 | 9 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 39 | 3 | 27 | 12 | 21 | 9 | 16 | 8 | 14 | 1 | 2 | 0 | 1 |
4,337 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/dynamic_rest/routers.py
|
dynamic_rest.routers.DynamicRouter.get_api_root_view.API
|
class API(views.APIView):
_ignore_model_permissions = True
def get(self, request, *args, **kwargs):
directory_list = get_directory(request)
result = OrderedDict()
for group_name, url, endpoints, _ in directory_list:
if url:
result[group_name] = url
else:
group = OrderedDict()
for endpoint_name, url, _, _ in endpoints:
group[endpoint_name] = url
result[group_name] = group
return Response(result)
|
class API(views.APIView):
def get(self, request, *args, **kwargs):
pass
| 2 | 0 | 12 | 0 | 12 | 0 | 4 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 15 | 1 | 14 | 8 | 12 | 0 | 13 | 8 | 11 | 4 | 1 | 3 | 4 |
4,338 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Permission
|
class Permission(models.Model):
name = models.TextField()
code = models.IntegerField()
|
class Permission(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,339 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Profile
|
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
display_name = models.TextField()
thumbnail_url = models.TextField(null=True, blank=True)
|
class Profile(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
4,340 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.User
|
class User(models.Model):
name = models.TextField()
last_name = models.TextField()
groups = models.ManyToManyField('Group', related_name='users')
permissions = models.ManyToManyField('Permission', related_name='users')
date_of_birth = models.DateField(null=True, blank=True)
# 'related_name' intentionally left unset in location field below:
location = models.ForeignKey(
'Location',
null=True,
blank=True,
on_delete=models.CASCADE
)
favorite_pet_type = models.ForeignKey(
ContentType,
null=True,
blank=True,
on_delete=models.CASCADE
)
favorite_pet_id = models.TextField(null=True, blank=True)
favorite_pet = GenericForeignKey(
'favorite_pet_type',
'favorite_pet_id',
)
is_dead = models.BooleanField(null=True, default=False)
|
class User(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0 | 24 | 11 | 23 | 1 | 11 | 11 | 10 | 0 | 1 | 0 | 0 |
4,341 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Zebra
|
class Zebra(models.Model):
name = models.TextField()
origin = models.TextField()
|
class Zebra(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,342 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.DogSerializer
|
class DogSerializer(DynamicModelSerializer):
class Meta:
model = Dog
fields = ('id', 'name', 'origin', 'fur', 'is_red')
fur = CharField(source='fur_color')
is_red = DynamicMethodField(deferred=True, requires=['fur_color'])
def get_is_red(self, instance):
return instance.fur_color == 'red'
|
class DogSerializer(DynamicModelSerializer):
class Meta:
def get_is_red(self, instance):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 34 | 11 | 3 | 8 | 7 | 5 | 0 | 8 | 7 | 5 | 1 | 5 | 0 | 1 |
4,343 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.LocationSerializer
|
class LocationSerializer(DynamicModelSerializer):
class Meta:
defer_many_relations = False
model = Location
name = 'location'
fields = (
'id', 'name', 'users', 'user_count', 'address',
'cats', 'friendly_cats', 'bad_cats'
)
users = DynamicRelationField(
'UserSerializer',
source='user_set',
many=True,
deferred=True
)
user_count = CountField('users', required=False, deferred=True)
address = DynamicField(source='blob', required=False, deferred=True)
cats = DynamicRelationField(
'CatSerializer', source='cat_set', many=True, deferred=True)
friendly_cats = DynamicRelationField(
'CatSerializer', many=True, deferred=True)
bad_cats = DynamicRelationField(
'CatSerializer', source='annoying_cats', many=True, deferred=True)
def filter_queryset(self, query):
return query.exclude(name='Atlantis')
|
class LocationSerializer(DynamicModelSerializer):
class Meta:
def filter_queryset(self, query):
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 34 | 28 | 3 | 25 | 13 | 22 | 0 | 14 | 13 | 11 | 1 | 5 | 0 | 1 |
4,344 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/serializers.py
|
tests.serializers.UserSerializer
|
class UserSerializer(DynamicModelSerializer):
class Meta:
model = User
name = 'user'
fields = (
'id',
'name',
'permissions',
'groups',
'location',
'last_name',
'display_name',
'thumbnail_url',
'number_of_cats',
'profile',
'date_of_birth',
'favorite_pet_id',
'favorite_pet',
'is_dead',
)
deferred_fields = (
'last_name',
'date_of_birth',
'display_name',
'profile',
'thumbnail_url',
'favorite_pet_id',
'favorite_pet',
'is_dead',
)
read_only_fields = ('profile',)
location = DynamicRelationField('LocationSerializer')
permissions = DynamicRelationField(
'PermissionSerializer',
many=True,
deferred=True
)
groups = DynamicRelationField('GroupSerializer', many=True, deferred=True)
display_name = DynamicField(source='profile.display_name', read_only=True)
thumbnail_url = DynamicField(
source='profile.thumbnail_url',
read_only=True
)
number_of_cats = DynamicMethodField(
requires=['location.cat_set.*'],
deferred=True
)
# Don't set read_only on this field directly. Used in test for
# Meta.read_only_fields.
profile = DynamicRelationField(
'ProfileSerializer',
deferred=True
)
favorite_pet = DynamicGenericRelationField(required=False)
def get_number_of_cats(self, user):
if not self.context.get('request'):
# Used in test_api.py::test_relation_includes_context
raise Exception("No request object in context")
location = user.location
return len(location.cat_set.all()) if location else 0
|
class UserSerializer(DynamicModelSerializer):
class Meta:
def get_number_of_cats(self, user):
pass
| 3 | 0 | 6 | 0 | 5 | 1 | 3 | 0.05 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 34 | 64 | 4 | 57 | 17 | 54 | 3 | 20 | 17 | 17 | 3 | 5 | 1 | 3 |
4,345 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestAlternateLocationsAPI
|
class TestAlternateLocationsAPI(APITestCase):
"""Test extra_drest_filters view attribute"""
def setUp(self):
self.fixture = create_fixture()
def test_extra_drest_filter_combines_with_drest_filters(self):
# sanity check: standard filter returns 1 result
r = self.client.get('/alternate_locations/?filter{users.last_name}=1')
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data.get('locations', [])), 1, r.data)
location = r.data['locations'][0]
self.assertEqual(location['name'], '0')
# using the custom filter gives same result
r = self.client.get('/alternate_locations/?user_name=0')
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data['locations']), 1)
location = r.data['locations'][0]
self.assertEqual(location['name'], '0')
# now combine filters, such that no user could satisfy both
# verify that we get no locations back
r = self.client.get(
'/alternate_locations/?user_name=0&filter{users.last_name}=1'
)
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data['locations']), 0)
def test_separate_filter_doesnt_combine_with_drest_filters(self):
# This establishes that doing a naive `.filter` results
# in multiple joins, giving an unexpected result:
# the Location has 2 users, each satisfying one of the
# two filters.
r = self.client.get(
'/alternate_locations/?user_name_separate=0'
'&filter{users.last_name}=1'
)
self.assertEqual(r.status_code, 200)
self.assertEqual(len(r.data['locations']), 1)
location = r.data['locations'][0]
self.assertEqual(location['name'], '0')
|
class TestAlternateLocationsAPI(APITestCase):
'''Test extra_drest_filters view attribute'''
def setUp(self):
pass
def test_extra_drest_filter_combines_with_drest_filters(self):
pass
def test_separate_filter_doesnt_combine_with_drest_filters(self):
pass
| 4 | 1 | 12 | 1 | 9 | 3 | 1 | 0.32 | 1 | 0 | 0 | 0 | 3 | 1 | 3 | 3 | 43 | 6 | 28 | 9 | 24 | 9 | 23 | 9 | 19 | 1 | 1 | 0 | 3 |
4,346 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestBrowsableAPI
|
class TestBrowsableAPI(APITestCase):
"""
Tests for Browsable API directory
"""
def test_get_root(self):
response = self.client.get('/?format=api')
content = response.content.decode('utf-8')
self.assertIn('directory', content)
self.assertIn('/horses', content)
self.assertIn('/zebras', content)
self.assertIn('/users', content)
def test_get_list(self):
response = self.client.get('/users/?format=api')
content = response.content.decode('utf-8')
self.assertIn('directory', content)
self.assertIn('/horses', content)
self.assertIn('/zebras', content)
self.assertIn('/users', content)
|
class TestBrowsableAPI(APITestCase):
'''
Tests for Browsable API directory
'''
def test_get_root(self):
pass
def test_get_list(self):
pass
| 3 | 1 | 7 | 0 | 7 | 0 | 1 | 0.2 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 21 | 3 | 15 | 7 | 12 | 3 | 15 | 7 | 12 | 1 | 1 | 0 | 2 |
4,347 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Group
|
class Group(models.Model):
name = models.TextField(unique=True)
permissions = models.ManyToManyField('Permission', related_name='groups')
|
class Group(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,348 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Event
|
class Event(models.Model):
"""
Event model -- Intentionally missing serializer and viewset, so they
can be added as part of a codelab.
"""
name = models.TextField()
status = models.TextField(default='current')
location = models.ForeignKey(
'Location',
null=True,
blank=True,
on_delete=models.CASCADE
)
users = models.ManyToManyField('User')
|
class Event(models.Model):
'''
Event model -- Intentionally missing serializer and viewset, so they
can be added as part of a codelab.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1 | 10 | 5 | 9 | 4 | 5 | 5 | 4 | 0 | 1 | 0 | 0 |
4,349 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Dog
|
class Dog(models.Model):
name = models.TextField()
fur_color = models.TextField()
origin = models.TextField()
|
class Dog(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
4,350 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.D
|
class D(models.Model):
name = models.TextField(blank=True)
|
class D(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 1 | 0 | 0 |
4,351 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/models.py
|
tests.models.Country
|
class Country(models.Model):
name = models.CharField(max_length=60)
short_name = models.CharField(max_length=30)
|
class Country(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
4,352 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestCatsAPI
|
class TestCatsAPI(APITestCase):
"""
Tests for nested resources
"""
def setUp(self):
self.fixture = create_fixture()
home_id = self.fixture.locations[0].id
backup_home_id = self.fixture.locations[1].id
parent = Cat.objects.create(
name='Parent',
home_id=home_id,
backup_home_id=backup_home_id
)
self.kitten = Cat.objects.create(
name='Kitten',
home_id=home_id,
backup_home_id=backup_home_id,
parent=parent
)
def test_additional_sideloads(self):
response = self.client.get(
'/cats/%i?include[]=parent.' % self.kitten.id
)
content = json.loads(response.content.decode('utf-8'))
self.assertTrue('cat' in content)
self.assertTrue('+cats' in content)
self.assertEqual(content['cat']['name'], 'Kitten')
self.assertEqual(content['+cats'][0]['name'], 'Parent')
def test_allows_whitespace(self):
data = {
'name': ' Zahaklu ',
'home': self.kitten.home_id,
'backup_home': self.kitten.backup_home_id,
'parent': self.kitten.parent_id,
}
response = self.client.post(
'/cats/',
json.dumps(data),
content_type='application/json',
)
self.assertEqual(201, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['cat']['name'], ' Zahaklu ')
def test_immutable_field(self):
""" Make sure immutable 'parent' field can be set on POST """
parent_id = self.kitten.parent_id
kitten_name = 'New Kitten'
data = {
'name': kitten_name,
'home': self.kitten.home_id,
'backup_home': self.kitten.backup_home_id,
'parent': parent_id
}
response = self.client.post(
'/cats/',
json.dumps(data),
content_type='application/json'
)
self.assertEqual(201, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(data['cat']['parent'], parent_id)
self.assertEqual(data['cat']['name'], kitten_name)
# Try to change immutable data in a PATCH request...
patch_data = {
'parent': self.kitten.pk,
'name': 'Renamed Kitten',
}
response = self.client.patch(
'/cats/%s/' % data['cat']['id'],
json.dumps(patch_data),
content_type='application/json'
)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
# ... and it should not have changed:
self.assertEqual(data['cat']['parent'], parent_id)
self.assertEqual(data['cat']['name'], kitten_name)
|
class TestCatsAPI(APITestCase):
'''
Tests for nested resources
'''
def setUp(self):
pass
def test_additional_sideloads(self):
pass
def test_allows_whitespace(self):
pass
def test_immutable_field(self):
''' Make sure immutable 'parent' field can be set on POST '''
pass
| 5 | 2 | 19 | 1 | 18 | 1 | 1 | 0.08 | 1 | 1 | 1 | 0 | 4 | 2 | 4 | 4 | 84 | 7 | 71 | 19 | 66 | 6 | 35 | 19 | 30 | 1 | 1 | 0 | 4 |
4,353 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestDogsAPI
|
class TestDogsAPI(APITestCase):
"""
Tests for sorting and pagination
"""
def setUp(self):
self.fixture = create_fixture()
def test_sort_exclude_count(self):
# page 1
url = '/dogs/?sort[]=name&exclude_count=1&per_page=4'
# 1 query - one for getting dogs, 0 for count
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_data = [{
'id': 2,
'name': 'Air-Bud',
'origin': 'Air Bud 4: Seventh Inning Fetch',
'fur': 'gold'
}, {
'id': 1,
'name': 'Clifford',
'origin': 'Clifford the big red dog',
'fur': 'red'
}, {
'id': 4,
'name': 'Pluto',
'origin': 'Mickey Mouse',
'fur': 'brown and white'
}, {
'id': 3,
'name': 'Spike',
'origin': 'Rugrats',
'fur': 'brown'
}]
expected_meta = {
'page': 1,
'per_page': 4,
'more_pages': True
}
actual_response = json.loads(
response.content.decode('utf-8'))
actual_data = actual_response.get('dogs')
actual_meta = actual_response.get('meta')
self.assertEqual(expected_data, actual_data)
self.assertEqual(expected_meta, actual_meta)
# page 2
url = f'{url}&page=2'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_data = [{
'id': 5,
'name': 'Spike',
'origin': 'Tom and Jerry',
'fur': 'light-brown'
}]
expected_meta = {
'page': 2,
'per_page': 4,
'more_pages': False
}
actual_response = json.loads(
response.content.decode('utf-8'))
actual_data = actual_response.get('dogs')
actual_meta = actual_response.get('meta')
self.assertEqual(expected_data, actual_data)
self.assertEqual(expected_meta, actual_meta)
def test_sort_implied_all(self):
url = '/dogs/?sort[]=name'
# 2 queries - one for getting dogs, one for the meta (count)
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_response = [{
'id': 2,
'name': 'Air-Bud',
'origin': 'Air Bud 4: Seventh Inning Fetch',
'fur': 'gold'
}, {
'id': 1,
'name': 'Clifford',
'origin': 'Clifford the big red dog',
'fur': 'red'
}, {
'id': 4,
'name': 'Pluto',
'origin': 'Mickey Mouse',
'fur': 'brown and white'
}, {
'id': 3,
'name': 'Spike',
'origin': 'Rugrats',
'fur': 'brown'
}, {
'id': 5,
'name': 'Spike',
'origin': 'Tom and Jerry',
'fur': 'light-brown'
}]
actual_response = json.loads(
response.content.decode('utf-8')).get('dogs')
self.assertEqual(expected_response, actual_response)
def test_sort_reverse(self):
url = '/dogs/?sort[]=-name'
# 2 queries - one for getting dogs, one for the meta (count)
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_response = [{
'id': 3,
'name': 'Spike',
'origin': 'Rugrats',
'fur': 'brown'
}, {
'id': 5,
'name': 'Spike',
'origin': 'Tom and Jerry',
'fur': 'light-brown'
}, {
'id': 4,
'name': 'Pluto',
'origin': 'Mickey Mouse',
'fur': 'brown and white'
}, {
'id': 1,
'name': 'Clifford',
'origin': 'Clifford the big red dog',
'fur': 'red'
}, {
'id': 2,
'name': 'Air-Bud',
'origin': 'Air Bud 4: Seventh Inning Fetch',
'fur': 'gold'
}]
actual_response = json.loads(
response.content.decode('utf-8')).get('dogs')
self.assertEqual(expected_response, actual_response)
def test_sort_multiple(self):
url = '/dogs/?sort[]=-name&sort[]=-origin'
# 2 queries - one for getting dogs, one for the meta (count)
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_response = [{
'id': 5,
'name': 'Spike',
'origin': 'Tom and Jerry',
'fur': 'light-brown'
}, {
'id': 3,
'name': 'Spike',
'origin': 'Rugrats',
'fur': 'brown'
}, {
'id': 4,
'name': 'Pluto',
'origin': 'Mickey Mouse',
'fur': 'brown and white'
}, {
'id': 1,
'name': 'Clifford',
'origin': 'Clifford the big red dog',
'fur': 'red'
}, {
'id': 2,
'name': 'Air-Bud',
'origin': 'Air Bud 4: Seventh Inning Fetch',
'fur': 'gold'
}]
actual_response = json.loads(
response.content.decode('utf-8')).get('dogs')
self.assertEqual(expected_response, actual_response)
def test_sort_rewrite(self):
url = '/dogs/?sort[]=fur'
# 2 queries - one for getting dogs, one for the meta (count)
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
expected_response = [{
'id': 3,
'name': 'Spike',
'origin': 'Rugrats',
'fur': 'brown'
}, {
'id': 4,
'name': 'Pluto',
'origin': 'Mickey Mouse',
'fur': 'brown and white'
}, {
'id': 2,
'name': 'Air-Bud',
'origin': 'Air Bud 4: Seventh Inning Fetch',
'fur': 'gold'
}, {
'id': 5,
'name': 'Spike',
'origin': 'Tom and Jerry',
'fur': 'light-brown'
}, {
'id': 1,
'name': 'Clifford',
'origin': 'Clifford the big red dog',
'fur': 'red'
}]
actual_response = json.loads(
response.content.decode('utf-8')).get('dogs')
self.assertEqual(expected_response, actual_response)
def test_sort_invalid(self):
url = '/horses?sort[]=borigin'
response = self.client.get(url)
# expected server to throw a 400 if an incorrect
# sort field is specified
self.assertEqual(400, response.status_code)
|
class TestDogsAPI(APITestCase):
'''
Tests for sorting and pagination
'''
def setUp(self):
pass
def test_sort_exclude_count(self):
pass
def test_sort_implied_all(self):
pass
def test_sort_reverse(self):
pass
def test_sort_multiple(self):
pass
def test_sort_rewrite(self):
pass
def test_sort_invalid(self):
pass
| 8 | 1 | 30 | 0 | 29 | 1 | 1 | 0.06 | 1 | 0 | 0 | 0 | 7 | 1 | 7 | 7 | 223 | 10 | 201 | 34 | 193 | 12 | 62 | 34 | 54 | 1 | 1 | 1 | 7 |
4,354 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestFilters
|
class TestFilters(APITestCase):
"""
Tests for filters.
"""
def testUnparseableInt(self):
url = '/users/?filter{pk}=123x'
response = self.client.get(url)
self.assertEqual(400, response.status_code)
|
class TestFilters(APITestCase):
'''
Tests for filters.
'''
def testUnparseableInt(self):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 1 | 0.6 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 10 | 2 | 5 | 4 | 3 | 3 | 5 | 4 | 3 | 1 | 1 | 0 | 1 |
4,355 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drest.py
|
benchmarks.drest.GroupSerializer.Meta
|
class Meta:
model = Group
name = 'group'
fields = ('id', 'name', 'permissions')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,356 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drest.py
|
benchmarks.drest.PermissionSerializer.Meta
|
class Meta:
model = Permission
name = 'permission'
fields = ('id', 'name')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,357 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drest.py
|
benchmarks.drest.UserSerializer.Meta
|
class Meta:
model = User
name = 'user'
fields = ('id', 'name', 'groups')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
4,358 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.GroupSerializer.Meta
|
class Meta:
model = Group
fields = ('id', 'name')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,359 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.GroupWithPermissionsSerializer.Meta
|
class Meta:
model = Group
fields = ('id', 'name', 'permissions')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,360 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.PermissionSerializer.Meta
|
class Meta:
model = Permission
fields = ('id', 'name')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,361 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.UserSerializer.Meta
|
class Meta:
model = User
fields = ('id', 'name')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,362 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/benchmarks/drf.py
|
benchmarks.drf.UserWithGroupsSerializer.Meta
|
class Meta:
model = User
fields = ('id', 'name', 'groups')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
4,363 |
AltSchool/dynamic-rest
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AltSchool_dynamic-rest/tests/test_utils.py
|
tests.test_utils.UtilsTestCase
|
class UtilsTestCase(TestCase):
def setUp(self):
User.objects.create(name="Marie")
User.objects.create(name="Rosalind")
def test_is_truthy(self):
self.assertTrue(is_truthy("faux"))
self.assertTrue(is_truthy(1))
self.assertFalse(is_truthy("0"))
self.assertFalse(is_truthy("False"))
self.assertFalse(is_truthy("false"))
self.assertFalse(is_truthy(""))
def test_unpack_empty_value(self):
self.assertIsNone(unpack(None))
def test_unpack_non_empty_value(self):
content = {"hello": "world", "meta": "worldpeace", "missed": "a 't'"}
self.assertIsNotNone(unpack(content))
def test_unpack_meta_first_key(self):
content = {"meta": "worldpeace", "missed": "a 't'"}
self.assertEqual(unpack(content), "a 't'")
def test_unpack_meta_not_first_key(self):
content = {"hello": "world", "meta": "worldpeace", "missed": "a 't'"}
self.assertEqual(unpack(content), "world")
@override_settings(
ENABLE_HASHID_FIELDS=True,
HASHIDS_SALT="If my calculations are correct, "
"when this vaby hits 88 miles per hour, "
"you're gonna see some serious s***.",
)
def test_int_id_from_model_ext_id_obj_does_not_exits(
self):
self.assertRaises(
User.DoesNotExist,
internal_id_from_model_and_external_id,
model=User,
external_id="skdkahh",
)
def test_model_from_definition(self):
self.assertEqual(model_from_definition('tests.models.User'), User)
self.assertEqual(model_from_definition(User), User)
self.assertRaises(
AssertionError,
model_from_definition,
model_definition='django.test.override_settings'
)
self.assertRaises(
AssertionError,
model_from_definition,
model_definition=User()
)
|
class UtilsTestCase(TestCase):
def setUp(self):
pass
def test_is_truthy(self):
pass
def test_unpack_empty_value(self):
pass
def test_unpack_non_empty_value(self):
pass
def test_unpack_meta_first_key(self):
pass
def test_unpack_meta_not_first_key(self):
pass
@override_settings(
ENABLE_HASHID_FIELDS=True,
HASHIDS_SALT="If my calculations are correct, "
"when this vaby hits 88 miles per hour, "
"you're gonna see some serious s***.",
)
def test_int_id_from_model_ext_id_obj_does_not_exits(
self):
pass
def test_model_from_definition(self):
pass
| 10 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 8 | 0 | 8 | 8 | 56 | 7 | 49 | 19 | 33 | 0 | 29 | 12 | 20 | 1 | 1 | 0 | 8 |
4,364 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.ZebraViewSet
|
class ZebraViewSet(DynamicModelViewSet):
features = (DynamicModelViewSet.SORT,)
model = Zebra
serializer_class = ZebraSerializer
queryset = Zebra.objects.all()
ordering_fields = '__all__'
|
class ZebraViewSet(DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 6 | 0 | 6 | 6 | 5 | 0 | 6 | 6 | 5 | 0 | 3 | 0 | 0 |
4,365 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/benchmarks/models.py
|
benchmarks.models.Permission
|
class Permission(models.Model):
name = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
|
class Permission(models.Model):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 1 | 0 | 0 |
4,366 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestLocationsAPI
|
class TestLocationsAPI(APITestCase):
def setUp(self):
self.fixture = create_fixture()
self.maxDiff = None
def test_options(self):
response = self.client.options('/locations/')
self.assertEqual(200, response.status_code)
actual = json.loads(response.content.decode('utf-8'))
expected = {
'description': '',
'name': 'Location List',
'parses': [
'application/json',
'application/x-www-form-urlencoded',
'multipart/form-data'
],
'properties': {
'name': {
'default': None,
'label': 'Name',
'nullable': False,
'read_only': False,
'required': True,
'type': 'string'
},
'address': {
'default': None,
'immutable': False,
'label': 'Address',
'nullable': False,
'read_only': False,
'required': False,
'type': 'field'
},
'id': {
'default': None,
'label': 'ID',
'nullable': False,
'read_only': True,
'required': False,
'type': 'integer'
},
'user_count': {
'default': None,
'immutable': False,
'label': 'User count',
'nullable': False,
'read_only': False,
'required': False,
'type': 'field'
},
'users': {
'default': None,
'immutable': False,
'label': 'Users',
'nullable': True,
'read_only': False,
'related_to': 'users',
'required': False,
'type': 'many'
},
'cats': {
'default': None,
'immutable': False,
'label': 'Cats',
'nullable': True,
'read_only': False,
'related_to': 'cats',
'required': False,
'type': 'many'
},
'bad_cats': {
'default': None,
'immutable': False,
'label': 'Bad cats',
'nullable': True,
'read_only': False,
'related_to': 'cats',
'required': False,
'type': 'many'
},
'friendly_cats': {
'default': None,
'immutable': False,
'label': 'Friendly cats',
'nullable': True,
'read_only': False,
'related_to': 'cats',
'required': False,
'type': 'many'
}
},
'renders': ['application/json', 'text/html'],
'resource_name': 'location',
'resource_name_plural': 'locations'
}
# Django 1.7 and 1.9 differ in their interpretation of
# "nullable" when it comes to inverse relationship fields.
# Ignore the values for the purposes of this comparison.
for field in ['cats', 'friendly_cats', 'bad_cats', 'users']:
del actual['properties'][field]['nullable']
del expected['properties'][field]['nullable']
actual.pop('features')
self.assertEqual(
json.loads(json.dumps(expected)),
json.loads(json.dumps(actual))
)
def test_get_with_filter_by_user(self):
url = '/locations/?filter{users}=1'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(1, len(content['locations']))
def test_get_with_filter_rewrites(self):
"""Tests various filter rewrite scenarios"""
urls = [
'/locations/?filter{cats}=1',
'/locations/?filter{friendly_cats}=1',
'/locations/?filter{bad_cats}=1'
]
for url in urls:
response = self.client.get(url)
self.assertEqual(200, response.status_code)
|
class TestLocationsAPI(APITestCase):
def setUp(self):
pass
def test_options(self):
pass
def test_get_with_filter_by_user(self):
pass
def test_get_with_filter_rewrites(self):
'''Tests various filter rewrite scenarios'''
pass
| 5 | 1 | 31 | 0 | 30 | 1 | 2 | 0.03 | 1 | 0 | 0 | 0 | 4 | 2 | 4 | 4 | 127 | 4 | 119 | 17 | 114 | 4 | 25 | 17 | 20 | 2 | 1 | 1 | 6 |
4,367 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.UserViewSet
|
class UserViewSet(DynamicModelViewSet):
features = (
DynamicModelViewSet.INCLUDE, DynamicModelViewSet.EXCLUDE,
DynamicModelViewSet.FILTER, DynamicModelViewSet.SORT,
DynamicModelViewSet.SIDELOADING, DynamicModelViewSet.DEBUG,
)
model = User
serializer_class = UserSerializer
queryset = User.objects.all()
def get_queryset(self):
location = self.request.query_params.get('location')
qs = self.queryset
if location:
qs = qs.filter(location=location)
return qs
def list(self, request, *args, **kwargs):
query_params = self.request.query_params
# for testing query param injection
if query_params.get('name'):
query_params.add('filter{name}', query_params.get('name'))
return super(UserViewSet, self).list(request, *args, **kwargs)
|
class UserViewSet(DynamicModelViewSet):
def get_queryset(self):
pass
def list(self, request, *args, **kwargs):
pass
| 3 | 0 | 6 | 0 | 6 | 1 | 2 | 0.05 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 29 | 23 | 2 | 20 | 10 | 17 | 1 | 16 | 10 | 13 | 2 | 3 | 1 | 4 |
4,368 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestHorsesAPI
|
class TestHorsesAPI(APITestCase):
"""
Tests for sorting on default fields and limit sorting fields
"""
def setUp(self):
self.fixture = create_fixture()
def test_sort_custom_default(self):
url = '/horses'
# 1 query - one for getting horses
# (the viewset as features specified, so no meta is returned)
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
# expect the default for horses to be sorted by -name
expected_response = {
'horses': [{
'id': 2,
'name': 'Secretariat',
'origin': 'Kentucky'
}, {
'id': 1,
'name': 'Seabiscuit',
'origin': 'LA'
}]
}
actual_response = json.loads(response.content.decode('utf-8'))
self.assertEqual(expected_response, actual_response)
def test_sort_with_field_not_allowed(self):
url = '/horses?sort[]=origin'
response = self.client.get(url)
# if `ordering_fields` are specified in the viewset, only allow sorting
# based off those fields. If a field is listed in the url that is not
# specified, return a 400
self.assertEqual(400, response.status_code)
|
class TestHorsesAPI(APITestCase):
'''
Tests for sorting on default fields and limit sorting fields
'''
def setUp(self):
pass
def test_sort_custom_default(self):
pass
def test_sort_with_field_not_allowed(self):
pass
| 4 | 1 | 11 | 1 | 8 | 2 | 1 | 0.36 | 1 | 0 | 0 | 0 | 3 | 1 | 3 | 3 | 40 | 6 | 25 | 11 | 21 | 9 | 15 | 11 | 11 | 1 | 1 | 1 | 3 |
4,369 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.UserLocationViewSet
|
class UserLocationViewSet(DynamicModelViewSet):
model = User
serializer_class = UserLocationSerializer
queryset = User.objects.all()
|
class UserLocationViewSet(DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 3 | 0 | 0 |
4,370 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestZebrasAPI
|
class TestZebrasAPI(APITestCase):
"""
Tests for sorting on when ordering_fields is __all__
"""
def setUp(self):
self.fixture = create_fixture()
def test_sort_explicit_all(self):
url = '/zebras?sort[]=-name'
# 1 query - one for getting zebras
# (the viewset as features specified, so no meta is returned)
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
# expect sortable on any field on horses because __all__ is specified
expected_response = {
'zebras': [{
'id': 2,
'name': 'Ted',
'origin': 'africa'
}, {
'id': 1,
'name': 'Ralph',
'origin': 'new york'
}]
}
actual_response = json.loads(response.content.decode('utf-8'))
self.assertEqual(expected_response, actual_response)
|
class TestZebrasAPI(APITestCase):
'''
Tests for sorting on when ordering_fields is __all__
'''
def setUp(self):
pass
def test_sort_explicit_all(self):
pass
| 3 | 1 | 12 | 1 | 10 | 2 | 1 | 0.29 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 31 | 4 | 21 | 8 | 18 | 6 | 11 | 8 | 8 | 1 | 1 | 1 | 2 |
4,371 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestUsersAPI
|
class TestUsersAPI(APITestCase):
def setUp(self):
self.fixture = create_fixture()
self.maxDiff = None
def _get_json(self, url, expected_status=200):
response = self.client.get(url)
self.assertEqual(expected_status, response.status_code)
return json.loads(response.content.decode('utf-8'))
def test_get(self):
with self.assertNumQueries(1):
# 1 for User, 0 for Location
response = self.client.get('/users/')
self.assertEqual(200, response.status_code)
self.assertEqual({
'users': [{
'id': 1,
'location': 1,
'name': '0'
}, {
'id': 2,
'location': 1,
'name': '1'
}, {
'id': 3,
'location': 2,
'name': '2'
}, {
'id': 4,
'location': 3,
'name': '3'
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_trailing_slash_does_not_redirect(self):
response = self.client.get('/users/1')
self.assertEqual(200, response.status_code)
def test_get_with_include(self):
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group, 0 for Location
response = self.client.get('/users/?include[]=groups')
self.assertEqual(200, response.status_code)
self.assertEqual({
'users': [{
'id': 1,
'groups': [1, 2],
'location': 1,
'name': '0'
}, {
'id': 2,
'groups': [1, 2],
'location': 1,
'name': '1'
}, {
'id': 3,
'groups': [1, 2],
'location': 2,
'name': '2'
}, {
'id': 4,
'groups': [1, 2],
'location': 3,
'name': '3'
}]
}, json.loads(response.content.decode('utf-8')))
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
response = self.client.get('/groups/?include[]=members')
self.assertEqual(200, response.status_code)
self.assertEqual({
'groups': [{
'id': 1,
'members': [1, 2, 3, 4],
'name': '0'
}, {
'id': 2,
'members': [1, 2, 3, 4],
'name':
'1'
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_exclude(self):
with self.assertNumQueries(1):
response = self.client.get('/users/?exclude[]=name')
query = connection.queries[-1]['sql']
self.assertFalse('name' in query, query)
self.assertFalse('*' in query, query)
self.assertEqual(200, response.status_code)
self.assertEqual({
'users': [{
'id': 1,
'location': 1
}, {
'id': 2,
'location': 1
}, {
'id': 3,
'location': 2
}, {
'id': 4,
'location': 3
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_nested_has_one_sideloading_disabled(self):
with self.assertNumQueries(2):
response = self.client.get(
'/users/?include[]=location.&sideloading=false'
)
self.assertEqual(200, response.status_code)
self.assertEqual({
'users': [{
'id': 1,
'location': {
'id': 1,
'name': '0'
},
'name': '0'
}, {
'id': 2,
'location': {
'id': 1,
'name': '0'
},
'name': '1'
}, {
'id': 3,
'location': {
'id': 2,
'name': '1'
},
'name': '2'
}, {
'id': 4,
'location': {
'id': 3,
'name': '2'
},
'name': '3'
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_nested_has_one(self):
with self.assertNumQueries(2):
response = self.client.get('/users/?include[]=location.')
self.assertEqual(200, response.status_code)
self.assertEqual({
'locations': [{
'id': 1,
'name': '0'
}, {
'id': 2,
'name': '1'
}, {
'id': 3,
'name': '2'
}],
'users': [{
'id': 1,
'location': 1,
'name': '0'
}, {
'id': 2,
'location': 1,
'name': '1'
}, {
'id': 3,
'location': 2,
'name': '2'
}, {
'id': 4,
'location': 3,
'name': '3'
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_nested_has_many(self):
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
response = self.client.get('/users/?include[]=groups.')
self.assertEqual(200, response.status_code)
self.assertEqual(
{'groups': [{'id': 1, 'name': '0'}, {'id': 2, 'name': '1'}],
'users': [{
'groups': [1, 2], 'id': 1, 'location': 1, 'name': '0'
}, {
'groups': [1, 2], 'id': 2, 'location': 1, 'name': '1'
}, {
'groups': [1, 2], 'id': 3, 'location': 2, 'name': '2'
}, {
'groups': [1, 2], 'id': 4, 'location': 3, 'name': '3'
}]},
json.loads(response.content.decode('utf-8')))
def test_get_with_nested_include(self):
with self.assertNumQueries(3):
# 3 queries: 1 for User, 1 for Group, 1 for Permissions
response = self.client.get('/users/?include[]=groups.permissions')
self.assertEqual(200, response.status_code)
self.assertEqual(
{'groups': [{'id': 1, 'name': '0', 'permissions': [1]},
{'id': 2, 'name': '1', 'permissions': [2]}],
'users': [{
'groups': [1, 2], 'id': 1, 'location': 1, 'name': '0'
}, {
'groups': [1, 2], 'id': 2, 'location': 1, 'name': '1'
}, {
'groups': [1, 2], 'id': 3, 'location': 2, 'name': '2'
}, {
'groups': [1, 2], 'id': 4, 'location': 3, 'name': '3'
}
]},
json.loads(response.content.decode('utf-8')))
def test_get_with_nested_exclude(self):
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
response = self.client.get('/users/?exclude[]=groups.name')
self.assertEqual(200, response.status_code)
self.assertEqual(
{'groups': [{'id': 1}, {'id': 2}],
'users': [{
'groups': [1, 2], 'id': 1, 'location': 1, 'name': '0'
}, {
'groups': [1, 2], 'id': 2, 'location': 1, 'name': '1'
}, {
'groups': [1, 2], 'id': 3, 'location': 2, 'name': '2'
}, {
'groups': [1, 2], 'id': 4, 'location': 3, 'name': '3'
}]},
json.loads(response.content.decode('utf-8')))
def test_get_with_nested_exclude_all(self):
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
url = '/users/?exclude[]=groups.*&include[]=groups.name'
response = self.client.get(url)
self.assertEqual(
200,
response.status_code,
response.content.decode('utf-8'))
self.assertEqual(
{
'groups': [{'name': '0'}, {'name': '1'}],
'users': [{
'groups': [1, 2], 'id': 1, 'location': 1, 'name': '0'
}, {
'groups': [1, 2], 'id': 2, 'location': 1, 'name': '1'
}, {
'groups': [1, 2], 'id': 3, 'location': 2, 'name': '2'
}, {
'groups': [1, 2], 'id': 4, 'location': 3, 'name': '3'
}]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_exclude_all_and_include_field(self):
with self.assertNumQueries(1):
url = '/users/?exclude[]=*&include[]=id'
response = self.client.get(url)
self.assertEqual(
200,
response.status_code,
response.content.decode('utf-8'))
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(
set(['id']),
set(data['users'][0].keys())
)
def test_get_with_exclude_all_and_include_relationship(self):
with self.assertNumQueries(2):
url = '/users/?exclude[]=*&include[]=groups.'
response = self.client.get(url)
self.assertEqual(
200,
response.status_code,
response.content.decode('utf-8'))
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(
set(['groups']),
set(data['users'][0].keys())
)
self.assertTrue('groups' in data)
def test_get_one_with_include(self):
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
response = self.client.get('/users/1/?include[]=groups.')
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data.get('groups', [])), 2)
def test_get_with_filter(self):
with self.assertNumQueries(1):
# verify that extra [] are stripped out of the key
response = self.client.get('/users/?filter{name}[]=1')
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'users': [
{'id': 2, 'location': 1, 'name': '1'},
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_no_match(self):
with self.assertNumQueries(1):
response = self.client.get('/users/?filter{name}[]=foo')
self.assertEqual(200, response.status_code)
self.assertEqual(
{'users': []},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_unicode_no_match(self):
with self.assertNumQueries(1):
response = self.client.get(
'/users/?filter{name}[]=%s' % UNICODE_URL_STRING
)
self.assertEqual(200, response.status_code)
self.assertEqual(
{'users': []},
json.loads(response.content.decode('utf-8')))
with self.assertNumQueries(1):
response = self.client.get(
six.u('/users/?filter{name}[]=%s') % UNICODE_STRING
)
self.assertEqual(200, response.status_code)
self.assertEqual(
{'users': []},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_unicode(self):
User.objects.create(
name=UNICODE_STRING,
last_name='Unicode'
)
with self.assertNumQueries(1):
response = self.client.get(
'/users/?filter{name}[]=%s' % UNICODE_URL_STRING
)
self.assertEqual(200, response.status_code)
self.assertEqual(
1,
len(
json.loads(
response.content.decode('utf-8')
)['users']
)
)
with self.assertNumQueries(1):
response = self.client.get(
six.u('/users/?filter{name}[]=%s') % UNICODE_STRING
)
self.assertEqual(200, response.status_code)
self.assertEqual(
1,
len(
json.loads(
response.content.decode('utf-8')
)['users']
)
)
def test_get_with_filter_in(self):
url = '/users/?filter{name.in}=1&filter{name.in}=2'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'users': [
{'id': 2, 'location': 1, 'name': '1'},
{'id': 3, 'location': 2, 'name': '2'},
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_complex_filter(self):
# same filter as the above case
f = {
".or": [{
"name": "1"
}, {
".and": [{
"name": "2"
}, {
"location": 2
}]
}]
}
f = quote(json.dumps(f))
url = f'/users/?filter{{}}={f}'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'users': [
{'id': 2, 'location': 1, 'name': '1'},
{'id': 3, 'location': 2, 'name': '2'},
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_exclude(self):
url = '/users/?filter{-name}=1'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'users': [
{'id': 1, 'location': 1, 'name': '0'},
{'id': 3, 'location': 2, 'name': '2'},
{'id': 4, 'location': 3, 'name': '3'},
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_relation_field(self):
url = '/users/?filter{location.name}=1'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'users': [
{'id': 3, 'location': 2, 'name': '2'},
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_and_include_relationship(self):
url = '/users/?include[]=groups.&filter{groups|name}=1'
with self.assertNumQueries(2):
# 2 queries: 1 for User, 1 for Group
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual(
{
'groups': [{'id': 2, 'name': '1'}],
'users': [
{'groups': [2], 'id': 1, 'location': 1, 'name': '0'},
{'groups': [2], 'id': 2, 'location': 1, 'name': '1'},
{'groups': [2], 'id': 3, 'location': 2, 'name': '2'},
{'groups': [2], 'id': 4, 'location': 3, 'name': '3'}
]
},
json.loads(response.content.decode('utf-8')))
def test_get_with_filter_and_source_rewrite(self):
""" Test filtering on fields where source is different """
url = '/locations/?filter{address}=here&include[]=address'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data['locations']), 1)
def test_get_with_filter_and_query_injection(self):
""" Test viewset with query injection """
url = '/users/?name=1'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data['users']), 1)
self.assertEqual(data['users'][0]['name'], '1')
def test_get_with_include_one_to_many(self):
""" Test o2m without related_name set. """
url = '/locations/?filter{id}=1&include[]=users'
with self.assertNumQueries(2):
# 2 queries: 1 for locations, 1 for location-users
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data['locations']), 1)
self.assertEqual(len(data['locations'][0]['users']), 2)
def test_get_with_count_field(self):
url = '/locations/?filter{id}=1&include[]=users&include[]=user_count'
with self.assertNumQueries(2):
# 2 queries: 1 for locations, 1 for location-users
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data['locations']), 1)
self.assertEqual(len(data['locations'][0]['users']), 2)
self.assertEqual(data['locations'][0]['user_count'], 2)
def test_get_with_queryset_injection(self):
url = '/users/?location=1'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(len(data['users']), 2)
def test_get_with_include_invalid(self):
for bad_data in ('name..', 'groups..name', 'foo', 'groups.foo'):
response = self.client.get('/users/?include[]=%s' % bad_data)
self.assertEqual(400, response.status_code)
def test_post(self):
data = {
'name': 'test',
'last_name': 'last',
'location': 1,
'display_name': 'test last' # Read only, should be ignored.
}
response = self.client.post(
'/users/', json.dumps(data), content_type='application/json')
self.assertEqual(201, response.status_code)
self.assertEqual(
json.loads(response.content.decode('utf-8')),
{
"user": {
"id": 5,
"name": "test",
"permissions": [],
"favorite_pet": None,
"favorite_pet_id": None,
"groups": [],
"location": 1,
"last_name": "last",
"display_name": None,
"thumbnail_url": None,
"number_of_cats": 1,
"profile": None,
"date_of_birth": None,
"is_dead": False,
}
})
def test_put(self):
group = Group.objects.create(name='test group')
data = {
'name': 'updated'
}
response = self.client.put(
'/groups/%s/' % group.pk,
json.dumps(data),
content_type='application/json')
self.assertEqual(200, response.status_code)
updated_group = Group.objects.get(pk=group.pk)
self.assertEqual(updated_group.name, data['name'])
def test_get_with_default_queryset(self):
url = '/groups/?filter{id}=1&include[]=loc1users'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(sorted([1, 2]), content['groups'][0]['loc1users'])
def test_get_with_default_lambda_queryset(self):
url = '/groups/?filter{id}=1&include[]=loc1usersLambda'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(
sorted([1, 2]),
content['groups'][0]['loc1usersLambda']
)
def test_get_with_default_queryset_filtered(self):
"""
Make sure filter can be added to relational fields with default
filters.
"""
url = (
'/groups/?filter{id}=1&include[]=loc1users'
'&filter{loc1users|id.in}=3'
'&filter{loc1users|id.in}=1'
)
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual([1], content['groups'][0]['loc1users'])
def test_get_with_filter_nested_rewrites(self):
"""
Test filter for members.id which needs to be rewritten as users.id
"""
user = User.objects.create(name='test user')
group = Group.objects.create(name='test group')
user.groups.add(group)
url = '/groups/?filter{members.id}=%s&include[]=members' % user.pk
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(content['groups']))
self.assertEqual(group.pk, content['groups'][0]['id'])
url = (
'/users/?filter{groups.members.id}=%s'
'&include[]=groups.members' % user.pk
)
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(1, len(content['users']))
def test_get_with_filter_nonexistent_field(self):
# Filtering on non-existent field should return 400
url = '/users/?filter{foobar}=1'
response = self.client.get(url)
self.assertEqual(400, response.status_code)
def test_get_with_filter_invalid_data(self):
User.objects.create(
name='test',
date_of_birth=datetime.datetime.utcnow()
)
url = '/users/?filter{date_of_birth.gt}=0&filter{date_of_birth.lt}=0'
response = self.client.get(url)
self.assertEqual(400, response.status_code)
if django.VERSION[0] > 2:
from rest_framework.exceptions import ErrorDetail
self.assertEqual(
[
ErrorDetail(
string='“0” value has an invalid date format. '
'It must be in YYYY-MM-DD format.',
code='invalid'
)
],
response.data
)
else:
self.assertEqual(
["'0' value has an invalid date format. "
"It must be in YYYY-MM-DD format."],
response.data
)
def test_get_with_filter_deferred(self):
# Filtering deferred field should work
grp = Group.objects.create(name='test group')
user = self.fixture.users[0]
user.groups.add(grp)
url = '/users/?filter{groups.id}=%s' % grp.pk
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(1, len(content['users']))
self.assertEqual(user.pk, content['users'][0]['id'])
def test_get_with_filter_outer_joins(self):
"""
Test that the API does not return duplicate results
when the underlying SQL query would return dupes.
"""
user = User.objects.create(name='test')
group_a = Group.objects.create(name='A')
group_b = Group.objects.create(name='B')
user.groups.add(group_a, group_b)
response = self.client.get(
'/users/?filter{groups.name.in}=A&filter{groups.name.in}=B'
)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(1, len(content['users']), content)
def test_get_with_filter_isnull(self):
"""
Test for .isnull filters
"""
# User with location=None
User.objects.create(name='name', last_name='lname', location=None)
# Count Users where location is not null
expected = User.objects.filter(location__isnull=False).count()
url = '/users/?filter{location.isnull}=0'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(expected, len(content['users']))
url = '/users/?filter{location.isnull}=False'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(expected, len(content['users']))
url = '/users/?filter{location.isnull}=1'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(1, len(content['users']))
url = '/users/?filter{-location.isnull}=True'
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertEqual(expected, len(content['users']))
def test_get_with_nested_source_fields(self):
u1 = User.objects.create(name='test1', last_name='user')
Profile.objects.create(
user=u1,
display_name='foo',
thumbnail_url='http://thumbnail.url')
url = (
'/users/?filter{id}=%s&include[]=display_name'
'&include[]=thumbnail_url' % u1.pk
)
response = self.client.get(url)
content = json.loads(response.content.decode('utf-8'))
self.assertEqual(200, response.status_code)
self.assertIsNotNone(content['users'][0]['display_name'])
self.assertIsNotNone(content['users'][0]['thumbnail_url'])
def test_get_with_nested_source_fields_count(self):
loc = Location.objects.create(name='test location')
u1 = User.objects.create(name='test1', last_name='user', location=loc)
Profile.objects.create(user=u1, display_name='foo')
u2 = User.objects.create(name='test2', last_name='user', location=loc)
Profile.objects.create(user=u2, display_name='moo')
# Test prefetching to pull profile.display_name into UserSerializer
url = (
'/users/?include[]=display_name'
'&include[]=thumbnail_url'
)
with self.assertNumQueries(2):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
# Test prefetching of user.location.name into ProfileSerializer
url = '/profiles/?include[]=user_location_name'
with self.assertNumQueries(3):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
self.assertIsNotNone(content['profiles'][0]['user_location_name'])
def test_get_with_dynamic_method_field(self):
url = '/users/?include[]=number_of_cats'
with self.assertNumQueries(3):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual({
'users': [{
'id': 1,
'location': 1,
'name': '0',
'number_of_cats': 1,
}, {
'id': 2,
'location': 1,
'name': '1',
'number_of_cats': 1,
}, {
'id': 3,
'location': 2,
'name': '2',
'number_of_cats': 1,
}, {
'id': 4,
'location': 3,
'name': '3',
'number_of_cats': 0,
}]
}, json.loads(response.content.decode('utf-8')))
def test_get_with_request_filters_and_requires(self):
"""
This tests conflicting external and internal prefetch requirements.
`location.cats` is an external requirement that points
to the `Location.cat_set` model relationship.
`user.number_of_cats` is an internal requirement that points
to the same relationship.
The prefetch tree produced by this call merges the two together
into a single prefetch:
{
'location': {
'cat_set': {}
}
}
"""
url = (
'/users/?'
'include[]=number_of_cats&'
'include[]=location.cats.&'
'filter{location.cats|name.icontains}=1'
)
with self.assertNumQueries(3):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
self.assertEqual({
'cats': [{
'id': 2,
'name': '1'
}],
'locations': [{
'name': '0',
'id': 1,
'cats': []
}, {
'name': '1',
'id': 2,
'cats': [2]
}, {
'name': '2',
'id': 3,
'cats': []
}],
'users': [{
'id': 1,
'location': 1,
'name': '0',
'number_of_cats': 0,
}, {
'id': 2,
'location': 1,
'name': '1',
'number_of_cats': 0,
}, {
'id': 3,
'location': 2,
'name': '2',
'number_of_cats': 1,
}, {
'id': 4,
'location': 3,
'name': '3',
'number_of_cats': 0,
}]
}, json.loads(response.content.decode('utf-8')))
def test_implicit_vs_explicit_prefetch(self):
"""
LocationSerializer has a built-in filter to hide Atlantis.
UserSerializer can explicitly include Location, and it can also
implicitly require Location through the `number_of_cats` field.
This test ensures that LocationSerializer.filter_queryset() is
being respected regardless of whether `User.location` is being
included implicitly or explicitly.
"""
atlantis = Location.objects.create(name='Atlantis')
atlantian = User.objects.create(
name='Atlantian',
last_name='Human',
location=atlantis
)
Cat.objects.create(
name='Gato',
home=atlantis,
backup_home=self.fixture.locations[0],
)
url = (
'/users/%s/?'
'include[]=number_of_cats&'
'include[]=location.'
) % atlantian.pk
response1 = self._get_json(url)
url = (
'/users/%s/?'
'include[]=number_of_cats&'
'exclude[]=location'
) % atlantian.pk
response2 = self._get_json(url)
# Atlantis is hidden, therefore its cats are also hidden
self.assertEqual(
response1['user']['number_of_cats'],
0
)
self.assertEqual(
response1['user']['number_of_cats'],
response2['user']['number_of_cats']
)
def test_boolean_filters_on_boolean_field(self):
# create one dead user
User.objects.create(name='Dead', last_name='Mort', is_dead=True)
# set up test specs
tests = {
True: ['true', 'True', '1', 'okies'],
False: ['false', 'False', '0', '']
}
# run through test scenarios
for expected_value, test_values in tests.items():
for test_value in test_values:
url = (
'/users/?include[]=is_dead&filter{is_dead}=%s' % test_value
)
data = self._get_json(url)
expected = set([expected_value])
actual = set([o['is_dead'] for o in data['users']])
self.assertEqual(
expected,
actual,
"Boolean filter '%s' failed. Expected=%s Actual=%s" % (
test_value,
expected,
actual,
)
)
def test_sort_relation_field(self):
url = '/users/?sort[]=location.name'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(
[1, 1, 2, 3],
[row['location'] for row in data['users']]
)
def test_sort_relation_field_reverse(self):
url = '/users/?sort[]=-location.name'
with self.assertNumQueries(1):
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
self.assertEqual(
[3, 2, 1, 1],
[row['location'] for row in data['users']]
)
def test_sort_relation_field_many(self):
url = '/locations/?sort[]=friendly_cats.name'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
data = json.loads(response.content.decode('utf-8'))
ids = [row['id'] for row in data['locations']]
# no duplicates
self.assertEqual(len(ids), len(set(ids)))
|
class TestUsersAPI(APITestCase):
def setUp(self):
pass
def _get_json(self, url, expected_status=200):
pass
def test_get(self):
pass
def test_get_with_trailing_slash_does_not_redirect(self):
pass
def test_get_with_include(self):
pass
def test_get_with_exclude(self):
pass
def test_get_with_nested_has_one_sideloading_disabled(self):
pass
def test_get_with_nested_has_one_sideloading_disabled(self):
pass
def test_get_with_nested_has_many(self):
pass
def test_get_with_nested_include(self):
pass
def test_get_with_nested_exclude(self):
pass
def test_get_with_nested_exclude_all(self):
pass
def test_get_with_exclude_all_and_include_field(self):
pass
def test_get_with_exclude_all_and_include_relationship(self):
pass
def test_get_one_with_include(self):
pass
def test_get_with_filter(self):
pass
def test_get_with_filter_no_match(self):
pass
def test_get_with_filter_unicode_no_match(self):
pass
def test_get_with_filter_unicode_no_match(self):
pass
def test_get_with_filter_in(self):
pass
def test_get_with_complex_filter(self):
pass
def test_get_with_filter_exclude(self):
pass
def test_get_with_filter_relation_field(self):
pass
def test_get_with_filter_and_include_relationship(self):
pass
def test_get_with_filter_and_source_rewrite(self):
''' Test filtering on fields where source is different '''
pass
def test_get_with_filter_and_query_injection(self):
''' Test viewset with query injection '''
pass
def test_get_with_include_one_to_many(self):
''' Test o2m without related_name set. '''
pass
def test_get_with_count_field(self):
pass
def test_get_with_queryset_injection(self):
pass
def test_get_with_include_invalid(self):
pass
def test_post(self):
pass
def test_put(self):
pass
def test_get_with_default_queryset(self):
pass
def test_get_with_default_lambda_queryset(self):
pass
def test_get_with_default_queryset_filtered(self):
'''
Make sure filter can be added to relational fields with default
filters.
'''
pass
def test_get_with_filter_nested_rewrites(self):
'''
Test filter for members.id which needs to be rewritten as users.id
'''
pass
def test_get_with_filter_nonexistent_field(self):
pass
def test_get_with_filter_invalid_data(self):
pass
def test_get_with_filter_deferred(self):
pass
def test_get_with_filter_outer_joins(self):
'''
Test that the API does not return duplicate results
when the underlying SQL query would return dupes.
'''
pass
def test_get_with_filter_isnull(self):
'''
Test for .isnull filters
'''
pass
def test_get_with_nested_source_fields(self):
pass
def test_get_with_nested_source_fields_count(self):
pass
def test_get_with_dynamic_method_field(self):
pass
def test_get_with_request_filters_and_requires(self):
'''
This tests conflicting external and internal prefetch requirements.
`location.cats` is an external requirement that points
to the `Location.cat_set` model relationship.
`user.number_of_cats` is an internal requirement that points
to the same relationship.
The prefetch tree produced by this call merges the two together
into a single prefetch:
{
'location': {
'cat_set': {}
}
}
'''
pass
def test_implicit_vs_explicit_prefetch(self):
'''
LocationSerializer has a built-in filter to hide Atlantis.
UserSerializer can explicitly include Location, and it can also
implicitly require Location through the `number_of_cats` field.
This test ensures that LocationSerializer.filter_queryset() is
being respected regardless of whether `User.location` is being
included implicitly or explicitly.
'''
pass
def test_boolean_filters_on_boolean_field(self):
pass
def test_sort_relation_field(self):
pass
def test_sort_relation_field_reverse(self):
pass
def test_sort_relation_field_many(self):
pass
| 51 | 9 | 18 | 1 | 16 | 1 | 1 | 0.08 | 1 | 7 | 5 | 0 | 50 | 2 | 50 | 50 | 953 | 76 | 814 | 181 | 762 | 64 | 366 | 181 | 314 | 3 | 1 | 2 | 54 |
4,372 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestUserLocationsAPI
|
class TestUserLocationsAPI(APITestCase):
"""
Test API on serializer with embedded fields.
"""
def setUp(self):
self.fixture = create_fixture()
def test_get_embedded(self):
with self.assertNumQueries(3):
url = '/v1/user_locations/1/'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
groups = content['user_location']['groups']
location = content['user_location']['location']
self.assertEqual(content['user_location']['location']['name'], '0')
self.assertTrue(isinstance(groups[0], dict))
self.assertTrue(isinstance(location, dict))
def test_get_embedded_force_sideloading(self):
with self.assertNumQueries(3):
url = '/v1/user_locations/1/?sideloading=true'
response = self.client.get(url)
self.assertEqual(200, response.status_code)
content = json.loads(response.content.decode('utf-8'))
groups = content['user_location']['groups']
location = content['user_location']['location']
self.assertEqual(content['locations'][0]['name'], '0')
self.assertFalse(isinstance(groups[0], dict))
self.assertFalse(isinstance(location, dict))
|
class TestUserLocationsAPI(APITestCase):
'''
Test API on serializer with embedded fields.
'''
def setUp(self):
pass
def test_get_embedded(self):
pass
def test_get_embedded_force_sideloading(self):
pass
| 4 | 1 | 9 | 1 | 8 | 0 | 1 | 0.12 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 3 | 34 | 6 | 25 | 15 | 21 | 3 | 25 | 15 | 21 | 1 | 1 | 1 | 3 |
4,373 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/test_api.py
|
tests.test_api.TestRelationsAPI
|
class TestRelationsAPI(APITestCase):
"""Test auto-generated relation endpoints."""
def setUp(self):
self.fixture = create_fixture()
def test_generated_relation_fields(self):
# Links for single-relations is currentlydisabled.
# See WithDynamicSerializerMixin.get_link_fields()
r = self.client.get('/users/1/location/')
self.assertEqual(404, r.status_code)
r = self.client.get('/users/1/permissions/')
self.assertFalse('groups' in r.data['permissions'][0])
self.assertEqual(200, r.status_code)
r = self.client.get('/users/1/groups/')
self.assertEqual(200, r.status_code)
# Not a relation field
r = self.client.get('/users/1/name/')
self.assertEqual(404, r.status_code)
def test_location_users_relations_identical_to_sideload(self):
r1 = self.client.get('/locations/1/?include[]=users.')
self.assertEqual(200, r1.status_code)
r1_data = json.loads(r1.content.decode('utf-8'))
r2 = self.client.get('/locations/1/users/')
self.assertEqual(200, r2.status_code)
r2_data = json.loads(r2.content.decode('utf-8'))
self.assertEqual(r2_data['users'], r1_data['users'])
def test_relation_includes(self):
r = self.client.get('/locations/1/users/?include[]=location.')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
self.assertTrue('locations' in content)
def test_relation_includes_context(self):
r = self.client.get('/locations/1/users/?include[]=number_of_cats')
self.assertEqual(200, r.status_code)
# Note: the DynamicMethodField for `number_of_cats` checks to
# ensure context is set, and raises if not. If the request
# succeeded and `number_of_cats` is returned, it means that check
# passed.
self.assertTrue('number_of_cats' in r.data['users'][0])
def test_relation_excludes(self):
r = self.client.get('/locations/1/users/?exclude[]=location')
self.assertEqual(200, r.status_code)
content = json.loads(r.content.decode('utf-8'))
self.assertFalse('location' in content['users'][0])
def test_relation_filter_returns_error(self):
r = self.client.get('/locations/1/users/?filter{name}=foo')
self.assertEqual(400, r.status_code)
|
class TestRelationsAPI(APITestCase):
'''Test auto-generated relation endpoints.'''
def setUp(self):
pass
def test_generated_relation_fields(self):
pass
def test_location_users_relations_identical_to_sideload(self):
pass
def test_relation_includes(self):
pass
def test_relation_includes_context(self):
pass
def test_relation_excludes(self):
pass
def test_relation_filter_returns_error(self):
pass
| 8 | 1 | 7 | 1 | 5 | 1 | 1 | 0.21 | 1 | 0 | 0 | 0 | 7 | 1 | 7 | 7 | 62 | 16 | 38 | 20 | 30 | 8 | 38 | 20 | 30 | 1 | 1 | 0 | 7 |
4,374 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.LocationViewSet
|
class LocationViewSet(DynamicModelViewSet):
features = (
DynamicModelViewSet.INCLUDE, DynamicModelViewSet.EXCLUDE,
DynamicModelViewSet.FILTER, DynamicModelViewSet.SORT,
DynamicModelViewSet.DEBUG, DynamicModelViewSet.SIDELOADING,
)
model = Location
serializer_class = LocationSerializer
queryset = Location.objects.all()
|
class LocationViewSet(DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 9 | 0 | 9 | 5 | 8 | 0 | 5 | 5 | 4 | 0 | 3 | 0 | 0 |
4,375 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.PermissionViewSet
|
class PermissionViewSet(DynamicModelViewSet):
serializer_class = PermissionSerializer
queryset = Permission.objects.all()
|
class PermissionViewSet(DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 3 | 0 | 0 |
4,376 |
AltSchool/dynamic-rest
|
AltSchool_dynamic-rest/tests/viewsets.py
|
tests.viewsets.ProfileViewSet
|
class ProfileViewSet(DynamicModelViewSet):
features = (
DynamicModelViewSet.EXCLUDE,
DynamicModelViewSet.FILTER,
DynamicModelViewSet.INCLUDE,
DynamicModelViewSet.SORT,
)
model = Profile
serializer_class = ProfileSerializer
queryset = Profile.objects.all()
|
class ProfileViewSet(DynamicModelViewSet):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 10 | 0 | 10 | 5 | 9 | 0 | 5 | 5 | 4 | 0 | 3 | 0 | 0 |
4,377 |
Aluriak/ACCC
|
Aluriak_ACCC/accc/pycompiler/pycompiler.py
|
accc.pycompiler.pycompiler.PyCompiler
|
class PyCompiler(Compiler):
"""
Compiler specialized in Python code.
Provide simpler API (automatization of python specifications sending),
and new tools (for call a code)
"""
# CONSTRUCTOR #################################################################
def __init__(self, alphabet, comparables, predicats, actions, operators):
""""""
super().__init__(alphabet, python_spec, comparables, predicats, actions, operators)
self.last_python_code = None
# PUBLIC METHODS ##############################################################
def compile(self, source_code, post_treatment=''.join, source='<string>', target='exec'):
"""Return ready-to-exec object code of compilation.
Use python built-in compile function.
Use exec(1) on returned object for execute it."""
self.last_python_code = super().compile(source_code, post_treatment)
return PyCompiler.executable(self.last_python_code, source, target)
# PRIVATE METHODS #############################################################
# CLASS METHODS ###############################################################
@staticmethod
def executable(python_code, source='<string>', target='exec'):
return compile(python_code, source, target)
|
class PyCompiler(Compiler):
'''
Compiler specialized in Python code.
Provide simpler API (automatization of python specifications sending),
and new tools (for call a code)
'''
def __init__(self, alphabet, comparables, predicats, actions, operators):
''''''
pass
def compile(self, source_code, post_treatment=''.join, source='<string>', target='exec'):
'''Return ready-to-exec object code of compilation.
Use python built-in compile function.
Use exec(1) on returned object for execute it.'''
pass
@staticmethod
def executable(python_code, source='<string>', target='exec'):
pass
| 5 | 3 | 4 | 0 | 3 | 1 | 1 | 1.3 | 1 | 1 | 0 | 0 | 2 | 1 | 3 | 3 | 25 | 2 | 10 | 6 | 5 | 13 | 9 | 5 | 5 | 1 | 1 | 0 | 3 |
4,378 |
Aluriak/ACCC
|
Aluriak_ACCC/accc/dnacompiler/dnacompiler.py
|
accc.dnacompiler.dnacompiler.DNACompiler
|
class DNACompiler(Compiler):
"""
Compiler specialized in DNA: vocabulary is 'ATGC'.
"""
def __init__(self, target_language_spec, comparables, predicats, actions, operators):
""""""
super().__init__('ATGC', target_language_spec, comparables, predicats, actions, operators)
|
class DNACompiler(Compiler):
'''
Compiler specialized in DNA: vocabulary is 'ATGC'.
'''
def __init__(self, target_language_spec, comparables, predicats, actions, operators):
''''''
pass
| 2 | 2 | 3 | 0 | 2 | 1 | 1 | 1.33 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 7 | 0 | 3 | 2 | 1 | 4 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
4,379 |
Aluriak/ACCC
|
Aluriak_ACCC/tests/dnacompilation.py
|
dnacompilation.Unit
|
class Unit():
def __init__(self, dc, *, dna=None, len_dna=DEFAULT_DNA_LEN):
if dna is None:
self.dna = ''.join((random.choice(NUCLEOTIDS) for _ in range(len_dna)))
else:
self.dna = dna
self.python_code = dc.compile(self.dna)
self.code = compile(self.python_code, '<string>', 'exec')
self.fitness = 0
# CONSTRUCTOR #################################################################
# PUBLIC METHODS ##############################################################
def exec_fun(self):
#self.fitness = 0
exec(self.code)
# PRIVATE METHODS #############################################################
# CLASS METHODS ###############################################################
@staticmethod
def from_pop(pop, parent_count, compiler, count=1):
"""Create a list of count Unit, by choosing parent_count parents in pop.
High fitness improves probability of reproduction."""
#from collections import defaultdict
#stats = defaultdict(int)
probs = tuple((u,u.abs_fitness) for u in pop)
max_prob = sum(u.abs_fitness for u in pop)
# get one parent
def oneParent():
"""Return a unit, taked in pop. Higher is the fit,
higher is the chance to be selectionned."""
if max_prob is 0: return pop[0]
parent = None
cur_prob = random.randrange(0, max_prob)
for unit, fit in probs:
cur_prob -= fit
if cur_prob <= 0:
parent = unit
break
#stats[parent] += 1
assert(parent is not None)
return parent
# get count new unit
new_units = []
for _ in range(count):
parents = (oneParent(), oneParent())
new_units.append(
Unit.from_parents(parents, compiler)
)
#print('\t'.join(str(u.abs_fitness) for u in pop))
#print('\t'.join(str(stats[u]) for u in pop))
#print('')
return new_units
@staticmethod
def from_parents(parents, compiler):
"""Create a new Unit, result of given parent crossing."""
#new_dna = sorted(parents, key=lambda x: x.fitness, reverse=True)[0].dna
parent = random.choice(parents)
new_dna = ''
for index in range(min(len(p.dna) for p in parents)):
if random.randint(0, 100) == 0:
parent = random.choice(parents)
new_dna += parent.dna[index]
return Unit.mutated(compiler, new_dna)
@staticmethod
def mutated(compiler, dna):
"""Return a mutated version of given string"""
# Create DNA
src = dna
if random.random() < MUTATION_CHANCE:
src = ''
for nuc in dna:
if random.random() < MUTATION_RATE:
src += random.choice(NUCLEOTIDS)
else:
src += nuc
# Create unit
return Unit(compiler, dna=src)
# PREDICATS ###################################################################
# ACCESSORS ###################################################################
@property
def dna_len(self):
return len(self.dna)
@property
def abs_fitness(self):
return abs(self.fitness)
# CONVERSION ##################################################################
def __str__(self):
return str(self.fitness)
def __repr__(self):
return str(self)
|
class Unit():
def __init__(self, dc, *, dna=None, len_dna=DEFAULT_DNA_LEN):
pass
def exec_fun(self):
pass
@staticmethod
def from_pop(pop, parent_count, compiler, count=1):
'''Create a list of count Unit, by choosing parent_count parents in pop.
High fitness improves probability of reproduction.'''
pass
def oneParent():
'''Return a unit, taked in pop. Higher is the fit,
higher is the chance to be selectionned.'''
pass
@staticmethod
def from_parents(parents, compiler):
'''Create a new Unit, result of given parent crossing.'''
pass
@staticmethod
def mutated(compiler, dna):
'''Return a mutated version of given string'''
pass
@property
def dna_len(self):
pass
@property
def abs_fitness(self):
pass
def __str__(self):
pass
def __repr__(self):
pass
| 16 | 4 | 9 | 1 | 7 | 2 | 2 | 0.4 | 0 | 3 | 0 | 0 | 6 | 4 | 9 | 9 | 110 | 22 | 63 | 33 | 47 | 25 | 55 | 28 | 44 | 4 | 0 | 3 | 20 |
4,380 |
Aluriak/ACCC
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Aluriak_ACCC/accc/compiler/compiler.py
|
accc.compiler.compiler.Compiler
|
class Compiler():
"""
Compiler of code writed with any vocabulary. ('01', 'ATGC', 'whatevr',…)
A source code is an ordered list of vocabulary elements
('10011010000101', 'AGGATGATCAGATA', 'wtrvwhttera'…).
Whatever the given source_code, it's always compilable. (but can return empty object code)
Also, it can be totally illogic (do many times the same test, do nothing,…)
The source code is readed entirely for determine STRUCTURE,
and then re-readed for determines effectives VALUES.
The STRUCTURE defines:
- logic of the code
- lexems type that will be used
The VALUES defines:
- what are the exact value of each lexem
- values of integers used as function parameters
Example of prettified STRUCTURE:
if C:
A
if C:
A
A
if P and P:
A
A
A
if P:
A
VALUES will describes which is the lexem effectively used for each
word, C, A or P. (condition, action, predicat)
NB: D is the char that indicate a indent level decrease
The dictionnary values vocabulary, given at compiler creation, define lexems :
vocabulary_values = {
LEXEM_TYPE_COMPARISON: ('parameter1', 'parameter2', 'parameter3', 'parameter4'),
LEXEM_TYPE_PREDICAT : ('have_that', 'is_this', 'have_many_things', 'know_that'),
LEXEM_TYPE_ACTION : ('do_that', 'say_this'),
LEXEM_TYPE_OPERATOR : ('>', '==', '<', 'is', '!='),
}
Then, compiled code can be something like:
if parameter1 == parameter2 and have_that:
do_that
if have_that:
say_this
do_that
if know_that and have_many_things:
do_that
say_this
do_that
if have_many_things:
say_this
Modification of provided lexems types is not supported at this time.
"""
# CONSTRUCTOR #################################################################
def __init__(self, alphabet, target_language_spec, comparables, predicats, actions, operators,
neutral_value_condition='True', neutral_value_action='pass'):
"""
Wait for alphabet ('01', 'ATGC',…), language specification and vocabularies of
structure and values parts.
Neutral value is used when no value is finded.
Set it to something that pass in all cases.
NB: a little source code lead to lots of neutral values.
"""
self.alphabet = alphabet
self.voc_structure = ALL_LEXEMS
self.target_lang_spec = target_language_spec()
self.voc_values = {
LEXEM_TYPE_COMPARISON: comparables,
LEXEM_TYPE_PREDICAT: predicats,
LEXEM_TYPE_ACTION: actions,
LEXEM_TYPE_OPERATOR: operators,
}
self.neutral_value_action = neutral_value_action
self.neutral_value_condition = neutral_value_condition
# verifications
assert (issubclass(neutral_value_action.__class__, str)
and issubclass(neutral_value_condition.__class__, str)
)
# prepare tables of words->lexems
self._initialize_tables()
# PUBLIC METHODS ###############################################################
def compile(self, source_code, post_treatment=''.join):
"""Compile given source code.
Return object code, modified by given post treatment.
"""
# read structure
structure = self._structure(source_code)
values = self._struct_to_values(structure, source_code)
# create object code, translated in targeted language
obj_code = langspec.translated(
structure, values,
self.target_lang_spec
)
# apply post treatment and return
return obj_code if post_treatment is None else post_treatment(obj_code)
# PRIVATE METHODS ##############################################################
def _initialize_tables(self):
"""Create tables for structure and values, word->vocabulary"""
# structure table
self.table_struct, self.idnt_struct_size = self._create_struct_table()
# values table
self.table_values, self.idnt_values_size = self._create_values_table()
# debug print
# print(self.table_struct)
# print(self.idnt_struct_size)
# print(self.table_values)
# print(self.idnt_values_size)
def _structure(self, source_code):
"""return structure in ACDP format."""
# define cutter as a per block reader
def cutter(seq, block_size):
for index in range(0, len(seq), block_size):
lexem = seq[index:index+block_size]
if len(lexem) == block_size:
yield self.table_struct[seq[index:index+block_size]]
return tuple(cutter(source_code, self.idnt_struct_size))
def _next_lexem(self, lexem_type, source_code, source_code_size):
"""Return next readable lexem of given type in source_code.
If no value can be found, the neutral_value will be used"""
# define reader as a lexem extractor
def reader(seq, block_size):
identificator = ''
for char in source_code:
if len(identificator) == self.idnt_values_size[lexem_type]:
yield self.table_values[lexem_type][identificator]
identificator = ''
identificator += char
lexem_reader = reader(source_code, self.idnt_values_size)
lexem = None
time_out = 0
while lexem == None and time_out < 2*source_code_size:
lexem = next(lexem_reader)
time_out += 1
# here we have found a lexem
return lexem
def _next_condition_lexems(self, source_code, source_code_size):
"""Return condition lexem readed in source_code"""
# find three lexems
lexems = tuple((
self._next_lexem(LEXEM_TYPE_COMPARISON,
source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_OPERATOR,
source_code, source_code_size),
self._next_lexem(LEXEM_TYPE_COMPARISON,
source_code, source_code_size)
))
# verify integrity
if None in lexems: # one of the condition lexem was not found in source code
return None
else: # all lexems are valid
return ' '.join(lexems)
@lru_cache(maxsize=100)
def _string_to_int(self, s):
"""Read an integer in s, in Little Indian. """
base = len(self.alphabet)
return sum((self._letter_to_int(l) * base**lsb
for lsb, l in enumerate(s)
))
@lru_cache(maxsize=None)
def _letter_to_int(self, l):
return self.alphabet.index(l)
# source code is potentially largely variable on length
@lru_cache(maxsize=127)
def _integer_size_for(self, source_code_size):
"""Find and return the optimal integer size.
A perfect integer can address all indexes of
a string of size source_code_size.
"""
return ceil(log(source_code_size, len(self.alphabet)))
def _struct_to_values(self, structure, source_code):
"""Return list of values readed in source_code,
according to given structure.
"""
# iterate on source code until all values are finded
# if a value is not foundable,
# (ie its identificator is not in source code)
# it will be replaced by associated neutral value
iter_source_code = itertools.cycle(source_code)
values = []
for lexem_type in (l for l in structure if l is not 'D'):
if lexem_type is LEXEM_TYPE_CONDITION:
new_value = self._next_condition_lexems(
iter_source_code, len(source_code)
)
else:
new_value = self._next_lexem(
lexem_type, iter_source_code, len(source_code)
)
# if values is unvalid:
# association with the right neutral value
if new_value is None:
if lexem_type in (LEXEM_TYPE_PREDICAT, LEXEM_TYPE_CONDITION):
new_value = self.neutral_value_condition
else:
new_value = self.neutral_value_action
values.append(new_value)
return values
# TABLE METHODS ################################################################
def _create_struct_table(self):
"""Create table identificator->vocabulary,
and return it with size of an identificator"""
len_alph = len(self.alphabet)
len_vocb = len(self.voc_structure)
identificator_size = ceil(log(len_vocb, len_alph))
# create list of lexems
def num2alph(x, n): return self.alphabet[(x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificator_size)
]
for x in range(len_vocb)
]
# initialize table and iterable
identificators_table = {}
zip_id_voc = zip_longest(
identificators, self.voc_structure,
fillvalue=None
)
# create dict identificator:word
for idt, word in zip_id_voc:
identificators_table[''.join(idt)] = word
return identificators_table, identificator_size
def _create_values_table(self):
"""Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size"""
# number of existing character, and returned dicts
len_alph = len(self.alphabet)
identificators_table = {k: {} for k in self.voc_values.keys()}
identificators_sizes = {k: -1 for k in self.voc_values.keys()}
for lexem_type, vocabulary in self.voc_values.items():
# find number of different values that can be found,
# and size of an identificator.
len_vocb = len(vocabulary)
identificators_sizes[lexem_type] = ceil(log(len_vocb, len_alph))
# create list of possible identificators
def num2alph(x, n): return self.alphabet[(
x // len_alph**n) % len_alph]
identificators = [[str(num2alph(x, n))
for n in range(identificators_sizes[lexem_type])
] # this list is an identificator
for x in range(len_alph**identificators_sizes[lexem_type])
] # this one is a list of identificator
# initialize iterable
zip_id_voc = zip_longest(
identificators, vocabulary,
fillvalue=None
)
# create dict {identificator:word}
for idt, voc in zip_id_voc:
identificators_table[lexem_type][''.join(idt)] = voc
# return all
return identificators_table, identificators_sizes
|
class Compiler():
'''
Compiler of code writed with any vocabulary. ('01', 'ATGC', 'whatevr',…)
A source code is an ordered list of vocabulary elements
('10011010000101', 'AGGATGATCAGATA', 'wtrvwhttera'…).
Whatever the given source_code, it's always compilable. (but can return empty object code)
Also, it can be totally illogic (do many times the same test, do nothing,…)
The source code is readed entirely for determine STRUCTURE,
and then re-readed for determines effectives VALUES.
The STRUCTURE defines:
- logic of the code
- lexems type that will be used
The VALUES defines:
- what are the exact value of each lexem
- values of integers used as function parameters
Example of prettified STRUCTURE:
if C:
A
if C:
A
A
if P and P:
A
A
A
if P:
A
VALUES will describes which is the lexem effectively used for each
word, C, A or P. (condition, action, predicat)
NB: D is the char that indicate a indent level decrease
The dictionnary values vocabulary, given at compiler creation, define lexems :
vocabulary_values = {
LEXEM_TYPE_COMPARISON: ('parameter1', 'parameter2', 'parameter3', 'parameter4'),
LEXEM_TYPE_PREDICAT : ('have_that', 'is_this', 'have_many_things', 'know_that'),
LEXEM_TYPE_ACTION : ('do_that', 'say_this'),
LEXEM_TYPE_OPERATOR : ('>', '==', '<', 'is', '!='),
}
Then, compiled code can be something like:
if parameter1 == parameter2 and have_that:
do_that
if have_that:
say_this
do_that
if know_that and have_many_things:
do_that
say_this
do_that
if have_many_things:
say_this
Modification of provided lexems types is not supported at this time.
'''
def __init__(self, alphabet, target_language_spec, comparables, predicats, actions, operators,
neutral_value_condition='True', neutral_value_action='pass'):
'''
Wait for alphabet ('01', 'ATGC',…), language specification and vocabularies of
structure and values parts.
Neutral value is used when no value is finded.
Set it to something that pass in all cases.
NB: a little source code lead to lots of neutral values.
'''
pass
def compile(self, source_code, post_treatment=''.join):
'''Compile given source code.
Return object code, modified by given post treatment.
'''
pass
def _initialize_tables(self):
'''Create tables for structure and values, word->vocabulary'''
pass
def _structure(self, source_code):
'''return structure in ACDP format.'''
pass
def cutter(seq, block_size):
pass
def _next_lexem(self, lexem_type, source_code, source_code_size):
'''Return next readable lexem of given type in source_code.
If no value can be found, the neutral_value will be used'''
pass
def reader(seq, block_size):
pass
def _next_condition_lexems(self, source_code, source_code_size):
'''Return condition lexem readed in source_code'''
pass
@lru_cache(maxsize=100)
def _string_to_int(self, s):
'''Read an integer in s, in Little Indian. '''
pass
@lru_cache(maxsize=None)
def _letter_to_int(self, l):
pass
@lru_cache(maxsize=127)
def _integer_size_for(self, source_code_size):
'''Find and return the optimal integer size.
A perfect integer can address all indexes of
a string of size source_code_size.
'''
pass
def _struct_to_values(self, structure, source_code):
'''Return list of values readed in source_code,
according to given structure.
'''
pass
def _create_struct_table(self):
'''Create table identificator->vocabulary,
and return it with size of an identificator'''
pass
def num2alph(x, n):
pass
def _create_values_table(self):
'''Create table lexem_type->{identificator->vocabulary},
and return it with sizes of an identificator as lexem_type->identificator_size'''
pass
def num2alph(x, n):
pass
| 20 | 12 | 14 | 0 | 10 | 4 | 2 | 0.92 | 0 | 5 | 0 | 0 | 12 | 10 | 12 | 12 | 282 | 38 | 130 | 60 | 111 | 119 | 89 | 56 | 74 | 5 | 0 | 3 | 28 |
4,381 |
Aluriak/bubble-tools
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Aluriak_bubble-tools/bubbletools/bbltree.py
|
bubbletools.bbltree.BubbleTree
|
class BubbleTree:
"""Model of a power graph, that can eventually be oriented.
"""
def __init__(self, edges: dict, inclusions: dict, roots: frozenset,
oriented: bool = False, symmetric_edges: bool = False):
self._edges, self._inclusions = dict(edges), dict(inclusions)
self._roots = frozenset(roots)
self._oriented = bool(oriented)
self._edge_reduction = None # computed on time
self.symmetric_edges = bool(symmetric_edges)
def compute_edge_reduction(self) -> float:
"""Compute the edge reduction. Costly computation"""
nb_init_edge = self.init_edge_number()
nb_poweredge = self.edge_number()
return (nb_init_edge - nb_poweredge) / (nb_init_edge)
def init_edge_number(self) -> int:
"""Return the number of edges present in the non-compressed graph"""
return len(frozenset(frozenset(edge) for edge in self.initial_edges()))
def initial_edges(self) -> iter:
"""Yield edges in the initial (uncompressed) graphs. Possible doublons."""
def nodes_in(n): return ([n] if self.is_node(n) else self.nodes_in(n))
for node, succs in self.edges.items():
twos = tuple(two for succ in succs for two in nodes_in(succ))
for one in nodes_in(node):
for two in twos:
yield one, two
@property
def oriented(self) -> bool:
return self._oriented
@property
def edges(self) -> dict:
return self._edges
@property
def inclusions(self) -> dict:
return self._inclusions
@property
def roots(self) -> frozenset:
return self._roots
@property
def edge_reduction(self) -> int:
if self._edge_reduction is None:
self._edge_reduction = self.compute_edge_reduction()
return self._edge_reduction
def connected_components(self) -> (dict, dict):
"""Return for one root of each connected component all
the linked objects, and the mapping linking a connected component
root with the roots that it contains."""
inclusions = utils.completed_graph(
self.inclusions) # allow bottom-up movement
edges = utils.completed_graph(
self.edges) if self.oriented else self.edges
cc = {} # maps cc root with nodes in the cc
subroots = defaultdict(set) # maps cc root with other roots of the cc
walked_roots = set() # all roots that have been walked already
for root in self.roots:
if root in walked_roots:
continue # this cc have been done already
# walk in the graph starting at root
cc[root] = set([root])
walked = cc[root]
stack = list(edges.get(root, ())) + list(inclusions.get(root, ()))
while len(stack) > 0:
*stack, curr = stack
walked.add(curr)
if curr in self.roots: # don't do the walk for already found roots
walked_roots.add(curr)
subroots[root].add(curr)
for succ in it.chain(edges.get(curr, ()), inclusions.get(curr, ())):
if succ not in walked:
stack.append(succ)
return cc, dict(subroots)
def assert_powernode(self, name: str) -> None or ValueError:
"""Do nothing if given name refers to a powernode in given graph.
Raise a ValueError in any other case.
"""
if name not in self.inclusions:
raise ValueError("Powernode '{}' does not exists.".format(name))
if self.is_node(name):
raise ValueError("Given name '{}' is a node.".format(name))
def powernode_data(self, name: str) -> Powernode:
"""Return a Powernode object describing the given powernode"""
self.assert_powernode(name)
contained_nodes = frozenset(self.nodes_in(name))
return Powernode(
size=len(contained_nodes),
contained=frozenset(self.all_in(name)),
contained_pnodes=frozenset(self.powernodes_in(name)),
contained_nodes=contained_nodes,
)
def node_number(self, *, count_pnode=True) -> int:
"""Return the number of node"""
return (sum(1 for n in self.nodes())
+ (sum(1 for n in self.powernodes()) if count_pnode else 0))
def edge_number(self) -> int:
"""Return the number of (power) edges"""
edges = set()
for node, succs in self.edges.items():
for succ in succs:
edges.add(frozenset((node, succ)))
return len(edges)
def nodes(self) -> iter:
"""Yield all nodes in the graph (not the powernodes)"""
yield from (elem for elem, subs in self.inclusions.items() if subs == ())
def powernodes(self) -> iter:
"""Yield all powernodes in the graph (not the nodes)"""
yield from (elem for elem, subs in self.inclusions.items() if subs != ())
def is_powernode(self, identifier: str) -> bool:
"""True if given identifier is a powernode inside the power graph"""
return self.inclusions[identifier] != ()
def is_node(self, identifier: str) -> bool:
"""True if given identifier is a node inside the power graph"""
return self.inclusions[identifier] == ()
def nodes_in(self, name) -> iter:
"""Yield all nodes contained in given (power) node"""
yield from (node for node in self.all_in(name) if self.is_node(node))
def powernodes_in(self, name) -> iter:
"""Yield all power nodes contained in given (power) node"""
yield from (node for node in self.all_in(name) if self.is_powernode(node))
def all_in(self, name) -> iter:
"""Yield all (power) nodes contained in given (power) node"""
for elem in self.inclusions[name]:
yield elem
yield from self.all_in(elem)
def powernodes_containing(self, name, directly=False) -> iter:
"""Yield all power nodes containing (power) node of given *name*.
If *directly* is True, will only yield the direct parent of given name.
"""
if directly:
yield from (node for node in self.all_in(name)
if name in self.inclusions[node])
else:
# This algorithm is very bad. Inverting the inclusion dict could
# be far better.
@functools.lru_cache(maxsize=self.node_number(count_pnode=True))
def contains_target(node, target):
succs = self.inclusions[node]
if target in succs:
return True
else:
return any(contains_target(succ, target) for succ in succs)
# populate the cache
for root in self.roots:
contains_target(root, name)
# output all that contains target at some level
yield from (node for node in self.inclusions.keys()
if contains_target(node, name))
def write_bubble(self, filename: str):
"""Write in given filename the lines of bubble describing this instance"""
from bubbletools import converter
converter.tree_to_bubble(self, filename)
@staticmethod
def from_bubble_file(bblfile: str, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
"""Extract data from given bubble file,
then call from_bubble_data method
"""
return BubbleTree.from_bubble_data(utils.data_from_bubble(bblfile),
oriented=bool(oriented),
symmetric_edges=symmetric_edges)
@staticmethod
def from_bubble_lines(bbllines: iter, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
"""Return a BubbleTree instance.
bbllines -- iterable of raw line, bubble-formatted
oriented -- True: returned BubbleTree is oriented
"""
return BubbleTree.from_bubble_data((utils.line_data(line)
for line in bbllines),
oriented=bool(oriented),
symmetric_edges=symmetric_edges)
@staticmethod
def from_bubble_data(bbldata: iter, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
"""Return a BubbleTree instance.
bbldata -- lines in bubble bbltree
oriented -- True: returned BubbleTree is oriented
"""
# get structure as two dicts
edges, inclusions = defaultdict(set), defaultdict(set)
used_in_edges = set()
for line in bbldata:
if not line:
continue
ltype, *payload = line
if ltype == 'EDGE':
source, target = payload
edges[source].add(target)
used_in_edges.add(source)
used_in_edges.add(target)
elif ltype == 'SET':
setname = payload[0]
inclusions[setname] # create it if not already populated
elif ltype == 'NODE':
nodename = payload[0]
inclusions[nodename] = () # a node can't contain anything
elif ltype == 'IN':
contained, container = payload
inclusions[container].add(contained)
else: # comment, empty or error
if ltype not in {'COMMENT', 'EMPTY', 'ERROR'}:
raise ValueError("The following line is not a valid "
"type ({}): '{}'".format(ltype, payload))
else: # it's a comment, an empty line or an error
pass
# all (power)nodes used in edges should be present in inclusions tree
for node in used_in_edges:
if node not in inclusions: # contains nothing, so its a node
inclusions[node] = ()
# all pure nodes needs to be a key in inclusions
for node in set(it.chain.from_iterable(inclusions.values())):
# an element that is not in inclusion is either:
# - a node not explicitely defined in a NODE line
# - a powernode that contains nothing and not explicitely defined in a SET line
# the second case is meaningless : this is the case for any unused powernode name.
# Consequently, elements not in inclusions are nodes.
if node not in inclusions:
inclusions[node] = ()
# find the roots
not_root = set(it.chain.from_iterable(inclusions.values()))
roots = frozenset(frozenset(inclusions.keys()) - not_root)
# build the (oriented) bubble tree
symmetric_edges = symmetric_edges and not oriented
if symmetric_edges:
edges = utils.completed_graph(edges)
return BubbleTree(edges=edges, inclusions=dict(inclusions),
roots=roots, oriented=oriented, symmetric_edges=symmetric_edges)
|
class BubbleTree:
'''Model of a power graph, that can eventually be oriented.
'''
def __init__(self, edges: dict, inclusions: dict, roots: frozenset,
oriented: bool = False, symmetric_edges: bool = False):
pass
def compute_edge_reduction(self) -> float:
'''Compute the edge reduction. Costly computation'''
pass
def init_edge_number(self) -> int:
'''Return the number of edges present in the non-compressed graph'''
pass
def initial_edges(self) -> iter:
'''Yield edges in the initial (uncompressed) graphs. Possible doublons.'''
pass
def nodes_in(n):
pass
@property
def oriented(self) -> bool:
pass
@property
def edges(self) -> dict:
pass
@property
def inclusions(self) -> dict:
pass
@property
def roots(self) -> frozenset:
pass
@property
def edge_reduction(self) -> int:
pass
def connected_components(self) -> (dict, dict):
'''Return for one root of each connected component all
the linked objects, and the mapping linking a connected component
root with the roots that it contains.'''
pass
def assert_powernode(self, name: str) -> None or ValueError:
'''Do nothing if given name refers to a powernode in given graph.
Raise a ValueError in any other case.
'''
pass
def powernode_data(self, name: str) -> Powernode:
'''Return a Powernode object describing the given powernode'''
pass
def node_number(self, *, count_pnode=True) -> int:
'''Return the number of node'''
pass
def edge_number(self) -> int:
'''Return the number of (power) edges'''
pass
def nodes_in(n):
'''Yield all nodes in the graph (not the powernodes)'''
pass
def powernodes(self) -> iter:
'''Yield all powernodes in the graph (not the nodes)'''
pass
def is_powernode(self, identifier: str) -> bool:
'''True if given identifier is a powernode inside the power graph'''
pass
def is_node(self, identifier: str) -> bool:
'''True if given identifier is a node inside the power graph'''
pass
def nodes_in(n):
'''Yield all nodes contained in given (power) node'''
pass
def powernodes_in(self, name) -> iter:
'''Yield all power nodes contained in given (power) node'''
pass
def all_in(self, name) -> iter:
'''Yield all (power) nodes contained in given (power) node'''
pass
def powernodes_containing(self, name, directly=False) -> iter:
'''Yield all power nodes containing (power) node of given *name*.
If *directly* is True, will only yield the direct parent of given name.
'''
pass
@functools.lru_cache(maxsize=self.node_number(count_pnode=True))
def contains_target(node, target):
pass
def write_bubble(self, filename: str):
'''Write in given filename the lines of bubble describing this instance'''
pass
@staticmethod
def from_bubble_file(bblfile: str, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
'''Extract data from given bubble file,
then call from_bubble_data method
'''
pass
@staticmethod
def from_bubble_lines(bbllines: iter, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
'''Return a BubbleTree instance.
bbllines -- iterable of raw line, bubble-formatted
oriented -- True: returned BubbleTree is oriented
'''
pass
@staticmethod
def from_bubble_data(bbldata: iter, oriented: bool = False,
symmetric_edges: bool = True) -> 'BubbleTree':
'''Return a BubbleTree instance.
bbldata -- lines in bubble bbltree
oriented -- True: returned BubbleTree is oriented
'''
pass
| 38 | 21 | 8 | 0 | 6 | 2 | 2 | 0.37 | 0 | 11 | 0 | 0 | 23 | 6 | 26 | 26 | 270 | 48 | 171 | 84 | 129 | 63 | 138 | 68 | 109 | 13 | 0 | 4 | 59 |
4,382 |
Aluriak/tergraw
|
Aluriak_tergraw/tergraw/constant.py
|
tergraw.constant.Direction
|
class Direction(Enum):
Up, Right, Down, Left = range(4)
|
class Direction(Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 4 | 0 | 0 |
4,383 |
Alveo/pyalveo
|
Alveo_pyalveo/tests/test_contributions.py
|
tests.test_contributions.ContributionsTest
|
class ContributionsTest(unittest.TestCase):
def test_create_contribution(self, m):
"""Test that we can create a new contribution"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
cname = 'testcontrib'
m.post(client.oauth.api_url + "/contrib/",
json={'description': 'This is contribution description',
'documents': [{'name': 'testfile.txt',
'url': 'https://staging.alveo.edu.au/catalog/demotext/2006-05-28-19/document/testfile.txt'}],
'id': '29',
'metadata': {'abstract': '"This is contribution abstract"',
'collection': 'https://staging.alveo.edu.au/catalog/demotext',
'created': '2018-12-06T05:46:11Z',
'creator': 'Data Owner',
'title': 'HelloWorld'},
'name': 'HelloWorld',
'url': 'https://staging.alveo.edu.au/contrib/29'}
)
meta = {
"contribution_name": "HelloWorld",
"contribution_collection": "demotext",
"contribution_text": "This is contribution description",
"contribution_abstract": "This is contribution abstract"
}
result = client.create_contribution(meta)
# validate the request we made
req = m.last_request
self.assertEqual(req.method, 'POST')
# check that the right things were in the request
self.assertIn('contribution_collection', req.json())
self.assertIn('contribution_name', req.json())
self.assertDictEqual(meta, req.json())
def test_get_contribution(self, m):
"""Get details of a contribution"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
cname = '29'
contrib_url = client.oauth.api_url + "/contrib/" + cname
m.get(contrib_url,
json={'description': 'This is contribution description',
'documents': [{'name': 'testfile.txt',
'url': 'https://staging.alveo.edu.au/catalog/demotext/2006-05-28-19/document/testfile.txt'}],
'metadata': {'abstract': '"This is contribution abstract"',
'collection': 'https://staging.alveo.edu.au/catalog/demotext',
'created': '2018-12-06T05:46:11Z',
'creator': 'Data Owner',
'title': 'HelloWorld'},
'name': 'HelloWorld',
'url': contrib_url}
)
result = client.get_contribution(contrib_url)
req = m.last_request
self.assertEqual(req.method, "GET")
self.assertEqual(result['id'], cname)
self.assertEqual(result['description'], 'This is contribution description')
|
class ContributionsTest(unittest.TestCase):
def test_create_contribution(self, m):
'''Test that we can create a new contribution'''
pass
def test_get_contribution(self, m):
'''Get details of a contribution'''
pass
| 3 | 2 | 33 | 6 | 26 | 2 | 1 | 0.08 | 1 | 1 | 1 | 0 | 2 | 0 | 2 | 74 | 69 | 13 | 52 | 13 | 49 | 4 | 24 | 13 | 21 | 1 | 2 | 0 | 2 |
4,384 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/pyalveo.py
|
pyalveo.pyalveo.OAuth2
|
class OAuth2(object):
""" An OAuth2 Manager class for the retrieval and storage of
all relevant URI's, tokens and client login data. """
def __init__(self, api_url, api_key=None, oauth=None, verifySSL=True):
""" Construct the OAuth requests module along with support for using
an API Key if Allowed.
:type api_url: String
:param api_url: the base URL for the API server used
:type oauth: Dictionary
:param oauth dictionary of configuation settings for oauth containing keys
client_id, client_secret and redirect_url, default None means to use
api_key instead
:type api_key: :class:String
:param api_key: the API key to use, if set, we use this rather than trying OAuth
:type verifySSL: Boolean
:param verifySSL True to enforce checking of SSL certificates, False
to disable checking (eg. for staging/testing servers)
:rtype: OAuth2
:returns: the new OAuth2 client that can be used to make API requests
"""
# Application specific parameters
self.api_url = api_url
self.verifySSL = verifySSL
self.api_key = api_key
if oauth is not None:
self.client_id = oauth['client_id']
self.client_secret = oauth['client_secret']
self.redirect_url = oauth['redirect_url']
else:
self.client_id = None
self.client_secret = None
self.redirect_url = None
if self.client_id is None or self.client_secret is None or self.redirect_url is None:
# There better be an API Key and I'm allowed to use it or I'll cry
if not (self.api_key):
raise APIError(http_status_code="0", response="Local Error", msg="Client could not be created. Check your api key")
# API Urls derived from the main URL
self.auth_base_url = self.api_url+'/oauth/authorize'
self.token_url = self.api_url+'/oauth/token' #grant_type = authorization_code
self.revoke_url = self.api_url+'/oauth/revoke'
self.validate_url = self.api_url+'/oauth/token/info'
self.refresh_url = self.api_url+'/oauth/token' #grant_type = refresh_token
self.token = None
self.auto_refresh = False
self.state = None
self.auth_url = None
#This is here to prevent continuous attempts at getting
#the api key once it is fully phased out
#set to false to not bother trying to get it
self.API_KEY_DEFAULT = True
if not self.verifySSL:
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
# trigger authorisation if we are to use oauth
if not self.api_key:
self.get_authorisation_url()
def __eq__(self, other):
""" Return true if another OAuth2 has all identical fields
:type other: $1
:param other: the other OAuth2 to compare to.
:rtype: Boolean
:returns: True if the OAuth2s are identical, otherwise False
"""
if not isinstance(other, OAuth2):
return False
d1 = dict(self.__dict__)
d1.pop('state',None)
d1.pop('auth_url',None)
d2 = dict(other.__dict__)
d2.pop('state',None)
d2.pop('auth_url',None)
return (d1 == d2)
def __ne__(self, other):
""" Return true if another Client does not have all identical fields
:type other: Client
:param other: the other Client to compare to.
:rtype: Boolean
:returns: False if the Clients are identical, otherwise True
"""
return not self.__eq__(other)
def to_dict(self):
"""
Returns a dict of all of it's necessary components.
Not the same as the __dict__ method
"""
data = dict()
data['api_url'] = self.api_url
data['api_key'] = self.api_key
data['verifySSL'] = self.verifySSL
data['client_id'] = self.client_id
data['client_secret'] = self.client_secret
data['redirect_url'] = self.redirect_url
data['token'] = self.token
data['state'] = self.state
data['auth_url'] = self.auth_url
return data
def to_json(self):
"""
Returns a json string containing all relevant data to recreate this pyalveo.OAuth2.
"""
return json.dumps(self.to_dict())
@staticmethod
def from_json(json_data):
"""
Returns a pyalveo.OAuth2 given a json string built from the oauth.to_json() method.
"""
#If we have a string, then decode it, otherwise assume it's already decoded
if isinstance(json_data, str):
data = json.loads(json_data)
else:
data = json_data
oauth_dict = {
'client_id':data.get('client_id',None),
'client_secret':data.get('client_secret',None),
'redirect_url':data.get('redirect_url',None),
}
oauth = OAuth2(api_url=data.get('api_url',None), api_key=data.get('api_key',None),oauth=oauth_dict, verifySSL=data.get('verifySSL',True))
oauth.token = data.get('token',None)
oauth.state = data.get('state',None)
oauth.auth_url = data.get('auth_url',None)
return oauth
def get_authorisation_url(self, reset=False):
""" Initialises the OAuth2 Process by asking the auth server for a login URL.
Once called, the user can login by being redirected to the url returned by
this function.
If there is an error during authorisation, None is returned."""
if reset:
self.auth_url = None
if not self.auth_url:
try:
oauth = OAuth2Session(self.client_id,redirect_uri=self.redirect_url)
self.auth_url,self.state = oauth.authorization_url(self.auth_base_url)
except Exception:
#print("Unexpected error:", sys.exc_info()[0])
#print("Could not get Authorisation Url!")
return None
return self.auth_url
def on_callback(self,auth_resp):
""" Must be called once the authorisation server has responded after
redirecting to the url provided by 'get_authorisation_url' and completing
the login there.
Returns True if a token was successfully retrieved, False otherwise."""
try:
oauth = OAuth2Session(self.client_id,state=self.state,redirect_uri=self.redirect_url)
self.token = oauth.fetch_token(self.token_url,
authorization_response=auth_resp,
client_secret=self.client_secret,
verify=self.verifySSL)
if not self.api_key and self.API_KEY_DEFAULT:
self.get_api_key()
if not self.api_key:
self.API_KEY_DEFAULT = False
except Exception:
#print("Unexpected error:", sys.exc_info()[0])
#print("Could not fetch token from OAuth Callback!")
return False
return True
def validate(self):
""" Confirms the current token is still valid.
Returns True if it is valid, False otherwise. """
try:
resp = self.request().get(self.validate_url, verify=self.verifySSL).json()
except TokenExpiredError:
return False
except AttributeError:
return False
if 'error' in resp:
return False
return True
def refresh_token(self):
""" Refreshes access token using refresh token. Returns true if successful, false otherwise. """
try:
if self.token:
self.token = self.request().refresh_token(self.refresh_url, self.token['refresh_token'])
return True
except Exception as e:
# TODO: what might go wrong here - handle this error properly
#print("Unexpected error:\t\t", str(e))
#traceback.print_exc()
pass
return False
def revoke_access(self):
""" Requests that the currently used token becomes invalid. Call this should a user logout. """
if self.token is None:
return True
#Don't try to revoke if token is invalid anyway, will cause an error response anyway.
if self.validate():
data = {}
data['token'] = self.token['access_token']
self.request().post(self.revoke_url, data=data, json=None,verify=self.verifySSL)
return True
def get_user_data(self):
try:
response = self.get(self.api_url+"/account/get_details.json")
if response.status_code != requests.codes.ok: #@UndefinedVariable
return None
return response.json()
except Exception:
return None
def get_api_key(self):
if self.token is None:
return False
try:
oauth = OAuth2Session(self.client_id,
token=self.token,
redirect_uri=self.redirect_url,
state=self.state)
response = oauth.get(self.api_url+"/account_api_key",verify=self.verifySSL)
if response.status_code != requests.codes.ok: #@UndefinedVariable
# attempt a 2nd time incase of random errors
response = oauth.get(self.api_url+"/account_api_key",verify=self.verifySSL)
if response.status_code != requests.codes.ok: #@UndefinedVariable
# print("Failed to get API KEY!!")
return False
self.api_key = response.json()['apiKey']
return True
except Exception:
# print("Failed to get API KEY!!")
return False
def request(self):
""" Returns an OAuth2 Session to be used to make requests.
Returns None if a token hasn't yet been received."""
headers = {'Accept': 'application/json'}
# Use API Key if possible
if self.api_key:
headers['X-API-KEY'] = self.api_key
return requests,headers
else:
# Try to use OAuth
if self.token:
return OAuth2Session(self.client_id, token=self.token),headers
else:
raise APIError("No API key and no OAuth session available")
def get(self, url, **kwargs):
request,headers = self.request()
headers.update(kwargs.get('headers',{}))
if not url.startswith(self.api_url):
url = self.api_url + url
return request.get(url, headers=headers, verify=self.verifySSL, **kwargs)
def post(self, url, **kwargs):
request,headers = self.request()
if not url.startswith(self.api_url):
url = self.api_url + url
afile = kwargs.pop('file',None)
if afile is not None:
# A file was given to us, so we should update headers
# with what is provided, if not default to: multipart/form-data
# headers.update(kwargs.get('headers',{'Content-Type':'multipart/form-data'}))
with open(afile, 'rb') as fd:
original_data = kwargs.pop('data',{})
if original_data is None:
original_data = {}
fname = os.path.basename(afile)
original_data.update({
"file": (fname, fd, "application/octet-stream"),
"composite":"NONE",
})
form = encoder.MultipartEncoder(original_data)
headers.update({
"Prefer": "respond-async",
"Content-Type": form.content_type,
})
response = request.post(url, headers=headers, data=form, verify=self.verifySSL, **kwargs)
else:
# If there is data but no file then set content type to json
if kwargs.get('data',None):
headers['Content-Type'] = 'application/json'
headers.update(kwargs.get('headers',{}))
response = request.post(url, headers=headers, verify=self.verifySSL, **kwargs)
return response
def put(self, url, **kwargs):
request,headers = self.request()
headers['Content-Type'] = 'application/json'
headers.update(kwargs.get('headers',{}))
if not url.startswith(self.api_url):
url = self.api_url + url
return request.put(url, headers=headers, verify=self.verifySSL, **kwargs)
def delete(self, url, **kwargs):
request,headers = self.request()
headers.update(kwargs.get('headers',{}))
if not url.startswith(self.api_url):
url = self.api_url + url
return request.delete(url, headers=headers, verify=self.verifySSL, **kwargs)
|
class OAuth2(object):
''' An OAuth2 Manager class for the retrieval and storage of
all relevant URI's, tokens and client login data. '''
def __init__(self, api_url, api_key=None, oauth=None, verifySSL=True):
''' Construct the OAuth requests module along with support for using
an API Key if Allowed.
:type api_url: String
:param api_url: the base URL for the API server used
:type oauth: Dictionary
:param oauth dictionary of configuation settings for oauth containing keys
client_id, client_secret and redirect_url, default None means to use
api_key instead
:type api_key: :class:String
:param api_key: the API key to use, if set, we use this rather than trying OAuth
:type verifySSL: Boolean
:param verifySSL True to enforce checking of SSL certificates, False
to disable checking (eg. for staging/testing servers)
:rtype: OAuth2
:returns: the new OAuth2 client that can be used to make API requests
'''
pass
def __eq__(self, other):
''' Return true if another OAuth2 has all identical fields
:type other: $1
:param other: the other OAuth2 to compare to.
:rtype: Boolean
:returns: True if the OAuth2s are identical, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if another Client does not have all identical fields
:type other: Client
:param other: the other Client to compare to.
:rtype: Boolean
:returns: False if the Clients are identical, otherwise True
'''
pass
def to_dict(self):
'''
Returns a dict of all of it's necessary components.
Not the same as the __dict__ method
'''
pass
def to_json(self):
'''
Returns a json string containing all relevant data to recreate this pyalveo.OAuth2.
'''
pass
@staticmethod
def from_json(json_data):
'''
Returns a pyalveo.OAuth2 given a json string built from the oauth.to_json() method.
'''
pass
def get_authorisation_url(self, reset=False):
''' Initialises the OAuth2 Process by asking the auth server for a login URL.
Once called, the user can login by being redirected to the url returned by
this function.
If there is an error during authorisation, None is returned.'''
pass
def on_callback(self,auth_resp):
''' Must be called once the authorisation server has responded after
redirecting to the url provided by 'get_authorisation_url' and completing
the login there.
Returns True if a token was successfully retrieved, False otherwise.'''
pass
def validate(self):
''' Confirms the current token is still valid.
Returns True if it is valid, False otherwise. '''
pass
def refresh_token(self):
''' Refreshes access token using refresh token. Returns true if successful, false otherwise. '''
pass
def revoke_access(self):
''' Requests that the currently used token becomes invalid. Call this should a user logout. '''
pass
def get_user_data(self):
pass
def get_api_key(self):
pass
def request(self):
''' Returns an OAuth2 Session to be used to make requests.
Returns None if a token hasn't yet been received.'''
pass
def get_authorisation_url(self, reset=False):
pass
def post(self, url, **kwargs):
pass
def put(self, url, **kwargs):
pass
def delete(self, url, **kwargs):
pass
| 20 | 13 | 17 | 2 | 11 | 5 | 3 | 0.41 | 1 | 5 | 1 | 0 | 17 | 16 | 18 | 18 | 333 | 51 | 203 | 61 | 183 | 84 | 181 | 58 | 162 | 6 | 1 | 3 | 53 |
4,385 |
Alveo/pyalveo
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Alveo_pyalveo/tests/create_test.py
|
tests.create_test.CreateTest
|
class CreateTest(unittest.TestCase):
def test_create_collection(self, m):
"""Test that we can create a new collection"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
cname = 'testcollection1'
curl = client.oauth.api_url + "/catalog/" + cname
m.post(client.oauth.api_url + "/catalog",
json={"success": "New collection \'%s\' (%s) created" % (cname, curl)})
meta = {"@context": CONTEXT,
"@type": "dcmitype:Collection",
"dcterms:creator": "Data Owner",
"dcterms:rights": "All rights reserved to Data Owner",
"dcterms:subject": "English Language",
"dcterms:title": "Test Collection"}
result = client.create_collection('testcollection1', meta)
self.assertIn("testcollection1", result)
self.assertIn("created", result)
# validate the request we made
req = m.last_request
self.assertEqual(req.method, 'POST')
self.assertIn('name', req.json())
self.assertIn('collection_metadata', req.json())
self.assertDictEqual(meta, req.json()['collection_metadata'])
# TODO: test creating collection that already exists
# TODO: test other error conditions - no name, no metadata, bad json
def test_add_text_item(self, m):
"""Test that we can add new items that have just a text document to a collection """
doctext = "This is the text of my test document.\nTwo lines.\n"
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
collection_uri = API_URL + "/catalog/" + collection_name
itemname = "item1"
m.post(collection_uri, json={"success": [itemname]})
meta = {
'dcterms:title': 'Test Item',
'dcterms:creator': 'A. Programmer'
}
item_uri = client.add_text_item(
collection_uri, itemname, meta, text=doctext, title='my test document')
self.assertIn(itemname, item_uri)
req = m.last_request
self.assertEqual(req.method, 'POST')
self.assertEqual(req.headers['Content-Type'], 'application/json')
self.assertEqual(req.headers['X-API-KEY'], API_KEY)
self.assertIn('items', req.json())
self.assertEqual(1, len(req.json()['items']))
itemdict = req.json()['items'][0]
self.assertIn('documents', itemdict)
self.assertEqual(1, len(itemdict['documents']))
self.assertEqual(doctext, itemdict['documents'][0]['content'])
self.assertEqual(
itemname+'.txt', itemdict['documents'][0]['identifier'])
def test_add_item(self, m):
"""Test that we can add new items to a collection"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
collection_uri = API_URL + "/catalog/" + collection_name
itemname = "item1"
m.post(collection_uri, json={"success": [itemname]})
meta = {
'dcterms:title': 'Test Item',
'dcterms:creator': 'A. Programmer'
}
item_uri = client.add_item(collection_uri, itemname, meta)
self.assertIn(itemname, item_uri)
req = m.last_request
self.assertEqual(req.method, 'POST')
self.assertEqual(req.headers['Content-Type'], 'application/json')
self.assertEqual(req.headers['X-API-KEY'], API_KEY)
self.assertIn('items', req.json())
def test_delete_document(self, m):
"""Test deleting a document"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
itemname = "item1"
docname = "doc1.txt"
document_uri = API_URL + \
"/catalog/%s/%s/documents/%s" % (collection_name,
itemname, docname)
# delete the document
m.delete(document_uri, json={"success": "Deleted the document %s from item %s in collection %s" % (
docname, itemname, collection_name)})
client.delete_document(document_uri)
req = m.last_request
self.assertEqual(req.method, 'DELETE')
def test_delete_item(self, m):
"""Test deleting an item"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
itemname = "item1"
docname = "doc1.txt"
item_uri = API_URL + "/catalog/%s/%s" % (collection_name, itemname)
# now delete the item
m.delete(item_uri, json={"success": itemname})
client.delete_item(item_uri)
req = m.last_request
self.assertEqual(req.method, 'DELETE')
def test_add_document(self, m):
"""Test adding documents to items"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
itemname = "item1"
docname = "doc1.txt"
content = "Hello World!\n"
item_uri = API_URL + "/catalog/%s/%s" % (collection_name, itemname)
m.post(item_uri, json={"success": "Added the document %s to item %s in collection %s" % (
docname, itemname, collection_name)})
docmeta = {
"dcterms:title": "Sample Document",
"dcterms:type": "Text"
}
document_uri = client.add_document(
item_uri, docname, docmeta, content=content)
req = m.last_request
payload = req.json()
self.assertEqual(payload['document_content'], content)
self.assertIn('metadata', payload)
md = payload['metadata']
self.assertIn('dcterms:title', md)
self.assertEqual(md['dcterms:title'], docmeta['dcterms:title'])
self.assertEqual(md['@type'], "foaf:Document")
self.assertEqual(md['dcterms:identifier'], docname)
def test_modify_item(self, m):
"""Test modify item metadata"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
itemname = "item1"
item_uri = API_URL + "/catalog/%s/%s" % (collection_name, itemname)
meta = {
"http://ns.ausnc.org.au/schemas/ausnc_md_model/mode": "An updated test mode"}
m.put(item_uri, json={'success': "item metadata updated"})
client.modify_item(item_uri, meta)
req = m.last_request
self.assertIn('metadata', req.json())
self.assertEqual(meta, req.json()['metadata'])
@unittest.skip("failing because of open file but works ok")
def test_add_document_attachment(self, m):
"""Test adding a document to an item as a file attachment"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_name = "testcollection1"
itemname = "item1"
docname = "doc1.txt"
content = "Hello World\n"
# create a temporary file to upload
with open(docname, 'w') as out:
out.write(content)
item_uri = API_URL + "/catalog/%s/%s" % (collection_name, itemname)
m.post(item_uri, json={"success": "Added the document %s to item %s in collection %s" % (
docname, itemname, collection_name)})
docmeta = {
"dcterms:title": "Sample Document",
"dcterms:type": "Text"
}
document_uri = client.add_document(
item_uri, docname, docmeta, file=docname)
self.assertEqual(
"http://example.alveo.froob/catalog/testcollection1/item1/document/doc1.txt", document_uri)
req = m.last_request
# should be a multipart-form with a json payload and a file attachment
self.assertIn('multipart/form-data', req.headers['Content-Type'])
self.assertIn('boundary', req.headers['Content-Type'])
# grab the multipart boundary from the header
bdy = req.headers['Content-Type'].split('=')[1]
# split the body on the boundary
# req.text.to_string() fails because the file handle for the
# file we attached in the request is now closed
# the multipart encoder didn't actually do the read yet
# so this test skipped for now
# can make it pass by leaving the file open
messages = req.text.to_string().decode().split('--' + bdy)
# run some tests on the parts
for msg in messages:
msg = msg.strip()
# read and skip header lines
inheader = True
blockname = ""
body = ""
for line in msg.split('\r\n'):
if "metadata" in line:
blockname = "metadata"
elif "filename" in line:
blockname = "file"
elif line == u'':
inheader = False
if not inheader:
body += line + '\n'
if blockname is "metadata":
md = json.loads(body)
self.assertIn('dcterms:title', md)
self.assertEqual(md['dcterms:title'], docmeta['dcterms:title'])
self.assertEqual(md['@type'], "foaf:Document")
self.assertEqual(md['dcterms:identifier'], docname)
elif blockname is "file":
self.assertIn(content, body)
def test_add_annotations(self, m):
"""Test that we can add new annotations for an item"""
m.get(API_URL + "/item_lists.json", json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
collection_uri = API_URL + "/catalog/collection1"
# create an item
itemname = "testitem1"
m.post(collection_uri, json={"success": [itemname]})
meta = {
'dcterms:title': 'Test Item',
'dcterms:creator': 'A. Programmer'
}
item_uri = client.add_item(collection_uri, itemname, meta)
anns = [{
"@type": "dada:TextAnnotation",
"type": "pageno",
"label": "hello",
"start": 421,
"end": 425
},
{
"@type": "dada:TextAnnotation",
"type": "pageno",
"label": "world",
"start": 2524,
"end": 2529
}
]
# now add some annotations
m.post(item_uri + "/annotations", json={'success': 'yes'})
client.add_annotations(item_uri, anns)
|
class CreateTest(unittest.TestCase):
def test_create_collection(self, m):
'''Test that we can create a new collection'''
pass
def test_add_text_item(self, m):
'''Test that we can add new items that have just a text document to a collection '''
pass
def test_add_item(self, m):
'''Test that we can add new items to a collection'''
pass
def test_delete_document(self, m):
'''Test deleting a document'''
pass
def test_delete_item(self, m):
'''Test deleting an item'''
pass
def test_add_document(self, m):
'''Test adding documents to items'''
pass
def test_modify_item(self, m):
'''Test modify item metadata'''
pass
@unittest.skip("failing because of open file but works ok")
def test_add_document_attachment(self, m):
'''Test adding a document to an item as a file attachment'''
pass
def test_add_annotations(self, m):
'''Test that we can add new annotations for an item'''
pass
| 11 | 9 | 30 | 6 | 21 | 3 | 2 | 0.14 | 1 | 1 | 1 | 0 | 9 | 0 | 9 | 81 | 286 | 64 | 195 | 86 | 184 | 27 | 156 | 84 | 146 | 9 | 2 | 3 | 17 |
4,386 |
Alveo/pyalveo
|
Alveo_pyalveo/tests/test_oauth.py
|
tests.test_oauth.OAuthTest
|
class OAuthTest(unittest.TestCase):
def test_to_from_json(self,m):
""" Test packing the oath object into a json form then reloading it. """
api_url = 'https://fake.com'
api_key = 'thisisrandomtext'
verifySSL = False
oauth_dict = {
'client_id':'morerandomtext',
'client_secret':'secretrandomtext',
'redirect_url':'https://anotherfake.com'
}
expected_json = '{"client_id": "morerandomtext", "state": "Yg4HRoIwCGspnYRQY65jCoPlbIHaiy", "token": null, "auth_url": "https://fake.com/oauth/authorize?response_type=code&client_id=morerandomtext&redirect_uri=https%3A%2F%2Fanotherfake.com&state=Yg4HRoIwCGspnYRQY65jCoPlbIHaiy", "redirect_url": "https://anotherfake.com", "client_secret": "secretrandomtext", "api_key": null, "verifySSL": false, "api_url": "https://fake.com"}'
oauth = OAuth2(api_url,oauth=oauth_dict,verifySSL=verifySSL)
json_string = oauth.to_json()
#Test json comes out as expected
#A state will be generated which should be different always
#So we need to load the json into a dict, remove the state key then check equality
json_dict = json.loads(json_string)
expected_dict = json.loads(expected_json)
json_dict.pop('state',None)
expected_dict.pop('state',None)
#Do the same with auth url as it's a string that contains the state
json_dict.pop('auth_url',None)
expected_dict.pop('auth_url',None)
self.assertEqual(json_dict, expected_dict)
oauth2 = OAuth2.from_json(json_string)
#Test generated json creates an identical object
#These should have identical states however
self.assertEqual(oauth, oauth2)
starting_json = '{"client_id": null, "state": null, "token": null, "auth_url": null, "redirect_url": null, "client_secret": null, "api_key": "thisisrandomtext", "verifySSL": false, "api_url": "https://fake.com"}'
oauth = OAuth2(api_url,api_key=api_key,verifySSL=verifySSL)
oauth2 = OAuth2.from_json(starting_json)
#test manually created json creates an identical cache to one properly setup
self.assertEqual(oauth, oauth2)
def test_create_client_oauth(self, m):
"""Create a client using OAuth credentials"""
redirect_url = API_URL+'/oauth_redirect/'
oauth_url = API_URL+'/oauth/authorize'
m.get(redirect_url, json={})
oauth_info = {
'client_id': 'foobar',
'client_secret': 'secret client',
'redirect_url': redirect_url,
}
client = pyalveo.Client(api_url=API_URL,
oauth=oauth_info,
configfile="missing.config",
verifySSL=False)
# we can't capture the request that OAuth makes but we can
# check the settings that result from it
self.assertTrue(client.oauth.auth_url.startswith(oauth_url))
self.assertEqual(client.oauth.redirect_url, redirect_url)
|
class OAuthTest(unittest.TestCase):
def test_to_from_json(self,m):
''' Test packing the oath object into a json form then reloading it. '''
pass
def test_create_client_oauth(self, m):
'''Create a client using OAuth credentials'''
pass
| 3 | 2 | 32 | 6 | 20 | 6 | 1 | 0.27 | 1 | 2 | 2 | 0 | 2 | 0 | 2 | 74 | 66 | 14 | 41 | 18 | 38 | 11 | 30 | 18 | 27 | 1 | 2 | 0 | 2 |
4,387 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/pyalveo.py
|
pyalveo.pyalveo.Client
|
class Client(object):
""" Client object used to manipulate Alveo objects and interface
with the API
"""
def __init__(self, api_key=None, cache=None, api_url=None,
use_cache=True, update_cache=True, cache_dir=None,
verifySSL=True,
oauth=None, configfile=None):
""" Construct a new Client with the specified parameters.
Unspecified parameters will be derived from the users ~/alveo.config
file if present.
:type api_key: :class:String
:param api_key: the API key to use
:type cache: :class:Cache
:param cache: the Cache to use
:type api_url: String
:param api_url: the base URL for the API server used
:type use_cache: Boolean
:param use_cache: True to fetch available data from the
cache database, False to always fetch data from the server
:type update_cache: Boolean
:param update_cache: True to update the cache database with
downloaded data, False to never write to the cache
:type verifySSL: Boolean
:param verifySSL True to enforce checking of SSL certificates, False
to disable checking (eg. for staging/testing servers)
:type oauth: Dictionary
:param oauth dictionary of configuation settings for oauth containing keys
client_id, client_secret and redirect_url, default None means to use
api_key instead
:type configfile: String
:param configfile File name to read configuration from, default ~/alveo.config
:rtype: Client
:returns: the new Client
"""
config = self._read_config(configfile)
# api_key, api_url args override config settings
if api_key:
self.api_key = api_key
else:
self.api_key = config.get('apiKey', None)
if api_url:
self.api_url = api_url
else:
self.api_url = config.get('base_url', None)
#pyAlveo Cache Settings
if use_cache is not None:
self.use_cache = use_cache
else:
self.use_cache = config.get('use_cache', None)
if cache_dir:
self.cache_dir = cache_dir
else:
self.cache_dir = config.get('cache_dir', None)
self.update_cache = update_cache
# grab the default context
self.context = config.get('context', CONTEXT)
# configure a cache if we want to read or write to it
if self.use_cache or self.update_cache:
if cache is None or isinstance(cache, str):
if 'max_age' in config:
self.cache = Cache(self.cache_dir, config['max_age'])
else:
self.cache = Cache(self.cache_dir)
else:
self.cache = cache
else:
self.cache = None
self.oauth = OAuth2(api_url=self.api_url,
oauth=oauth,
api_key=self.api_key,
verifySSL=verifySSL)
def to_json(self):
"""
Returns a json string containing all relevant data to recreate this pyalveo.Client.
"""
data = dict(self.__dict__)
data.pop('context',None)
data['oauth'] = self.oauth.to_dict()
data['cache'] = self.cache.to_dict()
return json.dumps(data)
@staticmethod
def from_json(json_data):
"""
Returns a pyalveo.Client given a json string built from the client.to_json() method.
"""
# If we have a string, then decode it, otherwise assume it's already decoded
if isinstance(json_data, str):
data = json.loads(json_data)
else:
data = json_data
oauth_dict = {
'client_id':data.get('oauth',{}).get('client_id',None),
'client_secret':data.get('oauth',{}).get('client_secret',None),
'redirect_url':data.get('oauth',{}).get('redirect_url',None),
}
client = Client(api_key=data.get('api_key',None),
api_url=data.get('api_url',None),
oauth=oauth_dict,
use_cache=data.get('use_cache',None),
cache_dir=data.get('cache_dir',None),
update_cache=data.get('update_cache',None),
verifySSL=data.get('oauth',{}).get('verifySSL',None)
)
client.cache = Cache.from_json(data.get('cache',None))
client.oauth = OAuth2.from_json(data.get('oauth',None))
return client
@staticmethod
def _read_config(configfile=None):
# copy the default configuration so we don't update it
config = CONFIG_DEFAULT.copy()
if configfile is None:
alveo_config = os.path.expanduser(CONFIG_DEFAULT['alveo_config'])
else:
alveo_config = configfile
alveo_config = os.path.expandvars(alveo_config)
if os.path.exists(alveo_config):
with open(alveo_config) as h:
config.update(json.load(h))
config['cache_dir'] = os.path.expandvars(os.path.expanduser(config['cache_dir']))
return config
def __eq__(self, other):
""" Return true if another Client has all identical fields
:type other: $1
:param other: the other Client to compare to.
:rtype: Boolean
:returns: True if the Clients are identical, otherwise False
"""
if not isinstance(other, Client):
return False
d1 = dict(self.__dict__)
d1oauth = d1['oauth']
d1.pop('oauth',None)
d1cache = d1['cache']
d1.pop('cache',None)
d2 = dict(other.__dict__)
d2oauth = d2['oauth']
d2.pop('oauth',None)
d2cache = d2['cache']
d2.pop('cache',None)
return (d1 == d2 and d1oauth == d2oauth and d1cache == d2cache)
def __ne__(self, other):
""" Return true if another Client does not have all identical fields
:type other: Client
:param other: the other Client to compare to.
:rtype: Boolean
:returns: False if the Clients are identical, otherwise True
"""
return not self.__eq__(other)
def api_request(self, url, data=None, method='GET', raw=False, file=None):
""" Perform an API request to the given URL, optionally
including the specified data
:type url: String
:param url: the URL to which to make the request
:type data: String
:param data: the data to send with the request, if any
:type method: String
:param method: the HTTP request method
:type raw: Boolean
:para raw: if True, return the raw response, otherwise treat as JSON and return the parsed response
:type file: String
:param file: (Optional) full path to file to be uploaded in a POST request
:returns: the response from the server either as a raw response or a Python dictionary
generated by parsing the JSON response
:raises: APIError if the API request is not successful
"""
if method is 'GET':
response = self.oauth.get(url)
elif method is 'POST':
if file is not None:
response = self.oauth.post(url, data=data, file=file)
else:
response = self.oauth.post(url, data=data)
elif method is 'PUT':
response = self.oauth.put(url, data=data)
elif method is 'DELETE':
response = self.oauth.delete(url)
else:
raise APIError("Unknown request method: %s" % (method,))
# check for error responses
if response.status_code >= 400:
raise APIError(response.status_code,
'',
"Error accessing API (url: %s, method: %s)\nData: %s\nMessage: %s" % (url, method, data, response.text))
if raw:
return response.content
else:
return response.json()
def add_context(self, prefix, url):
""" Add a new entry to the context that will be used
when uploading new metadata records.
:type prefix: String
:para prefix: the namespace prefix (eg. dc)
:type url: String
:para url: the url to associate with the prefix
"""
self.context[prefix] = url
def get_api_version(self):
""" Retrieve the API version from the server
:rtype: String
:returns: the API version string returned by the server
:raises: APIError if the API request is not successful
"""
resp = self.api_request('/version')
return resp['API version']
def get_annotation_context(self):
""" Retrieve the JSON-LD annotation context from the server
:rtype: Dict
:returns: the annotation context
:raises: APIError if the API request is not successful
"""
return self.api_request('/schema/json-ld')
def get_collections(self):
"""Retrieve a list of the collection URLs for all collections
hosted on the server.
:rtype: List
:returns: a List of tuples of (name, url) for each collection
"""
result = self.api_request('/catalog')
# get the collection name from the url
return [(os.path.split(x)[1], x) for x in result['collections']]
def get_item_lists(self):
""" Retrieve metadata about each of the Item Lists associated
with this Client's API key
Returns a List of Dicts, each containing metadata regarding
an Item List, with the following key-value pairs:
- name: the name of the Item List
- url: the URL of the Item List
- num_items: the number of items in the Item List
:rtype: List
:returns: a List of Dicts, each containing metadata regarding
an Item List
:raises: APIError if the API request is not successful
"""
return self.api_request('/item_lists.json')
def get_item(self, item_url, force_download=False):
""" Retrieve the item metadata from the server, as an Item object
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:rtype: Item
:returns: the corresponding metadata, as an Item object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:raises: APIError if the API request is not successful
"""
item_url = str(item_url)
if (self.use_cache and
not force_download and
self.cache.has_item(item_url)):
item_json = self.cache.get_item(item_url)
else:
item_json = self.api_request(item_url, raw=True)
if self.update_cache:
self.cache.add_item(item_url, item_json)
return Item(json.loads(item_json.decode('utf-8')), self)
def get_document(self, doc_url, force_download=False):
""" Retrieve the data for the given document from the server
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the document data
:raises: APIError if the API request is not successful
"""
doc_url = str(doc_url)
if (self.use_cache and
not force_download and
self.cache.has_document(doc_url)):
doc_data = self.cache.get_document(doc_url)
else:
doc_data = self.api_request(doc_url, raw=True)
if self.update_cache:
self.cache.add_document(doc_url, doc_data)
return doc_data
def get_primary_text(self, item_url, force_download=False):
""" Retrieve the primary text for an item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the item's primary text if it has one, otherwise None
:raises: APIError if the request was not successful
"""
item_url = str(item_url)
metadata = self.get_item(item_url).metadata()
try:
primary_text_url = metadata['alveo:primary_text_url']
except KeyError:
return None
if primary_text_url == 'No primary text found':
return None
if (self.use_cache and
not force_download and
self.cache.has_primary_text(item_url)):
primary_text = self.cache.get_primary_text(item_url)
else:
primary_text = self.api_request(primary_text_url, raw=True)
if self.update_cache:
self.cache.add_primary_text(item_url, primary_text)
return primary_text
def get_item_annotations(self, item_url, annotation_type=None, label=None):
""" Retrieve the annotations for an item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:type annotation_type: String
:param annotation_type: return only results with a matching Type field
:type label: String
:param label: return only results with a matching Label field
:rtype: String
:returns: the annotations as a dictionary, if the item has
annotations, otherwise None
The annotation dictionary has keys:
commonProperties - properties common to all annotations
@context - the url of the JSON-LD annotation context definition
alveo:annotations - a list of annotations, each is a dictionary
:raises: APIError if the request was not successful
"""
# get the annotation URL from the item metadata, if not present then there are no annotations
item_url = str(item_url)
metadata = self.get_item(item_url).metadata()
try:
annotation_url = metadata['alveo:annotations_url']
except KeyError:
return None
req_url = annotation_url
if annotation_type is not None:
req_url += '?'
req_url += urlencode((('type', annotation_type),))
if label is not None:
if annotation_type is None:
req_url += '?'
else:
req_url += '&'
req_url += urlencode((('label',label),))
try:
return self.api_request(req_url)
except KeyError:
return None
def get_annotation_types(self, item_url):
""" Retrieve the annotation types for the given item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:rtype: List
:returns: a List specifying the annotation types
:raises: APIError if the request was not successful
"""
req_url = item_url + "/annotations/types"
resp = self.api_request(req_url)
return resp['annotation_types']
def add_annotations(self, item_url, annotations):
"""Add annotations to the given item
:type item_url: String or Item
:param item_url: the URL of the item corresponding to the annotation,
or an Item object
:type annotation: list
:param annotations: the annotations as a list of dictionaries, each with keys '@type', 'label', 'start', 'end' and 'type'
:rtype: String
:returns: the server's success message, if successful
:raises: APIError if the upload was not successful
:raises: Exception if the annotations are malformed (missing a required key)
"""
adict = {'@context': "https://alveo-staging1.intersect.org.au/schema/json-ld"}
for ann in annotations:
# verify that we have the required properties
for key in ('@type', 'label', 'start', 'end', 'type'):
if key not in ann.keys():
raise Exception("required key '%s' not present in annotation" % key)
adict['@graph'] = annotations
resp = self.api_request(str(item_url) + '/annotations', method='POST', data=json.dumps(adict))
return self.__check_success(resp)
def get_collection_info(self, collection_url):
""" Retrieve information about the specified Collection from the server
:type collection_url: String
:param collection_url: the URL of the collection
:rtype: Dict
:returns: a Dict containing information about the Collection
:raises: APIError if the request was not successful
"""
return self.api_request(collection_url)
def create_collection(self, name, metadata):
""" Create a new collection with the given name
and attach the metadata.
:param name: the collection name, suitable for use in a URL (no spaces)
:type name: String
:param metadata: a dictionary of metadata values to associate with the new collection
:type metadata: Dict
:rtype: String
:returns: a message confirming creation of the collection
:raises: APIError if the request was not successful
"""
payload = {
'collection_metadata': metadata,
'name': name
}
response = self.api_request('/catalog', method='POST', data=json.dumps(payload))
return self.__check_success(response)
def modify_collection_metadata(self, collection_uri, metadata, replace=None, name=''):
"""Modify the metadata for the given collection.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param metadata: a dictionary of metadata values to add/modify
:type metadata: Dict
:rtype: String
:returns: a message confirming that the metadata is modified
:raises: APIError if the request was not successful
"""
payload = {
'collection_metadata': metadata,
'name': name
}
if replace is not None:
payload['replace'] = replace
response = self.api_request(collection_uri, method='PUT', data=json.dumps(payload))
return self.__check_success(response)
def get_items(self, collection_uri):
"""Return all items in this collection.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:rtype: List
:returns: a list of the URIs of the items in this collection
"""
cname = os.path.split(collection_uri)[1]
return self.search_metadata("collection_name:%s" % cname)
def add_text_item(self, collection_uri, name, metadata, text, title=None):
"""Add a new item to a collection containing a single
text document.
The full text of the text document is specified as the text
argument and will be stored with the same name as the
item and a .txt extension.
This is a shorthand for the more general add_item method.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param name: The item name, suitable for use in a URI (no spaces)
:type name: String
:param metadata: a dictionary of metadata values describing the item
:type metadata: Dict
:param text: the full text of the document associated with this item
:type text: String
:param title: document title, defaults to the item name
:type title: String
:rtype String
:returns: the URI of the created item
:raises: APIError if the request was not successful
"""
docname = name + ".txt"
if title is None:
title = name
metadata['dcterms:identifier'] = name
metadata['@type'] = 'ausnc:AusNCObject'
metadata['hcsvlab:display_document'] = {'@id': docname}
metadata['hcsvlab:indexable_document'] = {'@id': docname}
metadata['ausnc:document'] = [{ '@id': 'document1.txt',
'@type': 'foaf:Document',
'dcterms:extent': len(text),
'dcterms:identifier': docname,
'dcterms:title': title,
'dcterms:type': 'Text'}]
meta = {'items': [{'metadata': { '@context': self.context,
'@graph': [metadata]
},
'documents': [{'content': text, 'identifier': docname}]
}]
}
response = self.api_request(collection_uri, method='POST', data=json.dumps(meta))
# this will raise an exception if the request fails
self.__check_success(response)
item_uri = collection_uri + "/" + response['success'][0]
return item_uri
def add_item(self, collection_uri, name, metadata):
"""Add a new item to a collection
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param name: The item name, suitable for use in a URI (no spaces)
:type name: String
:param metadata: a dictionary of metadata values describing the item
:type metadata: Dict
:rtype String
:returns: the URI of the created item
:raises: APIError if the request was not successful
"""
metadata['dcterms:identifier'] = name
metadata['dc:identifier'] = name # for backward compatability in Alveo SOLR store until bug fix
metadata['@type'] = 'ausnc:AusNCObject'
meta = {'items': [{'metadata': { '@context': self.context,
'@graph': [metadata]
}
}]
}
response = self.api_request(collection_uri, method='POST', data=json.dumps(meta))
# this will raise an exception if the request fails
self.__check_success(response)
item_uri = collection_uri + "/" + response['success'][0]
return item_uri
def modify_item(self, item_uri, metadata):
"""Modify the metadata on an item
"""
md = json.dumps({'metadata': metadata})
response = self.api_request(item_uri, method='PUT', data=md)
return self.__check_success(response)
def delete_item(self, item_uri):
"""Delete an item from a collection
:param item_uri: the URI that references the item
:type item_uri: String
:rtype: String
:returns: a message confirming that the metadata is modified
:raises: APIError if the request was not successful
"""
response = self.api_request(item_uri, method='DELETE')
return self.__check_success(response)
def add_document(self, item_uri, name, metadata,
content=None, docurl=None, file=None,
displaydoc=False, preferName=False,
contrib_id=None):
"""Add a document to an existing item
:param item_uri: the URI that references the item
:type item_uri: String
:param name: The document name
:type name: String
:param metadata: a dictionary of metadata values describing the document
:type metadata: Dict
:param content: optional content of the document
:type content: byte array
:param docurl: optional url referencing the document
:type docurl: String
:param file: optional full path to file to be uploaded
:type file: String
:param displaydoc: if True, make this the display document for the item
:type displaydoc: Boolean
:param preferName: if True, given document name will be the document id rather than
filename. Useful if you want to upload under a different filename.
:type preferName: Boolean
:param contrib_id: if present, add this document to this contribution as well as
associating it with the item
:type contrib_id: Integer
:rtype: String
:returns: The URL of the newly created document
"""
if not preferName and file is not None:
docid = os.path.basename(file)
else:
docid = name
docmeta = {"metadata": {"@context": self.context,
"@type": "foaf:Document",
"dcterms:identifier": docid,
}
}
# add in metadata we are passed
docmeta["metadata"].update(metadata)
if contrib_id:
docmeta['contribution_id'] = contrib_id
if content is not None:
docmeta['document_content'] = content
elif docurl is not None:
docmeta["metadata"]["dcterms:source"] = { "@id": docurl }
elif file is not None:
# we only pass the metadata part of the dictionary
docmeta = docmeta['metadata']
else:
raise Exception("One of content, docurl or file must be specified in add_document")
if file is not None:
result = self.api_request(item_uri, method='POST', data={'metadata': json.dumps(docmeta)}, file=file)
else:
result = self.api_request(item_uri, method='POST', data=json.dumps(docmeta))
self.__check_success(result)
if displaydoc:
itemmeta = {"http://alveo.edu.org/vocabulary/display_document": docid}
self.modify_item(item_uri, itemmeta)
doc_uri = item_uri + "/document/" + name
return doc_uri
def delete_document(self, doc_uri):
"""Delete a document from an item
:param doc_uri: the URI that references the document
:type doc_uri: String
:rtype: String
:returns: a message confirming that the document was deleted
:raises: APIError if the request was not successful
"""
result = self.api_request(doc_uri, method='DELETE')
return self.__check_success(result)
@staticmethod
def __check_success(resp):
""" Check a JSON server response to see if it was successful
:type resp: Dictionary (parsed JSON from response)
:param resp: the response string
:rtype: String
:returns: the success message, if it exists
:raises: APIError if the success message is not present
"""
if "success" not in resp.keys():
try:
raise APIError('200', 'Operation Failed', resp["error"])
except KeyError:
raise APIError('200', 'Operation Failed', str(resp))
return resp["success"]
def download_items(self, items, file_path, file_format='zip'):
""" Retrieve a file from the server containing the metadata
and documents for the speficied items
:type items: List or ItemGroup
:param items: List of the the URLs of the items to download,
or an ItemGroup object
:type file_path: String
:param file_path: the path to which to save the file
:type file_format: String
:param file_format: the file format to request from the server: specify
either 'zip' or 'warc'
:rtype: String
:returns: the file path
:raises: APIError if the API request is not successful
"""
download_url = '/catalog/download_items'
download_url += '?' + urlencode((('format', file_format),))
item_data = {'items': list(items)}
data = self.api_request(download_url, method='POST', data=json.dumps(item_data), raw=True)
with open(file_path, 'w') as f:
f.write(data)
return file_path
def search_metadata(self, query):
""" Submit a search query to the server and retrieve the results
:type query: String
:param query: the search query
:rtype: ItemGroup
:returns: the search results
:raises: APIError if the API request is not successful
"""
query_url = ('/catalog/search?' +
urlencode((('metadata', query),)))
resp = self.api_request(query_url)
return ItemGroup(resp['items'], self)
def get_item_list(self, item_list_url):
""" Retrieve an item list from the server as an ItemList object
:type item_list_url: String or ItemList
:param item_list_url: URL of the item list to retrieve, or an
ItemList object
:rtype: ItemList
:returns: The ItemList
:raises: APIError if the request was not successful
"""
resp = self.api_request(str(item_list_url))
return ItemList(resp['items'], self, str(item_list_url), resp['name'])
def get_item_list_by_name(self, item_list_name, category='own'):
""" Retrieve an item list from the server as an ItemList object
:type item_list_name: String
:param item_list_name: name of the item list to retrieve
:type category: String
:param category: the category of lists to fetch. At the time of
writing, supported values are "own" and "shared"
:rtype: ItemList
:returns: The ItemList
:raises: APIError if the request was not successful
"""
resp = self.api_request('/item_lists')
for item_list in resp[category]:
if item_list['name'] == item_list_name:
return self.get_item_list(item_list['item_list_url'])
raise ValueError('List does not exist: ' + item_list_name)
def add_to_item_list(self, item_urls, item_list_url):
""" Instruct the server to add the given items to the specified
Item List
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:rtype: String
:returns: the server success message, if successful
:raises: APIError if the request was not successful
"""
item_list_url = str(item_list_url)
name = self.get_item_list(item_list_url).name()
return self.add_to_item_list_by_name(item_urls, name)
def add_to_item_list_by_name(self, item_urls, item_list_name):
""" Instruct the server to add the given items to the specified
Item List (which will be created if it does not already exist)
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_name: String
:param item_list_name: name of the item list to retrieve
:rtype: String
:returns: the server success message, if successful
:raises: APIError if the request was not successful
"""
url_name = urlencode((('name', item_list_name),))
request_url = '/item_lists?' + url_name
data = json.dumps({'items': list(item_urls)})
resp = self.api_request(request_url, method='POST', data=data)
return self.__check_success(resp)
def rename_item_list(self, item_list_url, new_name):
""" Rename an Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:type new_name: String
:param new_name: the new name to give the Item List
:rtype: ItemList
:returns: the item list, if successful
:raises: APIError if the request was not successful
"""
data = json.dumps({'name': new_name})
resp = self.api_request(str(item_list_url), data, method="PUT")
try:
return ItemList(resp['items'], self, item_list_url, resp['name'])
except KeyError:
try:
raise APIError('200', 'Rename operation failed', resp['error'])
except KeyError:
raise APIError('200', 'Rename operation failed', resp)
def delete_item_list(self, item_list_url):
""" Delete an Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:rtype: Boolean
:returns: True if the item list was deleted
:raises: APIError if the request was not successful
"""
try:
resp = self.api_request(str(item_list_url), method="DELETE")
# all good if it says success
if 'success' in resp:
return True
else:
raise APIError('200', 'Operation Failed', 'Delete operation failed')
except APIError as e:
if e.http_status_code == 302:
return True
else:
raise e
def get_speakers(self, collection_name):
"""Get a list of speaker URLs for this collection
:type collection_name: String
:param collection_name: the name of the collection to search
:rtype: List
:returns: a list of URLs for the speakers associated with
the given collection
"""
speakers_url = "/speakers/"+collection_name
resp = self.api_request(speakers_url)
if 'speakers' in resp:
return resp['speakers']
else:
return []
def get_speaker(self, speaker_url):
"""Given a speaker URL, return a dictionary containing
the speaker metadata.
:type speaker_url: String
:param speaker_url: the URL identifier of the speaker
:rtype: Dict
:returns: a dictionary containing the metadata fields describing
this speaker
"""
return self.api_request(speaker_url)
def add_speaker(self, collection_name, metadata):
"""Add a new speaker to this collection.
:type collection_name: String
:param collection_name: the name of the collection to search
:type metadata: Dict
:param metadata: dictionary of metadata properties and values
for this speaker. Must include 'dcterms:identifier' a unique
identifier for the speaker.
:rtype: String
:returns: the URL of the newly created speaker, or None if there was an
error
"""
if 'dcterms:identifier' not in metadata:
raise APIError(msg="No identifier in speaker metadata")
if '@context' not in metadata:
metadata['@context'] = CONTEXT
speakers_url = "/speakers/"+collection_name+"/"
resp = self.api_request(speakers_url, data=json.dumps(metadata), method="POST")
if 'success' in resp:
return resp['success']['URI']
else:
return None
def delete_speaker(self, speaker_uri):
"""Delete an speaker from a collection
:param speaker_uri: the URI that references the speaker
:type speaker_uri: String
:rtype: Boolean
:returns: True if the speaker was deleted
:raises: APIError if the request was not successful
"""
response = self.api_request(speaker_uri, method='DELETE')
return self.__check_success(response)
def sparql_query(self, collection_name, query):
""" Submit a sparql query to the server to search metadata
and annotations.
:type collection_name: String
:param collection_name: the name of the collection to search
:type query: String
:param query: the sparql query
:rtype: Dict
:returns: the query result from the server as a Python dictionary
following the format of the SPARQL JSON result format documented
at http://www.w3.org/TR/rdf-sparql-json-res/
:raises: APIError if the request was not successful
"""
request_url = '/sparql/' + collection_name + '?'
request_url += urlencode((('query', query),))
return self.api_request(request_url)
def get_contributions(self):
"""Return a list of contributions
:rtype: List
:returns: a dictionary with keys "own", "shared" with values
that are a list of contribution records, each one is a dictionary
with keys "id", "url", "name"
"""
result = self.api_request('/contrib')
# get the collection name from the url
return result
def get_contribution(self, url):
"""Get the details of a particular contribution given it's
url"""
result = self.api_request(url)
# add the contrib id into the metadata
result['id'] = os.path.split(result['url'])[1]
return result
def create_contribution(self, metadata):
"""Create a new contribution given a dictionary of metadata
{
"contribution_name": "HelloWorld",
"contribution_collection": "Cooee",
"contribution_text": "This is contribution description",
"contribution_abstract": "This is contribution abstract"
}
:rtype: dict
:returns: The metadata for the created contribution
"""
result = self.api_request('/contrib/', method='POST', data=json.dumps(metadata))
# add the contrib id into the metadata
result['id'] = os.path.split(result['url'])[1]
return result
def delete_contribution(self, url):
"""Delete the contribution with this identifier
:rtype: bool
:returns: True if the contribution was deleted, False otherwise (eg. if it didn't exist)
"""
# first validate that this is a real contrib
try:
result = self.api_request(url)
if 'url' in result and 'documents' in result:
self.api_request(result['url'], method='DELETE')
return True
except:
pass
return False
|
class Client(object):
''' Client object used to manipulate Alveo objects and interface
with the API
'''
def __init__(self, api_key=None, cache=None, api_url=None,
use_cache=True, update_cache=True, cache_dir=None,
verifySSL=True,
oauth=None, configfile=None):
''' Construct a new Client with the specified parameters.
Unspecified parameters will be derived from the users ~/alveo.config
file if present.
:type api_key: :class:String
:param api_key: the API key to use
:type cache: :class:Cache
:param cache: the Cache to use
:type api_url: String
:param api_url: the base URL for the API server used
:type use_cache: Boolean
:param use_cache: True to fetch available data from the
cache database, False to always fetch data from the server
:type update_cache: Boolean
:param update_cache: True to update the cache database with
downloaded data, False to never write to the cache
:type verifySSL: Boolean
:param verifySSL True to enforce checking of SSL certificates, False
to disable checking (eg. for staging/testing servers)
:type oauth: Dictionary
:param oauth dictionary of configuation settings for oauth containing keys
client_id, client_secret and redirect_url, default None means to use
api_key instead
:type configfile: String
:param configfile File name to read configuration from, default ~/alveo.config
:rtype: Client
:returns: the new Client
'''
pass
def to_json(self):
'''
Returns a json string containing all relevant data to recreate this pyalveo.Client.
'''
pass
@staticmethod
def from_json(json_data):
'''
Returns a pyalveo.Client given a json string built from the client.to_json() method.
'''
pass
@staticmethod
def _read_config(configfile=None):
pass
def __eq__(self, other):
''' Return true if another Client has all identical fields
:type other: $1
:param other: the other Client to compare to.
:rtype: Boolean
:returns: True if the Clients are identical, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if another Client does not have all identical fields
:type other: Client
:param other: the other Client to compare to.
:rtype: Boolean
:returns: False if the Clients are identical, otherwise True
'''
pass
def api_request(self, url, data=None, method='GET', raw=False, file=None):
''' Perform an API request to the given URL, optionally
including the specified data
:type url: String
:param url: the URL to which to make the request
:type data: String
:param data: the data to send with the request, if any
:type method: String
:param method: the HTTP request method
:type raw: Boolean
:para raw: if True, return the raw response, otherwise treat as JSON and return the parsed response
:type file: String
:param file: (Optional) full path to file to be uploaded in a POST request
:returns: the response from the server either as a raw response or a Python dictionary
generated by parsing the JSON response
:raises: APIError if the API request is not successful
'''
pass
def add_context(self, prefix, url):
''' Add a new entry to the context that will be used
when uploading new metadata records.
:type prefix: String
:para prefix: the namespace prefix (eg. dc)
:type url: String
:para url: the url to associate with the prefix
'''
pass
def get_api_version(self):
''' Retrieve the API version from the server
:rtype: String
:returns: the API version string returned by the server
:raises: APIError if the API request is not successful
'''
pass
def get_annotation_context(self):
''' Retrieve the JSON-LD annotation context from the server
:rtype: Dict
:returns: the annotation context
:raises: APIError if the API request is not successful
'''
pass
def get_collections(self):
'''Retrieve a list of the collection URLs for all collections
hosted on the server.
:rtype: List
:returns: a List of tuples of (name, url) for each collection
'''
pass
def get_item_lists(self):
''' Retrieve metadata about each of the Item Lists associated
with this Client's API key
Returns a List of Dicts, each containing metadata regarding
an Item List, with the following key-value pairs:
- name: the name of the Item List
- url: the URL of the Item List
- num_items: the number of items in the Item List
:rtype: List
:returns: a List of Dicts, each containing metadata regarding
an Item List
:raises: APIError if the API request is not successful
'''
pass
def get_item_lists(self):
''' Retrieve the item metadata from the server, as an Item object
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:rtype: Item
:returns: the corresponding metadata, as an Item object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:raises: APIError if the API request is not successful
'''
pass
def get_document(self, doc_url, force_download=False):
''' Retrieve the data for the given document from the server
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the document data
:raises: APIError if the API request is not successful
'''
pass
def get_primary_text(self, item_url, force_download=False):
''' Retrieve the primary text for an item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the item's primary text if it has one, otherwise None
:raises: APIError if the request was not successful
'''
pass
def get_item_annotations(self, item_url, annotation_type=None, label=None):
''' Retrieve the annotations for an item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:type annotation_type: String
:param annotation_type: return only results with a matching Type field
:type label: String
:param label: return only results with a matching Label field
:rtype: String
:returns: the annotations as a dictionary, if the item has
annotations, otherwise None
The annotation dictionary has keys:
commonProperties - properties common to all annotations
@context - the url of the JSON-LD annotation context definition
alveo:annotations - a list of annotations, each is a dictionary
:raises: APIError if the request was not successful
'''
pass
def get_annotation_types(self, item_url):
''' Retrieve the annotation types for the given item from the server
:type item_url: String or Item
:param item_url: URL of the item, or an Item object
:rtype: List
:returns: a List specifying the annotation types
:raises: APIError if the request was not successful
'''
pass
def add_annotations(self, item_url, annotations):
'''Add annotations to the given item
:type item_url: String or Item
:param item_url: the URL of the item corresponding to the annotation,
or an Item object
:type annotation: list
:param annotations: the annotations as a list of dictionaries, each with keys '@type', 'label', 'start', 'end' and 'type'
:rtype: String
:returns: the server's success message, if successful
:raises: APIError if the upload was not successful
:raises: Exception if the annotations are malformed (missing a required key)
'''
pass
def get_collection_info(self, collection_url):
''' Retrieve information about the specified Collection from the server
:type collection_url: String
:param collection_url: the URL of the collection
:rtype: Dict
:returns: a Dict containing information about the Collection
:raises: APIError if the request was not successful
'''
pass
def create_collection(self, name, metadata):
''' Create a new collection with the given name
and attach the metadata.
:param name: the collection name, suitable for use in a URL (no spaces)
:type name: String
:param metadata: a dictionary of metadata values to associate with the new collection
:type metadata: Dict
:rtype: String
:returns: a message confirming creation of the collection
:raises: APIError if the request was not successful
'''
pass
def modify_collection_metadata(self, collection_uri, metadata, replace=None, name=''):
'''Modify the metadata for the given collection.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param metadata: a dictionary of metadata values to add/modify
:type metadata: Dict
:rtype: String
:returns: a message confirming that the metadata is modified
:raises: APIError if the request was not successful
'''
pass
def get_items(self, collection_uri):
'''Return all items in this collection.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:rtype: List
:returns: a list of the URIs of the items in this collection
'''
pass
def add_text_item(self, collection_uri, name, metadata, text, title=None):
'''Add a new item to a collection containing a single
text document.
The full text of the text document is specified as the text
argument and will be stored with the same name as the
item and a .txt extension.
This is a shorthand for the more general add_item method.
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param name: The item name, suitable for use in a URI (no spaces)
:type name: String
:param metadata: a dictionary of metadata values describing the item
:type metadata: Dict
:param text: the full text of the document associated with this item
:type text: String
:param title: document title, defaults to the item name
:type title: String
:rtype String
:returns: the URI of the created item
:raises: APIError if the request was not successful
'''
pass
def add_item(self, collection_uri, name, metadata):
'''Add a new item to a collection
:param collection_uri: The URI that references the collection
:type collection_uri: String
:param name: The item name, suitable for use in a URI (no spaces)
:type name: String
:param metadata: a dictionary of metadata values describing the item
:type metadata: Dict
:rtype String
:returns: the URI of the created item
:raises: APIError if the request was not successful
'''
pass
def modify_item(self, item_uri, metadata):
'''Modify the metadata on an item
'''
pass
def delete_item(self, item_uri):
'''Delete an item from a collection
:param item_uri: the URI that references the item
:type item_uri: String
:rtype: String
:returns: a message confirming that the metadata is modified
:raises: APIError if the request was not successful
'''
pass
def add_document(self, item_uri, name, metadata,
content=None, docurl=None, file=None,
displaydoc=False, preferName=False,
contrib_id=None):
'''Add a document to an existing item
:param item_uri: the URI that references the item
:type item_uri: String
:param name: The document name
:type name: String
:param metadata: a dictionary of metadata values describing the document
:type metadata: Dict
:param content: optional content of the document
:type content: byte array
:param docurl: optional url referencing the document
:type docurl: String
:param file: optional full path to file to be uploaded
:type file: String
:param displaydoc: if True, make this the display document for the item
:type displaydoc: Boolean
:param preferName: if True, given document name will be the document id rather than
filename. Useful if you want to upload under a different filename.
:type preferName: Boolean
:param contrib_id: if present, add this document to this contribution as well as
associating it with the item
:type contrib_id: Integer
:rtype: String
:returns: The URL of the newly created document
'''
pass
def delete_document(self, doc_uri):
'''Delete a document from an item
:param doc_uri: the URI that references the document
:type doc_uri: String
:rtype: String
:returns: a message confirming that the document was deleted
:raises: APIError if the request was not successful
'''
pass
@staticmethod
def __check_success(resp):
''' Check a JSON server response to see if it was successful
:type resp: Dictionary (parsed JSON from response)
:param resp: the response string
:rtype: String
:returns: the success message, if it exists
:raises: APIError if the success message is not present
'''
pass
def download_items(self, items, file_path, file_format='zip'):
''' Retrieve a file from the server containing the metadata
and documents for the speficied items
:type items: List or ItemGroup
:param items: List of the the URLs of the items to download,
or an ItemGroup object
:type file_path: String
:param file_path: the path to which to save the file
:type file_format: String
:param file_format: the file format to request from the server: specify
either 'zip' or 'warc'
:rtype: String
:returns: the file path
:raises: APIError if the API request is not successful
'''
pass
def search_metadata(self, query):
''' Submit a search query to the server and retrieve the results
:type query: String
:param query: the search query
:rtype: ItemGroup
:returns: the search results
:raises: APIError if the API request is not successful
'''
pass
def get_item_lists(self):
''' Retrieve an item list from the server as an ItemList object
:type item_list_url: String or ItemList
:param item_list_url: URL of the item list to retrieve, or an
ItemList object
:rtype: ItemList
:returns: The ItemList
:raises: APIError if the request was not successful
'''
pass
def get_item_list_by_name(self, item_list_name, category='own'):
''' Retrieve an item list from the server as an ItemList object
:type item_list_name: String
:param item_list_name: name of the item list to retrieve
:type category: String
:param category: the category of lists to fetch. At the time of
writing, supported values are "own" and "shared"
:rtype: ItemList
:returns: The ItemList
:raises: APIError if the request was not successful
'''
pass
def add_to_item_list(self, item_urls, item_list_url):
''' Instruct the server to add the given items to the specified
Item List
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:rtype: String
:returns: the server success message, if successful
:raises: APIError if the request was not successful
'''
pass
def add_to_item_list_by_name(self, item_urls, item_list_name):
''' Instruct the server to add the given items to the specified
Item List (which will be created if it does not already exist)
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_name: String
:param item_list_name: name of the item list to retrieve
:rtype: String
:returns: the server success message, if successful
:raises: APIError if the request was not successful
'''
pass
def rename_item_list(self, item_list_url, new_name):
''' Rename an Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:type new_name: String
:param new_name: the new name to give the Item List
:rtype: ItemList
:returns: the item list, if successful
:raises: APIError if the request was not successful
'''
pass
def delete_item_list(self, item_list_url):
''' Delete an Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to which to add the items,
or an ItemList object
:rtype: Boolean
:returns: True if the item list was deleted
:raises: APIError if the request was not successful
'''
pass
def get_speakers(self, collection_name):
'''Get a list of speaker URLs for this collection
:type collection_name: String
:param collection_name: the name of the collection to search
:rtype: List
:returns: a list of URLs for the speakers associated with
the given collection
'''
pass
def get_speakers(self, collection_name):
'''Given a speaker URL, return a dictionary containing
the speaker metadata.
:type speaker_url: String
:param speaker_url: the URL identifier of the speaker
:rtype: Dict
:returns: a dictionary containing the metadata fields describing
this speaker
'''
pass
def add_speaker(self, collection_name, metadata):
'''Add a new speaker to this collection.
:type collection_name: String
:param collection_name: the name of the collection to search
:type metadata: Dict
:param metadata: dictionary of metadata properties and values
for this speaker. Must include 'dcterms:identifier' a unique
identifier for the speaker.
:rtype: String
:returns: the URL of the newly created speaker, or None if there was an
error
'''
pass
def delete_speaker(self, speaker_uri):
'''Delete an speaker from a collection
:param speaker_uri: the URI that references the speaker
:type speaker_uri: String
:rtype: Boolean
:returns: True if the speaker was deleted
:raises: APIError if the request was not successful
'''
pass
def sparql_query(self, collection_name, query):
''' Submit a sparql query to the server to search metadata
and annotations.
:type collection_name: String
:param collection_name: the name of the collection to search
:type query: String
:param query: the sparql query
:rtype: Dict
:returns: the query result from the server as a Python dictionary
following the format of the SPARQL JSON result format documented
at http://www.w3.org/TR/rdf-sparql-json-res/
:raises: APIError if the request was not successful
'''
pass
def get_contributions(self):
'''Return a list of contributions
:rtype: List
:returns: a dictionary with keys "own", "shared" with values
that are a list of contribution records, each one is a dictionary
with keys "id", "url", "name"
'''
pass
def get_contributions(self):
'''Get the details of a particular contribution given it's
url'''
pass
def create_contribution(self, metadata):
'''Create a new contribution given a dictionary of metadata
{
"contribution_name": "HelloWorld",
"contribution_collection": "Cooee",
"contribution_text": "This is contribution description",
"contribution_abstract": "This is contribution abstract"
}
:rtype: dict
:returns: The metadata for the created contribution
'''
pass
def delete_contribution(self, url):
'''Delete the contribution with this identifier
:rtype: bool
:returns: True if the contribution was deleted, False otherwise (eg. if it didn't exist)
'''
pass
| 50 | 46 | 24 | 6 | 9 | 9 | 2 | 1.06 | 1 | 12 | 6 | 0 | 43 | 8 | 46 | 46 | 1,169 | 317 | 414 | 144 | 358 | 439 | 330 | 132 | 283 | 8 | 1 | 3 | 104 |
4,388 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/pyalveo.py
|
pyalveo.pyalveo.APIError
|
class APIError(Exception):
""" Raised when an API operation fails for some reason """
def __init__(self, http_status_code=None, response=None, msg=''):
self.http_status_code = http_status_code
self.response = response
self.msg = msg
Exception.__init__(self, str(self))
def __str__(self):
ret = 'Error: '
if self.http_status_code:
ret += "HTTP " + str(self.http_status_code) + "\n"
if self.response:
ret += self.response + "\n"
return ret + self.msg
|
class APIError(Exception):
''' Raised when an API operation fails for some reason '''
def __init__(self, http_status_code=None, response=None, msg=''):
pass
def __str__(self):
pass
| 3 | 1 | 7 | 1 | 6 | 0 | 2 | 0.08 | 1 | 1 | 0 | 0 | 2 | 3 | 2 | 12 | 16 | 2 | 13 | 7 | 10 | 1 | 13 | 7 | 10 | 3 | 3 | 1 | 4 |
4,389 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/objects.py
|
pyalveo.objects.ItemList
|
class ItemList(ItemGroup):
""" Represents a Alveo Item List residing on the server
Extends ItemGroup with additional Item List-specific functionality
"""
def __init__(self, item_urls, client, url, name):
""" Construct a new ItemList
:type item_urls: List or ItemGroup
:param item_urls: a List of the item URLs in this Item List,
or an ItemGroup object
:type client: Client
:param client: the API client to use for API operations
:type url: String
:param url: the URL of this Item List
:type name: String
:param name: the name of this Item List
:rtype: ItemList
:returns: the new ItemList
"""
super(ItemList, self).__init__(list(item_urls), client) #augh
self.list_url = url
self.list_name = name
def __str__(self):
""" Return the URL corresponding to this ItemList
:rtype: String
:returns: the URL
"""
return self.url()
def name(self):
""" Return the name of this Item List
:rtype: String
:returns: the name of this Item List
"""
return self.list_name
def url(self):
""" Return the URL corresponding to this ItemList
:rtype: String
:returns: the URL
"""
return self.list_url
def refresh(self):
""" Update this ItemList by re-downloading it from the server
:rtype: ItemList
:returns: this ItemList, after the refresh
:raises: APIError if the API request is not successful
"""
refreshed = self.client.get_item_list(self.url())
self.item_urls = refreshed.urls()
self.list_name = refreshed.name()
return self
def append(self, items):
""" Add some items to this ItemList and save the changes to the server
:param items: the items to add, either as a List of Item objects, an
ItemList, a List of item URLs as Strings, a single item URL as a
String, or a single Item object
:rtype: String
:returns: the server success message
:raises: APIError if the API request is not successful
"""
resp = self.client.add_to_item_list(items, self.url())
self.refresh()
return resp
def __eq__(self, other):
""" Return true if another ItemList has all identical fields
:type other: ItemList
:param other: the other ItemList to compare to.
:rtype: Boolean
:returns: True if the ItemLists are identical, otherwise False
"""
return (self.url() == other.url() and
self.name() == other.name() and
super(ItemList, self).__eq__(other))
def __ne__(self, other):
""" Return true if another ItemList does not have all identical fields
:type other: ItemList
:param other: the other ItemList to compare to.
:rtype: Boolean
:returns: False if the ItemLists are identical, otherwise True
"""
return not self.__eq__(other)
|
class ItemList(ItemGroup):
''' Represents a Alveo Item List residing on the server
Extends ItemGroup with additional Item List-specific functionality
'''
def __init__(self, item_urls, client, url, name):
''' Construct a new ItemList
:type item_urls: List or ItemGroup
:param item_urls: a List of the item URLs in this Item List,
or an ItemGroup object
:type client: Client
:param client: the API client to use for API operations
:type url: String
:param url: the URL of this Item List
:type name: String
:param name: the name of this Item List
:rtype: ItemList
:returns: the new ItemList
'''
pass
def __str__(self):
''' Return the URL corresponding to this ItemList
:rtype: String
:returns: the URL
'''
pass
def name(self):
''' Return the name of this Item List
:rtype: String
:returns: the name of this Item List
'''
pass
def url(self):
''' Return the URL corresponding to this ItemList
:rtype: String
:returns: the URL
'''
pass
def refresh(self):
''' Update this ItemList by re-downloading it from the server
:rtype: ItemList
:returns: this ItemList, after the refresh
:raises: APIError if the API request is not successful
'''
pass
def append(self, items):
''' Add some items to this ItemList and save the changes to the server
:param items: the items to add, either as a List of Item objects, an
ItemList, a List of item URLs as Strings, a single item URL as a
String, or a single Item object
:rtype: String
:returns: the server success message
:raises: APIError if the API request is not successful
'''
pass
def __eq__(self, other):
''' Return true if another ItemList has all identical fields
:type other: ItemList
:param other: the other ItemList to compare to.
:rtype: Boolean
:returns: True if the ItemLists are identical, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if another ItemList does not have all identical fields
:type other: ItemList
:param other: the other ItemList to compare to.
:rtype: Boolean
:returns: False if the ItemLists are identical, otherwise True
'''
pass
| 9 | 9 | 13 | 4 | 3 | 6 | 1 | 2.08 | 1 | 2 | 0 | 0 | 8 | 3 | 8 | 25 | 124 | 45 | 26 | 14 | 17 | 54 | 24 | 14 | 15 | 1 | 2 | 0 | 8 |
4,390 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/objects.py
|
pyalveo.objects.ItemGroup
|
class ItemGroup(object):
""" Represents an ordered group of Alveo items"""
def __init__(self, item_urls, client):
""" Construct a new ItemGroup
:type item_urls: List or ItemGroup
:param item_urls: List of URLs of items in this group,
or an ItemGroup object
:type client: Client
:param client: the API client to use for API operations
:rtype: ItemGroup
:returns: the new ItemGroup
"""
self.item_urls = list(item_urls)
self.client = client
def set_client(self, new_client):
""" Set the Client for this ItemGroup
:type new_client: Client
:param new_client: the new Client
:rtype: Client
:returns: the new Client
"""
self.client = new_client
return new_client
def __eq__(self, other):
""" Return true if another ItemGroup has all identical fields
:type other: ItemGroup
:param other: the other ItemGroup to compare to.
:rtype: Boolean
:returns: True if the ItemGroups are identical, otherwise False
"""
return (self.urls() == other.urls() and self.client == other.client)
def __ne__(self, other):
""" Return true if another ItemGroup does not have all identical fields
:type other: ItemGroup
:param other: the other ItemGroup to compare to.
:rtype: Boolean
:returns: False if the ItemGroups are identical, otherwise True
"""
return not self.__eq__(other)
def __contains__(self, item):
""" Check if the given item is in this ItemGroup
:param item: either an item URL as a String, or an Item object
:rtype: Boolean
:returns: True if the item is present, False otherwise
"""
return str(item) in self.item_urls
def __add__(self, other):
""" Returns the union of this ItemGroup and another ItemGroup
which has an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: A new ItemGroup containing the union of the member items
of this and the other group
@raises ValueError: if the other ItemGroup does not have the same Client
"""
if self.client != other.client:
raise ValueError("To add ItemGroups, they must have the same Client")
combined_list = self.item_urls
combined_list += [url for url in other.item_urls if url not in self.item_urls]
return ItemGroup(combined_list, self.client)
def __sub__(self, other):
""" Returns the relative complement of this ItemGroup in another
ItemGroup which has an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: a new ItemGroup containing all member items of this
ItemGroup except those also appearing in the other ItemGroup
@raises ValueError: if the other ItemGroup does not have the same Client
"""
if self.client != other.client:
raise ValueError("To subtract ItemGroups, they must have the same Client")
new_list = [url for url in self.item_urls if url not in other.item_urls]
return ItemGroup(new_list, self.client)
def intersection(self, other):
""" Returns the intersection of this ItemGroup with another ItemGroup
which has the an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: a new ItemGroup containing all items that appear in both groups
@raises ValueError: if the other ItemGroup does not have the same Client
"""
if self.client != other.client:
raise ValueError("To intersect ItemGroups, they must have the same Client")
new_list = [url for url in self.item_urls if url in other.item_urls]
return ItemGroup(new_list, self.client)
def __iter__(self):
""" Iterate over the item URLs in this ItemGroup
:rtype: iterator
:returns: an iterator over the item URLs in this ItemGroup
"""
return iter(self.item_urls)
def __len__(self):
""" Return the number of items in this ItemGroup
:rtype: int
:returns: the number of items in this ItemGroup
"""
return len(self.item_urls)
def get_all(self, force_download=False):
""" Retrieve the metadata for all items in this list from the server,
as Item objects
:rtype: List
:returns: a List of the corresponding Item objects
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:raises: APIError if the API request is not successful
"""
cl = self.client
return [cl.get_item(item, force_download) for item in self.item_urls]
def item_url(self, item_index):
""" Return the URL of the specified item
:type item_index: int
:param item_index: the index of the item URL
:rtype: String
:returns: the URL of the item
"""
return self.item_urls[item_index]
def __getitem__(self, key):
""" Return the URL of the specified item
:type key: int
:param key: the index of the item URL
:rtype: String
:returns: the URL of the item
"""
try:
return self.item_urls[key]
except (IndexError, ValueError) as e:
raise KeyError(e.message)
def urls(self):
""" Return a list of all item URLs for this ItemGroup
:rtype: List
:returns: List of item URLs
"""
return self.item_urls
def get_item(self, item_index, force_download=False):
""" Retrieve the metadata for a specific item in this ItemGroup
:type item_index: int
:param item_index: the index of the item
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: Item
:returns: the metadata, as an Item object
:raises: APIError if the API request is not successful
"""
return self.client.get_item(self.item_urls[item_index], force_download)
def add_to_item_list_by_name(self, name):
""" Add the items in this ItemGroup to the specified Item List on
the server, creating the item list if it does not already exist
:type name: String
:param name: the name of the Item List
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
"""
return self.client.add_to_item_list_by_name(self.item_urls, name)
def add_to_item_list(self, item_list_url):
""" Add the items in this ItemGroup to the specified Item List on
the server, creating the item list if it does not already exist
:type item_list_url: String or ItemList
:param item_list_url: the URL of the Item List,
or an ItemList object
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
"""
return self.client.add_to_item_list(self.item_urls, item_list_url)
|
class ItemGroup(object):
''' Represents an ordered group of Alveo items'''
def __init__(self, item_urls, client):
''' Construct a new ItemGroup
:type item_urls: List or ItemGroup
:param item_urls: List of URLs of items in this group,
or an ItemGroup object
:type client: Client
:param client: the API client to use for API operations
:rtype: ItemGroup
:returns: the new ItemGroup
'''
pass
def set_client(self, new_client):
''' Set the Client for this ItemGroup
:type new_client: Client
:param new_client: the new Client
:rtype: Client
:returns: the new Client
'''
pass
def __eq__(self, other):
''' Return true if another ItemGroup has all identical fields
:type other: ItemGroup
:param other: the other ItemGroup to compare to.
:rtype: Boolean
:returns: True if the ItemGroups are identical, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if another ItemGroup does not have all identical fields
:type other: ItemGroup
:param other: the other ItemGroup to compare to.
:rtype: Boolean
:returns: False if the ItemGroups are identical, otherwise True
'''
pass
def __contains__(self, item):
''' Check if the given item is in this ItemGroup
:param item: either an item URL as a String, or an Item object
:rtype: Boolean
:returns: True if the item is present, False otherwise
'''
pass
def __add__(self, other):
''' Returns the union of this ItemGroup and another ItemGroup
which has an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: A new ItemGroup containing the union of the member items
of this and the other group
@raises ValueError: if the other ItemGroup does not have the same Client
'''
pass
def __sub__(self, other):
''' Returns the relative complement of this ItemGroup in another
ItemGroup which has an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: a new ItemGroup containing all member items of this
ItemGroup except those also appearing in the other ItemGroup
@raises ValueError: if the other ItemGroup does not have the same Client
'''
pass
def intersection(self, other):
''' Returns the intersection of this ItemGroup with another ItemGroup
which has the an identical Client
:type other: ItemGroup
:param other: the other ItemGroup
:rtype: ItemGroup
:returns: a new ItemGroup containing all items that appear in both groups
@raises ValueError: if the other ItemGroup does not have the same Client
'''
pass
def __iter__(self):
''' Iterate over the item URLs in this ItemGroup
:rtype: iterator
:returns: an iterator over the item URLs in this ItemGroup
'''
pass
def __len__(self):
''' Return the number of items in this ItemGroup
:rtype: int
:returns: the number of items in this ItemGroup
'''
pass
def get_all(self, force_download=False):
''' Retrieve the metadata for all items in this list from the server,
as Item objects
:rtype: List
:returns: a List of the corresponding Item objects
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:raises: APIError if the API request is not successful
'''
pass
def item_url(self, item_index):
''' Return the URL of the specified item
:type item_index: int
:param item_index: the index of the item URL
:rtype: String
:returns: the URL of the item
'''
pass
def __getitem__(self, key):
''' Return the URL of the specified item
:type key: int
:param key: the index of the item URL
:rtype: String
:returns: the URL of the item
'''
pass
def urls(self):
''' Return a list of all item URLs for this ItemGroup
:rtype: List
:returns: List of item URLs
'''
pass
def get_item(self, item_index, force_download=False):
''' Retrieve the metadata for a specific item in this ItemGroup
:type item_index: int
:param item_index: the index of the item
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: Item
:returns: the metadata, as an Item object
:raises: APIError if the API request is not successful
'''
pass
def add_to_item_list_by_name(self, name):
''' Add the items in this ItemGroup to the specified Item List on
the server, creating the item list if it does not already exist
:type name: String
:param name: the name of the Item List
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
'''
pass
def add_to_item_list_by_name(self, name):
''' Add the items in this ItemGroup to the specified Item List on
the server, creating the item list if it does not already exist
:type item_list_url: String or ItemList
:param item_list_url: the URL of the Item List,
or an ItemList object
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
'''
pass
| 18 | 18 | 14 | 4 | 3 | 7 | 1 | 2.33 | 1 | 5 | 0 | 1 | 17 | 2 | 17 | 17 | 271 | 101 | 51 | 25 | 33 | 119 | 51 | 24 | 33 | 2 | 1 | 1 | 21 |
4,391 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/objects.py
|
pyalveo.objects.Item
|
class Item(object):
""" Represents a single Alveo item """
def __init__(self, metadata, client):
""" Create a new Item object
:type metadata: Dict
:param metadata: the metadata for this Item
:type client: Client
:param client: the API client to use for API operations
:rtype: Item
:returns: the new Item
"""
self.item_url = metadata['alveo:catalog_url']
self.item_metadata = metadata
self.client = client
def metadata(self):
""" Return the metadata for this Item
:rtype: Dict
:returns: the metadata for this Item
"""
return self.item_metadata
def url(self):
""" Return the URL for this Item
:rtype: String
:returns: the URL for this Item
"""
return self.item_url
def get_documents(self):
""" Return the metadata for each of the documents corresponding
to this Item, each as a Document object
:rtype: List
:returns: a list of Document objects corresponding to this
Item's documents
"""
return[Document(d, self.client) for d in self.metadata()['alveo:documents']]
def get_document(self, index=0):
""" Return the metadata for the specified document, as a
Document object
:type index: int
:param index: the index of the document
:rtype: Document
:returns: the metadata for the specified document
"""
try:
return Document(self.metadata()['alveo:documents'][index], self.client)
except IndexError:
raise ValueError('No document exists for this item with index: '
+ str(index))
def get_primary_text(self, force_download=False):
""" Retrieve the primary text for this item from the server
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the primary text
:raises: APIError if the API request is not successful
"""
return self.client.get_primary_text(self.url(), force_download)
def get_annotations(self, atype=None, label=None):
""" Retrieve the annotations for this item from the server
:type atype: String
:param atype: return only results with a matching Type field
:type label: String
:param label: return only results with a matching Label field
:rtype: String
:returns: the annotations as a JSON string
:raises: APIError if the API request is not successful
"""
return self.client.get_item_annotations(self.url(), atype, label)
def get_annotation_types(self):
""" Retrieve the annotation types for this item from the server
:rtype: List
:returns: a List specifying the annotation types
:raises: APIError if the request was not successful
"""
return self.client.get_annotation_types(self.url())
def add_annotations(self, annotations):
"""Add annotations to an item
:type annotation: String
:param annotations: the annotations, a list of dictionaries
:rtype: String
:returns: the server success response
:raises: APIError if the API request is not successful
"""
return self.client.add_annotations(self.url(), annotations)
def __str__(self):
""" Return the URL of this Item
:rtype: String
:returns: the URL of this Item
"""
return self.url()
def __eq__(self, other):
""" Return true if and only if this Item is identical to another
:type other: Item
:param other: the other Item
:rtype: Boolean
:returns: True if both Items have all identical fields, otherwise False
"""
return (self.url() == other.url() and
self.metadata() == other.metadata() and
self.client == other.client)
def __ne__(self, other):
""" Return true if and only if this Item is not identical to another
:type other: Item
:param other: the other Item
:rtype: Boolean
:returns: False if both Items have all identical fields, otherwise True
"""
return not self.__eq__(other)
def add_to_item_list(self, item_list_url):
""" Add this item to the specified Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the Item list,
or an ItemList object
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
"""
return self.client.add_to_item_list([self.url()], item_list_url)
def add_to_item_list_by_name(self, name):
""" Add this item to the specified Item List on the server
:type name: String
:param name: the name of the Item list
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
"""
return self.client.add_to_item_list_by_name([self.url()], name)
|
class Item(object):
''' Represents a single Alveo item '''
def __init__(self, metadata, client):
''' Create a new Item object
:type metadata: Dict
:param metadata: the metadata for this Item
:type client: Client
:param client: the API client to use for API operations
:rtype: Item
:returns: the new Item
'''
pass
def metadata(self):
''' Return the metadata for this Item
:rtype: Dict
:returns: the metadata for this Item
'''
pass
def url(self):
''' Return the URL for this Item
:rtype: String
:returns: the URL for this Item
'''
pass
def get_documents(self):
''' Return the metadata for each of the documents corresponding
to this Item, each as a Document object
:rtype: List
:returns: a list of Document objects corresponding to this
Item's documents
'''
pass
def get_documents(self):
''' Return the metadata for the specified document, as a
Document object
:type index: int
:param index: the index of the document
:rtype: Document
:returns: the metadata for the specified document
'''
pass
def get_primary_text(self, force_download=False):
''' Retrieve the primary text for this item from the server
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the primary text
:raises: APIError if the API request is not successful
'''
pass
def get_annotations(self, atype=None, label=None):
''' Retrieve the annotations for this item from the server
:type atype: String
:param atype: return only results with a matching Type field
:type label: String
:param label: return only results with a matching Label field
:rtype: String
:returns: the annotations as a JSON string
:raises: APIError if the API request is not successful
'''
pass
def get_annotation_types(self):
''' Retrieve the annotation types for this item from the server
:rtype: List
:returns: a List specifying the annotation types
:raises: APIError if the request was not successful
'''
pass
def add_annotations(self, annotations):
'''Add annotations to an item
:type annotation: String
:param annotations: the annotations, a list of dictionaries
:rtype: String
:returns: the server success response
:raises: APIError if the API request is not successful
'''
pass
def __str__(self):
''' Return the URL of this Item
:rtype: String
:returns: the URL of this Item
'''
pass
def __eq__(self, other):
''' Return true if and only if this Item is identical to another
:type other: Item
:param other: the other Item
:rtype: Boolean
:returns: True if both Items have all identical fields, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if and only if this Item is not identical to another
:type other: Item
:param other: the other Item
:rtype: Boolean
:returns: False if both Items have all identical fields, otherwise True
'''
pass
def add_to_item_list(self, item_list_url):
''' Add this item to the specified Item List on the server
:type item_list_url: String or ItemList
:param item_list_url: the URL of the Item list,
or an ItemList object
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
'''
pass
def add_to_item_list_by_name(self, name):
''' Add this item to the specified Item List on the server
:type name: String
:param name: the name of the Item list
:rtype: String
:returns: the URL of the Item List
:raises: APIError if the API request is not successful
'''
pass
| 15 | 15 | 13 | 4 | 3 | 6 | 1 | 2.43 | 1 | 4 | 1 | 0 | 14 | 3 | 14 | 14 | 209 | 82 | 37 | 18 | 22 | 90 | 34 | 18 | 19 | 2 | 1 | 1 | 15 |
4,392 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/objects.py
|
pyalveo.objects.Document
|
class Document(object):
""" Represents a single Alveo document """
def __init__(self, metadata, client):
""" Create a new Document
:type metadata: Dict
:param metadata: the metadata for this Document
:type client: Client
:param client: the API client to use for API operations
:rtype: Document
:returns: the new Document
"""
self.doc_url = metadata['alveo:url']
self.doc_metadata = metadata
self.client = client
def metadata(self):
""" Return the metadata for this Document
:rtype: Dict
:returns: the metadata for this Document
"""
return self.doc_metadata
def url(self):
""" Return the URL for this Document
:rtype: String
:returns: the URL for this Document
"""
return self.doc_url
def __str__(self):
""" Return the URL of this Document
:rtype: String
:returns: the URL of this Document
"""
return self.url()
def __eq__(self, other):
""" Return true if and only if this Document is identical to another
:type other: Document
:param other: the other Document
:rtype: Boolean
:returns: True if both Documents have all identical fields, otherwise False
"""
return (self.url() == other.url() and
self.metadata() == other.metadata() and
self.client == other.client)
def __ne__(self, other):
""" Return true if and only if this Document is not identical to another
:type other: Document
:param other: the other Document
:rtype: Boolean
:returns: False if both Documents have all identical fields, otherwise True
"""
return not self.__eq__(other)
def get_content(self, force_download=False):
""" Retrieve the content for this Document from the server
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the content data
:raises: APIError if the API request is not successful
"""
return self.client.get_document(self.url(), force_download)
def get_filename(self):
""" Get the original filename for this document
:rtype: String
:returns: the filename
"""
return unquote(self.url().rsplit('/',1)[1])
def download_content(self, dir_path='', filename=None,
force_download=False):
""" Download the content for this document to a file
:type dir_path: String
:param dir_path: the path to which to write the data
:type filename: String
:param filename: filename to write to (if None, defaults to the document's
name, as specified by its metadata
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the path to the downloaded file
:raises: APIError if the API request is not successful
"""
if filename is None:
filename = self.get_filename()
path = os.path.join(dir_path, filename)
data = self.client.get_document(self.url(), force_download)
with open(path, 'wb') as f:
f.write(data)
return path
|
class Document(object):
''' Represents a single Alveo document '''
def __init__(self, metadata, client):
''' Create a new Document
:type metadata: Dict
:param metadata: the metadata for this Document
:type client: Client
:param client: the API client to use for API operations
:rtype: Document
:returns: the new Document
'''
pass
def metadata(self):
''' Return the metadata for this Document
:rtype: Dict
:returns: the metadata for this Document
'''
pass
def url(self):
''' Return the URL for this Document
:rtype: String
:returns: the URL for this Document
'''
pass
def __str__(self):
''' Return the URL of this Document
:rtype: String
:returns: the URL of this Document
'''
pass
def __eq__(self, other):
''' Return true if and only if this Document is identical to another
:type other: Document
:param other: the other Document
:rtype: Boolean
:returns: True if both Documents have all identical fields, otherwise False
'''
pass
def __ne__(self, other):
''' Return true if and only if this Document is not identical to another
:type other: Document
:param other: the other Document
:rtype: Boolean
:returns: False if both Documents have all identical fields, otherwise True
'''
pass
def get_content(self, force_download=False):
''' Retrieve the content for this Document from the server
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the content data
:raises: APIError if the API request is not successful
'''
pass
def get_filename(self):
''' Get the original filename for this document
:rtype: String
:returns: the filename
'''
pass
def download_content(self, dir_path='', filename=None,
force_download=False):
''' Download the content for this document to a file
:type dir_path: String
:param dir_path: the path to which to write the data
:type filename: String
:param filename: filename to write to (if None, defaults to the document's
name, as specified by its metadata
:type force_download: Boolean
:param force_download: True to download from the server
regardless of the cache's contents
:rtype: String
:returns: the path to the downloaded file
:raises: APIError if the API request is not successful
'''
pass
| 10 | 10 | 13 | 4 | 3 | 6 | 1 | 1.93 | 1 | 0 | 0 | 0 | 9 | 3 | 9 | 9 | 139 | 51 | 30 | 17 | 19 | 58 | 27 | 15 | 17 | 2 | 1 | 1 | 10 |
4,393 |
Alveo/pyalveo
|
Alveo_pyalveo/pyalveo/cache.py
|
pyalveo.cache.Cache
|
class Cache(object):
""" Handles caching for Alveo API Client objects """
def __init__(self, cache_dir, max_age=0):
""" Create a new Cache object
:type cache_dir: String
:param: cache_dir: directory to store cache database and large files
:type max_age: int
:param max_age: cache entries older than this many seconds will be
ignored by the has_item, has_document and has_primary_text methods
:rtype: Cache
:returns: the new Cache
"""
self.max_age = max_age
self.cache_dir = os.path.expanduser(cache_dir)
self.database = os.path.join(self.cache_dir, 'alveo_cache.db')
self.file_dir = os.path.join(self.cache_dir, 'files')
# create file_dir using makedirs which will also make cache_dir if needed
if not os.path.exists(self.file_dir):
os.makedirs(self.file_dir)
elif not os.path.isdir(self.cache_dir):
raise Exception("file_dir exists and is not a directory")
if not os.path.isfile(self.database):
self.create_cache_database()
self.conn = sqlite3.connect(self.database)
self.conn.text_factory = str
def to_dict(self):
"""
Returns a dict of all of it's necessary components.
Not the same as the __dict__ method
"""
data = dict()
data['max_age'] = self.max_age
data['cache_dir'] = self.cache_dir
return data
def to_json(self):
"""
Returns a json string containing all relevant data to recreate this pyalveo.OAuth2.
"""
return json.dumps(self.to_dict())
@staticmethod
def from_json(json_data):
"""
Returns a pyalveo.OAuth2 given a json string built from the oauth.to_json() method.
"""
#If we have a string, then decode it, otherwise assume it's already decoded
if isinstance(json_data, str):
data = json.loads(json_data)
else:
data = json_data
oauth = Cache(cache_dir=data.get('cache_dir',None), max_age=data.get('max_age',None))
return oauth
def create_cache_database(self):
""" Create a new SQLite3 database for use with Cache objects
:raises: IOError if there is a problem creating the database file
"""
conn = sqlite3.connect(self.database)
conn.text_factory = str
c = conn.cursor()
c.execute("""CREATE TABLE items
(url text, metadata text, datetime text)""")
c.execute("""CREATE TABLE documents
(url text, path text, datetime text)""")
c.execute("""CREATE TABLE primary_texts
(item_url text, primary_text text, datetime text)""")
conn.commit()
conn.close()
def __eq__(self, other):
""" Return True if this cache has identical fields to another
:type other: Cache
:param other: the other Cache
:rtype: Boolean
:returns: True if the caches are identical, oterwise False
"""
return(self.max_age == other.max_age and
self.database == other.database)
def __ne__(self, other):
""" Return False if this cache has all identical fields to another
:type other: Cache
:param other: the other Cache
:rtype: Boolean
:returns: False if the caches are identical, oterwise True
"""
return not self.__eq__(other)
def __del__(self):
""" Close the database connection """
self.conn.close()
def __exists_row_not_too_old(self, row):
""" Check if the given row exists and is not too old """
if row is None:
return False
record_time = dateutil.parser.parse(row[2])
now = datetime.datetime.now(dateutil.tz.gettz())
age = (record_time - now).total_seconds()
if age > self.max_age:
return False
return True
@staticmethod
def __now_iso_8601():
""" Get the current local time as an ISO 8601 string """
return datetime.datetime.now(dateutil.tz.gettz()).isoformat()
def has_item(self, item_url):
""" Check if the metadata for the given item is present in
the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: Boolean
:returns: True if the item is present, False otherwise
"""
c = self.conn.cursor()
c.execute("SELECT * FROM items WHERE url=?", (str(item_url),))
row = c.fetchone()
c.close()
return self.__exists_row_not_too_old(row)
def has_document(self, doc_url):
""" Check if the content of the given document is present
in the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:rtype: Boolean
:returns: True if the data is present, False otherwise
"""
c = self.conn.cursor()
c.execute("SELECT * FROM documents WHERE url=?", (str(doc_url),))
row = c.fetchone()
c.close()
return self.__exists_row_not_too_old(row)
def has_primary_text(self, item_url):
""" Check if the primary text corresponding to the
given item is present in the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: Boolean
:returns: True if the primary text is present, False otherwise
"""
c = self.conn.cursor()
c.execute("SELECT * FROM primary_texts WHERE item_url=?",
(str(item_url),))
row = c.fetchone()
c.close()
return self.__exists_row_not_too_old(row)
def get_item(self, item_url):
""" Retrieve the metadata for the given item from the cache.
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: String
:returns: the item metadata, as a JSON string
:raises: ValueError if the item is not in the cache
"""
c = self.conn.cursor()
c.execute("SELECT * FROM items WHERE url=?", (str(item_url),))
row = c.fetchone()
c.close()
if row is None:
raise ValueError("Item not present in cache")
return row[1]
def get_document(self, doc_url):
""" Retrieve the content for the given document from the cache.
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:rtype: String
:returns: the document data
:raises: ValueError if the item is not in the cache
"""
c = self.conn.cursor()
c.execute("SELECT * FROM documents WHERE url=?", (str(doc_url),))
row = c.fetchone()
c.close()
if row is None:
raise ValueError("Item not present in cache")
file_path = row[1]
try:
with open(file_path, 'rb') as f:
return f.read()
except IOError as e:
raise IOError("Error reading file " + file_path +
" to retrieve document " + doc_url +
": " + e.message)
def get_primary_text(self, item_url):
""" Retrieve the primary text for the given item from the cache.
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: String
:returns: the primary text
:raises: ValueError if the primary text is not in the cache
"""
c = self.conn.cursor()
c.execute("SELECT * FROM primary_texts WHERE item_url=?",
(str(item_url),))
row = c.fetchone()
c.close()
if row is None:
raise ValueError("Item not present in cache")
return row[1]
def add_item(self, item_url, item_metadata):
""" Add the given item to the cache database, updating
the existing metadata if the item is already present
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:type item_metadata: String
:param item_metadata: the item's metadata, as a JSON string
"""
c = self.conn.cursor()
c.execute("DELETE FROM items WHERE url=?", (str(item_url),))
self.conn.commit()
c.execute("INSERT INTO items VALUES (?, ?, ?)",
(str(item_url), item_metadata, self.__now_iso_8601()))
self.conn.commit()
c.close()
def __generate_filepath(self):
""" Generate a unique (absolute) file path within the file_dir directory
:rtype: String
:returns: a unique file path
"""
file_path = os.path.join(self.file_dir, str(uuid.uuid4()))
if os.path.exists(file_path):
warnings.warn("something has almost certainly gone wrong")
return self.__generate_filepath()
return file_path
def add_document(self, doc_url, data):
""" Add the given document to the cache, updating
the existing content data if the document is already present
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type data: String
:param data: the document's content data
"""
file_path = self.__generate_filepath()
with open(file_path, 'wb') as f:
f.write(data)
c = self.conn.cursor()
c.execute("SELECT * FROM documents WHERE url=?", (str(doc_url),))
for row in c.fetchall():
old_file_path = row[1]
if os.path.isfile(old_file_path):
os.unlink(old_file_path)
c.execute("DELETE FROM documents WHERE url=?", (str(doc_url),))
self.conn.commit()
c.execute("INSERT INTO documents VALUES (?, ?, ?)",
(str(doc_url), file_path, self.__now_iso_8601()))
self.conn.commit()
c.close()
def add_primary_text(self, item_url, primary_text):
""" Add the given primary text to the cache database, updating
the existing record if the primary text is already present
:type item_url: String or Item
:param item_url: the URL of the corresponding item, or an Item object
:type primary_text: String
:param primary_text: the item's primary text
"""
c = self.conn.cursor()
c.execute("DELETE FROM primary_texts WHERE item_url=?",
(str(item_url),))
self.conn.commit()
c.execute("INSERT INTO primary_texts VALUES (?, ?, ?)",
(str(item_url), primary_text, self.__now_iso_8601()))
self.conn.commit()
c.close()
|
class Cache(object):
''' Handles caching for Alveo API Client objects '''
def __init__(self, cache_dir, max_age=0):
''' Create a new Cache object
:type cache_dir: String
:param: cache_dir: directory to store cache database and large files
:type max_age: int
:param max_age: cache entries older than this many seconds will be
ignored by the has_item, has_document and has_primary_text methods
:rtype: Cache
:returns: the new Cache
'''
pass
def to_dict(self):
'''
Returns a dict of all of it's necessary components.
Not the same as the __dict__ method
'''
pass
def to_json(self):
'''
Returns a json string containing all relevant data to recreate this pyalveo.OAuth2.
'''
pass
@staticmethod
def from_json(json_data):
'''
Returns a pyalveo.OAuth2 given a json string built from the oauth.to_json() method.
'''
pass
def create_cache_database(self):
''' Create a new SQLite3 database for use with Cache objects
:raises: IOError if there is a problem creating the database file
'''
pass
def __eq__(self, other):
''' Return True if this cache has identical fields to another
:type other: Cache
:param other: the other Cache
:rtype: Boolean
:returns: True if the caches are identical, oterwise False
'''
pass
def __ne__(self, other):
''' Return False if this cache has all identical fields to another
:type other: Cache
:param other: the other Cache
:rtype: Boolean
:returns: False if the caches are identical, oterwise True
'''
pass
def __del__(self):
''' Close the database connection '''
pass
def __exists_row_not_too_old(self, row):
''' Check if the given row exists and is not too old '''
pass
@staticmethod
def __now_iso_8601():
''' Get the current local time as an ISO 8601 string '''
pass
def has_item(self, item_url):
''' Check if the metadata for the given item is present in
the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: Boolean
:returns: True if the item is present, False otherwise
'''
pass
def has_document(self, doc_url):
''' Check if the content of the given document is present
in the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:rtype: Boolean
:returns: True if the data is present, False otherwise
'''
pass
def has_primary_text(self, item_url):
''' Check if the primary text corresponding to the
given item is present in the cache
If the max_age attribute of this Cache is set to a nonzero value,
entries older than the value of max_age in seconds will be ignored
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: Boolean
:returns: True if the primary text is present, False otherwise
'''
pass
def get_item(self, item_url):
''' Retrieve the metadata for the given item from the cache.
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: String
:returns: the item metadata, as a JSON string
:raises: ValueError if the item is not in the cache
'''
pass
def get_document(self, doc_url):
''' Retrieve the content for the given document from the cache.
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:rtype: String
:returns: the document data
:raises: ValueError if the item is not in the cache
'''
pass
def get_primary_text(self, item_url):
''' Retrieve the primary text for the given item from the cache.
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:rtype: String
:returns: the primary text
:raises: ValueError if the primary text is not in the cache
'''
pass
def add_item(self, item_url, item_metadata):
''' Add the given item to the cache database, updating
the existing metadata if the item is already present
:type item_url: String or Item
:param item_url: the URL of the item, or an Item object
:type item_metadata: String
:param item_metadata: the item's metadata, as a JSON string
'''
pass
def __generate_filepath(self):
''' Generate a unique (absolute) file path within the file_dir directory
:rtype: String
:returns: a unique file path
'''
pass
def add_document(self, doc_url, data):
''' Add the given document to the cache, updating
the existing content data if the document is already present
:type doc_url: String or Document
:param doc_url: the URL of the document, or a Document object
:type data: String
:param data: the document's content data
'''
pass
def add_primary_text(self, item_url, primary_text):
''' Add the given primary text to the cache database, updating
the existing record if the primary text is already present
:type item_url: String or Item
:param item_url: the URL of the corresponding item, or an Item object
:type primary_text: String
:param primary_text: the item's primary text
'''
pass
| 23 | 21 | 16 | 3 | 7 | 6 | 2 | 0.75 | 1 | 5 | 0 | 0 | 18 | 5 | 20 | 20 | 363 | 100 | 150 | 59 | 127 | 113 | 134 | 54 | 113 | 4 | 1 | 2 | 33 |
4,394 |
Alveo/pyalveo
|
Alveo_pyalveo/tests/pyalveo_test.py
|
tests.pyalveo_test.ClientTest
|
class ClientTest(unittest.TestCase):
def test_create_client(self, m):
""" Test that the clients can be created with or without alveo.config file
and correct database is created """
m.get(API_URL + "/item_lists.json",
json={'failure': 'Client could not be created. Check your api key'},
status_code=401)
# Test with wrong api key
with self.assertRaises(pyalveo.APIError) as cm:
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
client.get_item_lists()
self.assertTrue(
"Client could not be created. Check your api key" in str(cm.exception)
)
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
alveo_config_path = os.path.expanduser('~/alveo.config')
cache_db_path = 'tmp'
if False:
# how to mock these?
# Test when alveo.config is present
if os.path.exists(alveo_config_path):
client = pyalveo.Client()
self.assertEqual(type(client), pyalveo.Client)
else:
# Test when alveo.config is absent
with self.assertRaises(IOError) as cm:
client = pyalveo.Client()
self.assertEqual(
"Could not find file ~/alveo.config. Please download your configuration file from http://pyalveo.org.au/ OR try to create a client by specifying your api key",
str(cm.exception)
)
# Test with correct api key
client = pyalveo.Client()
self.assertEqual(type(client), pyalveo.Client)
def test_to_from_json(self,m):
""" Test packing the oath object into a json form then reloading it. """
api_url = 'https://example.org'
api_key = 'thisisrandomtext'
verifySSL = False
cache_dir = 'tmp'
oauth_dict = {
'client_id':'morerandomtext',
'client_secret':'secretrandomtext',
'redirect_url':'https://anotherfake.com'
}
expected_json = '{"use_cache": false, "api_url": "https://example.org", "cache": {"max_age": 0, "cache_dir": "tmp"}, "cache_dir": "tmp", "update_cache": true, "oauth": {"client_id": "morerandomtext", "state": "cgLXfsICCMsuTeY6HWkzsqMPyxTA8K", "token": null, "auth_url": "https://example.org/oauth/authorize?response_type=code&client_id=morerandomtext&redirect_uri=https%3A%2F%2Fanotherfake.com&state=cgLXfsICCMsuTeY6HWkzsqMPyxTA8K", "redirect_url": "https://anotherfake.com", "client_secret": "secretrandomtext", "api_key": "secretkey", "verifySSL": false, "api_url": "https://example.org"}, "api_key": "secretkey"}'
client = pyalveo.Client(api_url=api_url,oauth=oauth_dict,verifySSL=verifySSL,use_cache=False,cache_dir=cache_dir, configfile="tests/alveo.config")
json_string = client.to_json()
#Test json comes out as expected
#A state will be generated which should be different always
#So we need to load the json into a dict, remove the state key then check equality
json_dict = json.loads(json_string)
expected_dict = json.loads(expected_json)
json_dict['oauth'].pop('state',None)
expected_dict['oauth'].pop('state',None)
#Do the same with auth url as it's a string that contains the state
json_dict['oauth'].pop('auth_url',None)
expected_dict['oauth'].pop('auth_url',None)
#Do the same with cache dir as that also can't be predicted
json_dict['cache'].pop('cache_dir',None)
expected_dict['cache'].pop('cache_dir',None)
json_dict.pop('cache_dir',None)
expected_dict.pop('cache_dir',None)
self.assertEqual(json_dict, expected_dict)
client2 = pyalveo.Client.from_json(json_string)
#Test generated json creates an identical object
#These should have identical states however
self.assertEqual(client, client2)
starting_json = '{"use_cache": true, "api_url": "https://example.org", "cache": {"max_age": 0, "cache_dir": "tmp"}, "cache_dir": "tmp", "update_cache": true, "oauth": {"client_id": null, "state": null, "token": null, "auth_url": null, "redirect_url": null, "client_secret": null, "api_key": "thisisrandomtext", "verifySSL": false, "api_url": "https://example.org"}, "api_key": "thisisrandomtext"}'
client = pyalveo.Client(api_url=api_url,api_key=api_key,verifySSL=verifySSL,use_cache=True,cache_dir=cache_dir)
client2 = pyalveo.Client.from_json(starting_json)
#test manually created json creates an identical cache to one properly setup
self.assertEqual(client, client2)
def test_client_context(self, m):
"""add_context extends the context that is used by the client"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY)
client.add_context('test', 'http://test.org/')
self.assertIn('test', client.context)
self.assertEqual('http://test.org/', client.context['test'])
def test_client_cache(self, m):
"""Test that we can create a client with a cache enabled and that it caches things"""
cache_dir = "tmp"
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=True, cache_dir=cache_dir)
item_url = client.oauth.api_url + "/catalog/cooee/1-190"
item_meta = ""
self.addCleanup(shutil.rmtree, cache_dir, True)
self.assertEqual(type(client.cache), pyalveo.Cache)
with open('tests/responses/1-190.json', 'rb') as rh:
m.get(item_url, body=rh)
item = client.get_item(item_url)
self.assertEqual(type(item), pyalveo.Item)
# look in the cache for this item metadata
self.assertTrue(client.cache.has_item(item_url))
meta = client.cache.get_item(item_url)
# check a few things about the metadata json
self.assertIn("@context", meta.decode('utf-8'))
self.assertIn(item_url, meta.decode('utf-8'))
# get a document
with open('tests/responses/1-190-plain.txt', 'rb') as rh:
m.get(item_url + "/document/1-190-plain.txt", body=rh)
doc = item.get_document(0)
self.assertEqual(type(doc), pyalveo.Document)
doc_content = doc.get_content()
self.assertEqual(doc_content[:20].decode(), "Sydney, New South Wa")
# there should be a cached file somewhere under cache_dir
ldir = os.listdir(os.path.join(cache_dir, "files"))
self.assertEqual(1, len(ldir))
# the content of the file should be the same as our doc_content
with open(os.path.join(cache_dir, "files", ldir[0]), 'rb') as h:
self.assertEqual(h.read(), doc_content)
# now trigger a cache hit
doc_content_cache = doc.get_content()
self.assertEqual(doc_content, doc_content_cache)
def test_client_no_cache(self, m):
"""Test that we can create and use a client without a cache enabled"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
item_url = client.oauth.api_url + "/catalog/cooee/1-190"
item_meta = ""
with open('tests/responses/1-190.json', 'rb') as rh:
m.get(item_url, body=rh)
item = client.get_item(item_url)
self.assertEqual(type(item), pyalveo.Item)
# get a document
with open('tests/responses/1-190-plain.txt', 'rb') as rh:
m.get(item_url + "/document/1-190-plain.txt", body=rh)
doc = item.get_document(0)
self.assertEqual(type(doc), pyalveo.Document)
doc_content = doc.get_content()
self.assertEqual(doc_content[:20].decode(), "Sydney, New South Wa")
def test_identical_clients(self, m):
""" Test that multiple clients can be created with default configuration or specific configuration
and check if they are identical or not """
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
first_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
second_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
self.assertTrue(first_client.__eq__(second_client))
self.assertTrue(second_client.__eq__(first_client))
first_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, cache="cache.db", use_cache=True, update_cache=True)
second_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, cache="cache.db", use_cache=True, update_cache=True)
# Two clients created with same api key and same arguments must be same
self.assertTrue(first_client.__eq__(second_client))
self.assertTrue(second_client.__eq__(first_client))
# Two clients with same api key but diffent database configuration must be different
third_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, cache="cache.db", use_cache=False, update_cache=False)
self.assertTrue(first_client.__ne__(third_client))
self.assertTrue(second_client.__ne__(third_client))
# Client without any arguments should be equal to client with all the default arguments
first_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
second_client = first_client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, cache="cache.db", use_cache=True, update_cache=True)
self.assertTrue(first_client.__eq__(second_client))
def test_item_download(self, m):
"""Test access to individual items"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=True)
item_url = client.oauth.api_url + '/catalog/cooee/1-190'
with open('tests/responses/1-190.json', 'rb') as rh:
m.get(item_url, body=rh)
item = client.get_item(item_url)
self.assertEqual(item_url, item.url())
meta = item.metadata()
self.assertEqual(meta['alveo:primary_text_url'], client.oauth.api_url + u'/catalog/cooee/1-190/primary_text.json')
# now try it with the cache, should not make a request
item2 = client.get_item(item_url)
self.assertEqual(item_url, item2.url())
self.assertEqual(item.metadata(), item2.metadata())
def test_download_document(self, m):
"""Download a document"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
# temp directory for output
output_dir = tempfile.mkdtemp()
outname = "downloaded_sample.wav"
document_url = client.oauth.api_url + '/catalog/cooee/1-190/document/sample.wav'
meta = {'alveo:url': document_url}
document = pyalveo.Document(meta, client)
with open('tests/responses/sample.wav', 'rb') as rh:
m.get(document_url, body=rh)
document.download_content(output_dir, outname, force_download=True)
self.assertTrue(os.path.exists(os.path.join(output_dir, outname)))
def test_item_lists(self, m):
""" Test that the item list can be created, item can be added to the item list,
item list can be renamed and deleted """
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
base_url = client.oauth.api_url
item_list_name = 'pyalveo_test_item_list'
msg = '1 items added to new item list ' + item_list_name
m.post(API_URL + '/item_lists',json={'success': msg})
new_item_url_1 = [base_url + '/catalog/ace/A01a']
self.assertEqual(client.add_to_item_list_by_name(new_item_url_1, item_list_name), msg)
with open('tests/responses/item-lists.json', 'rb') as fd:
m.get(API_URL + '/item_lists', content=fd.read())
with open('tests/responses/item-list-831.json') as fd:
ilist_831 = json.loads(fd.read())
m.get(API_URL + '/item_lists/831', json=ilist_831)
my_list = client.get_item_list_by_name(item_list_name)
self.assertEqual(my_list.name(), item_list_name)
msg = '1 items added to existing item list ' + item_list_name
m.post(API_URL + '/item_lists',json={'success': msg})
new_item_url_2 = [base_url + 'catalog/ace/A01b']
self.assertEqual(client.add_to_item_list(new_item_url_2, my_list.url()), '1 items added to existing item list ' + my_list.name())
# Test Rename List
ilist_831['name'] = 'brand new list'
m.put(API_URL + '/item_lists/831', json=ilist_831)
client.rename_item_list(my_list, 'brand new list')
# Deleting an Item List
m.delete(API_URL + '/item_lists/831', json={'success': 'item list deleted'})
self.assertEqual(client.delete_item_list(my_list), True)
# deleting an Item List that isn't there raises an exception
m.delete(API_URL + '/item_lists/831', status_code=404)
self.assertRaises(pyalveo.APIError, client.delete_item_list, my_list)
def test_get_annotations(self, m):
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
item_url = client.oauth.api_url + "/catalog/ace/A01b"
with open('tests/responses/A01b.json', 'rb') as fd:
m.get(item_url, content=fd.read())
item = client.get_item(item_url)
# get annotations for this item of type 'speaker'
ann_url = item_url + '/annotations.json'
with open('tests/responses/A01b-annotations.json', 'rb') as fd:
m.get(ann_url, content=fd.read())
anns = item.get_annotations(atype=u'http://ns.ausnc.org.au/schemas/annotation/ice/speaker')
self.assertListEqual(sorted(anns.keys()), [u'@context', u'alveo:annotations', u'commonProperties'])
ann = anns['alveo:annotations'][0]
self.assertEqual(sorted(ann.keys()), [u'@id', u'@type', u'end', u'start', u'type'])
def test_sparql_query(self, m):
"""Can we run a simple SPARQL query"""
m.get(API_URL + "/item_lists.json",json={'success': 'yes'})
client = pyalveo.Client(api_url=API_URL, api_key=API_KEY, use_cache=False)
query = """select * where { ?a ?b ?c } LIMIT 10"""
m.get(API_URL + "/sparql/mitcheldelbridge", json={'results': {'bindings': [1,2,3,4,5,6,7,8,9,0]}})
result = client.sparql_query('mitcheldelbridge', query)
self.assertIn('results', result)
self.assertIn('bindings', result['results'])
self.assertEqual(len(result['results']['bindings']), 10)
|
class ClientTest(unittest.TestCase):
def test_create_client(self, m):
''' Test that the clients can be created with or without alveo.config file
and correct database is created '''
pass
def test_to_from_json(self,m):
''' Test packing the oath object into a json form then reloading it. '''
pass
def test_client_context(self, m):
'''add_context extends the context that is used by the client'''
pass
def test_client_cache(self, m):
'''Test that we can create a client with a cache enabled and that it caches things'''
pass
def test_client_no_cache(self, m):
'''Test that we can create and use a client without a cache enabled'''
pass
def test_identical_clients(self, m):
''' Test that multiple clients can be created with default configuration or specific configuration
and check if they are identical or not '''
pass
def test_item_download(self, m):
'''Test access to individual items'''
pass
def test_download_document(self, m):
'''Download a document'''
pass
def test_item_lists(self, m):
''' Test that the item list can be created, item can be added to the item list,
item list can be renamed and deleted '''
pass
def test_get_annotations(self, m):
pass
def test_sparql_query(self, m):
'''Can we run a simple SPARQL query'''
pass
| 12 | 10 | 29 | 7 | 18 | 4 | 1 | 0.21 | 1 | 7 | 5 | 0 | 11 | 0 | 11 | 83 | 337 | 98 | 197 | 83 | 185 | 42 | 185 | 75 | 173 | 3 | 2 | 3 | 13 |
4,395 |
Alveo/pyalveo
|
Alveo_pyalveo/tests/cache_test.py
|
tests.cache_test.CacheTest
|
class CacheTest(unittest.TestCase):
def test_create_cache(self):
""" Test that we can make a chache and all the right things happen"""
file_dir = 'tmp'
cache_db_path = os.path.join(file_dir, 'alveo_cache.db')
self.addCleanup(shutil.rmtree, file_dir, True)
cache = pyalveo.Cache(file_dir)
self.assertEqual(type(cache), pyalveo.Cache)
self.assertTrue(os.path.exists(cache_db_path))
# Test all the tables in the database
conn = sqlite3.connect(cache_db_path)
cursor = conn.cursor()
sql = "SELECT * from sqlite_master WHERE type = 'table'"
cursor.execute(sql)
result = cursor.fetchall()
self.assertEqual(3, len(result), "there should be 4 tables in the database")
self.assertEqual(result[0][1], 'items', "first table should be items")
self.assertEqual(result[1][1], 'documents', "second table should be documents")
self.assertEqual(result[2][1], 'primary_texts', "third table should be primary_texts")
conn.close()
def test_add_item(self):
"""Test adding an item to the cache and retrieving it"""
item_url = 'http://foo.org/one/two/three.jpg'
item_meta = "{'one': 'two'}"
file_dir = 'tmp'
cache_db_path = os.path.join(file_dir, 'alveo_cache.db')
self.addCleanup(shutil.rmtree, file_dir, True)
cache = pyalveo.Cache(file_dir)
cache.add_item(item_url, item_meta)
self.assertTrue(cache.has_item(item_url))
self.assertEqual(item_meta, cache.get_item(item_url))
def test_to_from_json(self):
""" Test packing the cache into a json form then reloading it. """
file_dir = 'tmp'
expected_json = '{"max_age": 0, "cache_dir": "tmp"}'
cache = pyalveo.Cache(file_dir)
json_string = cache.to_json()
#Test json comes out as expected
json_dict = json.loads(json_string)
expected_dict = json.loads(expected_json)
self.assertEqual(json_dict, expected_dict)
cache2 = pyalveo.Cache.from_json(json_string)
#Test generated json creates an identical object
self.assertEqual(cache, cache2)
starting_json = '{"max_age": 500, "cache_dir": "tmp"}'
cache = pyalveo.Cache(file_dir,max_age=500)
cache2 = pyalveo.Cache.from_json(starting_json)
#test manually created json creates an identical cache to one properly setup
self.assertEqual(cache, cache2)
def test_add_primary_text(self):
"""Test adding a primary text to the cache and retrieving it"""
item_url = 'http://foo.org/one/two/three.jpg'
item_text = "one two buckly my shoe"
file_dir = 'tmp'
cache_db_path = os.path.join(file_dir, 'alveo_cache.db')
self.addCleanup(shutil.rmtree, file_dir, True)
cache = pyalveo.Cache(file_dir)
cache.add_primary_text(item_url, item_text)
self.assertTrue(cache.has_primary_text(item_url))
self.assertEqual(item_text, cache.get_primary_text(item_url))
def test_add_document(self):
"""Test adding a document to the cache and retrieving it"""
item_url = 'http://foo.org/one/two/three.jpg'
item_data = "this is the text of a sample document".encode()
file_dir = 'tmp'
cache_db_path = os.path.join(file_dir, 'alveo_cache.db')
self.addCleanup(shutil.rmtree, file_dir, True)
cache = pyalveo.Cache(file_dir)
cache.add_document(item_url, item_data)
self.assertTrue(cache.has_document(item_url))
self.assertEqual(item_data, cache.get_document(item_url))
|
class CacheTest(unittest.TestCase):
def test_create_cache(self):
''' Test that we can make a chache and all the right things happen'''
pass
def test_add_item(self):
'''Test adding an item to the cache and retrieving it'''
pass
def test_to_from_json(self):
''' Test packing the cache into a json form then reloading it. '''
pass
def test_add_primary_text(self):
'''Test adding a primary text to the cache and retrieving it'''
pass
def test_add_document(self):
'''Test adding a document to the cache and retrieving it'''
pass
| 6 | 5 | 20 | 6 | 12 | 2 | 1 | 0.15 | 1 | 2 | 1 | 0 | 5 | 0 | 5 | 77 | 107 | 36 | 62 | 36 | 56 | 9 | 62 | 36 | 56 | 1 | 2 | 0 | 5 |
4,396 |
AmanoTeam/amanobot
|
AmanoTeam_amanobot/test/test3a_routing.py
|
test3a_routing.RegexHandler
|
class RegexHandler():
@staticmethod
def on_CS101(msg, match):
print('Someone mentioned CS101 !!!', msg, match.groups())
@staticmethod
def on_CS202(msg, match):
print('Someone mentioned CS202 !!!', msg, match.groups())
@staticmethod
def no_cs_courses_mentioned(msg):
print('No CS courses mentioned ...', msg)
@staticmethod
def course_not_exist(msg, match):
print('%s does not exist' % match.group(1), msg)
|
class RegexHandler():
@staticmethod
def on_CS101(msg, match):
pass
@staticmethod
def on_CS202(msg, match):
pass
@staticmethod
def no_cs_courses_mentioned(msg):
pass
@staticmethod
def course_not_exist(msg, match):
pass
| 9 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 4 | 16 | 3 | 13 | 9 | 4 | 0 | 9 | 5 | 4 | 1 | 0 | 0 | 4 |
4,397 |
AmanoTeam/amanobot
|
AmanoTeam_amanobot/examples/callback/vote.py
|
vote.VoteCounter
|
class VoteCounter(amanobot.helper.ChatHandler):
def __init__(self, *args, **kwargs):
super(VoteCounter, self).__init__(*args, **kwargs)
global votes
if self.id in votes:
self._ballot_box, self._keyboard_msg_ident, self._expired_event = votes[self.id]
self._editor = amanobot.helper.Editor(self.bot, self._keyboard_msg_ident) if self._keyboard_msg_ident else None
else:
self._ballot_box = None
self._keyboard_msg_ident = None
self._editor = None
self._expired_event = None
self._member_count = self.administrator.getChatMembersCount() - 1 # exclude myself, the bot
# Catch _vote_expired event
self.router.routing_table['_vote_expired'] = self.on__vote_expired
def on_chat_message(self, msg):
content_type, chat_type, chat_id = amanobot.glance(msg)
if content_type != 'text':
print('Not a text message.')
return
if msg['text'] != '/vote':
print('Not /vote')
return
if self._ballot_box is not None:
self.sender.sendMessage('Voting still in progress')
else:
self._init_ballot()
def _count_votes(self):
yes = reduce(lambda a,b: a+(1 if b=='yes' else 0), self._ballot_box.values(), 0)
no = reduce(lambda a,b: a+(1 if b=='no' else 0), self._ballot_box.values(), 0)
return yes, no, self._member_count-yes-no
def _init_ballot(self):
keyboard = InlineKeyboardMarkup(inline_keyboard=[[
InlineKeyboardButton(text='Yes', callback_data='yes'),
InlineKeyboardButton(text='Nah!!!!', callback_data='no'),
]])
sent = self.sender.sendMessage("Let's Vote ...", reply_markup=keyboard)
self._ballot_box = {}
self._keyboard_msg_ident = amanobot.message_identifier(sent)
self._editor = amanobot.helper.Editor(self.bot, self._keyboard_msg_ident)
# Generate an expiry event 30 seconds later
self._expired_event = self.scheduler.event_later(30, ('_vote_expired', {'seconds': 30}))
def _close_ballot(self):
try:
self.scheduler.cancel(self._expired_event)
# The expiry event may have already occurred and cannot be found in scheduler.
except amanobot.exception.EventNotFound:
pass
self._editor.editMessageReplyMarkup(reply_markup=None)
self._ballot_box = None
self._keyboard_msg_ident = None
self._editor = None
def on_callback_query(self, msg):
query_id, from_id, query_data = amanobot.glance(msg, flavor='callback_query')
if from_id in self._ballot_box:
self.bot.answerCallbackQuery(query_id, text='You have already voted %s' % self._ballot_box[from_id])
else:
self.bot.answerCallbackQuery(query_id, text='Ok')
self._ballot_box[from_id] = query_data
# Announce results if everyone has voted.
if len(self._ballot_box) >= self._member_count:
result = self._count_votes()
self._close_ballot()
self.sender.sendMessage('Everyone has voted:\nYes: %d\nNo: %d\nSilent: %d' % result)
def on__vote_expired(self, event):
result = self._count_votes()
self._close_ballot()
self.sender.sendMessage('Time is up:\nYes: %d\nNo: %d\nSilent: %d' % result)
def on_close(self, ex):
global votes
if self._ballot_box is None:
try:
del votes[self.id]
except KeyError:
pass
else:
votes[self.id] = (self._ballot_box, self._keyboard_msg_ident, self._expired_event)
from pprint import pprint
pprint(votes)
|
class VoteCounter(amanobot.helper.ChatHandler):
def __init__(self, *args, **kwargs):
pass
def on_chat_message(self, msg):
pass
def _count_votes(self):
pass
def _init_ballot(self):
pass
def _close_ballot(self):
pass
def on_callback_query(self, msg):
pass
def on__vote_expired(self, event):
pass
def on_close(self, ex):
pass
| 9 | 0 | 11 | 2 | 9 | 1 | 2 | 0.07 | 1 | 4 | 2 | 0 | 8 | 6 | 8 | 25 | 99 | 20 | 75 | 24 | 63 | 5 | 68 | 23 | 56 | 4 | 3 | 2 | 18 |
4,398 |
AmanoTeam/amanobot
|
AmanoTeam_amanobot/examples/callback/votea.py
|
votea.VoteCounter
|
class VoteCounter(amanobot.aio.helper.ChatHandler):
def __init__(self, *args, **kwargs):
super(VoteCounter, self).__init__(*args, **kwargs)
global votes
if self.id in votes:
self._ballot_box, self._keyboard_msg_ident, self._expired_event, self._member_count = votes[self.id]
self._editor = amanobot.aio.helper.Editor(self.bot, self._keyboard_msg_ident) if self._keyboard_msg_ident else None
else:
self._ballot_box = None
self._keyboard_msg_ident = None
self._editor = None
self._expired_event = None
self._member_count = None
# Catch _vote_expired event
self.router.routing_table['_vote_expired'] = self.on__vote_expired
async def on_chat_message(self, msg):
content_type, chat_type, chat_id = glance(msg)
if content_type != 'text':
print('Not a text message.')
return
if msg['text'] != '/vote':
print('Not /vote')
return
if self._ballot_box is not None:
await self.sender.sendMessage('Voting still in progress')
else:
await self._init_ballot()
def _count_votes(self):
yes = reduce(lambda a,b: a+(1 if b=='yes' else 0), self._ballot_box.values(), 0)
no = reduce(lambda a,b: a+(1 if b=='no' else 0), self._ballot_box.values(), 0)
return yes, no, self._member_count-yes-no
async def _init_ballot(self):
keyboard = InlineKeyboardMarkup(inline_keyboard=[[
InlineKeyboardButton(text='Yes', callback_data='yes'),
InlineKeyboardButton(text='Nah!!!!', callback_data='no'),
]])
sent = await self.sender.sendMessage("Let's Vote ...", reply_markup=keyboard)
self._member_count = await self.administrator.getChatMembersCount() - 1 # exclude myself, the bot
self._ballot_box = {}
self._keyboard_msg_ident = message_identifier(sent)
self._editor = amanobot.aio.helper.Editor(self.bot, self._keyboard_msg_ident)
# Generate an expiry event 30 seconds later
self._expired_event = self.scheduler.event_later(30, ('_vote_expired', {'seconds': 30}))
async def _close_ballot(self):
self.scheduler.cancel(self._expired_event)
await self._editor.editMessageReplyMarkup(reply_markup=None)
self._ballot_box = None
self._keyboard_msg_ident = None
self._editor = None
async def on_callback_query(self, msg):
query_id, from_id, query_data = glance(msg, flavor='callback_query')
if from_id in self._ballot_box:
await self.bot.answerCallbackQuery(query_id, text='You have already voted %s' % self._ballot_box[from_id])
else:
await self.bot.answerCallbackQuery(query_id, text='Ok')
self._ballot_box[from_id] = query_data
# Announce results if everyone has voted.
if len(self._ballot_box) >= self._member_count:
result = self._count_votes()
await self._close_ballot()
await self.sender.sendMessage('Everyone has voted:\nYes: %d\nNo: %d\nSilent: %d' % result)
async def on__vote_expired(self, event):
result = self._count_votes()
await self._close_ballot()
await self.sender.sendMessage('Time is up:\nYes: %d\nNo: %d\nSilent: %d' % result)
def on_close(self, ex):
global votes
if self._ballot_box is None:
try:
del votes[self.id]
except KeyError:
pass
else:
votes[self.id] = (self._ballot_box, self._keyboard_msg_ident, self._expired_event, self._member_count)
from pprint import pprint
print('%d closing ...' % self.id)
pprint(votes)
|
class VoteCounter(amanobot.aio.helper.ChatHandler):
def __init__(self, *args, **kwargs):
pass
async def on_chat_message(self, msg):
pass
def _count_votes(self):
pass
async def _init_ballot(self):
pass
async def _close_ballot(self):
pass
async def on_callback_query(self, msg):
pass
async def on__vote_expired(self, event):
pass
def on_close(self, ex):
pass
| 9 | 0 | 11 | 2 | 9 | 1 | 2 | 0.05 | 1 | 3 | 1 | 0 | 8 | 6 | 8 | 25 | 97 | 20 | 74 | 23 | 62 | 4 | 67 | 22 | 55 | 4 | 3 | 2 | 17 |
4,399 |
AmanoTeam/amanobot
|
AmanoTeam_amanobot/test/test3a_admin.py
|
test3a_admin.AdminBot
|
class AdminBot(amanobot.aio.Bot):
async def on_chat_message(self, msg):
content_type, chat_type, chat_id = amanobot.glance(msg)
if 'edit_date' not in msg:
await self.sendMessage(chat_id, 'Edit the message, please.')
else:
await self.sendMessage(chat_id, 'Add me to a group, please.')
# Make a router to route `new_chat_member` and `left_chat_member`
r = amanobot.aio.helper.Router(by_content_type(), make_content_type_routing_table(self))
# Replace current handler with that router
self._router.routing_table['chat'] = r.route
async def on_new_chat_member(self, msg, new_chat_member):
print('New chat member:', new_chat_member)
content_type, chat_type, chat_id = amanobot.glance(msg)
r = await self.getChat(chat_id)
print(r)
r = await self.getChatAdministrators(chat_id)
print(r)
print(amanobot.namedtuple.ChatMemberArray(r))
r = await self.getChatMembersCount(chat_id)
print(r)
while 1:
try:
await self.setChatTitle(chat_id, 'AdminBot Title')
print('Set title successfully.')
break
except NotEnoughRightsError:
print('No right to set title. Try again in 10 seconds ...')
await asyncio.sleep(10)
while 1:
try:
await self.setChatPhoto(chat_id, open('gandhi.png', 'rb'))
print('Set photo successfully.')
await asyncio.sleep(2) # let tester see photo briefly
break
except NotEnoughRightsError:
print('No right to set photo. Try again in 10 seconds ...')
await asyncio.sleep(10)
while 1:
try:
await self.deleteChatPhoto(chat_id)
print('Delete photo successfully.')
break
except NotEnoughRightsError:
print('No right to delete photo. Try again in 10 seconds ...')
await asyncio.sleep(10)
print('I am done. Remove me from the group.')
async def on_left_chat_member(self, msg, left_chat_member):
print('I see that I have left.')
|
class AdminBot(amanobot.aio.Bot):
async def on_chat_message(self, msg):
pass
async def on_new_chat_member(self, msg, new_chat_member):
pass
async def on_left_chat_member(self, msg, left_chat_member):
pass
| 4 | 0 | 19 | 3 | 15 | 1 | 3 | 0.06 | 1 | 2 | 2 | 0 | 3 | 0 | 3 | 90 | 61 | 12 | 47 | 8 | 43 | 3 | 46 | 8 | 42 | 7 | 2 | 2 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.