id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6,400 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/tests/test_checks.py
|
basic_cms.tests.test_checks.PageTemplatesLoadingCheckTestCase
|
class PageTemplatesLoadingCheckTestCase(TestCase):
def test_check_detects_unexistant_template(self):
unexistant = ('does_not_exists.html', 'foo')
with self.settings(PAGE_TEMPLATES=[unexistant]):
errors = page_templates_loading_check([])
self.assertEqual(errors, [Warning(
'Django cannot find template does_not_exists.html',
obj=unexistant, id='basic_cms.W001')])
def test_check_doesnt_warn_on_existing_templates(self):
with self.settings(PAGE_TEMPLATES=[('pages/base.html', 'bas')]):
errors = page_templates_loading_check([])
self.assertEquals(errors, [])
def test_template_syntax_error_is_not_silenced(self):
with self.settings(PAGE_TEMPLATES=[('syntax_error.html', 'fail')]):
with self.assertRaises(TemplateSyntaxError):
page_templates_loading_check([])
|
class PageTemplatesLoadingCheckTestCase(TestCase):
def test_check_detects_unexistant_template(self):
pass
def test_check_doesnt_warn_on_existing_templates(self):
pass
def test_template_syntax_error_is_not_silenced(self):
pass
| 4 | 0 | 6 | 1 | 5 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 3 | 0 | 3 | 3 | 20 | 4 | 16 | 7 | 12 | 0 | 14 | 7 | 10 | 1 | 1 | 2 | 3 |
6,401 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/admin/__init__.py
|
basic_cms.admin.AliasAdmin
|
class AliasAdmin(admin.ModelAdmin):
list_display = ('page', 'url',)
list_editable = ('url',)
|
class AliasAdmin(admin.ModelAdmin):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
6,402 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/admin/__init__.py
|
basic_cms.admin.PageAdmin
|
class PageAdmin(admin.ModelAdmin):
"""Page Admin class."""
exclude = ['author', 'parent']
# these mandatory fields are not versioned
mandatory_placeholders = ('title', 'slug')
general_fields = ['title', 'slug', 'status', 'target',
'position', 'freeze_date']
if settings.PAGE_USE_SITE_ID and not settings.PAGE_HIDE_SITES:
general_fields.append('sites')
insert_point = general_fields.index('status') + 1
# Strange django behavior. If not provided, django will try to find
# 'page' foreign key in all registered models
inlines = []
general_fields.insert(insert_point, 'tags')
# Add support for future dating and expiration based on settings.
if settings.PAGE_SHOW_END_DATE:
general_fields.insert(insert_point, 'publication_end_date')
if settings.PAGE_SHOW_START_DATE:
general_fields.insert(insert_point, 'publication_date')
from basic_cms.urlconf_registry import registry
if(len(registry)):
general_fields.append('delegate_to')
insert_point = general_fields.index('status') + 1
normal_fields = ['language']
page_templates = settings.get_page_templates()
if len(page_templates) > 0:
normal_fields.append('template')
normal_fields.append('redirect_to')
normal_fields.append('redirect_to_url')
fieldsets = (
[_('General'), {
'fields': general_fields,
'classes': ('module-general',),
}],
(_('Options'), {
'fields': normal_fields,
'classes': ('module-options',),
}),
)
actions = [export_pages_as_json]
metadata_fields = [
{'name': 'meta_title',
'field': forms.fields.CharField(required=False)},
{'name': 'meta_description',
'field': forms.fields.CharField(required=False, widget=forms.widgets.Textarea()), },
{'name': 'meta_keywords',
'field': forms.fields.CharField(required=False, widget=forms.widgets.Textarea()), },
{'name': 'meta_author',
'field': forms.fields.CharField(required=False), },
{'name': 'fb_page_type',
'field': forms.fields.CharField(required=False), },
{'name': 'fb_image',
'field': forms.fields.CharField(required=False), },
]
class Media:
css = {
'all': [join(settings.PAGES_MEDIA_URL, path) for path in (
'css/rte.css',
'css/pages.css'
)]
}
js = [join(settings.PAGES_MEDIA_URL, path) for path in (
'javascript/jquery.js',
'javascript/jquery.rte.js',
'javascript/pages.js',
'javascript/pages_list.js',
'javascript/pages_form.js',
'javascript/jquery.query-2.1.7.js',
)]
def urls(self):
from django.conf.urls import patterns, url
# Admin-site-wide views.
urlpatterns = patterns('',
url(r'^$', self.list_pages, name='page-index'),
url(r'^(?P<page_id>[0-9]+)/traduction/(?P<language_id>[-\w]+)/$',
traduction, name='page-traduction'),
url(r'^(?P<page_id>[0-9]+)/get-content/(?P<content_id>[0-9]+)/$',
get_content, name='page-get-content'),
url(r'^(?P<page_id>[0-9]+)/modify-content/(?P<content_type>[-\w]+)/(?P<language_id>[-\w]+)/$',
modify_content, name='page-modify-content'),
url(r'^(?P<page_id>[0-9]+)/delete-content/(?P<language_id>[-\w]+)/$',
delete_content, name='page-delete-content'),
url(r'^(?P<page_id>[0-9]+)/sub-menu/$',
sub_menu, name='page-sub-menu'),
url(r'^(?P<page_id>[0-9]+)/move-page/$',
move_page, name='page-move-page'),
url(r'^(?P<page_id>[0-9]+)/change-status/$',
change_status, name='page-change-status'),
url(r'^import-json/$',
self.import_pages, name='import-pages-from-json'),
)
urlpatterns += super(PageAdmin, self).urls
return urlpatterns
urls = property(urls)
def i18n_javascript(self, request):
"""Displays the i18n JavaScript that the Django admin
requires.
This takes into account the ``USE_I18N`` setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
return javascript_catalog(request, packages='pages')
def save_model(self, request, page, form, change):
"""Move the page in the tree if necessary and save every
placeholder :class:`Content <pages.models.Content>`.
"""
language = form.cleaned_data['language']
target = form.data.get('target', None)
position = form.data.get('position', None)
page.save()
# if True, we need to move the page
if target and position:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
target.invalidate()
page.move_to(target, position)
for name in self.mandatory_placeholders:
data = form.cleaned_data[name]
placeholder = PlaceholderNode(name)
extra_data = placeholder.get_extra_data(form.data)
placeholder.save(page, language, data, change,
extra_data=extra_data)
for placeholder in get_placeholders(page.get_template()):
if(placeholder.name in form.cleaned_data and placeholder.name
not in self.mandatory_placeholders):
data = form.cleaned_data[placeholder.name]
extra_data = placeholder.get_extra_data(form.data)
placeholder.save(page, language, data, change,
extra_data=extra_data)
for placeholder in self.metadata_fields:
data = form.cleaned_data[placeholder['name']]
Content.objects.set_or_create_content(
page,
language,
placeholder['name'],
data
)
page.invalidate()
def get_fieldsets(self, request, obj=None):
"""
Add fieldsets of placeholders to the list of already
existing fieldsets.
"""
# some ugly business to remove freeze_date
# from the field list
general_module = {
'fields': list(self.general_fields),
'classes': ('module-general',),
}
default_fieldsets = list(self.fieldsets)
if not request.user.has_perm('pages.can_freeze'):
general_module['fields'].remove('freeze_date')
if not request.user.has_perm('pages.can_publish'):
general_module['fields'].remove('status')
default_fieldsets[0][1] = general_module
placeholder_fieldsets = []
template = get_template_from_request(request, obj)
for placeholder in get_placeholders(template):
if placeholder.name not in self.mandatory_placeholders:
placeholder_fieldsets.append(placeholder.name)
additional_fieldsets = []
# meta fields
metadata_fieldsets = [f['name'] for f in self.metadata_fields]
additional_fieldsets.append((_('Metadata'), {
'fields': metadata_fieldsets,
'classes': ('module-content', 'grp-collapse grp-closed'),
}))
additional_fieldsets.append((_('Content'), {
'fields': placeholder_fieldsets,
'classes': ('module-content',),
}))
return default_fieldsets + additional_fieldsets
def save_form(self, request, form, change):
"""Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added."""
instance = super(PageAdmin, self).save_form(request, form, change)
instance.template = form.cleaned_data['template']
if not change:
instance.author = request.user
return instance
def get_form(self, request, obj=None, **kwargs):
"""Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request."""
#form = super(PageAdmin, self).get_form(request, obj, **kwargs)
template = get_template_from_request(request, obj)
form = make_form(self.model, get_placeholders(template))
language = get_language_from_request(request)
form.base_fields['language'].initial = language
if obj:
initial_slug = obj.slug(language=language, fallback=False)
initial_title = obj.title(language=language, fallback=False)
form.base_fields['slug'].initial = initial_slug
form.base_fields['title'].initial = initial_title
form.base_fields['slug'].label = _('Slug')
template = get_template_from_request(request, obj)
page_templates = settings.get_page_templates()
if len(page_templates) > 0:
template_choices = list(page_templates)
template_choices.insert(0, (settings.PAGE_DEFAULT_TEMPLATE,
_('Default template')))
form.base_fields['template'].choices = template_choices
form.base_fields['template'].initial = force_text(template)
for placeholder in get_placeholders(template):
name = placeholder.name
if obj:
initial = placeholder.get_content(obj, language, name)
else:
initial = None
form.base_fields[name] = placeholder.get_field(obj,
language, initial=initial)
for placeholder in self.metadata_fields:
name = placeholder['name']
initial = None
if obj:
try:
initial = Content.objects.get(page=obj, language=language, type=name).body
except Content.DoesNotExist:
pass
form.base_fields[name] = placeholder['field']
form.base_fields[name].initial = initial
return form
def change_view(self, request, object_id, extra_context=None):
"""The ``change`` admin view for the
:class:`Page <pages.models.Page>`."""
language = get_language_from_request(request)
extra_context = {
'language': language,
# don't see where it's used
#'lang': current_lang,
'page_languages': settings.PAGE_LANGUAGES,
}
try:
int(object_id)
except ValueError:
raise Http404('The "%s" part of the location is invalid.'
% str(object_id))
try:
obj = self.model.objects.get(pk=object_id)
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
else:
template = get_template_from_request(request, obj)
extra_context['placeholders'] = get_placeholders(template)
extra_context['traduction_languages'] = [l for l in
settings.PAGE_LANGUAGES if Content.objects.get_content(obj,
l[0], "title") and l[0] != language]
extra_context['page'] = obj
return super(PageAdmin, self).change_view(request, object_id,
extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
"""The ``add`` admin view for the :class:`Page <pages.models.Page>`."""
extra_context = {
'language': get_language_from_request(request),
'page_languages': settings.PAGE_LANGUAGES,
}
return super(PageAdmin, self).add_view(request, form_url,
extra_context)
def has_add_permission(self, request):
"""Return ``True`` if the current user has permission to add a new
page."""
return request.user.has_perm('pages.add_page')
def has_change_permission(self, request, obj=None):
"""Return ``True`` if the current user has permission
to change the page."""
return request.user.has_perm('pages.change_page')
def has_delete_permission(self, request, obj=None):
"""Return ``True`` if the current user has permission on the page."""
return request.user.has_perm('pages.delete_page')
def list_pages(self, request, template_name=None, extra_context=None):
"""List root pages"""
if not self.admin_site.has_permission(request):
return self.admin_site.login(request)
language = get_language_from_request(request)
query = request.POST.get('q', '').strip()
if query:
page_ids = list(set([c.page.pk for c in
Content.objects.filter(body__icontains=query)]))
pages = Page.objects.filter(pk__in=page_ids)
else:
pages = Page.objects.root()
if settings.PAGE_HIDE_SITES:
pages = pages.filter(sites=settings.SITE_ID)
context = {
'can_publish': request.user.has_perm('pages.can_publish'),
'language': language,
'name': _("page"),
'pages': pages,
'opts': self.model._meta,
'q': query
}
context.update(extra_context or {})
change_list = self.changelist_view(request, context)
return change_list
def import_pages(self, request):
if not self.has_add_permission(request):
return admin.site.login(request)
return import_pages_from_json(request)
|
class PageAdmin(admin.ModelAdmin):
'''Page Admin class.'''
class Media:
def urls(self):
pass
def i18n_javascript(self, request):
'''Displays the i18n JavaScript that the Django admin
requires.
This takes into account the ``USE_I18N`` setting. If it's set to False, the
generated JavaScript will be leaner and faster.
'''
pass
def save_model(self, request, page, form, change):
'''Move the page in the tree if necessary and save every
placeholder :class:`Content <pages.models.Content>`.
'''
pass
def get_fieldsets(self, request, obj=None):
'''
Add fieldsets of placeholders to the list of already
existing fieldsets.
'''
pass
def save_form(self, request, form, change):
'''Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.'''
pass
def get_form(self, request, obj=None, **kwargs):
'''Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request.'''
pass
def change_view(self, request, object_id, extra_context=None):
'''The ``change`` admin view for the
:class:`Page <pages.models.Page>`.'''
pass
def add_view(self, request, form_url='', extra_context=None):
'''The ``add`` admin view for the :class:`Page <pages.models.Page>`.'''
pass
def has_add_permission(self, request):
'''Return ``True`` if the current user has permission to add a new
page.'''
pass
def has_change_permission(self, request, obj=None):
'''Return ``True`` if the current user has permission
to change the page.'''
pass
def has_delete_permission(self, request, obj=None):
'''Return ``True`` if the current user has permission on the page.'''
pass
def list_pages(self, request, template_name=None, extra_context=None):
'''List root pages'''
pass
def import_pages(self, request):
pass
| 15 | 12 | 20 | 2 | 15 | 3 | 3 | 0.16 | 1 | 9 | 3 | 1 | 13 | 0 | 13 | 13 | 353 | 50 | 261 | 64 | 244 | 42 | 168 | 64 | 151 | 8 | 1 | 3 | 37 |
6,403 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/admin/__init__.py
|
basic_cms.admin.PageAdminWithDefaultContent
|
class PageAdminWithDefaultContent(PageAdmin):
"""
Fill in values for content blocks from official language
if creating a new translation
"""
def get_form(self, request, obj=None, **kwargs):
form = super(PageAdminWithDefaultContent, self
).get_form(request, obj, **kwargs)
language = get_language_from_request(request)
if global_settings.LANGUAGE_CODE == language:
# this is the "official" language
return form
if Content.objects.filter(page=obj, language=language).count():
return form
# this is a new page, try to find some default content
template = get_template_from_request(request, obj)
for placeholder in get_placeholders(template):
name = placeholder.name
form.base_fields[name] = placeholder.get_field(obj, language,
initial=Content.objects.get_content(obj,
global_settings.LANGUAGE_CODE, name))
return form
|
class PageAdminWithDefaultContent(PageAdmin):
'''
Fill in values for content blocks from official language
if creating a new translation
'''
def get_form(self, request, obj=None, **kwargs):
pass
| 2 | 1 | 21 | 4 | 15 | 2 | 4 | 0.38 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 14 | 26 | 4 | 16 | 7 | 14 | 6 | 13 | 7 | 11 | 4 | 2 | 1 | 4 |
6,404 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/admin/forms.py
|
basic_cms.admin.forms.SlugFormMixin
|
class SlugFormMixin(forms.ModelForm):
"""To edit models with slugs"""
title = forms.CharField(
label=_('Title'),
widget=forms.TextInput(),
)
slug = forms.CharField(
label=_('Slug'),
widget=forms.TextInput(),
help_text=_('The slug will be used to create the page URL, it must be unique among the other pages of the same level.')
)
def _clean_page_automatic_slug_renaming(self, slug, is_slug_safe):
"""Helper to add numbers to slugs"""
if not callable(is_slug_safe):
raise TypeError('is_slug_safe must be callable')
if is_slug_safe(slug):
return slug
count = 2
new_slug = slug + "-" + str(count)
while not is_slug_safe(new_slug):
count = count + 1
new_slug = slug + "-" + str(count)
return new_slug
def _clean_page_unique_slug_required(self, slug):
"""See if this slug exists already"""
if hasattr(self, 'instance') and self.instance.id:
if Content.objects.exclude(page=self.instance).filter(
body=slug, type="slug").count():
raise forms.ValidationError(self.err_dict['another_page_error'])
elif Content.objects.filter(body=slug, type="slug").count():
raise forms.ValidationError(self.err_dict['another_page_error'])
return slug
|
class SlugFormMixin(forms.ModelForm):
'''To edit models with slugs'''
def _clean_page_automatic_slug_renaming(self, slug, is_slug_safe):
'''Helper to add numbers to slugs'''
pass
def _clean_page_unique_slug_required(self, slug):
'''See if this slug exists already'''
pass
| 3 | 3 | 13 | 2 | 10 | 1 | 4 | 0.1 | 1 | 3 | 1 | 1 | 2 | 1 | 2 | 2 | 39 | 7 | 29 | 8 | 26 | 3 | 20 | 7 | 17 | 4 | 1 | 2 | 8 |
6,405 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/api.py
|
basic_cms.api.BasicCMSAPI
|
class BasicCMSAPI(APIView):
"""
Get basic cms page by slug in given format
format -- json/html, html = default
get-children -- true/false, true by default, works only with json.
"""
permission_classes = (AllowAny,)
def get(self, request, slug, *args, **kwargs):
format = request.GET.get('format', 'html')
get_children = request.GET.get('get-children', True)
lang = get_language()
page = Page.objects.from_path(slug, lang)
if page is None:
raise Http404("Page does not exist")
if format == 'html':
page = render_to_string(page.template, {'current_page': page}, request=request)
base_url = request.build_absolute_uri('/')
return Response({"html": links_append_domain(page, base_url)})
else:
page = page.dump_json_data(get_children=get_children)
return Response(page)
|
class BasicCMSAPI(APIView):
'''
Get basic cms page by slug in given format
format -- json/html, html = default
get-children -- true/false, true by default, works only with json.
'''
def get(self, request, slug, *args, **kwargs):
pass
| 2 | 1 | 14 | 0 | 14 | 0 | 3 | 0.31 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 25 | 4 | 16 | 7 | 14 | 5 | 15 | 7 | 13 | 3 | 1 | 1 | 3 |
6,406 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/app_config.py
|
basic_cms.app_config.BasicCmsConfig
|
class BasicCmsConfig(AppConfig):
name = 'basic_cms'
def ready(self):
from . import checks
|
class BasicCmsConfig(AppConfig):
def ready(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 5 | 1 | 4 | 4 | 1 | 0 | 4 | 4 | 1 | 1 | 1 | 0 | 1 |
6,407 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/managers.py
|
basic_cms.managers.PageAliasManager
|
class PageAliasManager(models.Manager):
""":class:`PageAlias <pages.models.PageAlias>` manager."""
def from_path(self, request, path, lang):
"""
Resolve a request to an alias. returns a
:class:`PageAlias <pages.models.PageAlias>` if the url matches
no page at all. The aliasing system supports plain
aliases (``/foo/bar``) as well as aliases containing GET parameters
(like ``index.php?page=foo``).
:param request: the request object
:param path: the complete path to the page
:param lang: not used
"""
from basic_cms.models import PageAlias
url = normalize_url(path)
# §1: try with complete query string
query = request.META.get('QUERY_STRING')
if query:
url = url + '?' + query
try:
alias = PageAlias.objects.get(url=url)
return alias
except PageAlias.DoesNotExist:
pass
# §2: try with path only
url = normalize_url(path)
try:
alias = PageAlias.objects.get(url=url)
return alias
except PageAlias.DoesNotExist:
pass
# §3: not alias found, we give up
return None
|
class PageAliasManager(models.Manager):
''':class:`PageAlias <pages.models.PageAlias>` manager.'''
def from_path(self, request, path, lang):
'''
Resolve a request to an alias. returns a
:class:`PageAlias <pages.models.PageAlias>` if the url matches
no page at all. The aliasing system supports plain
aliases (``/foo/bar``) as well as aliases containing GET parameters
(like ``index.php?page=foo``).
:param request: the request object
:param path: the complete path to the page
:param lang: not used
'''
pass
| 2 | 2 | 33 | 2 | 18 | 13 | 4 | 0.74 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 36 | 3 | 19 | 6 | 16 | 14 | 19 | 6 | 16 | 4 | 1 | 1 | 4 |
6,408 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/managers.py
|
basic_cms.managers.PageManager
|
class PageManager(models.Manager):
"""
Page manager provide several filters to obtain pages :class:`QuerySet`
that respect the page attributes and project settings.
"""
if settings.PAGE_HIDE_SITES:
def get_query_set(self):
"""Restrict operations to pages on the current site."""
return super(PageManager, self).get_query_set().filter(
sites=global_settings.SITE_ID)
def populate_pages(self, parent=None, child=5, depth=5):
"""Create a population of :class:`Page <pages.models.Page>`
for testing purpose."""
User = get_user_model()
from basic_cms.models import Content
author = User.objects.all()[0]
if depth == 0:
return
p = self.model(parent=parent, author=author, status=self.model.PUBLISHED)
p.save()
p = self.get(id=p.id)
Content(body='page-' + str(p.id), type='title', language=settings.PAGE_DEFAULT_LANGUAGE, page=p).save()
Content(body='page-' + str(p.id), type='slug', language=settings.PAGE_DEFAULT_LANGUAGE, page=p).save()
for child in range(1, child + 1):
self.populate_pages(parent=p, child=child, depth=(depth - 1))
def on_site(self, site_id=None):
"""Return a :class:`QuerySet` of pages that are published on the site
defined by the ``SITE_ID`` setting.
:param site_id: specify the id of the site object to filter with.
"""
if settings.PAGE_USE_SITE_ID:
if not site_id:
site_id = settings.SITE_ID
return self.filter(sites=site_id)
return self.all()
def root(self):
"""Return a :class:`QuerySet` of pages without parent."""
return self.on_site().filter(parent__isnull=True)
def navigation(self):
"""Creates a :class:`QuerySet` of the published root pages."""
return self.on_site().filter(status=self.model.PUBLISHED).filter(parent__isnull=True)
def hidden(self):
"""Creates a :class:`QuerySet` of the hidden pages."""
return self.on_site().filter(status=self.model.HIDDEN)
def filter_published(self, queryset):
"""Filter the given pages :class:`QuerySet` to obtain only published
page."""
if settings.PAGE_USE_SITE_ID:
queryset = queryset.filter(sites=settings.SITE_ID)
queryset = queryset.filter(status=self.model.PUBLISHED)
if settings.PAGE_SHOW_START_DATE:
queryset = queryset.filter(publication_date__lte=datetime.now())
if settings.PAGE_SHOW_END_DATE:
queryset = queryset.filter(
Q(publication_end_date__gt=datetime.now()) |
Q(publication_end_date__isnull=True)
)
return queryset
def published(self):
"""Creates a :class:`QuerySet` of published
:class:`Page <pages.models.Page>`."""
return self.filter_published(self)
def drafts(self):
"""Creates a :class:`QuerySet` of drafts using the page's
:attr:`Page.publication_date`."""
pub = self.on_site().filter(status=self.model.DRAFT)
if settings.PAGE_SHOW_START_DATE:
pub = pub.filter(publication_date__gte=datetime.now())
return pub
def expired(self):
"""Creates a :class:`QuerySet` of expired using the page's
:attr:`Page.publication_end_date`."""
return self.on_site().filter(
publication_end_date__lte=datetime.now())
def from_path(self, complete_path, lang, exclude_drafts=True):
"""Return a :class:`Page <pages.models.Page>` according to
the page's path."""
if complete_path.endswith("/"):
complete_path = complete_path[:-1]
# just return the root page
if complete_path == '':
root_pages = self.root()
if root_pages:
return root_pages[0]
else:
return None
slug = get_slug(complete_path)
from basic_cms.models import Content
page_ids = Content.objects.get_page_ids_by_slug(slug)
pages_list = self.on_site().filter(id__in=page_ids)
if exclude_drafts:
pages_list = pages_list.exclude(status=self.model.DRAFT)
if len(pages_list) == 1:
if(settings.PAGE_USE_STRICT_URL and
pages_list[0].get_complete_slug(lang) != complete_path):
return None
return pages_list[0]
# if more than one page is matching the slug,
# we need to use the full URL
if len(pages_list) > 1:
for page in pages_list:
if page.get_complete_slug(lang) == complete_path:
return page
return None
def create_and_update_from_json_data(self, d, user):
"""
Create or update page based on python dict d loaded from JSON data.
This applies all data except for redirect_to, which is done in a
second pass after all pages have been imported,
user is the User instance that will be used if the author can't
be found in the DB.
returns (page object, created, messages).
created is True if this was a new page or False if an existing page
was updated.
messages is a list of strings warnings/messages about this import
"""
User = get_user_model()
page = None
parent = None
parent_required = True
created = False
messages = []
page_languages = set(lang[0] for lang in settings.PAGE_LANGUAGES)
for lang, s in d['complete_slug'].items():
if lang not in page_languages:
messages.append(_("Language '%s' not imported") % (lang,))
continue
page = self.from_path(s, lang, exclude_drafts=False)
if page:
break
if parent_required and parent is None:
if '/' in s:
parent = self.from_path(s.rsplit('/', 1)[0], lang, exclude_drafts=False)
else:
parent_required = False
else:
# can't find an existing match, need to create a new Page
page = self.model(parent=parent)
created = True
def custom_get_user_by_email(email):
"""
Allow the user profile class to look up a user by email
address
"""
User = get_user_model()
# bit of an unpleasant hack that requres the logged-in
# user has a profile, but I don't want to reproduce the
# code in get_profile() here
try:
profile = user.get_profile()
except (SiteProfileNotAvailable, ObjectDoesNotExist, AttributeError):
return User.objects.get(email=email)
get_user_by_email = getattr(profile, 'get_user_by_email', None)
if get_user_by_email:
return get_user_by_email(email)
return User.objects.get(email=email)
try:
page.author = custom_get_user_by_email(d['author_email'])
except (User.DoesNotExist, User.MultipleObjectsReturned):
page.author = user
messages.append(_("Original author '%s' not found") % (d['author_email'],))
page.creation_date = datetime.strptime(d['creation_date'],
ISODATE_FORMAT)
page.publication_date = datetime.strptime(d['publication_date'],
ISODATE_FORMAT) if d['publication_date'] else None
page.publication_end_date = datetime.strptime(d['publication_end_date'],
ISODATE_FORMAT) if d['publication_end_date'] else None
page.last_modification_date = datetime.strptime(
d['last_modification_date'], ISODATE_FORMAT)
page.status = {
'published': self.model.PUBLISHED,
'hidden': self.model.HIDDEN,
'draft': self.model.DRAFT
}[d['status']]
page.template = d['template']
page.redirect_to_url = d['redirect_to_url']
page.save()
# Add tags
tags = d.get('tags', [])
page.tags.clear()
if tags:
for tag in tags:
Tag.objects.get_or_create(name=tag)
page.tags.add(tag)
page.save()
if settings.PAGE_USE_SITE_ID and not settings.PAGE_HIDE_SITES:
if d['sites']:
for site in d['sites']:
try:
page.sites.add(Site.objects.get(domain=site))
except Site.DoesNotExist:
messages.append(_("Could not add site '%s' to page") % (site,))
if not page.sites.count(): # need at least one site
page.sites.add(Site.objects.get(pk=global_settings.SITE_ID))
from basic_cms.models import Content
def create_content(lang, ctype, body):
Content.objects.create_content_if_changed(page, lang, ctype, body)
for lang in d['content_language_updated_order']:
if lang not in page_languages:
continue
create_content(lang, 'slug', d['complete_slug'][lang].rsplit('/', 1)[-1])
create_content(lang, 'title', d['title'][lang])
for ctype, langs_bodies in d['content'].items():
create_content(lang, ctype, langs_bodies[lang])
return page, created, messages
|
class PageManager(models.Manager):
'''
Page manager provide several filters to obtain pages :class:`QuerySet`
that respect the page attributes and project settings.
'''
def get_query_set(self):
'''Restrict operations to pages on the current site.'''
pass
def populate_pages(self, parent=None, child=5, depth=5):
'''Create a population of :class:`Page <pages.models.Page>`
for testing purpose.'''
pass
def on_site(self, site_id=None):
'''Return a :class:`QuerySet` of pages that are published on the site
defined by the ``SITE_ID`` setting.
:param site_id: specify the id of the site object to filter with.
'''
pass
def root(self):
'''Return a :class:`QuerySet` of pages without parent.'''
pass
def navigation(self):
'''Creates a :class:`QuerySet` of the published root pages.'''
pass
def hidden(self):
'''Creates a :class:`QuerySet` of the hidden pages.'''
pass
def filter_published(self, queryset):
'''Filter the given pages :class:`QuerySet` to obtain only published
page.'''
pass
def published(self):
'''Creates a :class:`QuerySet` of published
:class:`Page <pages.models.Page>`.'''
pass
def drafts(self):
'''Creates a :class:`QuerySet` of drafts using the page's
:attr:`Page.publication_date`.'''
pass
def expired(self):
'''Creates a :class:`QuerySet` of expired using the page's
:attr:`Page.publication_end_date`.'''
pass
def from_path(self, complete_path, lang, exclude_drafts=True):
'''Return a :class:`Page <pages.models.Page>` according to
the page's path.'''
pass
def create_and_update_from_json_data(self, d, user):
'''
Create or update page based on python dict d loaded from JSON data.
This applies all data except for redirect_to, which is done in a
second pass after all pages have been imported,
user is the User instance that will be used if the author can't
be found in the DB.
returns (page object, created, messages).
created is True if this was a new page or False if an existing page
was updated.
messages is a list of strings warnings/messages about this import
'''
pass
def custom_get_user_by_email(email):
'''
Allow the user profile class to look up a user by email
address
'''
pass
def create_content(lang, ctype, body):
pass
| 15 | 14 | 17 | 2 | 12 | 4 | 4 | 0.3 | 1 | 7 | 1 | 0 | 12 | 0 | 12 | 12 | 238 | 33 | 158 | 42 | 140 | 48 | 142 | 42 | 124 | 19 | 1 | 4 | 51 |
6,409 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/migrations/0001_initial.py
|
basic_cms.migrations.0001_initial.Migration
|
class Migration(migrations.Migration):
dependencies = [
('taggit', '0001_initial'),
('sites', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Content',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('language', models.CharField(max_length=5, verbose_name='language')),
('body', models.TextField(verbose_name='body')),
('type', models.CharField(max_length=100, verbose_name='type', db_index=True)),
('creation_date', models.DateTimeField(default=basic_cms.utils.now_utc, verbose_name='creation date', editable=False)),
],
options={
'get_latest_by': 'creation_date',
'verbose_name': 'content',
'verbose_name_plural': 'contents',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_date', models.DateTimeField(default=basic_cms.utils.now_utc, verbose_name='creation date', editable=False)),
('publication_date', models.DateTimeField(help_text='When the page should go\n live. Status must be "Published" for page to go live.', null=True, verbose_name='publication date', blank=True)),
('publication_end_date', models.DateTimeField(help_text='When to expire the page.\n Leave empty to never expire.', null=True, verbose_name='publication end date', blank=True)),
('last_modification_date', models.DateTimeField(verbose_name='last modification date')),
('status', models.IntegerField(default=0, verbose_name='status', choices=[(1, 'Published'), (3, 'Hidden'), (0, 'Draft')])),
('template', models.CharField(max_length=100, null=True, verbose_name='template', blank=True)),
('delegate_to', models.CharField(max_length=100, null=True, verbose_name='delegate to', blank=True)),
('freeze_date', models.DateTimeField(help_text="Don't publish any content\n after this date.", null=True, verbose_name='freeze date', blank=True)),
('redirect_to_url', models.CharField(max_length=200, null=True, blank=True)),
('lft', models.PositiveIntegerField(editable=False, db_index=True)),
('rght', models.PositiveIntegerField(editable=False, db_index=True)),
('tree_id', models.PositiveIntegerField(editable=False, db_index=True)),
('level', models.PositiveIntegerField(editable=False, db_index=True)),
('author', models.ForeignKey(related_name=b'pages', verbose_name='author', to=settings.AUTH_USER_MODEL)),
('parent', models.ForeignKey(related_name=b'children', verbose_name='parent', blank=True, to='basic_cms.Page', null=True)),
('redirect_to', models.ForeignKey(related_name=b'redirected_pages', blank=True, to='basic_cms.Page', null=True)),
('sites', models.ManyToManyField(default=[1], help_text='The site(s) the page is accessible at.', verbose_name='sites', to='sites.Site', related_name=b'pages')),
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
],
options={
'ordering': ['tree_id', 'lft'],
'get_latest_by': 'publication_date',
'verbose_name': 'page',
'verbose_name_plural': 'pages',
'permissions': [('can_freeze', 'Can freeze page'), ('can_publish', 'Can publish page'), ('can_manage_en_gb', 'Manage Base'), ('can_manage_eng', 'Manage English')],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PageAlias',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.CharField(unique=True, max_length=255)),
('page', models.ForeignKey(verbose_name='page', blank=True, to='basic_cms.Page', null=True)),
],
options={
'verbose_name_plural': 'Aliases',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='content',
name='page',
field=models.ForeignKey(verbose_name='page', to='basic_cms.Page'),
preserve_default=True,
),
]
|
class Migration(migrations.Migration):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 76 | 2 | 74 | 3 | 73 | 0 | 3 | 3 | 2 | 0 | 1 | 0 | 0 |
6,410 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/tests/test_functionnal.py
|
basic_cms.tests.test_functionnal.FunctionnalTestCase
|
class FunctionnalTestCase(TestCase):
"""Django page CMS functionnal tests suite class."""
def test_add_page(self):
"""Test that the add admin page could be displayed via the
admin"""
c = self.get_admin_client()
response = c.get('/admin/basic_cms/page/add/')
self.assertEqual(response.status_code, 200)
def test_create_page(self):
"""Test that a page can be created via the admin."""
c = self.get_admin_client()
page_data = self.get_new_page_data()
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
slug_content = Content.objects.get_content_slug_by_slug(
page_data['slug']
)
assert(slug_content is not None)
page = slug_content.page
self.assertEqual(page.title(), page_data['title'])
self.assertEqual(page.slug(), page_data['slug'])
self.assertNotEqual(page.last_modification_date, None)
def test_delete_page(self):
"""Create a page, then delete it."""
c = self.get_admin_client()
page_data = self.get_new_page_data()
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
slug_content = Content.objects.get_content_slug_by_slug(
page_data['slug']
)
assert(slug_content is not None)
pageCount = Page.objects.count()
page = slug_content.page
page.delete()
slug_content = Content.objects.get_content_slug_by_slug(
page_data['slug']
)
assert(slug_content is None)
self.assertEqual(Page.objects.count(), pageCount - 1)
def test_slug_collision(self):
"""Test a slug collision."""
self.set_setting("PAGE_UNIQUE_SLUG_REQUIRED", True)
c = self.get_admin_client()
page_data = self.get_new_page_data()
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
self.set_setting("PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
page_data['position'] = 'first-child'
page_data['target'] = page1.id
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page2 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertNotEqual(page1.id, page2.id)
def test_details_view(self):
"""Test the details view basics."""
c = self.get_admin_client()
response = c.get(self.get_page_url())
self.assertEqual(response.status_code, 404)
page_data = self.get_new_page_data()
page_data['status'] = Page.DRAFT
response = c.post('/admin/basic_cms/page/add/', page_data)
response = c.get(self.get_page_url())
self.assertEqual(response.status_code, 200)
page_data = self.get_new_page_data()
page_data['status'] = Page.PUBLISHED
page_data['slug'] = 'test-page-2'
page_data['template'] = 'pages/examples/index.html'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
response = c.get(self.get_page_url('test-page-2'))
self.assertEqual(response.status_code, 200)
def test_edit_page(self):
"""Test that a page can edited via the admin."""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.all()[0]
response = c.get('/admin/basic_cms/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
page_data['title'] = 'changed title'
page_data['body'] = 'changed body'
response = c.post('/admin/basic_cms/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.get(id=page.id)
self.assertEqual(page.title(), 'changed title')
body = Content.objects.get_content(page, 'en-us', 'body')
self.assertEqual(body, 'changed body')
def test_site_framework(self):
"""Test the site framework, and test if it's possible to
disable it."""
from basic_cms import settings as pages_settings
# it's not possible to enforce PAGE_USE_SITE_ID in the tests
if not pages_settings.PAGE_USE_SITE_ID:
#TODO: use unittest.skip skip when 2.7
return
# this is necessary to make the test pass
setattr(pages_settings, "SITE_ID", 2)
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data["sites"] = [2]
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 2)
page_data = self.get_new_page_data()
page_data["sites"] = [3]
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
# we cannot get a slug that doesn't exist
content = Content.objects.get_content_slug_by_slug("this doesn't exist")
self.assertEqual(content, None)
# we cannot get the data posted on another site
content = Content.objects.get_content_slug_by_slug(page_data['slug'])
self.assertEqual(content, None)
setattr(pages_settings, "SITE_ID", 3)
page = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page.sites.count(), 1)
self.assertEqual(page.sites.all()[0].id, 3)
# with param
self.assertEqual(Page.objects.on_site(2).count(), 1)
self.assertEqual(Page.objects.on_site(3).count(), 1)
# without param
self.assertEqual(Page.objects.on_site().count(), 1)
setattr(pages_settings, "SITE_ID", 2)
self.assertEqual(Page.objects.on_site().count(), 1)
page_data = self.get_new_page_data()
page_data["sites"] = [2, 3]
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
self.assertEqual(Page.objects.on_site(3).count(), 2)
self.assertEqual(Page.objects.on_site(2).count(), 2)
self.assertEqual(Page.objects.on_site().count(), 2)
setattr(pages_settings, "PAGE_USE_SITE_ID", False)
# we should get everything
self.assertEqual(Page.objects.on_site().count(), 3)
setattr(pages_settings, "SITE_ID", 1)
def test_languages(self):
"""Test post a page with different languages
and test that the admin views works correctly."""
c = self.get_admin_client()
c.login(username='batiste', password='b')
# test that the client language setting is used in add page admin
c.cookies["django_language"] = 'de'
response = c.get('/admin/basic_cms/page/add/')
self.assertContains(response, 'value="de"')
page_data = self.get_new_page_data()
page_data["title"] = 'english title'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.all()[0]
self.assertEqual(page.get_languages(), ['en-us'])
# test the language cache
self.assertEqual(page.get_languages(), ['en-us'])
# this test only works in version superior of 1.0.2
django_version = django.get_version().rsplit()[0].split('.')
if len(django_version) > 2:
major, middle, minor = [int(v) for v in django_version]
else:
major, middle = [int(v) for v in django_version]
if major >= 1 and middle > 0:
response = c.get('/admin/basic_cms/page/%d/?language=de' % page.id)
self.assertContains(response, 'value="de"')
# add a french version of the same page
page_data["language"] = 'fr-ch'
page_data["title"] = 'french title'
response = c.post('/admin/basic_cms/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
# test that the frontend view use the good parameters
# I cannot find a way of setting the accept-language HTTP
# header so I used django_language cookie instead
c = self.get_admin_client()
c.cookies["django_language"] = 'en-us'
response = c.get(page.get_url_path())
self.assertContains(response, 'english title')
self.assertContains(response, 'lang="en-us"')
self.assertNotContains(response, 'french title')
c = self.get_admin_client()
c.cookies["django_language"] = 'fr-ch'
response = c.get(page.get_url_path())
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
self.assertNotContains(response, 'english title')
# this should be mapped to the fr-ch content
c = self.get_admin_client()
c.cookies["django_language"] = 'fr-fr'
response = c.get(page.get_url_path())
self.assertContains(response, 'french title')
self.assertContains(response, 'lang="fr-ch"')
def test_revision(self):
"""Test that a page can edited several times."""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
response = c.post('/admin/basic_cms/page/add/', page_data)
page = Page.objects.all()[0]
page_data['body'] = 'changed body'
response = c.post('/admin/basic_cms/page/%d/' % page.id, page_data)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'),
'changed body')
page_data['body'] = 'changed body 2'
response = c.post('/admin/basic_cms/page/%d/' % page.id, page_data)
page.invalidate()
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'),
'changed body 2')
response = c.get(page.get_url_path())
self.assertContains(response, 'changed body 2', 1)
self.set_setting("PAGE_CONTENT_REVISION", False)
self.assertEqual(Content.objects.get_content(page, 'en-us', 'body'),
'changed body 2')
def test_placeholder(self):
"""
Test that the placeholder is correctly displayed in
the admin
"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['template'] = 'pages/examples/nice.html'
response = c.post('/admin/basic_cms/page/add/', page_data)
page = Page.objects.all()[0]
response = c.get('/admin/basic_cms/page/%d/' % page.id)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'name="right-column"', 1)
def test_directory_slug(self):
"""
Test diretory slugs
"""
self.set_setting("PAGE_UNIQUE_SLUG_REQUIRED", False)
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['title'] = 'parent title'
page_data['slug'] = 'same-slug'
response = c.post('/admin/basic_cms/page/add/', page_data)
# the redirect tell that the page has been create correctly
self.assertRedirects(response, '/admin/basic_cms/page/')
response = c.get(self.get_page_url('same-slug/'))
self.assertEqual(response.status_code, 200)
page = Page.objects.all()[0]
response = c.post('/admin/basic_cms/page/add/', page_data)
# we cannot create 2 root page with the same slug
# this assert test that the creation fails as wanted
self.assertEqual(response.status_code, 200)
page1 = Content.objects.get_content_slug_by_slug(page_data['slug']).page
self.assertEqual(page1.id, page.id)
page_data['title'] = 'children title'
page_data['target'] = page1.id
page_data['position'] = 'first-child'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
# finaly test that we can get every page according the path
response = c.get(self.get_page_url('same-slug'))
self.assertContains(response, "parent title", 3)
response = c.get(self.get_page_url('same-slug/same-slug'))
self.assertContains(response, "children title", 3)
def test_page_admin_view(self):
"""Test page admin view"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page-1'
response = c.post('/admin/basic_cms/page/add/', page_data)
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, 1)
response = c.post('/admin/basic_cms/page/%d/change-status/' %
page.id, {'status': Page.DRAFT})
page = Content.objects.get_content_slug_by_slug('page-1').page
self.assertEqual(page.status, Page.DRAFT)
url = '/admin/basic_cms/page/%d/modify-content/title/en-us/' % page.id
response = c.post(url, {'content': 'test content'})
self.assertEqual(page.title(), 'test content')
# TODO: realy test these methods
url = '/admin/basic_cms/page/%d/traduction/en-us/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/basic_cms/page/%d/sub-menu/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/basic_cms/page/%d/get-content/%d/' % (page.id,
Content.objects.get_content_slug_by_slug('page-1').id)
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = '/admin/basic_cms/page/%d/delete-content/en-us/' % page.id
response = c.get(url)
self.assertEqual(response.status_code, 302)
def test_page_alias(self):
"""Test page aliasing system"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
# create some pages
page_data = self.get_new_page_data()
page_data['title'] = 'home-page-title'
page_data['slug'] = 'home-page'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page_data['title'] = 'downloads-page-title'
page_data['slug'] = 'downloads-page'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
# create aliases for the pages
page = Page.objects.from_path('home-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='/index.php')
p.save()
page = Page.objects.from_path('downloads-page', None)
self.assertTrue(page)
p = PageAlias(page=page, url='index.php?page=downloads')
p.save()
# now check whether we can retrieve the pages.
# is the homepage available from is alias
response = c.get(self.get_page_url('index.php'))
self.assertRedirects(response, self.get_page_url('home-page'), 301)
# for the download page, the slug is canonical
response = c.get(self.get_page_url('downloads-page/'))
self.assertContains(response, "downloads-page-title", 3)
# calling via its alias must cause redirect
response = c.get(self.get_page_url('index.php') + '?page=downloads')
self.assertRedirects(response,
self.get_page_url('downloads-page'), 301)
def test_page_redirect_to(self):
"""Test page redirected to an other page."""
client = self.get_admin_client()
# create some pages
page1 = self.create_new_page(client)
page2 = self.create_new_page(client)
page1.redirect_to = page2
page1.save()
# now check whether you go to the target page.
response = client.get(page1.get_url_path())
self.assertRedirects(response, page2.get_url_path(), 301)
def test_page_valid_targets(self):
"""Test page valid_targets method"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/basic_cms/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 302)
c1 = Content.objects.get_content_slug_by_slug('child-1').page
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertEqual(len(root_page.valid_targets()), 0)
self.assertEqual(str(c1.valid_targets()),
"[<Page: root>]")
def test_ajax_language(self):
"""Test that language is working properly"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
# Activate a language other than settings.LANGUAGE_CODE
response = c.post('/i18n/setlang/', {'language': 'fr-ch'})
try:
from django.utils.translation import LANGUAGE_SESSION_KEY
except ImportError:
LANGUAGE_SESSION_KEY = 'django_language'
self.assertEqual(c.session.get(LANGUAGE_SESSION_KEY, False), 'fr-ch')
# Make sure we're in french
response = c.get('/admin/basic_cms/page/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Ajouter')
# Create some pages (taken from test_tree_admin_interface)
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/basic_cms/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/basic_cms/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
page_data['slug'] = 'child-2'
response = c.post('/admin/basic_cms/page/add/', page_data)
Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
"""
The relevant bit, fixed by rev 501: the response issued by a move
command returns content localized in settings.LANGUAGE_CODE (i.e. 'en´)
even though the original AJAX request passed in a the correct
session ID localizing this client as fr-ch
This is probably because the LocaleMiddleware gets instantiated
with a couple request_mocks which have no real connection to the
AJAX request *but* django.utils.translation caches the active
language on a per thread basis.
This means that the first "bogus" call to
LocaleMiddleware.process_request will "kill" the localization
data for the AJAX request.
Rev. 501 fixes this by passing in the language code from the original
request.
"""
response = c.post('/admin/basic_cms/page/%d/move-page/' % child_1.id,
{'position': 'first-child', 'target': root_page.id})
# Make sure the content response we got was in french
self.assertContains(response, 'Auteur')
def test_view_context(self):
"""
Test that the default view can only return the context
"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
# create a page for the example otherwise you will get a Http404 error
c.post('/admin/basic_cms/page/add/', page_data)
page1 = Content.objects.get_content_slug_by_slug('page1').page
from basic_cms.views import details
from basic_cms.http import get_request_mock
request = get_request_mock()
context = details(request, path='/page1/', only_context=True)
self.assertEqual(context['current_page'], page1)
def test_request_mockup(self):
from basic_cms.http import get_request_mock
request = get_request_mock()
self.assertEqual(hasattr(request, 'session'), True)
def test_tree_admin_interface(self):
"""
Test that moving/creating page in the tree is working properly
using the admin interface
"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'root'
response = c.post('/admin/basic_cms/page/add/', page_data)
root_page = Content.objects.get_content_slug_by_slug('root').page
self.assertTrue(root_page.is_first_root())
page_data['position'] = 'first-child'
page_data['target'] = root_page.id
page_data['slug'] = 'child-1'
response = c.post('/admin/basic_cms/page/add/', page_data)
child_1 = Content.objects.get_content_slug_by_slug('child-1').page
self.assertFalse(child_1.is_first_root())
page_data['slug'] = 'child-2'
response = c.post('/admin/basic_cms/page/add/', page_data)
child_2 = Content.objects.get_content_slug_by_slug('child-2').page
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# move page 1 in the first position
response = c.post('/admin/basic_cms/page/%d/move-page/' % child_1.id,
{'position': 'first-child', 'target': root_page.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-1>, <Page: child-2>]")
# move page 2 in the first position
response = c.post('/admin/basic_cms/page/%d/move-page/' % child_2.id,
{'position': 'left', 'target': child_1.id})
self.assertEqual(str(Page.objects.all()),
"[<Page: root>, <Page: child-2>, <Page: child-1>]")
# try to create a sibling with the same slug, via left, right
from basic_cms import settings as pages_settings
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
page_data['target'] = child_2.id
page_data['position'] = 'left'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a sibling with the same slug, via first-child
page_data['target'] = root_page.id
page_data['position'] = 'first-child'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to create a second page 2 in root
del page_data['target']
del page_data['position']
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
# cannot create because slug exists
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# Now it should work beause the page is not a sibling
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", False)
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 4)
# Should not work because we already have sibling at the same level
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(response.status_code, 200)
# try to change the page 2 slug into page 1
page_data['slug'] = 'child-1'
response = c.post('/admin/basic_cms/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
setattr(pages_settings, "PAGE_UNIQUE_SLUG_REQUIRED", True)
response = c.post('/admin/basic_cms/page/%d/' % child_2.id, page_data)
self.assertEqual(response.status_code, 200)
def test_tree(self):
"""
Test that the navigation tree works properly with mptt.
"""
c = self.get_admin_client()
c.login(username='batiste', password='b')
page_data = self.get_new_page_data()
page_data['slug'] = 'page1'
c.post('/admin/basic_cms/page/add/', page_data)
page_data['slug'] = 'page2'
c.post('/admin/basic_cms/page/add/', page_data)
page_data['slug'] = 'page3'
c.post('/admin/basic_cms/page/add/', page_data)
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>, <Page: page2>, <Page: page3>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p2.move_to(p1, 'left')
p2.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page2>, <Page: page1>, <Page: page3>]")
p3.move_to(p2, 'left')
p3.save()
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page2>, <Page: page1>]")
p1 = Content.objects.get_content_slug_by_slug('page1').page
p2 = Content.objects.get_content_slug_by_slug('page2').page
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'first-child')
p2.move_to(p1, 'first-child')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page1>]")
p3 = Content.objects.get_content_slug_by_slug('page3').page
p3.move_to(p1, 'left')
self.assertEqual(str(Page.objects.navigation()),
"[<Page: page3>, <Page: page1>]")
def test_page_redirect_to_url(self):
"""Test page redirected to external url."""
client = self.get_admin_client()
page1 = self.create_new_page(client)
url = 'http://code.google.com/p/django-page-cms/'
page1.redirect_to_url = url
page1.save()
# now check whether we can retrieve the page.
response = client.get(page1.get_url_path())
self.assertTrue(response.status_code == 301)
self.assertTrue(response['Location'] == url)
def test_page_freeze_date(self):
"""Test page freezing feature."""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['title'] = 'before'
page_data['slug'] = 'before'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.from_path('before', None)
self.assertEqual(page.freeze_date, None)
limit = datetime.datetime.now()
page.freeze_date = limit
page.save()
page_data['title'] = 'after'
page_data['slug'] = 'after'
# this post erase the limit
response = c.post('/admin/basic_cms/page/%d/' % page.id, page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.from_path('after', None)
page.freeze_date = limit
self.assertEqual(page.slug(), 'before')
page.freeze_date = None
page.save()
self.assertEqual(page.slug(), 'after')
page.freeze_date = limit
page.save()
self.assertEqual(page.slug(), 'before')
def test_delegate_to(self):
"""Test the view delegate feature."""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['title'] = 'delegate'
page_data['slug'] = 'delegate'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.from_path('delegate', None)
from basic_cms import urlconf_registry as reg
reg.register_urlconf('test', 'basic_cms.testproj.documents.urls',
label='test')
page.delegate_to = 'test'
page.save()
response = c.get(self.get_page_url('delegate'))
self.assertEqual(response.status_code, 200)
from basic_cms.testproj.documents.models import Document
doc = Document(title='doc title 1', text='text', page=page)
doc.save()
response = c.get(self.get_page_url('delegate/doc-%d' % doc.id))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "doc title 1")
reg.registry = []
def test_untranslated(self):
"""Test the untranslated feature in the admin."""
c = self.get_admin_client()
page_data = self.get_new_page_data()
page_data['title'] = 'untranslated'
page_data['slug'] = 'untranslated'
unstranslated_string = 'the untranslated string'
page_data['untrans'] = unstranslated_string
page_data['template'] = 'pages/tests/untranslated.html'
response = c.post('/admin/basic_cms/page/add/', page_data)
self.assertRedirects(response, '/admin/basic_cms/page/')
page = Page.objects.from_path('untranslated', None)
self.assertEqual(
Content.objects.get_content(page, 'en-us', 'untrans'),
unstranslated_string
)
page_data['untrans'] = ''
response = c.get('/admin/basic_cms/page/%d/?language=fr-ch' % page.id)
self.assertContains(response, unstranslated_string)
def test_root_page(self):
"""Test that the root page doesn't trigger a 404."""
c = self.get_admin_client()
self.new_page(content={'slug': 'this-is-not-a-404'})
self.assertEqual(Page.objects.count(), 1)
page = Page.objects.on_site()[0]
self.assertTrue(page.is_first_root())
response = c.get(self.get_page_url())
self.assertEqual(response.status_code, 200)
def test_page_with_trailing_slash(self):
"""
Test that a page is also available with and without a trailing slash.
"""
c = self.get_admin_client()
self.new_page(content={'slug': 'root'})
self.new_page(content={'slug': 'other'})
response = c.get(self.get_page_url('other'))
self.assertEqual(response.status_code, 200)
response = c.get(self.get_page_url('other/'))
self.assertEqual(response.status_code, 200)
def test_page_sitemap(self):
"""
Test the sitemap class
"""
c = self.get_admin_client()
page1 = self.new_page(content={'slug': 'english-slug'})
page1.save()
Content(page=page1, language='fr-ch', type='slug',
body='french-slug').save()
response = c.get('/sitemap.xml')
self.assertContains(response, 'english-slug')
self.assertNotContains(response, 'french-slug')
response = c.get('/sitemap2.xml')
self.assertContains(response, 'english-slug')
self.assertContains(response, 'french-slug')
|
class FunctionnalTestCase(TestCase):
'''Django page CMS functionnal tests suite class.'''
def test_add_page(self):
'''Test that the add admin page could be displayed via the
admin'''
pass
def test_create_page(self):
'''Test that a page can be created via the admin.'''
pass
def test_delete_page(self):
'''Create a page, then delete it.'''
pass
def test_slug_collision(self):
'''Test a slug collision.'''
pass
def test_details_view(self):
'''Test the details view basics.'''
pass
def test_edit_page(self):
'''Test that a page can edited via the admin.'''
pass
def test_site_framework(self):
'''Test the site framework, and test if it's possible to
disable it.'''
pass
def test_languages(self):
'''Test post a page with different languages
and test that the admin views works correctly.'''
pass
def test_revision(self):
'''Test that a page can edited several times.'''
pass
def test_placeholder(self):
'''
Test that the placeholder is correctly displayed in
the admin
'''
pass
def test_directory_slug(self):
'''
Test diretory slugs
'''
pass
def test_page_admin_view(self):
'''Test page admin view'''
pass
def test_page_alias(self):
'''Test page aliasing system'''
pass
def test_page_redirect_to(self):
'''Test page redirected to an other page.'''
pass
def test_page_valid_targets(self):
'''Test page valid_targets method'''
pass
def test_ajax_language(self):
'''Test that language is working properly'''
pass
def test_view_context(self):
'''
Test that the default view can only return the context
'''
pass
def test_request_mockup(self):
pass
def test_tree_admin_interface(self):
'''
Test that moving/creating page in the tree is working properly
using the admin interface
'''
pass
def test_tree_admin_interface(self):
'''
Test that the navigation tree works properly with mptt.
'''
pass
def test_page_redirect_to_url(self):
'''Test page redirected to external url.'''
pass
def test_page_freeze_date(self):
'''Test page freezing feature.'''
pass
def test_delegate_to(self):
'''Test the view delegate feature.'''
pass
def test_untranslated(self):
'''Test the untranslated feature in the admin.'''
pass
def test_root_page(self):
'''Test that the root page doesn't trigger a 404.'''
pass
def test_page_with_trailing_slash(self):
'''
Test that a page is also available with and without a trailing slash.
'''
pass
def test_page_sitemap(self):
'''
Test the sitemap class
'''
pass
| 28 | 27 | 28 | 5 | 20 | 4 | 1 | 0.2 | 1 | 8 | 4 | 0 | 27 | 0 | 27 | 37 | 795 | 151 | 538 | 154 | 502 | 106 | 507 | 154 | 471 | 3 | 1 | 1 | 31 |
6,411 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/admin/__init__.py
|
basic_cms.admin.PageAdmin.Media
|
class Media:
css = {
'all': [join(settings.PAGES_MEDIA_URL, path) for path in (
'css/rte.css',
'css/pages.css'
)]
}
js = [join(settings.PAGES_MEDIA_URL, path) for path in (
'javascript/jquery.js',
'javascript/jquery.rte.js',
'javascript/pages.js',
'javascript/pages_list.js',
'javascript/pages_form.js',
'javascript/jquery.query-2.1.7.js',
)]
|
class Media:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 0 | 15 | 3 | 14 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
6,412 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/admin/forms.py
|
basic_cms.admin.forms.make_form.PageForm
|
class PageForm(SlugFormMixin):
"""Form for page creation"""
err_dict = {
'another_page_error': _('Another page with this slug already exists'),
'sibling_position_error': _('A sibling with this slug already exists at the targeted position'),
'child_error': _('A child with this slug already exists at the targeted position'),
'sibling_error': _('A sibling with this slug already exists'),
'sibling_root_error': _('A sibling with this slug already exists at the root level'),
}
language = forms.ChoiceField(
label=_('Language'),
choices=settings.PAGE_LANGUAGES,
widget=LanguageChoiceWidget()
)
template = forms.ChoiceField(
required=False,
label=_('Template'),
choices=settings.get_page_templates(),
)
delegate_to = forms.ChoiceField(
required=False,
label=_('Delegate to application'),
choices=get_choices(),
)
freeze_date = forms.DateTimeField(
required=False,
label=_('Freeze'),
help_text=_(
"Don't publish any content after this date. Format is 'Y-m-d H:M:S'")
# those make tests fail miserably
# widget=widgets.AdminSplitDateTime()
# widget=widgets.AdminTimeWidget()
)
target = forms.IntegerField(required=False, widget=forms.HiddenInput)
position = forms.CharField(required=False, widget=forms.HiddenInput)
class Meta:
model = model_
exclude = ('author', 'last_modification_date', 'parent')
def clean_slug(self):
"""Handle move action on the pages"""
slug = slugify(self.cleaned_data['slug'])
target = self.data.get('target', None)
position = self.data.get('position', None)
# this enforce a unique slug for every page
if settings.PAGE_AUTOMATIC_SLUG_RENAMING:
def is_slug_safe(slug):
content = Content.objects.get_content_slug_by_slug(slug)
if content is None:
return True
if self.instance.id:
if content.page.id == self.instance.id:
return True
else:
return False
return self._clean_page_automatic_slug_renaming(slug, is_slug_safe)
if settings.PAGE_UNIQUE_SLUG_REQUIRED:
return self._clean_page_unique_slug_required(slug)
if settings.PAGE_USE_SITE_ID:
if settings.PAGE_HIDE_SITES:
site_ids = [global_settings.SITE_ID]
else:
site_ids = [int(x) for x in self.data.getlist('sites')]
def intersects_sites(sibling):
return sibling.sites.filter(id__in=site_ids).count() > 0
else:
def intersects_sites(sibling):
return True
if not settings.PAGE_UNIQUE_SLUG_REQUIRED:
if target and position:
target = Page.objects.get(pk=target)
if position in ['right', 'left']:
slugs = [sibling.slug() for sibling in
target.get_siblings()
if intersects_sites(sibling)]
slugs.append(target.slug())
if slug in slugs:
raise forms.ValidationError(
self.err_dict['sibling_position_error'])
if position == 'first-child':
if slug in [sibling.slug() for sibling in
target.get_children()
if intersects_sites(sibling)]:
raise forms.ValidationError(
self.err_dict['child_error'])
else:
if self.instance.id:
if (slug in [sibling.slug() for sibling in
self.instance.get_siblings().exclude(
id=self.instance.id
) if intersects_sites(sibling)]):
raise forms.ValidationError(
self.err_dict['sibling_error'])
else:
if slug in [sibling.slug() for sibling in
Page.objects.root()
if intersects_sites(sibling)]:
raise forms.ValidationError(
self.err_dict['sibling_root_error'])
return slug
|
class PageForm(SlugFormMixin):
'''Form for page creation'''
class Meta:
def clean_slug(self):
'''Handle move action on the pages'''
pass
def is_slug_safe(slug):
pass
def intersects_sites(sibling):
pass
def intersects_sites(sibling):
pass
| 6 | 2 | 19 | 2 | 17 | 1 | 5 | 0.07 | 1 | 3 | 2 | 0 | 1 | 0 | 1 | 3 | 106 | 12 | 88 | 21 | 82 | 6 | 52 | 21 | 46 | 14 | 2 | 4 | 20 |
6,413 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/admin/forms.py
|
basic_cms.admin.forms.make_form.PageForm.Meta
|
class Meta:
model = model_
exclude = ('author', 'last_modification_date', 'parent')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
6,414 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/admin/utils.py
|
basic_cms.admin.utils.make_inline_admin.ModelOptions
|
class ModelOptions(admin_class):
model = model_class
fk_name = 'page'
form = options.get('form', ModelForm)
extra = options.get('extra', 3)
# Since Django 1.2, max_num=None sets unlimited inlines,
# see https://docs.djangoproject.com/en/1.2/topics/forms/modelforms/#model-formsets-max-num
max_num = options.get(
'max_num', 0 if django.VERSION < (1, 2) else None)
|
class ModelOptions(admin_class):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 1 | 6 | 6 | 5 | 2 | 6 | 6 | 5 | 0 | 1 | 0 | 0 |
6,415 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/models.py
|
basic_cms.models.Content.Meta
|
class Meta:
get_latest_by = 'creation_date'
verbose_name = _('content')
verbose_name_plural = _('contents')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
6,416 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/models.py
|
basic_cms.models.Page.Meta
|
class Meta:
"""Make sure the default page ordering is correct."""
ordering = ['tree_id', 'lft']
get_latest_by = "publication_date"
verbose_name = _('page')
verbose_name_plural = _('pages')
permissions = settings.PAGE_EXTRA_PERMISSIONS
|
class Meta:
'''Make sure the default page ordering is correct.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.17 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 6 | 6 | 5 | 1 | 6 | 6 | 5 | 0 | 0 | 0 | 0 |
6,417 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/models.py
|
basic_cms.models.PageAlias.Meta
|
class Meta:
verbose_name_plural = _('Aliases')
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
6,418 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/testproj/documents/models.py
|
basic_cms.testproj.documents.models.DocumentForm.Meta
|
class Meta:
model = Document
exclude = []
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
6,419 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/tests/test_unit.py
|
basic_cms.tests.test_unit.UnitTestCase.test_get_language_from_request.Req
|
class Req():
LANGUAGE_CODE = 'en-us'
GET = {}
|
class Req():
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 2 | 0 | 3 | 3 | 2 | 0 | 0 | 0 | 0 |
6,420 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/widgets.py
|
basic_cms.widgets.CKEditor.Media
|
class Media:
js = [join(PAGES_MEDIA_URL, 'ckeditor/ckeditor.js'),
join(settings.MEDIA_URL, 'filebrowser/js/FB_CKEditor.js'),
]
|
class Media:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 2 | 3 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
6,421 |
ArabellaTech/django-basic-cms
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_django-basic-cms/basic_cms/widgets.py
|
basic_cms.widgets.FileBrowseInput
|
class FileBrowseInput(FileBrowseWidget):
"""FileBrowseInput widget."""
def __init__(self, attrs={}):
super(FileBrowseInput, self).__init__(attrs)
|
class FileBrowseInput(FileBrowseWidget):
'''FileBrowseInput widget.'''
def __init__(self, attrs={}):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 5 | 1 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 1 | 0 | 1 |
6,422 |
ArabellaTech/django-basic-cms
|
ArabellaTech_django-basic-cms/basic_cms/tests/test_unit.py
|
basic_cms.tests.test_unit.UnitTestCase
|
class UnitTestCase(TestCase):
"""Django page CMS unit test suite class."""
def test_date_ordering(self):
"""Test page date ordering feature."""
self.set_setting("PAGE_USE_SITE_ID", False)
author = User.objects.all()[0]
yesterday = now_utc() - datetime.timedelta(days=1)
now = now_utc()
p1 = Page(author=author, status=Page.PUBLISHED, publication_date=now)
p1.save()
p2 = Page(
author=author,
publication_date=now,
status=Page.PUBLISHED
)
p2.save()
p3 = Page(
author=author,
publication_date=yesterday,
status=Page.PUBLISHED
)
p3.save()
p2.move_to(p1, position='first-child')
p3.move_to(p1, position='first-child')
p1 = Page.objects.get(pk=p1.id)
p2 = Page.objects.get(pk=p2.id)
p3 = Page.objects.get(pk=p3.id)
self.assertEqual(
[p.id for p in p1.get_children_for_frontend()],
[p3.id, p2.id]
)
self.assertEqual(
[p.id for p in p1.get_date_ordered_children_for_frontend()],
[p2.id, p3.id]
)
def test_widgets_registry(self):
"""Test the widget registry module."""
from basic_cms import widgets_registry as wreg
for widget in wreg.registry:
w = widget()
w.render('name', 'value')
try:
wreg.register_widget(wreg.registry[0])
raise AssertionError("Error not raised properly.")
except wreg.WidgetAlreadyRegistered:
pass
try:
wreg.get_widget('wrong')
raise AssertionError("Error not raised properly.")
except wreg.WidgetNotFound:
pass
def test_page_caculated_status(self):
"""Test calculated status property."""
self.set_setting("PAGE_SHOW_START_DATE", True)
yesterday = now_utc() - datetime.timedelta(days=1)
tomorrow = now_utc() + datetime.timedelta(days=1)
page = self.new_page()
self.assertEqual(page.calculated_status, Page.PUBLISHED)
page.publication_date = tomorrow
self.assertEqual(page.calculated_status, Page.DRAFT)
page.publication_date = yesterday
self.assertEqual(page.calculated_status, Page.PUBLISHED)
self.set_setting("PAGE_SHOW_END_DATE", True)
page.publication_end_date = yesterday
self.assertEqual(page.calculated_status, Page.EXPIRED)
def test_placeholder_inherit_content(self):
"""Test placeholder content inheritance between pages."""
self.set_setting("PAGE_USE_SITE_ID", False)
p1 = self.new_page(content={'inher': 'parent-content'})
p2 = self.new_page()
template = django.template.loader.get_template('pages/tests/test7.html')
context = Context({'current_page': p2, 'lang': 'en-us'})
self.assertEqual(template.render(context), '')
p2.move_to(p1, position='first-child')
self.assertEqual(template.render(context), 'parent-content')
def test_get_page_template_tag(self):
"""Test get_page template tag."""
context = Context({})
pl1 = """{% load pages_tags %}{% get_page "get-page-slug" as toto %}{{ toto }}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), u'None')
self.new_page({'slug': 'get-page-slug'})
self.assertEqual(template.render(context), u'get-page-slug')
def test_placeholder_all_syntaxes(self):
"""Test placeholder syntaxes."""
page = self.new_page()
context = Context({'current_page': page, 'lang': 'en-us'})
pl1 = """{% load pages_tags %}{% placeholder title as hello %}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), '')
pl1 = """{% load pages_tags %}{% placeholder title as hello %}{{ hello }}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), page.title())
# to be sure to raise an errors in parse template content
setattr(settings, "DEBUG", True)
page = self.new_page({'wrong': '{% wrong %}'})
context = Context({'current_page': page, 'lang': 'en-us'})
pl2 = """{% load pages_tags %}{% placeholder wrong parsed %}"""
template = get_template_from_string(pl2)
from basic_cms.placeholders import PLACEHOLDER_ERROR
error = PLACEHOLDER_ERROR % {
'name': 'wrong',
'error': "Invalid block tag: 'wrong'",
}
self.assertEqual(template.render(context), error)
# generate errors
pl3 = """{% load pages_tags %}{% placeholder %}"""
try:
template = get_template_from_string(pl3)
except TemplateSyntaxError:
pass
pl4 = """{% load pages_tags %}{% placeholder wrong wrong %}"""
try:
template = get_template_from_string(pl4)
except TemplateSyntaxError:
pass
pl5 = """{% load pages_tags %}{% placeholder wrong as %}"""
try:
template = get_template_from_string(pl5)
except TemplateSyntaxError:
pass
def test_parsed_template(self):
"""Test the parsed template syntax."""
setattr(settings, "DEBUG", True)
page = self.new_page({'title': '<b>{{ "hello"|capfirst }}</b>'})
page.save()
context = Context({'current_page': page, 'lang': 'en-us'})
pl_parsed = """{% load pages_tags %}{% placeholder title parsed %}"""
template = get_template_from_string(pl_parsed)
self.assertEqual(template.render(context), '<b>Hello</b>')
setattr(settings, "DEBUG", False)
page = self.new_page({'title': '<b>{{ "hello"|wrong_filter }}</b>'})
context = Context({'current_page': page, 'lang': 'en-us'})
self.assertEqual(template.render(context), u'')
def test_video(self):
"""Test video placeholder."""
page = self.new_page(content={
'title': 'video-page',
'video': 'http://www.youtube.com/watch?v=oHg5SJYRHA0\\\\'
})
context = Context({'current_page': page, 'lang': 'en-us'})
pl1 = """{% load pages_tags %}{% videoplaceholder video %}"""
template = get_template_from_string(pl1)
self.assertNotEqual(template.render(context), '')
self.assertTrue(len(template.render(context)) > 10)
def test_placeholder_untranslated_content(self):
"""Test placeholder untranslated content."""
self.set_setting("PAGE_USE_SITE_ID", False)
page = self.new_page(content={})
placeholder = PlaceholderNode('untrans', page='p', untranslated=True)
placeholder.save(page, 'fr-ch', 'test-content', True)
placeholder.save(page, 'en-us', 'test-content', True)
self.assertEqual(len(Content.objects.all()), 1)
self.assertEqual(Content.objects.all()[0].language, 'en-us')
placeholder = PlaceholderNode('untrans', page='p', untranslated=False)
placeholder.save(page, 'fr-ch', 'test-content', True)
self.assertEqual(len(Content.objects.all()), 2)
# test the syntax
page = self.new_page()
template = django.template.loader.get_template(
'pages/tests/untranslated.html')
context = Context({'current_page': page, 'lang': 'en-us'})
self.assertEqual(template.render(context), '')
def test_urlconf_registry(self):
"""Test urlconf_registry basic functions."""
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
reg.get_urlconf('Documents')
try:
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
except reg.UrlconfAlreadyRegistered:
pass
reg.registry = []
try:
reg.get_urlconf('Documents')
except reg.UrlconfNotFound:
pass
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
self.assertEqual(reg.get_choices(),
[('', 'No delegation'), ('Documents', 'Display documents')])
def test_permissions(self):
pass
def test_managers(self):
# TODO: this test seems dependant from other tests
self.set_setting("PAGE_USE_SITE_ID", False)
Page.objects.populate_pages(child=2, depth=2)
for p in Page.objects.all():
p.invalidate()
self.assertEqual(Page.objects.count(), 3)
self.assertEqual(Page.objects.published().count(), 3)
self.assertEqual(Page.objects.drafts().count(), 0)
self.assertEqual(Page.objects.expired().count(), 0)
def test_get_content_tag(self):
"""
Test the {% get_content %} template tag
"""
page_data = {'title': 'test', 'slug': 'test'}
page = self.new_page(page_data)
context = RequestContext(MockRequest, {'page': page})
template = Template('{% load pages_tags %}'
'{% get_content page "title" "en-us" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% get_content page "title" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
def test_get_content_tag_bug(self):
"""
Make sure that {% get_content %} use the "lang" context variable if
no language string is provided.
"""
page_data = {'title': 'test', 'slug': 'english'}
page = self.new_page(page_data)
Content(page=page, language='fr-ch', type='title', body='french').save()
Content(page=page, language='fr-ch', type='slug', body='french').save()
self.assertEqual(page.slug(language='fr-ch'), 'french')
self.assertEqual(page.slug(language='en-us'), 'english')
# default
context = RequestContext(MockRequest, {'page': page})
template = Template('{% load pages_tags %}'
'{% get_content page "slug" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), 'english')
# french specified
context = RequestContext(MockRequest, {'page': page, 'lang': 'fr'})
template = Template('{% load pages_tags %}'
'{% get_content page "slug" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), 'french')
# english specified
context = RequestContext(MockRequest, {'page': page, 'lang': 'en-us'})
template = Template('{% load pages_tags %}'
'{% get_content page "slug" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), 'english')
def test_show_content_tag(self):
"""
Test the {% show_content %} template tag.
"""
page_data = {'title': 'test', 'slug': 'test'}
page = self.new_page(page_data)
# cleanup the cache from previous tests
page.invalidate()
context = RequestContext(MockRequest, {'page': page, 'lang': 'en-us',
'path': '/page-1/'})
template = Template('{% load pages_tags %}'
'{% show_content page "title" "en-us" %}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% show_content page "title" %}')
self.assertEqual(template.render(context), page_data['title'])
def test_pages_siblings_menu_tag(self):
"""
Test the {% pages_siblings_menu %} template tag.
"""
page_data = {'title': 'test', 'slug': 'test'}
page = self.new_page(page_data)
# cleanup the cache from previous tests
page.invalidate()
context = RequestContext(MockRequest, {'page': page, 'lang': 'en-us',
'path': '/page-1/'})
template = Template('{% load pages_tags %}'
'{% pages_siblings_menu page %}')
template.render(context)
def test_show_absolute_url_with_language(self):
"""
Test a {% show_absolute_url %} template tag bug.
"""
page_data = {'title': 'english', 'slug': 'english'}
page = self.new_page(page_data)
Content(page=page, language='fr-ch', type='title', body='french').save()
Content(page=page, language='fr-ch', type='slug', body='french').save()
self.assertEqual(page.get_url_path(language='fr-ch'),
self.get_page_url(u'french'))
self.assertEqual(page.get_url_path(language='en-us'),
self.get_page_url(u'english'))
context = RequestContext(MockRequest, {'page': page})
template = Template('{% load pages_tags %}'
'{% show_absolute_url page "en-us" %}')
self.assertEqual(template.render(context),
self.get_page_url(u'english'))
template = Template('{% load pages_tags %}'
'{% show_absolute_url page "fr-ch" %}')
self.assertEqual(template.render(context),
self.get_page_url('french'))
def test_get_page_ids_by_slug(self):
"""
Test that get_page_ids_by_slug work as intented.
"""
page_data = {'title': 'test1', 'slug': 'test1'}
page1 = self.new_page(page_data)
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id]
)
page_data = {'title': 'test1', 'slug': 'test1'}
page2 = self.new_page(page_data)
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
Content(page=page1, language='en-us', type='slug', body='test2').save()
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
Content(page=page1, language='en-us', type='slug', body='test1').save()
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
def test_get_language_from_request(self):
"""
Test that get_language_from_request return the default language even if a
unaccepted language is used.
"""
class Req():
LANGUAGE_CODE = 'en-us'
GET = {}
request = Req()
self.assertEqual(
get_language_from_request(request), 'en-us')
request.LANGUAGE_CODE = 'dont'
self.assertEqual(
get_language_from_request(request), 'en-us')
request.LANGUAGE_CODE = 'fr-ch'
self.assertEqual(
get_language_from_request(request), 'fr-ch')
def test_default_view_with_language_prefix(self):
"""
Test that everything is working with the language prefix option
activated.
"""
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", True)
req = get_request_mock()
self.assertRaises(Http404, details, req, '/pages/')
page1 = self.new_page(content={'slug': 'page1'})
page2 = self.new_page(content={'slug': 'page2'})
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': 'page1'})
)
self.assertEqual(details(req, page1.get_url_path(),
only_context=True)['current_page'],
page1)
self.assertEqual(details(req, path=page2.get_complete_slug(),
only_context=True)['current_page'], page2)
self.assertEqual(details(req, page2.get_url_path(),
only_context=True)['current_page'],
page2)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.assertEqual(details(req, page2.get_url_path(),
only_context=True)['current_page'],
page2)
def test_root_page_hidden_slug(self):
"""
Check that the root works properly in every case.
"""
page1 = self.new_page(content={'slug': 'page1'})
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.set_setting("PAGE_HIDE_ROOT_SLUG", True)
self.assertEqual(page1.is_first_root(), True)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[], kwargs={'path': ''})
)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", True)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': ''})
)
self.set_setting("PAGE_HIDE_ROOT_SLUG", False)
page1.invalidate()
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': 'page1'})
)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'path': 'page1'})
)
def test_revision_depth(self):
"""
Check that PAGE_CONTENT_REVISION_DEPTH works.
"""
page1 = self.new_page(content={'slug': 'page1'})
self.set_setting("PAGE_CONTENT_REVISION_DEPTH", 3)
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev1')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev2')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev3')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev4')
self.assertEqual(Content.objects.filter(type='rev-test').count(), 3)
self.assertEqual(
Content.objects.filter(type='rev-test').latest('creation_date').body,
'rev4')
def test_content_dict(self):
"""
Check that content_dict method works.
"""
page1 = self.new_page(content={'slug': 'page1'})
page1.save()
c = Content.objects.create_content_if_changed(page1, 'en-us', 'body', 'test')
self.assertEqual(
page1.content_by_language(language='en-us'),
[c]
)
def test_strict_urls(self):
"""
Check that the strict handling of URLs work as
intended.
"""
page1 = self.new_page(content={'slug': 'page1'})
page2 = self.new_page(content={'slug': 'page2'})
page1.save()
page2.save()
page2.parent = page1
page2.save()
page1 = Page.objects.get(id=page1.id)
self.assertTrue(page1.get_children(), [page2])
self.assertEqual(
Page.objects.from_path('wrong/path/page2', 'en-us'),
page2
)
self.set_setting("PAGE_USE_STRICT_URL", True)
self.assertEqual(
Page.objects.from_path('wrong/path/page2', 'en-us'),
None
)
self.assertEqual(
Page.objects.from_path('page1/page2', 'en-us'),
page2
)
def test_remove_slug(self):
"""Test the remove slug function."""
self.assertEqual(remove_slug('hello/world/toto'), 'hello/world')
self.assertEqual(remove_slug('hello/world'), 'hello')
self.assertEqual(remove_slug('/hello/world/'), 'hello')
self.assertEqual(remove_slug('hello'), None)
def test_path_too_long(self):
"""Test that the CMS try to resolve the whole page path to find
a suitable sub path with delegation."""
page1 = self.new_page(content={'slug': 'page1'})
page2 = self.new_page(content={'slug': 'page2'})
from basic_cms import urlconf_registry as reg
reg.register_urlconf('test', 'basic_cms.testproj.documents.urls',
label='test')
page2.delegate_to = 'test'
page1.delegate_to = 'test'
page1.save()
page2.save()
page2.parent = page1
page2.save()
from basic_cms.testproj.documents.models import Document
doc = Document(title='doc title 1', text='text', page=page1)
doc.save()
req = get_request_mock()
self.set_setting("PAGE_HIDE_ROOT_SLUG", False)
page1.invalidate()
page2.invalidate()
def _get_context_page(path):
return details(req, path, 'en-us')
self.assertEqual(_get_context_page('/').status_code, 200)
self.assertEqual(_get_context_page('/page1/').status_code, 200)
self.assertEqual(_get_context_page('/page1/').status_code, 200)
self.assertEqual(_get_context_page('/page1/page2').status_code, 301)
self.assertEqual(_get_context_page('/page1/page2/').status_code, 301)
self.assertEqual(_get_context_page('/page1/page2/doc-%d' % doc.id
).status_code, 301)
self.assertRaises(Http404, _get_context_page,
'/page1/page-wrong/doc-%d' % doc.id)
reg.registry = []
def test_po_file_imoprt_export(self):
"""Test the po files export and import."""
try:
import polib
except ImportError:
return unittest.skip("Polib is not installed")
page1 = self.new_page(content={'slug': 'page1', 'title': 'english title'})
page1.save()
#Content(page=page1, language='en-us', type='title', body='toto').save()
Content(page=page1, language='fr-ch', type='title', body='french title').save()
page1.invalidate()
import StringIO
stdout = StringIO.StringIO()
# TODO: might be nice to use a temp dir for this test
export_po_files(path='potests', stdout=stdout)
self.assertTrue("Export language fr-ch" in stdout.getvalue())
f = open("potests/fr-ch.po", "r+")
old = f.read().replace('french title', 'translated')
f.seek(0)
f.write(old)
f.close()
stdout = StringIO.StringIO()
import_po_files(path='potests', stdout=stdout)
self.assertTrue("Update language fr-ch" in stdout.getvalue())
self.assertTrue(("Update page %d" % page1.id) in stdout.getvalue())
self.assertTrue(page1.title(language='fr-ch'), 'translated')
def test_page_methods(self):
"""Test that some methods run properly."""
page1 = self.new_page(content={'slug': 'page1', 'title': 'hello'})
page2 = self.new_page(content={'slug': 'page2'})
page1.save()
page2.save()
page2.parent = page1
page2.save()
self.assertEqual(
page1.expose_content(),
u"hello"
)
self.assertEqual(
page2.slug_with_level(),
u" page2"
)
p = Page(author=page1.author)
self.assertEqual(str(p), u"Page without id")
p.save()
self.assertEqual(str(p), u"Page %d" % p.id)
def test_context_processor(self):
"""Test that the page's context processor is properly activated."""
from basic_cms.views import details
req = get_request_mock()
page1 = self.new_page(content={'slug': 'page1', 'title': 'hello'})
page1.save()
self.set_setting("PAGES_MEDIA_URL", "test_request_context")
self.assertContains(details(req, path='/'), "test_request_context")
def test_get_page_from_id_context_variable(self):
"""Test get_page_from_string_or_id with an id context variable."""
page = self.new_page({'slug': 'test'})
self.assertEqual(get_page_from_string_or_id(str(page.id)), page)
content = Content(page=page, language='en-us', type='test_id',
body=page.id)
content.save()
context = Context({'current_page': page})
context = RequestContext(MockRequest, context)
template = Template('{% load pages_tags %}'
'{% placeholder test_id as str %}'
'{% get_page str as p %}'
'{{ p.slug }}')
self.assertEqual(template.render(context), 'test')
def test_get_page_from_slug_context_variable(self):
"""Test get_page_from_string_or_id with an slug context variable."""
page = self.new_page({'slug': 'test'})
context = Context({'current_page': page})
context = RequestContext(MockRequest, context)
template = Template('{% load pages_tags %}'
'{% placeholder slug as str %}'
'{% get_page str as p %}'
'{{ p.slug }}')
self.assertEqual(template.render(context), 'test')
template = Template('{% load pages_tags %}'
'{% get_page "test" as p %}'
'{{ p.slug }}')
self.assertEqual(template.render(context), 'test')
def test_get_page_template_tag_with_page_arg_as_id(self):
"""Test get_page template tag with page argument given as a page id"""
context = Context({})
pl1 = """{% load pages_tags %}{% get_page 1 as toto %}{{ toto }}"""
template = get_template_from_string(pl1)
self.new_page({'id': 1, 'slug': 'get-page-slug'})
self.assertEqual(template.render(context), u'get-page-slug')
def test_get_page_template_tag_with_variable_containing_page_id(self):
"""Test get_page template tag with page argument given as a page id"""
context = Context({})
pl1 = ('{% load pages_tags %}{% placeholder somepage as page_id %}'
'{% get_page page_id as toto %}{{ toto }}')
template = get_template_from_string(pl1)
page = self.new_page({'id': 1, 'slug': 'get-page-slug',
'somepage': '1'})
context = Context({'current_page': page})
self.assertEqual(template.render(context), u'get-page-slug')
def test_get_page_template_tag_with_variable_containing_page_slug(self):
"""Test get_page template tag with page argument given as a page id"""
context = Context({})
pl1 = ('{% load pages_tags %}{% placeholder somepage as slug %}'
'{% get_page slug as toto %}{{ toto }}')
template = get_template_from_string(pl1)
page = self.new_page({'slug': 'get-page-slug', 'somepage':
'get-page-slug'})
context = Context({'current_page': page})
self.assertEqual(template.render(context), u'get-page-slug')
def test_get_pages_with_tag(self):
"""Test get_page template tag with page argument given as a page id"""
page = self.new_page({'slug': 'footer-page', 'somepage': 'get-footer-slug'})
tag = Tag.objects.create(name="footer")
page.tags.add(tag)
context = Context({})
pl1 = ('{% load pages_tags %}{% get_pages_with_tag "footer" as pages %}{% for page in pages %}{{ page.slug }}{% endfor %}')
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), u'footer-page')
def test_variable_disapear_in_block(self):
"""Try to test the disapearance of a context variable in a block."""
tpl = ("{% load pages_tags %}"
"{% placeholder slug as test_value untranslated %}"
"{% block someblock %}"
"{% get_page test_value as toto %}"
"{{ toto.slug }}"
"{% endblock %}")
template = get_template_from_string(tpl)
page = self.new_page({'slug': 'get-page-slug'})
context = Context({'current_page': page})
self.assertEqual(template.render(context), u'get-page-slug')
def test_get_filename(self):
placeholder = PlaceholderNode("placeholdername")
page = self.new_page({'slug': 'page1'})
data = "myfile.pdf"
self.assertTrue(data in get_filename(page, placeholder, data))
self.assertTrue("page_%d" % page.id in get_filename(page, placeholder, data))
self.assertTrue(placeholder.name in get_filename(page, placeholder, data))
|
class UnitTestCase(TestCase):
'''Django page CMS unit test suite class.'''
def test_date_ordering(self):
'''Test page date ordering feature.'''
pass
def test_widgets_registry(self):
'''Test the widget registry module.'''
pass
def test_page_caculated_status(self):
'''Test calculated status property.'''
pass
def test_placeholder_inherit_content(self):
'''Test placeholder content inheritance between pages.'''
pass
def test_get_page_template_tag(self):
'''Test get_page template tag.'''
pass
def test_placeholder_all_syntaxes(self):
'''Test placeholder syntaxes.'''
pass
def test_parsed_template(self):
'''Test the parsed template syntax.'''
pass
def test_video(self):
'''Test video placeholder.'''
pass
def test_placeholder_untranslated_content(self):
'''Test placeholder untranslated content.'''
pass
def test_urlconf_registry(self):
'''Test urlconf_registry basic functions.'''
pass
def test_permissions(self):
pass
def test_managers(self):
pass
def test_get_content_tag(self):
'''
Test the {% get_content %} template tag
'''
pass
def test_get_content_tag_bug(self):
'''
Make sure that {% get_content %} use the "lang" context variable if
no language string is provided.
'''
pass
def test_show_content_tag(self):
'''
Test the {% show_content %} template tag.
'''
pass
def test_pages_siblings_menu_tag(self):
'''
Test the {% pages_siblings_menu %} template tag.
'''
pass
def test_show_absolute_url_with_language(self):
'''
Test a {% show_absolute_url %} template tag bug.
'''
pass
def test_get_page_ids_by_slug(self):
'''
Test that get_page_ids_by_slug work as intented.
'''
pass
def test_get_language_from_request(self):
'''
Test that get_language_from_request return the default language even if a
unaccepted language is used.
'''
pass
class Req():
def test_default_view_with_language_prefix(self):
'''
Test that everything is working with the language prefix option
activated.
'''
pass
def test_root_page_hidden_slug(self):
'''
Check that the root works properly in every case.
'''
pass
def test_revision_depth(self):
'''
Check that PAGE_CONTENT_REVISION_DEPTH works.
'''
pass
def test_content_dict(self):
'''
Check that content_dict method works.
'''
pass
def test_strict_urls(self):
'''
Check that the strict handling of URLs work as
intended.
'''
pass
def test_remove_slug(self):
'''Test the remove slug function.'''
pass
def test_path_too_long(self):
'''Test that the CMS try to resolve the whole page path to find
a suitable sub path with delegation.'''
pass
def _get_context_page(path):
pass
def test_po_file_imoprt_export(self):
'''Test the po files export and import.'''
pass
def test_page_methods(self):
'''Test that some methods run properly.'''
pass
def test_context_processor(self):
'''Test that the page's context processor is properly activated.'''
pass
def test_get_page_from_id_context_variable(self):
'''Test get_page_from_string_or_id with an id context variable.'''
pass
def test_get_page_from_slug_context_variable(self):
'''Test get_page_from_string_or_id with an slug context variable.'''
pass
def test_get_page_template_tag_with_page_arg_as_id(self):
'''Test get_page template tag with page argument given as a page id'''
pass
def test_get_page_template_tag_with_variable_containing_page_id(self):
'''Test get_page template tag with page argument given as a page id'''
pass
def test_get_page_template_tag_with_variable_containing_page_slug(self):
'''Test get_page template tag with page argument given as a page id'''
pass
def test_get_pages_with_tag(self):
'''Test get_page template tag with page argument given as a page id'''
pass
def test_variable_disapear_in_block(self):
'''Try to test the disapearance of a context variable in a block.'''
pass
def test_get_filename(self):
pass
| 40 | 35 | 18 | 2 | 14 | 2 | 1 | 0.14 | 1 | 14 | 10 | 0 | 37 | 0 | 37 | 47 | 716 | 105 | 536 | 165 | 489 | 75 | 413 | 165 | 366 | 4 | 1 | 1 | 48 |
6,423 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/clear_database.py
|
ydcommon.management.commands.clear_database.Command
|
class Command(BaseCommand):
help = 'Clear database'
def handle(self, **options):
cursor = connection.cursor()
cursor.execute('show tables;')
parts = ('DROP TABLE IF EXISTS %s;' % table for (table,) in cursor.fetchall())
sql = 'SET FOREIGN_KEY_CHECKS = 0;\n' + '\n'.join(parts) + 'SET FOREIGN_KEY_CHECKS = 1;\n'
connection.cursor().execute(sql)
|
class Command(BaseCommand):
def handle(self, **options):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 9 | 1 | 8 | 6 | 6 | 0 | 8 | 6 | 6 | 1 | 1 | 0 | 1 |
6,424 |
ArabellaTech/ydcommon
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_ydcommon/ydcommon/tests/test_qunit.py
|
ydcommon.tests.test_qunit.QunitTests
|
class QunitTests(TestCase):
@mock.patch(CMD)
@mock.patch(BUILTIN_MODULE + ".open")
@mock.patch('ydcommon.management.commands.run_qunit.render_to_string')
def test_command(self, mock_render, mock_open, mock_status):
mock_status.return_value = CMD_VALUE
mock_render.return_value = ''
call_command('run_qunit')
self.assertTrue('example.html' in mock_status.call_args[0][0])
def test_view(self):
User.objects.create_superuser('foo', 'foo@foo.com' 'me', 'pass')
self.client.login(username='foo', password='pass')
response = self.client.get('/js-tests/')
self.assertContains(response, '/js-tests/example')
self.client.get('/js-tests/example')
self.assertEqual(response.status_code, 200)
|
class QunitTests(TestCase):
@mock.patch(CMD)
@mock.patch(BUILTIN_MODULE + ".open")
@mock.patch('ydcommon.management.commands.run_qunit.render_to_string')
def test_command(self, mock_render, mock_open, mock_status):
pass
def test_view(self):
pass
| 6 | 0 | 7 | 1 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 20 | 4 | 16 | 5 | 10 | 0 | 13 | 4 | 10 | 1 | 1 | 0 | 2 |
6,425 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/check_migrations.py
|
ydcommon.management.commands.check_migrations.Command
|
class Command(BaseCommand):
help = 'Render JSHint'
def handle(self, *args, **options):
cmd = "./manage.py makemigrations %s --check --dry-run"
for app in settings.PROJECT_APPS:
# Skip main app name, e.g., demo.accounts => accounts
submodule = app[app.find('.') + 1:]
code, result = False, False
if sys.version_info > (2, 7):
try:
result = subprocess.check_call(cmd % submodule,
stderr=subprocess.STDOUT,
shell=True)
except subprocess.CalledProcessError as e:
code = e.returncode
result = e.output
else:
code, result = commands.getstatusoutput(cmd % submodule)
if code != 0:
raise Exception('Missing migration')
|
class Command(BaseCommand):
def handle(self, *args, **options):
pass
| 2 | 0 | 19 | 1 | 17 | 1 | 5 | 0.05 | 1 | 2 | 0 | 0 | 1 | 0 | 1 | 1 | 22 | 2 | 19 | 8 | 17 | 1 | 16 | 7 | 14 | 5 | 1 | 3 | 5 |
6,426 |
ArabellaTech/ydcommon
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_ydcommon/ydcommon/tests/test_check_test_requirements.py
|
ydcommon.tests.test_check_test_requirements.CheckTestRequirementsTests
|
class CheckTestRequirementsTests(TestCase):
@mock.patch(CMD)
def test_command(self, mock_status):
mock_status.return_value = CMD_VALUE
call_command('check_test_requirements')
self.assertTrue('jshint -v' in mock_status.call_args_list[0][0])
self.assertTrue('phantomjs -v' in mock_status.call_args_list[1][0])
@mock.patch(CMD)
def test_wrong_phantomjs(self, mock_status):
mock_status.return_value = CMD_VALUE_WRONG
with self.assertRaises(SystemExit):
call_command('check_test_requirements')
|
class CheckTestRequirementsTests(TestCase):
@mock.patch(CMD)
def test_command(self, mock_status):
pass
@mock.patch(CMD)
def test_wrong_phantomjs(self, mock_status):
pass
| 5 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 14 | 2 | 12 | 5 | 7 | 0 | 10 | 3 | 7 | 1 | 1 | 1 | 2 |
6,427 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/views.py
|
ydcommon.views.QunitTestsView
|
class QunitTestsView(TemplateView):
template_name = 'ydcommon/js-tests/index.html'
def get_context_data(self, **kwargs):
context = super(QunitTestsView, self).get_context_data(**kwargs)
if not kwargs['path'] or kwargs['path'] == 'index':
for template_dir in get_template_dirs():
path = os.path.join(template_dir, 'js-tests')
files = [f.replace('.html', '') for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))]
for ignore_file in IGNORE_QUNIT_HTML_FILES:
if ignore_file in files:
files.remove(ignore_file)
tests = []
for f in files:
tests.append({'file': f,
'url': reverse(qunit_view, args=[f])})
context['tests'] = tests
return context
def get(self, request, *args, **kwargs):
if not kwargs['path']:
self.template_name = 'ydcommon/js-tests/index.html'
else:
self.template_name = 'js-tests/%s.html' % kwargs['path']
return super(QunitTestsView, self).get(request, *args, **kwargs)
|
class QunitTestsView(TemplateView):
def get_context_data(self, **kwargs):
pass
def get_context_data(self, **kwargs):
pass
| 3 | 0 | 11 | 0 | 11 | 0 | 4 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 26 | 2 | 24 | 10 | 21 | 0 | 21 | 10 | 18 | 6 | 1 | 4 | 8 |
6,428 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/tests/test_utils.py
|
ydcommon.tests.test_utils.UtilsTests
|
class UtilsTests(TestCase):
def test_settings(self):
# test old Django configuration
template_dirs = ['a', 'b']
with override_settings(TEMPLATES=[{'BACKEND': '1'}], TEMPLATE_DIRS=template_dirs):
self.assertListEqual(get_template_dirs(), template_dirs)
# test Django > 1.8
with override_settings(TEMPLATES=[{'BACKEND': '1', 'DIRS': template_dirs}]):
self.assertListEqual(get_template_dirs(), template_dirs)
|
class UtilsTests(TestCase):
def test_settings(self):
pass
| 2 | 0 | 9 | 1 | 6 | 2 | 1 | 0.29 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 10 | 1 | 7 | 3 | 5 | 2 | 7 | 3 | 5 | 1 | 1 | 1 | 1 |
6,429 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/run_qunit.py
|
ydcommon.management.commands.run_qunit.Command
|
class Command(BaseCommand):
help = 'Render mvc tests'
def add_arguments(self, parser):
parser.add_argument('-l', '--without-local-paths', action='store_false',
default=True,
dest='local_paths',
help='Render with changing js path to local')
def handle(self, *args, **options):
print('Preparing files')
if 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
call_command('collectstatic', interactive=False)
if 'compressor' in settings.INSTALLED_APPS:
call_command('compress', force=True, verbosity=0)
print('Running tests')
qunit = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'..', '..', 'scripts', 'run-qunit.js')
request = HttpRequest()
data = RequestContext(request).dicts[0]
for template_dir in get_template_dirs():
path = os.path.join(template_dir, 'js-tests')
if not os.path.exists(path):
continue
files = [f.replace('.html', '') for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))]
for ignore_file in IGNORE_QUNIT_HTML_FILES:
if ignore_file in files:
files.remove(ignore_file)
for filename in files:
file_path = 'js-tests/%s.html' % (filename)
output = render_to_string(file_path, data).encode('utf-8')
if options['local_paths']:
output = str(output)
output = output.replace('src="/static/', "src=\"{0}/"
.format(settings.STATIC_ROOT))
output = output.replace('href="/static/', "href=\"{0}/"
.format(settings.STATIC_ROOT))
with open('reports/%s.html' % filename, 'w') as f:
f.write(output)
cmd = "phantomjs %s file://`pwd`/reports/%s.html junit-xml" % \
(qunit, filename)
if sys.version_info > (2, 7):
try:
result = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True)
except subprocess.CalledProcessError as e:
result = e.output
else:
code, result = commands.getstatusoutput(cmd)
with open('reports/junit-%s.xml' % filename, 'w') as f:
f.write(result)
sys.stdout.write(filename.title() + ' - ' + RE_RESULTS.findall(result)[0].replace('\n', ' ').strip())
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 26 | 1 | 25 | 0 | 6 | 0 | 1 | 2 | 0 | 0 | 2 | 0 | 2 | 2 | 56 | 4 | 52 | 18 | 49 | 0 | 41 | 17 | 38 | 11 | 1 | 4 | 12 |
6,430 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/jshint.py
|
ydcommon.management.commands.jshint.Command
|
class Command(BaseCommand):
help = 'Render JSHint'
def add_arguments(self, parser):
parser.add_argument('-x', '--xml-output', action='store_true',
default=False,
dest='xml_output',
help='Render as XML')
parser.add_argument('-d', '--dir', action='store',
default=None,
dest='search_dir',
help='Search files in directory')
def handle(self, *args, **options):
search_dir = options['search_dir']
xml_output = options['xml_output']
files = []
if search_dir is not None:
search_dir = [search_dir]
else:
search_dir = settings.STATICFILES_DIRS
for path in search_dir:
cmd = "find %s %s" % (path, JSHINT_FILES_FIND)
if sys.version_info > (2, 7):
try:
result = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True)
except subprocess.CalledProcessError:
return
else:
code, result = commands.getstatusoutput(cmd)
for f in result.split('\n'):
if f.strip():
files.append(f.strip())
if xml_output:
cmd = 'jshint --reporter=checkstyle %s' % (' '.join(files))
else:
cmd = 'jshint --show-non-errors %s' % (' '.join(files))
if sys.version_info > (2, 7):
try:
result = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True)
code = 0
except subprocess.CalledProcessError as e:
result = e.output
code = e.returncode
else:
code, result = commands.getstatusoutput(cmd)
if result:
sys.stdout.write(result)
if not xml_output:
message = 'OK' if code == 0 else 'Error'
sys.stdout.write('%s! (Checked files: %d)' % (message, len(files)))
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 27 | 1 | 26 | 0 | 7 | 0 | 1 | 1 | 0 | 0 | 2 | 0 | 2 | 2 | 57 | 4 | 53 | 14 | 50 | 0 | 39 | 13 | 36 | 13 | 1 | 3 | 14 |
6,431 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/dump_database.py
|
ydcommon.management.commands.dump_database.Command
|
class Command(BaseCommand):
help = """\
Dumps the whole database (mysql only!). Looks at the environment
variable MYSQLDUMP_OPTIONS and uses what it finds there as additional
options."""
args = "[table1 table2 ...]"
requires_model_validation = False
def add_arguments(self, parser):
parser.add_argument(
'--database',
action='store',
dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database which to dump. Defaults to the "default" database.',
)
def handle(self, *args, **kwargs):
connection = connections[kwargs.get('database', DEFAULT_DB_ALIAS)]
settings_dict = connection.settings_dict
cmd_args = [DUMP_COMMAND_NAME]
db = settings_dict['OPTIONS'].get('db', settings_dict['NAME'])
user = settings_dict['OPTIONS'].get('user', settings_dict['USER'])
passwd = settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD'])
host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
if user:
cmd_args += ["--user=%s" % user]
if passwd:
cmd_args += ["--password=%s" % passwd]
if host:
cmd_args += ["--host=%s" % host]
if port:
cmd_args += ["--port=%s" % port]
cmd_args.extend(shlex.split(os.getenv("MYSQLDUMP_OPTIONS", '')))
if len(args):
tables = list(args)
else:
tables = connection.introspection.get_table_list(connection.cursor())
if django.VERSION >= (1, 8):
cmd_args += ["--extended-insert", db] + [str(t.name) for t in tables]
else:
cmd_args += ["--extended-insert", db] + tables
try:
if os.name == 'nt':
sys.exit(os.system(" ".join(cmd_args)))
else:
os.execvp(DUMP_COMMAND_NAME, cmd_args)
except OSError:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError('You appear not to have the %r program installed or on your path.' % DUMP_COMMAND_NAME)
|
class Command(BaseCommand):
def add_arguments(self, parser):
pass
def handle(self, *args, **kwargs):
pass
| 3 | 0 | 25 | 3 | 21 | 2 | 5 | 0.08 | 1 | 3 | 0 | 0 | 2 | 0 | 2 | 2 | 60 | 8 | 48 | 15 | 45 | 4 | 36 | 15 | 33 | 9 | 1 | 2 | 10 |
6,432 |
ArabellaTech/ydcommon
|
ArabellaTech_ydcommon/ydcommon/management/commands/check_test_requirements.py
|
ydcommon.management.commands.check_test_requirements.Command
|
class Command(BaseCommand):
help = 'Check test requirements'
errors = []
def check_req(self, name, cmd):
print('Checking ' + name)
if sys.version_info > (2, 7):
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
code = 0
except subprocess.CalledProcessError as e:
result = e.output
code = 1
else:
code, result = commands.getstatusoutput(cmd)
if code != 0:
self.errors.append('jshint')
return code, result
def handle(self, *args, **options):
self.check_req('jshint', 'jshint -v')
code, result = self.check_req('phantomjs', 'phantomjs -v')
if code == 0:
versions = result.split('.')
if int(versions[0]) < 1 or int(versions[1]) < 9:
self.errors.append('phantomjs')
if self.errors:
print('Missing: ' + ', '.join(self.errors))
exit(1)
else:
print('OK')
|
class Command(BaseCommand):
def check_req(self, name, cmd):
pass
def handle(self, *args, **options):
pass
| 3 | 0 | 14 | 1 | 13 | 0 | 4 | 0 | 1 | 2 | 0 | 0 | 2 | 0 | 2 | 2 | 32 | 3 | 29 | 10 | 26 | 0 | 27 | 9 | 24 | 4 | 1 | 2 | 8 |
6,433 |
ArabellaTech/ydcommon
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArabellaTech_ydcommon/ydcommon/tests/test_jshint.py
|
ydcommon.tests.test_jshint.JSHintTests
|
class JSHintTests(TestCase):
@mock.patch(CMD)
@mock.patch(BUILTIN_MODULE + ".open")
def test_command(self, mock_open, mock_status):
mock_status.return_value = CMD_VALUE
call_command('jshint')
self.assertTrue('--show-non-errors' in mock_status.call_args[0][0])
call_command('jshint', xml_output=True)
self.assertTrue('--reporter=checkstyle' in mock_status.call_args[0][0])
if sys.version_info > (2, 7):
mock_status.side_effect = subprocess.CalledProcessError(
'x', 'x', 'x')
call_command('jshint', xml_output=True)
|
class JSHintTests(TestCase):
@mock.patch(CMD)
@mock.patch(BUILTIN_MODULE + ".open")
def test_command(self, mock_open, mock_status):
pass
| 4 | 0 | 11 | 2 | 9 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 15 | 3 | 12 | 3 | 8 | 0 | 10 | 2 | 8 | 2 | 1 | 1 | 2 |
6,434 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/pyArango/tests/tests.py
|
pyArango.tests.tests.pyArangoTests.test_fields_on_save.Col_on_save
|
class Col_on_save(Collection):
_validation = {
"on_save": True,
"on_set": False,
"allow_foreign_fields": False
}
_fields = {
"str": Field(validators=[String_val()]),
"nestedStr": {
"str": Field(validators=[VAL.Length(1, 51)])
}
}
|
class Col_on_save(Collection):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 53 | 14 | 2 | 12 | 3 | 11 | 0 | 3 | 3 | 2 | 0 | 2 | 0 | 0 |
6,435 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/pyArango/tests/tests.py
|
pyArango.tests.tests.pyArangoTests.test_validation_default_settings.Col_empty
|
class Col_empty(Collection):
pass
|
class Col_empty(Collection):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 53 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
6,436 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/query.py
|
pyArango.query.RawCursor
|
class RawCursor(object):
"a raw interface to cursors that returns json"
def __init__(self, database, cursorId):
self.database = database
self.connection = self.database.connection
self.id = cursorId
def getURL(self):
return "%s/%s" % (self.database.getCursorsURL(), self.id)
def __next__(self):
"returns the next batch"
r = self.connection.session.put(self.getURL())
data = r.json()
if r.status_code in [400, 404]:
raise CursorError(data["errorMessage"], self.id, data)
return r.json()
|
class RawCursor(object):
'''a raw interface to cursors that returns json'''
def __init__(self, database, cursorId):
pass
def getURL(self):
pass
def __next__(self):
'''returns the next batch'''
pass
| 4 | 2 | 4 | 0 | 4 | 0 | 1 | 0.15 | 1 | 1 | 1 | 0 | 3 | 3 | 3 | 3 | 17 | 2 | 13 | 9 | 9 | 2 | 13 | 9 | 9 | 2 | 1 | 1 | 4 |
6,437 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/query.py
|
pyArango.query.SimpleQuery
|
class SimpleQuery(Query):
"Simple queries are attached to and instanciated by a collection"
def __init__(self, collection, queryType, rawResults, json_encoder = None,
**queryArgs):
self.collection = collection
self.connection = self.collection.database.connection
payload = {'collection' : collection.name}
payload.update(queryArgs)
payload = json.dumps(payload, cls=json_encoder, default=str)
URL = "%s/simple/%s" % (collection.database.getURL(), queryType)
request = self.connection.session.put(URL, data = payload)
Query.__init__(self, request, collection.database, rawResults)
def _raiseInitFailed(self, request):
data = request.json()
raise SimpleQueryError(data["errorMessage"], data)
def _developDoc(self, i):
docJson = self.result[i]
if self.collection.type == CONST.COLLECTION_EDGE_TYPE:
self.result[i] = Edge(self.collection, docJson)
else:
self.result[i] = Document(self.collection, docJson)
|
class SimpleQuery(Query):
'''Simple queries are attached to and instanciated by a collection'''
def __init__(self, collection, queryType, rawResults, json_encoder = None,
**queryArgs):
pass
def _raiseInitFailed(self, request):
pass
def _developDoc(self, i):
pass
| 4 | 1 | 7 | 1 | 6 | 0 | 1 | 0.05 | 1 | 4 | 3 | 0 | 3 | 2 | 3 | 14 | 26 | 5 | 20 | 12 | 15 | 1 | 18 | 11 | 14 | 2 | 2 | 1 | 4 |
6,438 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/tasks.py
|
pyArango.tasks.Tasks
|
class Tasks:
"""Tasks for database."""
URL = '/_api/tasks'
def __init__(self, database):
"""Initialise the database."""
self.database = database
def __call__(self):
"""All the active tasks in the db."""
# response = self.database.action.get(self.URL)
# response.raise_for_status()
# return response.json()
return self.fetch()
def drop(self):
"""delete all tasks"""
for task in self.fetch():
self.delete(task["id"])
def fetch(self, task_id=None):
"""Fetch the task for given task_id. If task_id is None return all tasks """
if task_id is not None:
url = '{tasks_url}/{task_id}'.format(
tasks_url=self.URL, task_id=task_id
)
else:
url = self.URL
response = self.database.action.get(url)
response.raise_for_status()
return response.json()
def create(
self, name, command, params=None,
period=None, offset=None, task_id=None
):
"""Create a task with given command and its parameters."""
task = {'name': name, 'command': command, 'params': params}
if period is not None:
task['period'] = period
if offset is not None:
task['offset'] = offset
if task_id is not None:
task['id'] = task_id
url = '{tasks_url}/{task_id}'.format(
tasks_url=self.URL, task_id=task_id
)
else:
url = self.URL
response = self.database.action.post(url, json=task)
response.raise_for_status()
return response.json()
def delete(self, task_id):
"""Delete the task for given task_id."""
url = '{tasks_url}/{task_id}'.format(
tasks_url=self.URL, task_id=task_id
)
response = self.database.action.delete(url)
response.raise_for_status()
return response.json()
|
class Tasks:
'''Tasks for database.'''
def __init__(self, database):
'''Initialise the database.'''
pass
def __call__(self):
'''All the active tasks in the db.'''
pass
def drop(self):
'''delete all tasks'''
pass
def fetch(self, task_id=None):
'''Fetch the task for given task_id. If task_id is None return all tasks '''
pass
def create(
self, name, command, params=None,
period=None, offset=None, task_id=None
):
'''Create a task with given command and its parameters.'''
pass
def delete(self, task_id):
'''Delete the task for given task_id.'''
pass
| 7 | 7 | 9 | 1 | 7 | 2 | 2 | 0.22 | 0 | 0 | 0 | 0 | 6 | 1 | 6 | 6 | 65 | 10 | 45 | 20 | 35 | 10 | 34 | 17 | 27 | 4 | 0 | 2 | 11 |
6,439 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/tests/tests.py
|
pyArango.tests.tests.pyArangoTests
|
class pyArangoTests(unittest.TestCase):
def setUp(self):
if __name__ == "__main__":
global ARANGODB_URL
global ARANGODB_ROOT_USERNAME
global ARANGODB_ROOT_PASSWORD
else:
ARANGODB_URL = os.getenv('ARANGODB_URL', 'http://127.0.0.1:8529')
ARANGODB_ROOT_USERNAME = os.getenv('ARANGODB_ROOT_USERNAME', 'root')
ARANGODB_ROOT_PASSWORD = os.getenv('ARANGODB_ROOT_PASSWORD', 'root')
self.conn = Connection(arangoURL=ARANGODB_URL, username=ARANGODB_ROOT_USERNAME, password=ARANGODB_ROOT_PASSWORD)
try:
self.conn.createDatabase(name = "test_db_2")
except CreationError:
pass
self.db = self.conn["test_db_2"]
self.admin = Admin(self.conn)
self.is_cluster = self.admin.is_cluster()
self.server_version = self.conn.getVersion()
self._reset()
def _reset(self):
self.db.reload()
self.db.tasks.drop()
for colName in self.db.collections:
if not self.db[colName].isSystem:
self.db[colName].delete()
for graph in self.db.graphs.values():
graph.delete()
for user in self.conn.users.fetchAllUsers():
if user["username"].find("pyArangoTest") > -1:
user.delete()
self.conn.disconnectSession()
def tearDown(self):
self._reset()
def createManyUsers(self, nbUsers):
collection = self.db.createCollection(name = "users")
for i in range(nbUsers):
doc = collection.createDocument()
doc["name"] = "Tesla-%d" % i
doc["number"] = i
doc["species"] = "human"
doc.save()
return collection
def createManyUsersBulk(self, nbUsers, batchSize):
docs = [];
collection = self.db.createCollection(name = "users")
with BulkOperation(collection, batchSize=batchSize) as col:
for i in range(nbUsers):
doc = col.createDocument()
docs.append(doc)
doc["name"] = "Tesla-%d" % i
doc["number"] = i
doc["species"] = "human"
doc.save()
return (collection, docs)
def patchManyUsersBulk(self, collection, batchSize, skip, docs):
count = 0
with BulkOperation(collection, batchSize=batchSize) as col:
i = 0;
while i < len(docs):
docs[i]["species"] = "robot"
docs[i]["xtrue"] = False
docs[i].patch()
i += skip
count += 1
return count
def deleteManyUsersBulk(self, collection, batchSize, skip, docs):
count = 0
with BulkOperation(collection, batchSize=batchSize) as col:
i = 0;
while i < len(docs):
docs[i].delete()
i += skip
count += 1
return count
# @unittest.skip("stand by")
def test_to_default(self):
class theCol(Collection):
_fields = {
'address' : {
'street' : Field(default="Paper street"),
},
"name": Field(default = "Tyler Durden")
}
col = self.db.createCollection("theCol")
doc = col.createDocument()
self.assertEqual(doc["address"]["street"], "Paper street")
self.assertEqual(doc["name"], "Tyler Durden")
doc["address"]["street"] = "North street"
doc["name"] = "Jon Snow"
self.assertEqual(doc["address"]["street"], "North street")
self.assertEqual(doc["name"], "Jon Snow")
doc.to_default()
self.assertEqual(doc["address"]["street"], "Paper street")
self.assertEqual(doc["name"], "Tyler Durden")
# @unittest.skip("stand by")
def test_fill_default(self):
class theCol(Collection):
_fields = {
"name": Field( default="Paper"),
"dct1":{
"num": Field(default=13),
"dct2":{
"str": Field(default='string'),
}
}
}
_validation = {
"on_save" : True,
"on_set" : True,
"allow_foreign_fields" : False
}
col = self.db.createCollection("theCol")
doc = col.createDocument()
doc['name'] = 'Orson'
doc['dct1']['num'] = None
doc['dct1']['dct2']['str'] = None
doc.fill_default()
self.assertEqual(doc['name'], 'Orson')
self.assertEqual(doc['dct1']['num'], 13)
self.assertEqual(doc['dct1']['dct2']['str'], 'string')
# @unittest.skip("stand by")
def test_fill_default_on_save(self):
class theCol(Collection):
_fields = {
"name": Field( default="Paper"),
"dct1":{
"num": Field(default=13),
"dct2":{
"str": Field(default='string'),
}
}
}
_validation = {
"on_save" : True,
"on_set" : True,
"allow_foreign_fields" : False
}
col = self.db.createCollection("theCol")
doc = col.createDocument()
doc['name'] = 'Orson'
doc['dct1']['num'] = None
doc['dct1']['dct2']['str'] = None
store = doc.getStore()
doc.save()
self.assertEqual(store['name'], 'Orson')
self.assertEqual(store['dct1']['num'], None)
self.assertEqual(store['dct1']['dct2']['str'], None)
self.assertEqual(doc['name'], 'Orson')
self.assertEqual(doc['dct1']['num'], 13)
self.assertEqual(doc['dct1']['dct2']['str'], 'string')
doc2 = col[doc['_key']]
self.assertEqual(doc2['name'], 'Orson')
self.assertEqual(doc2['dct1']['num'], 13)
self.assertEqual(doc2['dct1']['dct2']['str'], 'string')
# @unittest.skip("stand by")
def test_bulk_operations(self):
(collection, docs) = self.createManyUsersBulk(55, 17)
self.assertEqual(collection.count(), len(docs))
newCount = self.patchManyUsersBulk(collection, 7, 3, docs)
aql = "let length = (FOR c IN @@col FILTER c.xtrue == false RETURN 1) RETURN count(length)"
q = self.db.AQLQuery(aql, rawResults = True, bindVars = {"@col": collection.name})
self.assertEqual(len(q.result), 1)
self.assertEqual(q[0], newCount)
deleteCount = self.deleteManyUsersBulk(collection, 9, 4, docs)
self.assertEqual(len(docs) - deleteCount, collection.count())
# mixing bulk operations not supported, should throw:
with BulkOperation(collection, batchSize=99) as col:
doc = col.createDocument()
doc.save()
try:
docs[2]['something'] = 'abc'
docs[2].patch()
self.fail("should have raised while patching")
except UpdateError:
pass
try:
docs[2].delete()
self.fail("should have raised while deleting")
except UpdateError:
pass
with BulkOperation(collection, batchSize=99) as col:
docs[1].delete()
try:
docs[2]['something'] = 'abc'
docs[2].patch()
self.fail("should have raised")
except UpdateError:
pass
try:
doc = col.createDocument()
doc.save()
self.fail("should have raised")
except UpdateError:
pass
collection.delete()
# @unittest.skip("stand by")
def test_bulk_import(self):
usersCollection = self.db.createCollection(name = "users")
nbUsers = 100
users = []
for i in range(nbUsers):
user = {}
user["name"] = "Tesla-%d" % i
user["number"] = i
user["species"] = "human"
users.append(user)
usersCollection.importBulk(users)
self.assertEqual(usersCollection.count(), len(users))
# @unittest.skip("stand by")
def test_bulk_import_exception(self):
usersCollection = self.db.createCollection(name="users")
nbUsers = 2
users = []
for i in range(nbUsers):
user = {}
user["_key"] = "tesla"
user["name"] = "Tesla-%d" % i
user["number"] = i
user["species"] = "human"
users.append(user)
with self.assertRaises(CreationError):
usersCollection.importBulk(users, onDuplicate="error", complete=True)
expectCount = 0
if self.is_cluster:
# The cluster can't do a complete rollback.
expectCount = 1
self.assertEqual(usersCollection.count(), expectCount)
# @unittest.skip("stand by")
def test_bulk_import_error_return_value(self):
usersCollection = self.db.createCollection(name="users")
nbUsers = 2
users = []
for i in range(nbUsers):
user = {}
user["_key"] = "tesla"
user["name"] = "Tesla-%d" % i
user["number"] = i
user["species"] = "human"
users.append(user)
result = usersCollection.importBulk(users, onDuplicate="error")
self.assertEqual(result, {
'created': 1,
'empty': 0,
'error': False,
'errors': 1,
'ignored': 0,
'updated': 0
})
# @unittest.skip("stand by")
def test_bulkSave(self):
collection = self.db.createCollection(name = "lops")
nbUsers = 100
docs = []
for i in range(nbUsers):
doc = collection.createDocument()
doc["name"] = "Tesla-%d" % i
docs.append(doc)
res = collection.bulkSave(docs)
self.assertEqual(res, nbUsers)
# @unittest.skip("stand by")
def test_bulkSave_dict(self):
collection = self.db.createCollection(name = "lops")
nbUsers = 100
docs = []
for i in range(nbUsers):
doc = {}
doc["name"] = "Tesla-%d" % i
docs.append(doc)
res = collection.bulkSave(docs)
self.assertEqual(res, nbUsers)
# @unittest.skip("stand by")
def test_collection_create_delete(self):
col = self.db.createCollection(name = "to_be_erased")
self.assertTrue(self.db.hasCollection("to_be_erased"))
self.assertFalse(self.db.hasCollection("no_collection_by_that_name"))
d1 = col.createDocument()
d1["name"] = "tesla"
d1.save()
self.assertEqual(1, col.count())
self.db["to_be_erased"].delete()
self.assertRaises(DeletionError, self.db["to_be_erased"].delete)
# @unittest.skip("stand by")
def test_edges_create_delete(self):
ed = self.db.createCollection(className = "Edges", name = "to_be_erased")
col = self.db.createCollection(name = "to_be_erased_to")
d1 = col.createDocument()
d1["name"] = "tesla"
d1.save()
d2 = col.createDocument()
d2["name"] = "tesla2"
d2.save()
d3 = col.createDocument()
d3["name"] = "tesla3"
d3.save()
self.db.reloadCollections()
ed = self.db.collections["to_be_erased"]
e1 = ed.createEdge({"name": 'tesla-edge'})
e1.links(d1, d2)
# create an edge with one saved and one unsaved attribute:
e2 = ed.createEdge()
e2['blarg'] = 'blub'
e2.links(d1, d3)
self.assertEqual(1, len(e2))
e2['blub'] = 'blarg'
self.assertEqual(2, len(e2))
# should have two edges in total:
self.assertEqual(2, ed.count())
# deleting property:
del e2['blarg']
self.assertEqual(1, len(e2))
e2.save()
# loading edge from collection, revify deletion, addition
e2_ = ed[e2._key]
self.assertEqual(1, len(e2_))
self.assertNotIn('blarg', e2_)
self.assertIn('blub', e2_)
# modify once more:
e2['start_date'] = "2018-03-23T23:27:40.029Z"
e2['end_date'] = "2018-04-13T00:00:00.000Z"
e2.save()
# load it once more
e2_ = ed[e2._key]
# should have saved properties:
self.assertEqual(e2.start_date, e2_.start_date)
self.db["to_be_erased"].delete()
self.assertRaises(DeletionError, self.db["to_be_erased"].delete)
# @unittest.skip("stand by")
def test_collection_count_truncate(self):
collection = self.db.createCollection(name = "lala")
collection.truncate()
doc = collection.createDocument()
doc.save()
doc2 = collection.createDocument()
doc2.save()
self.assertEqual(2, collection.count())
collection.truncate()
self.assertEqual(0, collection.count())
# @unittest.skip("stand by")
def test_document_create_update_delete(self):
collection = self.db.createCollection(name = "lala")
doc = collection.createDocument()
doc["name"] = "Tesla"
self.assertTrue(doc._id is None)
doc.save()
if self.server_version["version"] >= "3.5" and self.is_cluster:
shardID = doc.getResponsibleShard()
self.assertTrue(shardID.startswith("s"))
self.assertTrue(doc._id is not None)
did = copy.copy(doc._id)
doc["name"] = "Tesla2"
doc.save()
self.assertEqual(doc._id, did)
doc.delete()
self.assertTrue(doc._id is None)
# @unittest.skip("stand by")
def test_document_fetch_by_key(self):
collection = self.db.createCollection(name = "lala")
doc = collection.createDocument()
doc["name"] = 'iop'
doc.save()
doc2 = collection.fetchDocument(doc._key)
self.assertEqual(doc._id, doc2._id)
def test_database_contains_id(self):
collection = self.db.createCollection(name="lala")
doc = collection.createDocument()
doc["name"] = 'iop'
doc.save()
result = doc["_id"] in self.db
self.assertTrue(result)
result = doc["_id"] + '1' in self.db
self.assertFalse(result)
# @unittest.skip("stand by")
def test_document_set_private_w_rest(self):
collection = self.db.createCollection(name = "lala")
data = {
"_key": "key",
"name": "iop"
}
doc = collection.createDocument(data)
self.assertEqual(doc["_key"], doc._key)
self.assertEqual(doc["_key"], data["_key"])
# @unittest.skip("stand by")
def test_document_has_field(self):
class theCol(Collection):
_fields = {
'address' : {
'street' : Field(),
}
}
col = self.db.createCollection("theCol")
self.assertTrue(self.db['theCol'].hasField('address'))
self.assertTrue(self.db['theCol'].hasField('address.street'))
self.assertFalse(self.db['theCol'].hasField('street'))
self.assertFalse(self.db['theCol'].hasField('banana'))
self.assertFalse(self.db['theCol'].hasField('address.banana'))
# @unittest.skip("stand by")
def test_document_create_patch(self):
collection = self.db.createCollection(name = "lala")
doc = collection.createDocument()
doc["name"] = "Tesla3"
self.assertRaises(ValueError, doc.patch)
doc.save()
doc.patch()
# @unittest.skip("stand by")
def test_aql_validation(self):
collection = self.db.createCollection(name = "users")
doc = collection.createDocument()
doc["name"] = "Tesla"
doc.save()
aql = "FOR c IN users FILTER c.name == @name LIMIT 2 RETURN c.name"
bindVars = {'name' : 'Tesla-3'}
# @unittest.skip("stand by")
def test_aql_query_rawResults_true(self):
self.createManyUsers(100)
aql = "FOR c IN users FILTER c.name == @name LIMIT 10 RETURN c.name"
bindVars = {'name' : 'Tesla-3'}
q = self.db.AQLQuery(aql, rawResults = True, batchSize = 10, bindVars = bindVars)
self.assertEqual(len(q.result), 1)
self.assertEqual(q[0], 'Tesla-3')
# @unittest.skip("stand by")
def test_aql_query_rawResults_false(self):
self.createManyUsers(100)
aql = "FOR c IN users FILTER c.name == @name LIMIT 10 RETURN c"
bindVars = {'name' : 'Tesla-3'}
q = self.db.AQLQuery(aql, rawResults = False, batchSize = 10, bindVars = bindVars)
self.assertEqual(len(q.result), 1)
self.assertEqual(q[0]['name'], 'Tesla-3')
self.assertTrue(isinstance(q[0], Document))
# @unittest.skip("stand by")
def test_aql_query_batch(self):
nbUsers = 100
self.createManyUsers(nbUsers)
aql = "FOR c IN users LIMIT %s RETURN c" % nbUsers
q = self.db.AQLQuery(aql, rawResults = False, batchSize = 1, count = True)
lstRes = []
for i in range(nbUsers):
lstRes.append(q[0]["number"])
try:
q.nextBatch()
except StopIteration:
self.assertEqual(i, nbUsers-1)
lstRes.sort()
self.assertEqual(lstRes, list(range(nbUsers)))
self.assertEqual(q.count, nbUsers)
# @unittest.skip("stand by")
def test_simple_query_by_example_batch(self):
nbUsers = 100
col = self.createManyUsers(nbUsers)
example = {'species' : "human"}
q = col.fetchByExample(example, batchSize = 1, count = True)
lstRes = []
for i in range(nbUsers+5):
lstRes.append(q[0]["number"])
try:
q.nextBatch()
except StopIteration as e:
self.assertEqual(i, nbUsers-1)
break
lstRes.sort()
self.assertEqual(lstRes, list(range(nbUsers)))
self.assertEqual(q.count, nbUsers)
# @unittest.skip("stand by")
def test_simple_query_all_batch(self):
nbUsers = 100
col = self.createManyUsers(nbUsers)
q = col.fetchAll(batchSize = 1, count = True)
lstRes = []
for i in range(nbUsers):
lstRes.append(q[0]["number"])
try:
q.nextBatch()
except StopIteration:
self.assertEqual(i, nbUsers-1)
lstRes.sort()
self.assertEqual(lstRes, list(range(nbUsers)))
self.assertEqual(q.count, nbUsers)
# @unittest.skip("stand by")
def test_simple_query_iterator_all_batch_rawResults_true(self):
nbUsers = 20
col = self.createManyUsers(nbUsers)
q = col.fetchAll(batchSize=5, count=True, rawResults=True)
lstRes = []
for user in q:
lstRes.append(user["number"])
self.assertEqual(sorted(lstRes), list(range(nbUsers)))
self.assertEqual(q.count, nbUsers)
# @unittest.skip("stand by")
def test_nonRaw_creation_error(self):
col = self.createManyUsers(1)
docs = self.db.AQLQuery("for x in users return { name : x.name }", batchSize = 1);
with self.assertRaises(CreationError):
doc0 = docs[0]
# @unittest.skip("stand by")
def test_empty_query(self):
col = self.createManyUsers(1)
example = {'species' : "rat"}
q = col.fetchByExample(example, batchSize = 1, count = True)
self.assertEqual(q.result, [])
# @unittest.skip("stand by")
def test_cursor(self):
nbUsers = 2
col = self.createManyUsers(nbUsers)
q = col.fetchAll(batchSize = 1, count = True)
q2 = Cursor(q.database, q.cursor.id, rawResults = True)
lstRes = [q.result[0]["number"], q2.result[0]["number"]]
lstRes.sort()
self.assertEqual(lstRes, list(range(nbUsers)))
self.assertEqual(q.count, nbUsers)
# @unittest.skip("stand by")
def test_fields_on_set(self):
import pyArango.validation as VAL
class Col_on_set(Collection):
_validation = {
"on_save" : False,
"on_set" : True,
"allow_foreign_fields" : False
}
_fields = {
"str" : Field(validators = [VAL.Length(50, 51)]),
"notNull" : Field(validators = [VAL.NotNull()]),
"nestedStr": {
"str": Field(validators = [VAL.Length(50, 51)])
}
}
myCol = self.db.createCollection('Col_on_set')
doc = myCol.createDocument()
self.assertRaises(ValidationError, doc.__setitem__, 'str', "qwer")
self.assertRaises(ValidationError, doc["nestedStr"].__setitem__, 'str', "qwer")
self.assertRaises(ValidationError, doc.__setitem__, 'notNull', None)
self.assertRaises(SchemaViolation, doc.__setitem__, 'foreigner', None)
# @unittest.skip("stand by")
def test_fields_on_save(self):
import pyArango.validation as VAL
import types
class String_val(VAL.Validator):
def validate(self, value):
if not isinstance(value, bytes) and not isinstance(value, str):
raise ValidationError("Field value must be a string")
return True
class Col_on_save(Collection):
_validation = {
"on_save" : True,
"on_set" : False,
"allow_foreign_fields" : False
}
_fields = {
"str" : Field(validators = [String_val()]),
"nestedStr": {
"str": Field(validators = [VAL.Length(1, 51)])
}
}
myCol = self.db.createCollection('Col_on_save')
doc = myCol.createDocument()
doc["str"] = 3
self.assertRaises(InvalidDocument, doc.save)
doc = myCol.createDocument()
doc["str"] = "string"
self.assertRaises(SchemaViolation, doc.__setitem__, "foreigner", "string")
doc = myCol.createDocument()
doc["nestedStr"] = {}
doc["nestedStr"]["str"] = 3
doc["str"] = "string"
self.assertRaises(InvalidDocument, doc.save)
doc = myCol.createDocument()
doc["nestedStr"] = {}
doc["nestedStr"]["str"] = "string"
doc["str"] = "string"
doc.save()
self.assertEqual(myCol[doc._key]._store.getStore(), doc._store.getStore())
doc["nestedStr"]["str"] = "string2"
self.assertTrue(len(doc._store.getPatches()) > 0)
doc.patch()
self.assertEqual(myCol[doc._key]._store.getStore(), doc._store.getStore())
# @unittest.skip("stand by")
def test_unvalidated_nested_fields(self):
import pyArango.validation as VAL
class String_val(VAL.Validator):
def validate(self, value):
if not isinstance(value, bytes) and not isinstance(value, str):
raise ValidationError("Field value must be a string")
return True
class Col_on_save(Collection):
_validation = {
"on_save": True,
"on_set": False,
"allow_foreign_fields": True
}
_fields = {
"str": Field(validators=[String_val()]),
"nestedSomething": Field()
}
myCol = self.db.createCollection('Col_on_save')
doc = myCol.createDocument()
doc["str"] = 3
doc["nestedSomething"] = {
"some_nested_data": "data"
}
self.assertRaises(InvalidDocument, doc.save)
doc = myCol.createDocument()
doc["str"] = "string"
doc["nestedSomething"] = {
"some_nested_data": "data"
}
doc.save()
self.assertEqual(myCol[doc._key]._store.getStore(), doc._store.getStore())
doc["nestedSomething"]["some_nested_data"] = "data"
self.assertTrue(len(doc._store.getPatches()) > 0)
doc.patch()
self.assertEqual(myCol[doc._key]._store.getStore(), doc._store.getStore())
# @unittest.skip("stand by")
def test_document_cache(self):
class DummyDoc(object):
def __init__(self, key):
self._key = key
self.hhh = "hhh"
self.store = {
"a" : 1
}
def __getitem__(self, k):
return self.store[k]
def __setitem__(self, k, v):
self.store[k] = v
def __repr__(self):
return repr(self._key)
docs = []
for i in range(10):
docs.append(DummyDoc(i))
cache = DocumentCache(5)
for doc in docs:
cache.cache(doc)
self.assertEqual(cache.head._key, doc._key)
self.assertEqual(list(cache.cacheStore.keys()), [5, 6, 7, 8, 9])
self.assertEqual(cache.getChain(), [9, 8, 7, 6, 5])
doc = cache[5]
self.assertEqual(doc.hhh, "hhh")
doc["a"] = 3
self.assertEqual(doc["a"], 3)
self.assertEqual(cache.head._key, doc._key)
self.assertEqual(cache.getChain(), [5, 9, 8, 7, 6])
# @unittest.skip("stand by")
def test_validation_default_settings(self):
class Col_empty(Collection):
pass
class Col_empty2(Collection):
_validation = {
"on_save" : False,
}
c = Col_empty
self.assertEqual(c._validation, Collection_metaclass._validationDefault)
c = Col_empty2
self.assertEqual(c._validation, Collection_metaclass._validationDefault)
# @unittest.skip("stand by")
def test_validation_default_inlavid_key(self):
def keyTest():
class Col(Collection):
_validation = {
"on_sav" : True,
}
self.assertRaises(KeyError, keyTest)
# @unittest.skip("stand by")
def test_validation_default_inlavid_value(self):
def keyTest():
class Col(Collection):
_validation = {
"on_save" : "wrong",
}
self.assertRaises(ValueError, keyTest)
# @unittest.skip("stand by")
def test_collection_type_creation(self):
class Edgy(Edges):
pass
class Coly(Collection):
pass
edgy = self.db.createCollection("Edgy")
self.assertEqual(edgy.type, COLLECTION_EDGE_TYPE)
coly = self.db.createCollection("Coly")
self.assertEqual(coly.type, COLLECTION_DOCUMENT_TYPE)
# @unittest.skip("stand by")
def test_save_edge(self):
class Human(Collection):
_fields = {
"name" : Field()
}
class Relation(Edges):
_fields = {
"ctype" : Field()
}
humans = self.db.createCollection("Human")
rels = self.db.createCollection("Relation")
tete = humans.createDocument()
tete["name"] = "tete"
tete.save()
toto = humans.createDocument()
toto["name"] = "toto"
toto.save()
link = rels.createEdge()
link["ctype"] = "brother"
link.links(tete, toto)
sameLink = rels[link._key]
self.assertEqual(sameLink["ctype"], link["ctype"])
self.assertEqual(sameLink["_from"], tete._id)
self.assertEqual(sameLink["_to"], toto._id)
# @unittest.skip("stand by")
def test_get_edges(self):
class Human(Collection):
_fields = {
"number" : Field()
}
class Relation(Edges):
_fields = {
"number" : Field()
}
humans = self.db.createCollection("Human")
rels = self.db.createCollection("Relation")
humansList = []
for i in range(10):
h = humans.createDocument()
h["number"] = i
humansList.append(h)
h.save()
for i in range(10):
e = rels.createEdge()
e["number"] = i
if i % 2 == 1:
e.links(humansList[0], humansList[i])
else:
e.links(humansList[-1], humansList[i])
outs = humansList[0].getOutEdges(rels)
self.assertEqual(len(outs), 5)
for o in outs:
self.assertEqual(o["number"] % 2, 1)
ins = humansList[-1].getOutEdges(rels)
self.assertEqual(len(ins), 5)
for i in ins:
self.assertEqual(i["number"] % 2, 0)
# @unittest.skip("stand by")
def test_graph(self):
class Humans(Collection):
_fields = {
"name" : Field()
}
class Friend(Edges):
_fields = {
"number" : Field()
}
class MyGraph(Graph):
_edgeDefinitions = (EdgeDefinition("Friend", fromCollections = ["Humans"], toCollections = ["Humans"]), )
_orphanedCollections = []
humans = self.db.createCollection("Humans")
rels = self.db.createCollection("Friend")
g = self.db.createGraph("MyGraph")
h1 = g.createVertex('Humans', {"name" : "simba"})
h2 = g.createVertex('Humans', {"name" : "simba2"})
h3 = g.createVertex('Humans', {"name" : "simba3"})
h4 = g.createVertex('Humans', {"name" : "simba4"})
g.link('Friend', h1, h3, {})
g.link('Friend', h2, h3, {})
self.assertEqual(len(h3.getEdges(rels)), 2)
self.assertEqual(len(h2.getEdges(rels)), 1)
g.deleteVertex(h3)
self.assertEqual(len(h2.getEdges(rels)), 0)
g.link('Friend', h1, h2, {})
self.assertEqual(len(h2.getEdges(rels)), 1)
g.link('Friend', h4, h1, {})
g.link('Friend', h4, h2, {})
g.unlink('Friend', h4, h2)
self.assertEqual(len(h4.getEdges(rels)), 1)
h5 = g.createVertex('Humans', {"name" : "simba5"})
h6 = g.createVertex('Humans', {"name" : "simba6"})
for i in range(200):
g.link('Friend', h5, h6, {})
self.assertEqual(len(h5.getEdges(rels)), 200)
g.unlink('Friend', h5, h6)
self.assertEqual(len(h5.getEdges(rels)), 0)
# g.deleteEdge()
# @unittest.skip("stand by")
def test_traversal(self):
class persons(Collection):
_fields = {
"name" : Field()
}
class knows(Edges):
_fields = {
"number" : Field()
}
class knows_graph(Graph):
_edgeDefinitions = (EdgeDefinition("knows", fromCollections = ["persons"], toCollections = ["persons"]), )
_orphanedCollections = []
pers = self.db.createCollection("persons")
rels = self.db.createCollection("knows")
g = self.db.createGraph("knows_graph")
alice = g.createVertex("persons", {"_key" : "alice"})
bob = g.createVertex("persons", {"_key" : "bob"})
charlie = g.createVertex("persons", {"_key" : "charlie"})
dave = g.createVertex("persons", {"_key" : "dave"})
eve = g.createVertex("persons", {"_key" : "eve"})
e = g.link("knows", alice, alice, {'me' : "aa"})
g.link("knows", alice, bob, {})
g.link("knows", bob, charlie, {})
g.link("knows", bob, dave, {})
g.link("knows", eve, alice, {})
g.link("knows", eve, bob, {})
travVerts = g.traverse(alice, direction = "outbound")["visited"]["vertices"]
_keys = set()
for v in travVerts:
_keys.add(v["_key"])
pers = [alice, bob, charlie, dave]
for p in pers:
self.assertTrue(p._key in _keys)
travVerts = g.traverse(alice, direction = "inbound")["visited"]["vertices"]
_keys = set()
for v in travVerts:
_keys.add(v["_key"])
pers = [alice, eve]
for p in pers:
self.assertTrue(p._key in _keys)
travVerts = g.traverse(alice, direction = "any")["visited"]["vertices"]
_keys = set()
for v in travVerts:
_keys.add(v["_key"])
pers = [alice, bob, charlie, dave, eve]
for p in pers:
self.assertTrue(p._key in _keys)
# @unittest.skip("stand by")
def testIndexes(self):
haveNamedIndices = self.server_version["version"] >= "3.5"
def getName(name):
if haveNamedIndices:
return name
return None
class persons(Collection):
_fields = {
"name" : Field(),
"Description": Field(),
"geo": Field(),
"skip": Field()
}
pers = self.db.createCollection("persons")
hashInd = pers.ensureHashIndex(["name"], name = getName("hi1"))
hashInd.delete()
hashInd2 = pers.ensureHashIndex(["name"], name = getName("hi2"))
if haveNamedIndices:
self.assertEqual(pers.getIndex("hi2"), hashInd2)
pers.getIndexes()
# after reloading the indices, some more attributes will be there, thus
# only compare for its actual ID:
self.assertEqual(pers.getIndex("hi2").infos['id'], hashInd2.infos['id'])
self.assertTrue(hashInd.infos["id"] != hashInd2.infos["id"])
persInd = pers.ensurePersistentIndex(["name2"], name = getName("pers"))
persInd.delete()
persInd = pers.ensurePersistentIndex(["name2"], name = getName("pers"))
self.assertTrue(persInd.infos["id"] != hashInd.infos["id"])
if self.server_version["version"] >= "3.5":
TTLInd = pers.ensureTTLIndex(["name3"], 123456, name = getName("ttl"))
TTLInd.delete()
TTLInd2 = pers.ensureTTLIndex(["name3"], 897345, name = getName("ttl"))
self.assertTrue(TTLInd.infos["id"] != hashInd.infos["id"])
ftInd = pers.ensureFulltextIndex(["Description"], name = getName("ft"))
ftInd.delete()
ftInd2 = pers.ensureFulltextIndex(["Description"], name = getName("ft2"))
self.assertTrue(ftInd.infos["id"] != ftInd2.infos["id"])
skipInd = pers.ensureFulltextIndex(["skip"], name = getName("ft3"))
skipInd.delete()
skipInd2 = pers.ensureFulltextIndex(["skip"], name = getName("skip"))
self.assertTrue(skipInd.infos["id"] != skipInd2.infos["id"])
geoInd = pers.ensureFulltextIndex(["geo"], name = getName("geo"))
geoInd.delete()
geoInd2 = pers.ensureFulltextIndex(["geo"], name = getName("geo2"))
self.assertTrue(geoInd.infos["id"] != geoInd2.infos["id"])
# @unittest.skip("stand by")
def test_transaction(self):
transaction = self.db.transaction(
collections = {},
action = "function (params) {return params['some_param'];}",
params = {"some_param": "lala param"})
self.assertEqual(transaction, {"code": 200, "result": "lala param", "error": False})
# @unittest.skip("stand by")
def test_transaction_exception(self):
self.assertRaises(TransactionError, self.db.transaction, collections = {}, action = "function () { return value; }")
# @unittest.skip("stand by")
def test_users_create_delete(self):
nbUsers = len(self.conn.users.fetchAllUsers())
u = self.conn.users.createUser("pyArangoTest_tesla", "secure")
u.save()
self.assertEqual(len(self.conn.users.fetchAllUsers()), nbUsers + 1)
u2 = self.conn.users.fetchUser(u["username"])
self.assertEqual(u2["username"], u["username"])
u.delete()
self.assertRaises( KeyError, self.conn.users.fetchUser, "tesla")
self.assertEqual(len(self.conn.users.fetchAllUsers()), nbUsers)
# @unittest.skip("stand by")
def test_users_credentials(self):
class persons(Collection):
pass
u = self.conn.users.createUser("pyArangoTest_tesla", "secure")
u.save()
u.setPermissions("test_db_2", True)
global ARANGODB_URL
conn = Connection(arangoURL=ARANGODB_URL, username="pyArangoTest_tesla", password="secure")
self.assertRaises(KeyError, conn.__getitem__, "_system")
self.assertTrue(conn.hasDatabase("test_db_2"))
# @unittest.skip("stand by")
def test_users_update(self):
u = self.conn.users.createUser("pyArangoTest_tesla", "secure")
u.save()
u.setPermissions("test_db_2", True)
global ARANGODB_URL
Connection(arangoURL=ARANGODB_URL, username="pyArangoTest_tesla", password="secure")
u["password"] = "newpass"
u.save()
Connection(arangoURL=ARANGODB_URL, username="pyArangoTest_tesla", password="newpass")
# @unittest.skip("stand by")
def test_action(self):
response = self.db.action.get("/_admin/aardvark/index.html")
self.assertEqual(response.status_code, 200, "Check if db is running")
# @unittest.skip("stand by")
def test_foxx_service(self):
response = self.db.foxx.service("/_admin/aardvark").get("/index.html")
self.assertEqual(response.status_code, 200, "Check if db is running")
# @unittest.skip("stand by")
def test_tasks(self):
db_tasks = self.db.tasks
self.assertListEqual(db_tasks(), [])
task = db_tasks.create(
'sample-task', 'console.log("sample-task", new Date());',
period=10
)
task_id = task['id']
fetched_task = db_tasks.fetch(task_id)
fetched_task['offset'] = int(fetched_task['offset'])
self.assertDictEqual(task, fetched_task)
tasks = db_tasks()
tasks[0]['offset'] = int(tasks[0]['offset'])
self.assertListEqual(tasks, [task])
db_tasks.delete(task_id)
self.assertListEqual(db_tasks(), [])
# @unittest.skip("stand by")
def test_timeout_parameter(self):
# Create a Connection object with the desired timeout
timeout = 120
connection = Connection(arangoURL=ARANGODB_URL, username=ARANGODB_ROOT_USERNAME, password=ARANGODB_ROOT_PASSWORD, timeout=timeout)
# Verify that the Connection session was created with the correct timeout
assert connection.session.timeout == timeout
|
class pyArangoTests(unittest.TestCase):
def setUp(self):
pass
def _reset(self):
pass
def tearDown(self):
pass
def createManyUsers(self, nbUsers):
pass
def createManyUsersBulk(self, nbUsers, batchSize):
pass
def patchManyUsersBulk(self, collection, batchSize, skip, docs):
pass
def deleteManyUsersBulk(self, collection, batchSize, skip, docs):
pass
def test_to_default(self):
pass
class theCol(Collection):
def test_fill_default(self):
pass
class theCol(Collection):
def test_fill_default_on_save(self):
pass
class theCol(Collection):
def test_bulk_operations(self):
pass
def test_bulk_import(self):
pass
def test_bulk_import_exception(self):
pass
def test_bulk_import_error_return_value(self):
pass
def test_bulkSave(self):
pass
def test_bulkSave_dict(self):
pass
def test_collection_create_delete(self):
pass
def test_edges_create_delete(self):
pass
def test_collection_count_truncate(self):
pass
def test_document_create_update_delete(self):
pass
def test_document_fetch_by_key(self):
pass
def test_database_contains_id(self):
pass
def test_document_set_private_w_rest(self):
pass
def test_document_has_field(self):
pass
class theCol(Collection):
def test_document_create_patch(self):
pass
def test_aql_validation(self):
pass
def test_aql_query_rawResults_true(self):
pass
def test_aql_query_rawResults_false(self):
pass
def test_aql_query_batch(self):
pass
def test_simple_query_by_example_batch(self):
pass
def test_simple_query_all_batch(self):
pass
def test_simple_query_iterator_all_batch_rawResults_true(self):
pass
def test_nonRaw_creation_error(self):
pass
def test_empty_query(self):
pass
def test_cursor(self):
pass
def test_fields_on_set(self):
pass
class Col_on_set(Collection):
def test_fields_on_save(self):
pass
class String_val(VAL.Validator):
def validate(self, value):
pass
class Col_on_save(Collection):
def test_unvalidated_nested_fields(self):
pass
class String_val(VAL.Validator):
def validate(self, value):
pass
class Col_on_save(Collection):
def test_document_cache(self):
pass
class DummyDoc(object):
def __init__(self, key):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
def __repr__(self):
pass
def test_validation_default_settings(self):
pass
class Col_empty(Collection):
class Col_empty2(Collection):
def test_validation_default_inlavid_key(self):
pass
def keyTest():
pass
class Col_on_set(Collection):
def test_validation_default_inlavid_value(self):
pass
def keyTest():
pass
class Col_on_set(Collection):
def test_collection_type_creation(self):
pass
class Edgy(Edges):
class Coly(Collection):
def test_save_edge(self):
pass
class Human(Collection):
class Relation(Edges):
def test_get_edges(self):
pass
class Human(Collection):
class Relation(Edges):
def test_graph(self):
pass
class Humans(Collection):
class Friend(Edges):
class MyGraph(Graph):
def test_traversal(self):
pass
class persons(Collection):
class knows(Edges):
class knows_graph(Graph):
def testIndexes(self):
pass
def getName(name):
pass
class persons(Collection):
def test_transaction(self):
pass
def test_transaction_exception(self):
pass
def test_users_create_delete(self):
pass
def test_users_credentials(self):
pass
class persons(Collection):
def test_users_update(self):
pass
def test_action(self):
pass
def test_foxx_service(self):
pass
def test_tasks(self):
pass
def test_timeout_parameter(self):
pass
| 95 | 0 | 16 | 2 | 14 | 0 | 2 | 0.07 | 1 | 24 | 17 | 0 | 57 | 5 | 57 | 129 | 1,138 | 194 | 881 | 353 | 777 | 63 | 771 | 348 | 667 | 7 | 2 | 2 | 114 |
6,440 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/tests/validators_tests.py
|
pyArango.tests.validators_tests.ValidatorTests
|
class ValidatorTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_notNull(self):
v = NotNull()
self.assertTrue(v.validate(33))
self.assertRaises(ValidationError, v.validate, None)
def test_email(self):
v = Email()
self.assertTrue(v.validate('nicholas.tesla@simba.com'))
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla @simba.com')
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla&@simba.com')
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla @simba.com')
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla')
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla@.com')
self.assertRaises(ValidationError, v.validate, 'nicholas.tesla@com')
def test_length(self):
v = Length(2, 5)
self.assertTrue(v.validate("12"))
self.assertRaises(ValidationError, v.validate, '1')
self.assertRaises(ValidationError, v.validate, '123456')
|
class ValidatorTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_notNull(self):
pass
def test_email(self):
pass
def test_length(self):
pass
| 6 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 4 | 4 | 0 | 5 | 0 | 5 | 77 | 28 | 5 | 23 | 9 | 17 | 0 | 23 | 9 | 17 | 1 | 2 | 0 | 5 |
6,441 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.AQLFetchError
|
class AQLFetchError(Exception):
"""Raised error when fetching the data."""
def __init__(self, err_message):
"""Error when unable to fetch.
Parameters
----------
err_message : str
error message.
"""
Exception.__init__(self, err_message)
|
class AQLFetchError(Exception):
'''Raised error when fetching the data.'''
def __init__(self, err_message):
'''Error when unable to fetch.
Parameters
----------
err_message : str
error message.
'''
pass
| 2 | 2 | 10 | 2 | 2 | 6 | 1 | 2.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 11 | 13 | 3 | 3 | 2 | 1 | 7 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
6,442 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.AQLQueryError
|
class AQLQueryError(pyArangoException):
"""Something went wrong with an aql query"""
def __init__(self, message, query, errors = None):
if errors is None:
errors = {}
lq = []
for i, ll in enumerate(query.split("\n")):
lq.append("%s: %s" % (i+1, ll))
lq = '\n'.join(lq)
message = "Error in:\n%s.\n->%s" % (lq, message)
pyArangoException.__init__(self, message, errors)
|
class AQLQueryError(pyArangoException):
'''Something went wrong with an aql query'''
def __init__(self, message, query, errors = None):
pass
| 2 | 1 | 10 | 1 | 9 | 0 | 3 | 0.1 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 13 | 12 | 1 | 10 | 4 | 8 | 1 | 10 | 4 | 8 | 3 | 4 | 1 | 3 |
6,443 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.AbstractInstanciationError
|
class AbstractInstanciationError(Exception):
"""Raised when someone tries to instanciate an abstract class"""
def __init__(self, cls):
self.cls = cls
self.message = "%s is abstract and is not supposed to be instanciated. Collections my inherit from it" % self.cls.__name__
Exception.__init__(self, self.message)
def __str__(self):
return self.message
|
class AbstractInstanciationError(Exception):
'''Raised when someone tries to instanciate an abstract class'''
def __init__(self, cls):
pass
def __str__(self):
pass
| 3 | 1 | 3 | 0 | 3 | 0 | 1 | 0.14 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 12 | 9 | 1 | 7 | 5 | 4 | 1 | 7 | 5 | 4 | 1 | 3 | 0 | 2 |
6,444 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.ArangoError
|
class ArangoError(pyArangoException):
"""a generic arangodb error object"""
def __init__(self, errorObject):
self.errorNum = errorObject['errorNum']
pyArangoException.__init__(self, errorObject['errorMessage'], errorObject)
|
class ArangoError(pyArangoException):
'''a generic arangodb error object'''
def __init__(self, errorObject):
pass
| 2 | 1 | 3 | 0 | 3 | 0 | 1 | 0.25 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 13 | 5 | 0 | 4 | 3 | 2 | 1 | 4 | 3 | 2 | 1 | 4 | 0 | 1 |
6,445 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.BulkOperationError
|
class BulkOperationError(pyArangoException):
"""Something went wrong in one of the bulk operations. This error contains more errors"""
def __init__(self, message):
self._errors = []
self._errmsgs = []
self._documents = []
pyArangoException.__init__(self, "Batch error - + " + message)
def addBulkError(self, error, document):
self._errors.append(error)
self._errmsgs.append(str(error))
self._documents.append(document)
def __str__(self):
strErrors = []
i = 0
for errMsg in self._errmsgs:
err = "<unknown>"
docstr = "<unknown>"
try:
err = errMsg
except:
pass
try:
docstr = self._documents[i]
except:
pass
strErrors.append("\t<%s> -> %s" % (err, docstr))
i+=1
strErrors = '\n\t'.join(strErrors)
return self.message + ":\n\t" + strErrors
|
class BulkOperationError(pyArangoException):
'''Something went wrong in one of the bulk operations. This error contains more errors'''
def __init__(self, message):
pass
def addBulkError(self, error, document):
pass
def __str__(self):
pass
| 4 | 1 | 9 | 0 | 9 | 0 | 2 | 0.04 | 1 | 1 | 0 | 0 | 3 | 3 | 3 | 15 | 30 | 1 | 28 | 12 | 24 | 1 | 28 | 12 | 24 | 4 | 4 | 2 | 6 |
6,446 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.ConnectionError
|
class ConnectionError(pyArangoException):
"""Something went wrong with the connection"""
def __init__(self, message, URL, statusCode = "", errors = None):
if errors is None:
errors = {}
mes = "%s. URL: %s, status: %s" % (message, URL, statusCode)
pyArangoException.__init__(self, mes, errors)
|
class ConnectionError(pyArangoException):
'''Something went wrong with the connection'''
def __init__(self, message, URL, statusCode = "", errors = None):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 7 | 0 | 6 | 3 | 4 | 1 | 6 | 3 | 4 | 2 | 4 | 1 | 2 |
6,447 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/pyArango/tests/tests.py
|
pyArango.tests.tests.pyArangoTests.test_validation_default_settings.Col_empty2
|
class Col_empty2(Collection):
_validation = {
"on_save": False,
}
|
class Col_empty2(Collection):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 53 | 4 | 0 | 4 | 2 | 3 | 0 | 2 | 2 | 1 | 0 | 2 | 0 | 0 |
6,448 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/query.py
|
pyArango.query.Cursor
|
class Cursor(Query):
"Cursor queries are attached to and instanciated by a database, use them to continue from where you left"
def __init__(self, database, cursorId, rawResults):
self.rawResults = rawResults
self._developed = set()
self.batchNumber = 1
self.cursor = RawCursor(database, cursorId)
self.response = next(self.cursor)
def _raiseInitFailed(self, request):
data = request.json()
raise CursorError(data["errorMessage"], self.id, data)
|
class Cursor(Query):
'''Cursor queries are attached to and instanciated by a database, use them to continue from where you left'''
def __init__(self, database, cursorId, rawResults):
pass
def _raiseInitFailed(self, request):
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 1 | 0.1 | 1 | 3 | 2 | 0 | 2 | 5 | 2 | 13 | 12 | 1 | 10 | 9 | 7 | 1 | 10 | 9 | 7 | 1 | 2 | 0 | 2 |
6,449 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.CursorError
|
class CursorError(pyArangoException):
"""Something went wrong when trying to fetch data with a cursor"""
def __init__(self, message, cursorId, errors = None):
if errors is None:
errors = {}
message = "Unable to retreive data for cursor %s: %s" % (cursorId, message)
pyArangoException.__init__(self, message, errors)
|
class CursorError(pyArangoException):
'''Something went wrong when trying to fetch data with a cursor'''
def __init__(self, message, cursorId, errors = None):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 7 | 0 | 6 | 2 | 4 | 1 | 6 | 2 | 4 | 2 | 4 | 1 | 2 |
6,450 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.DocumentNotFoundError
|
class DocumentNotFoundError(pyArangoException):
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class DocumentNotFoundError(pyArangoException):
def __init__(self, message, errors = None):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 5 | 0 | 5 | 2 | 3 | 0 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,451 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.ExportError
|
class ExportError(pyArangoException):
""" Something went wrong using the export cursor """
def __init__(self, message, errors = None ):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class ExportError(pyArangoException):
''' Something went wrong using the export cursor '''
def __init__(self, message, errors = None ):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,452 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.IndexError
|
class IndexError(pyArangoException):
"""wasn't able to get the index"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class IndexError(pyArangoException):
'''wasn't able to get the index'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,453 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.InvalidDocument
|
class InvalidDocument(pyArangoException):
"""Raised when a Document does not respect schema/validation defined in its collection"""
def __init__(self, errors):
message = "Unsuccesful validation"
self.strErrors = []
for k, v in errors.items():
self.strErrors.append("%s -> %s" % (k, v))
self.strErrors = '\n\t'.join(self.strErrors)
pyArangoException.__init__(self, message, errors)
def __str__(self):
strErrors = []
for k, v in self.errors.items():
strErrors.append("%s -> %s" % (k, v))
strErrors = '\n\t'.join(strErrors)
return self.message + ":\n\t" + strErrors
|
class InvalidDocument(pyArangoException):
'''Raised when a Document does not respect schema/validation defined in its collection'''
def __init__(self, errors):
pass
def __str__(self):
pass
| 3 | 1 | 7 | 1 | 7 | 0 | 2 | 0.07 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 14 | 17 | 2 | 14 | 8 | 11 | 1 | 14 | 8 | 11 | 2 | 4 | 1 | 4 |
6,454 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.QueryError
|
class QueryError(pyArangoException):
"""Something went wrong with an aql query"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class QueryError(pyArangoException):
'''Something went wrong with an aql query'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,455 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.SchemaViolation
|
class SchemaViolation(pyArangoException):
"""Raised when someone tries to add a new field to an object belonging a to a Collection with enforced schema"""
def __init__(self, collection, field, errors = None):
if errors is None:
errors = {}
message = "Collection '%s' does not have a field '%s' in it's schema" % (collection.__name__, field)
pyArangoException.__init__(self, message, errors)
|
class SchemaViolation(pyArangoException):
'''Raised when someone tries to add a new field to an object belonging a to a Collection with enforced schema'''
def __init__(self, collection, field, errors = None):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 7 | 0 | 6 | 3 | 4 | 1 | 6 | 3 | 4 | 2 | 4 | 1 | 2 |
6,456 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.SimpleQueryError
|
class SimpleQueryError(pyArangoException):
"""Something went wrong with a simple query"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class SimpleQueryError(pyArangoException):
'''Something went wrong with a simple query'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,457 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.TransactionError
|
class TransactionError(pyArangoException):
"""Something went wrong with a transaction"""
def __init__(self, message, action, errors = None):
if errors is None:
errors = {}
message = "Error in: %s.\n->%s" % (action, message)
pyArangoException.__init__(self, message, errors)
|
class TransactionError(pyArangoException):
'''Something went wrong with a transaction'''
def __init__(self, message, action, errors = None):
pass
| 2 | 1 | 5 | 0 | 5 | 0 | 2 | 0.17 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 7 | 0 | 6 | 2 | 4 | 1 | 6 | 2 | 4 | 2 | 4 | 1 | 2 |
6,458 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.TraversalError
|
class TraversalError(pyArangoException):
"""Something went wrong when doing a graph traversal"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class TraversalError(pyArangoException):
'''Something went wrong when doing a graph traversal'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,459 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.UniqueConstrainViolation
|
class UniqueConstrainViolation(CreationError):
"""Violation of a unique key"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
CreationError.__init__(self, message, errors)
|
class UniqueConstrainViolation(CreationError):
'''Violation of a unique key'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 14 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 5 | 1 | 2 |
6,460 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.UpdateError
|
class UpdateError(pyArangoException):
"""Something went wrong when updating something"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class UpdateError(pyArangoException):
'''Something went wrong when updating something'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,461 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.ValidationError
|
class ValidationError(pyArangoException):
"""Something went wrong when validating something"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class ValidationError(pyArangoException):
'''Something went wrong when validating something'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,462 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.DeletionError
|
class DeletionError(pyArangoException):
"""Something went wrong when deleting something"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class DeletionError(pyArangoException):
'''Something went wrong when deleting something'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,463 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/query.py
|
pyArango.query.AQLQuery
|
class AQLQuery(Query):
"AQL queries are attached to and instanciated by a database"
def __init__(self, database, query, batchSize, bindVars, options, count, fullCount, rawResults = True,
json_encoder = None, **moreArgs):
# fullCount is passed in the options dict per https://docs.arangodb.com/3.1/HTTP/AqlQueryCursor/AccessingCursors.html
options["fullCount"] = fullCount
payload = {'query' : query, 'batchSize' : batchSize, 'bindVars' : bindVars, 'options' : options, 'count' : count}
payload.update(moreArgs)
self.query = query
self.database = database
self.connection = self.database.connection
self.connection.reportStart(query)
request = self.connection.session.post(database.getCursorsURL(), data = json.dumps(payload, cls=json_encoder, default=str))
self.connection.reportItem()
try:
Query.__init__(self, request, database, rawResults)
except QueryError as e:
raise AQLQueryError( message = e.message, query = self.query, errors = e.errors)
def explain(self, bindVars = None, allPlans = False):
"""Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan"""
if bindVars is None:
bindVars = {}
return self.database.explainAQLQuery(self.query, bindVars, allPlans)
def _raiseInitFailed(self, request):
data = request.json()
raise AQLQueryError(data["errorMessage"], self.query, data)
|
class AQLQuery(Query):
'''AQL queries are attached to and instanciated by a database'''
def __init__(self, database, query, batchSize, bindVars, options, count, fullCount, rawResults = True,
json_encoder = None, **moreArgs):
pass
def explain(self, bindVars = None, allPlans = False):
'''Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan'''
pass
def _raiseInitFailed(self, request):
pass
| 4 | 2 | 9 | 1 | 7 | 1 | 2 | 0.13 | 1 | 3 | 2 | 0 | 3 | 3 | 3 | 14 | 30 | 4 | 23 | 12 | 18 | 3 | 22 | 10 | 18 | 2 | 2 | 1 | 5 |
6,464 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/jwauth.py
|
pyArango.jwauth.JWTAuth
|
class JWTAuth(requests.auth.AuthBase):
# Half a day before the actual expiration.
REAUTH_TIME_INTERVEL = 43200
def __init__(
self, username, password, urls, use_lock_for_reseting_jwt=False,
max_retries=5
):
self.username = username
self.password = password
self.urls = urls
self.lock_for_reseting_jwt = Lock() if use_lock_for_reseting_jwt else None
self.__init_request_session(max_retries)
self.__set_token()
def __init_request_session(self, max_retries):
self.max_retries = max_retries
self.session = requests.Session()
http = requests.adapters.HTTPAdapter(max_retries=max_retries)
https = requests.adapters.HTTPAdapter(max_retries=max_retries)
self.session.mount('http://', http)
self.session.mount('https://', https)
def __parse_token(self):
decoded_token = b64decode(self.token.split('.')[1].encode())
return json_mod.loads(decoded_token.decode())
def __get_auth_token(self):
request_data = '{"username":"%s","password":"%s"}' % (self.username, self.password)
for connection_url in self.urls:
try:
response = self.session.post('%s/_open/auth' % connection_url, data=request_data)
if response.ok:
json_data = response.content
if json_data:
data_dict = json_mod.loads(json_data.decode("utf-8"))
return data_dict.get('jwt')
except requests_exceptions.ConnectionError:
if connection_url is not self.urls[-1]:
logging.critical("Unable to connect to %s trying another", connection_url)
else:
logging.critical("Unable to connect to any of the urls: %s", self.urls)
raise
def __set_token(self):
self.token = self.__get_auth_token()
self.parsed_token = \
self.__parse_token() if self.token is not None else {}
self.token_last_updated = time.time()
def reset_token(self):
logging.warning("Reseting the token.")
self.__set_token()
def is_token_expired(self):
return (
self.parsed_token.get("exp", 0) - time.time() <
JWTAuth.REAUTH_TIME_INTERVEL
)
def __call__(self, req):
# Implement JWT authentication
if self.is_token_expired():
if self.lock_for_reseting_jwt is not None:
self.lock_for_reseting_jwt.acquire()
if self.is_token_expired():
self.reset_token()
if self.lock_for_reseting_jwt is not None:
self.lock_for_reseting_jwt.release()
req.headers['Authorization'] = 'Bearer %s' % self.token
return req
|
class JWTAuth(requests.auth.AuthBase):
def __init__(
self, username, password, urls, use_lock_for_reseting_jwt=False,
max_retries=5
):
pass
def __init_request_session(self, max_retries):
pass
def __parse_token(self):
pass
def __get_auth_token(self):
pass
def __set_token(self):
pass
def reset_token(self):
pass
def is_token_expired(self):
pass
def __call__(self, req):
pass
| 9 | 0 | 8 | 0 | 7 | 0 | 2 | 0.03 | 1 | 2 | 0 | 0 | 8 | 9 | 8 | 8 | 72 | 9 | 61 | 30 | 49 | 2 | 53 | 27 | 44 | 6 | 1 | 4 | 19 |
6,465 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/index.py
|
pyArango.index.Index
|
class Index(object):
"""An index on a collection's fields. Indexes are meant to de created by ensureXXX functions of Collections.
Indexes have a .infos dictionary that stores all the infos about the index"""
def __init__(self, collection, infos = None, creationData = None):
self.collection = collection
self.connection = self.collection.database.connection
self.infos = None
self.active = False
if infos:
self.infos = infos
elif creationData:
self._create(creationData)
def getURL(self):
if self.infos:
return "%s/%s" % (self.getIndexesURL(), self.infos["id"])
return None
def getIndexesURL(self):
return "%s/index" % self.collection.database.getURL()
def _create(self, postData, force=False):
"""Creates an index of any type according to postData"""
if self.infos is None or not self.active or force:
r = self.connection.session.post(self.getIndexesURL(), params = {"collection" : self.collection.name}, data = json.dumps(postData, default=str))
data = r.json()
if (r.status_code >= 400) or data['error']:
raise CreationError(data['errorMessage'], data)
self.infos = data
self.active = True
def restore(self):
"""restore and index that has been previously deleted"""
self._create(self.infos, force=True)
def delete(self):
"""Delete the index"""
r = self.connection.session.delete(self.getURL())
data = r.json()
if (r.status_code != 200 and r.status_code != 202) or data['error']:
raise DeletionError(data['errorMessage'], data)
self.active = False
def __repr__(self):
return "<Index of type %s>" % self.infos["type"]
|
class Index(object):
'''An index on a collection's fields. Indexes are meant to de created by ensureXXX functions of Collections.
Indexes have a .infos dictionary that stores all the infos about the index'''
def __init__(self, collection, infos = None, creationData = None):
pass
def getURL(self):
pass
def getIndexesURL(self):
pass
def _create(self, postData, force=False):
'''Creates an index of any type according to postData'''
pass
def restore(self):
'''restore and index that has been previously deleted'''
pass
def delete(self):
'''Delete the index'''
pass
def __repr__(self):
pass
| 8 | 4 | 5 | 0 | 5 | 0 | 2 | 0.15 | 1 | 3 | 2 | 0 | 7 | 4 | 7 | 7 | 48 | 9 | 34 | 16 | 26 | 5 | 33 | 16 | 25 | 3 | 1 | 2 | 13 |
6,466 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/examples/createSocialGraph.py
|
createSocialGraph.Social
|
class Social(object):
class male(Collection):
_fields = {
"name" : Field()
}
class female(Collection):
_fields = {
"name" : Field()
}
class relation(Edges):
_fields = {
"number" : Field()
}
class social(Graph):
_edgeDefinitions = (EdgeDefinition ('relation',
fromCollections = ["female", "male"],
toCollections = ["female", "male"]),)
_orphanedCollections = []
def __init__(self):
self.conn = Connection(username="USERNAME", password="SECRET")
self.db = self.conn["_system"]
if self.db.hasGraph('social'):
raise Exception("The social graph was already provisioned! remove it first")
self.female = self.db.createCollection(className='Collection', name='female')
self.male = self.db.createCollection(className='Collection', name='male')
self.relation = self.db.createCollection(className='Edges', name='relation')
g = self.db.createGraph("social")
a = g.createVertex('female', {"name": 'Alice', "_key": 'alice'});
b = g.createVertex('male', {"name": 'Bob', "_key": 'bob'});
c = g.createVertex('male', {"name": 'Charly', "_key": 'charly'});
d = g.createVertex('female', {"name": 'Diana', "_key": 'diana'});
a.save()
b.save()
c.save()
d.save()
g.link('relation', a, b, {"type": 'married', "_key": 'aliceAndBob'})
g.link('relation', a, c, {"type": 'friend', "_key": 'aliceAndCharly'})
g.link('relation', c, d, {"type": 'married', "_key": 'charlyAndDiana'})
g.link('relation', b, d, {"type": 'friend', "_key": 'bobAndDiana'})
|
class Social(object):
class male(Collection):
class female(Collection):
class relation(Edges):
class social(Graph):
def __init__(self):
pass
| 6 | 0 | 27 | 6 | 21 | 0 | 2 | 0 | 1 | 5 | 4 | 0 | 1 | 2 | 1 | 1 | 51 | 12 | 39 | 18 | 33 | 0 | 31 | 18 | 25 | 2 | 1 | 1 | 2 |
6,467 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/action.py
|
pyArango.action.ConnectionAction
|
class ConnectionAction:
"""Base class for using the session to execute action."""
def __init__(self, connection):
"""Initialise connection."""
self.connection = connection
@property
def session(self):
"""Session of the connection."""
return self.connection.session
@property
def end_point_url(self):
"""End point url for connection."""
return self.connection.getEndpointURL()
def get(self, url, **kwargs):
"""HTTP GET Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.get(action_url, **kwargs)
def post(self, url, data=None, json=None, **kwargs):
"""HTTP POST Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.post(
action_url, data, json, **kwargs
)
def put(self, url, data=None, **kwargs):
"""HTTP PUT Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.put(action_url, data, **kwargs)
def head(self, url, **kwargs):
"""HTTP HEAD Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.head(action_url, **kwargs)
def options(self, url, **kwargs):
"""HTTP OPTIONS Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.options(action_url, **kwargs)
def patch(self, url, data=None, **kwargs):
"""HTTP PATCH Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.patch(action_url, data, **kwargs)
def delete(self, url, **kwargs):
"""HTTP DELETE Method."""
action_url = '%s%s' % (self.end_point_url, url)
return self.session.delete(action_url, **kwargs)
|
class ConnectionAction:
'''Base class for using the session to execute action.'''
def __init__(self, connection):
'''Initialise connection.'''
pass
@property
def session(self):
'''Session of the connection.'''
pass
@property
def end_point_url(self):
'''End point url for connection.'''
pass
def get(self, url, **kwargs):
'''HTTP GET Method.'''
pass
def post(self, url, data=None, json=None, **kwargs):
'''HTTP POST Method.'''
pass
def put(self, url, data=None, **kwargs):
'''HTTP PUT Method.'''
pass
def head(self, url, **kwargs):
'''HTTP HEAD Method.'''
pass
def options(self, url, **kwargs):
'''HTTP OPTIONS Method.'''
pass
def patch(self, url, data=None, **kwargs):
'''HTTP PATCH Method.'''
pass
def delete(self, url, **kwargs):
'''HTTP DELETE Method.'''
pass
| 13 | 11 | 4 | 0 | 3 | 1 | 1 | 0.34 | 0 | 0 | 0 | 1 | 10 | 1 | 10 | 10 | 53 | 10 | 32 | 21 | 19 | 11 | 28 | 19 | 17 | 1 | 0 | 0 | 10 |
6,468 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/action.py
|
pyArango.action.DatabaseAction
|
class DatabaseAction(ConnectionAction):
"""Base class for using the session to execute action."""
def __init__(self, database):
"""Initialise database."""
self.database = database
@property
def session(self):
"""Session of the connection."""
return self.database.connection.session
@property
def end_point_url(self):
"""End point url for database."""
return '%s/_db/%s' % (
self.database.connection.getEndpointURL(), self.database.name
)
|
class DatabaseAction(ConnectionAction):
'''Base class for using the session to execute action.'''
def __init__(self, database):
'''Initialise database.'''
pass
@property
def session(self):
'''Session of the connection.'''
pass
@property
def end_point_url(self):
'''End point url for database.'''
pass
| 6 | 4 | 4 | 0 | 3 | 1 | 1 | 0.36 | 1 | 0 | 0 | 1 | 3 | 1 | 3 | 13 | 18 | 3 | 11 | 7 | 5 | 4 | 7 | 5 | 3 | 1 | 1 | 0 | 3 |
6,469 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/admin.py
|
pyArango.admin.Admin
|
class Admin(object):
"""administrative tasks with arangodb"""
def __init__(self, connection):
self.connection = connection
def status(self):
""" fetches the server status."""
url = "%s/_admin/status" % self.connection.getEndpointURL()
result = self.connection.session.get(url)
if result.status_code < 400:
return result.json()
raise ArangoError(result.json()['errorMessage'], result.json())
def is_cluster(self):
status = self.status()
return status['serverInfo']['role'] == 'COORDINATOR'
|
class Admin(object):
'''administrative tasks with arangodb'''
def __init__(self, connection):
pass
def status(self):
''' fetches the server status.'''
pass
def is_cluster(self):
pass
| 4 | 2 | 4 | 0 | 4 | 0 | 1 | 0.17 | 1 | 1 | 1 | 0 | 3 | 1 | 3 | 3 | 17 | 3 | 12 | 8 | 8 | 2 | 12 | 8 | 8 | 2 | 1 | 1 | 4 |
6,470 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/ca_certificate.py
|
pyArango.ca_certificate.CA_Certificate
|
class CA_Certificate(object):
"""A CA certificate. If encoded is True the certificate will be automatically base64 decoded"""
def __init__(self, certificate, encoded):
super(CA_Certificate, self).__init__()
self.certificate = certificate
if encoded:
self.certificate = base64.b64decode(self.certificate)
self.tmp_file = None
def get_file_path(self):
"""saves the cetificate into a tmp file and returns the file path"""
if self.tmp_file is not None:
return self.tmp_file
_ , self.tmp_file = tempfile.mkstemp(text=True)
f = open(self.tmp_file, "wb")
f.write(self.certificate)
f.close()
return self.tmp_file
def clean(self):
"""erases the tmp_file containing the certificate"""
if self.tmp_file is not None:
os.remove(self.tmp_file)
self.tmp_file = None
|
class CA_Certificate(object):
'''A CA certificate. If encoded is True the certificate will be automatically base64 decoded'''
def __init__(self, certificate, encoded):
pass
def get_file_path(self):
'''saves the cetificate into a tmp file and returns the file path'''
pass
def clean(self):
'''erases the tmp_file containing the certificate'''
pass
| 4 | 3 | 7 | 0 | 6 | 1 | 2 | 0.16 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 3 | 24 | 2 | 19 | 8 | 15 | 3 | 19 | 8 | 15 | 2 | 1 | 1 | 6 |
6,471 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.BulkMode
|
class BulkMode(Enum):
NONE = 0
INSERT = 1
UPDATE = 2
DELETE = 3
|
class BulkMode(Enum):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 5 | 0 | 5 | 5 | 4 | 0 | 5 | 5 | 4 | 0 | 4 | 0 | 0 |
6,472 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.BulkOperation
|
class BulkOperation(object):
def __init__(self, collection, batchSize=100):
self.coll = collection
self.batchSize = batchSize
def __enter__(self):
self.coll._isBulkInProgress = True
self.coll._bulkSize = self.batchSize
return self.coll
def __exit__(self, type, value, traceback):
self.coll._finalizeBatch();
|
class BulkOperation(object):
def __init__(self, collection, batchSize=100):
pass
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 2 | 3 | 3 | 11 | 1 | 10 | 6 | 6 | 0 | 10 | 6 | 6 | 1 | 1 | 0 | 3 |
6,473 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.CachedDoc
|
class CachedDoc(object):
"""A cached document."""
def __init__(self, document, prev, nextDoc):
self.prev = prev
self.document = document
self.nextDoc = nextDoc
self._key = document._key
def __getitem__(self, k):
return self.document[k]
def __setitem__(self, k, v):
self.document[k] = v
def __getattribute__(self, k):
try:
return object.__getattribute__(self, k)
except Exception as e1:
try:
return getattr(self.document, k)
except Exception as e2:
raise e2
|
class CachedDoc(object):
'''A cached document.'''
def __init__(self, document, prev, nextDoc):
pass
def __getitem__(self, k):
pass
def __setitem__(self, k, v):
pass
def __getattribute__(self, k):
pass
| 5 | 1 | 4 | 0 | 4 | 0 | 2 | 0.06 | 1 | 1 | 0 | 0 | 4 | 4 | 4 | 4 | 22 | 3 | 18 | 11 | 13 | 1 | 18 | 9 | 13 | 3 | 1 | 2 | 6 |
6,474 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.Collection
|
class Collection(with_metaclass(Collection_metaclass, object)):
"""A document collection. Collections are meant to be instantiated by databases."""
# here you specify the fields that you want for the documents in your collection
_fields = {}
_validation = {
'on_save' : False,
'on_set' : False,
'on_load' : False,
'allow_foreign_fields' : True
}
arangoPrivates = ["_id", "_key", "_rev"]
def __init__(self, database, jsonData):
self.database = database
self.connection = self.database.connection
self.name = self.__class__.__name__
for k in jsonData:
setattr(self, k, jsonData[k])
self.documentCache = None
self.documentClass = Document
self.indexes = {
"primary" : {},
"hash" : {},
"skiplist" : {},
"persistent": {},
"ttl": {},
"geo" : {},
"fulltext" : {},
}
self.indexes_by_name = {}
# self.defaultDocument = None #getDefaultDoc(self._fields, {})
self._isBulkInProgress = False
self._bulkSize = 0
self._bulkCache = []
self._bulkMode = BulkMode.NONE
def getDefaultDocument(self, fields=None, dct=None):
if dct is None:
dct = {}
if fields is None:
fields = self._fields
for k, v in fields.items():
if isinstance(v, dict):
dct[k] = self.getDefaultDocument(fields[k], None)
elif isinstance(v, list) or isinstance(v, tuple):
dct[k] = []
elif isinstance(v, Field):
if callable(v.default):
dct[k] = v.default()
else :
dct[k] = v.default
else:
raise ValueError("Field '%s' is of invalid type '%s'" % (k, type(v)) )
return dct
def getURL(self):
return "%s/collection/%s" % (self.database.getURL(), self.name)
def getDocumentsURL(self):
return "%s/document" % (self.database.getURL())
def getIndexes(self):
"""Fill 'self.indexes' with all the indexes associated with the collection and return it."""
self.indexes_by_name = {}
url = "%s/index" % self.database.getURL()
r = self.connection.session.get(url, params = {"collection": self.name})
data = r.json()
for ind in data["indexes"]:
index = Index(collection = self, infos = ind)
self.indexes[ind["type"]][ind["id"]] = index
if "name" in ind:
self.indexes_by_name[ind["name"]] = index
return self.indexes
def getIndex(self, name):
if len(self.indexes_by_name) == 0:
raise IndexError("named indices unsupported")
return self.indexes_by_name[name]
def activateCache(self, cacheSize):
"""Activate the caching system.
Cached documents are only available through the __getitem__ interface."""
self.documentCache = DocumentCache(cacheSize)
def deactivateCache(self):
"""Deactivate the caching system."""
self.documentCache = None
def delete(self):
"""Delete the collection from the database."""
r = self.connection.session.delete(self.getURL())
data = r.json()
if not r.status_code == 200 or data["error"]:
raise DeletionError(data["errorMessage"], data)
def createDocument(self, initDict = None):
"""Create and return a completely empty document unless the initial document is set via 'initDict'."""
# res = dict(self.defaultDocument)
res = self.getDefaultDocument()
if initDict is not None:
res.update(initDict)
return self.documentClass(self, res)
def _writeBatch(self):
if not self._bulkCache:
return
if self._bulkMode != BulkMode.INSERT:
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
payload = []
for d in self._bulkCache:
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
payload.append(d.toJson())
except Exception as e:
payload.append(json.dumps(d.getStore(), default=str))
payload = '[' + ','.join(payload) + ']'
r = self.connection.session.post(self.getDocumentsURL(), params = self._batchParams, data = payload)
data = r.json()
if (not isinstance(data, list)):
raise UpdateError("expected reply to be a json array" + r)
i = 0
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError is None:
bulkError = BulkOperationError("saving failed")
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
else:
self._bulkCache[i].setPrivates(xd)
self._bulkCache[i]._key = \
xd['_key']
i += 1
if bulkError is not None:
self._bulkCache = []
raise bulkError
self._bulkCache = []
def _saveBatch(self, document, params):
if self._bulkMode != BulkMode.NONE and self._bulkMode != BulkMode.INSERT:
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
self._bulkMode = BulkMode.INSERT
self._bulkCache.append(document)
self._batchParams = params
if len(self._bulkCache) == self._bulkSize:
self._writeBatch()
self._bulkMode = BulkMode.NONE
def _updateBatch(self):
if not self._bulkCache:
return
if self._bulkMode != BulkMode.UPDATE:
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
payload = []
for d in self._bulkCache:
dPayload = d._store.getPatches()
if d.collection._validation['on_save']:
d.validate()
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
payload.append(d.toJson())
except Exception as e:
payload.append(json.dumps(d.getStore(), default=str))
payload = '[' + ','.join(payload) + ']'
r = self.connection.session.patch(self.getDocumentsURL(), params = self._batchParams, data = payload)
data = r.json()
if (not isinstance(data, list)):
raise UpdateError("expected reply to be a json array" + dir(r))
i = 0
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError is None:
bulkError = BulkOperationError("patching failed")
bulkError.addBulkError(ArangoError(xd), str(self._bulkCache[i]))
else:
self._bulkCache[i].setPrivates(xd)
self._bulkCache[i]._key = \
xd['_key']
i += 1
self._bulkCache = []
if bulkError is not None:
raise bulkError
def _patchBatch(self, document, params):
if self._bulkMode != BulkMode.NONE and self._bulkMode != BulkMode.UPDATE:
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
self._bulkMode = BulkMode.UPDATE
self._bulkCache.append(document)
self._batchParams = params
if len(self._bulkCache) == self._bulkSize:
self._updateBatch()
self._bulkMode = BulkMode.NONE
def _removeBatch(self):
if not self._bulkCache:
return
if self._bulkMode != BulkMode.DELETE:
raise UpdateError("Mixed bulk operations not supported - have " + self._bulkMode)
payload = []
for d in self._bulkCache:
if isinstance(d,dict):
payload.append('"%s"' % d['_key'])
else:
try:
payload.append('"%s"' % d['_key'])
except Exception as e:
payload.append('"%s"' % d['_key'])
payload = '[' + ','.join(payload) + ']'
r = self.connection.session.delete(self.getDocumentsURL() + "/" + self.name, params = self._batchParams, data = payload)
data = r.json()
if (not isinstance(data, list)):
raise UpdateError("expected reply to be a json array" + r)
i = 0
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError is None:
bulkError = BulkOperationError("deleting failed")
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
else:
self._bulkCache[i].reset(self)
i += 1
self._bulkCache = []
if bulkError is not None:
raise bulkError
def _deleteBatch(self, document, params):
if self._bulkMode != BulkMode.NONE and self._bulkMode != BulkMode.DELETE:
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
self._bulkMode = BulkMode.DELETE
self._bulkCache.append(document)
self._batchParams = params
if len(self._bulkCache) == self._bulkSize:
self._removeBatch()
self._bulkMode = BulkMode.NONE
def _finalizeBatch(self):
if self._bulkMode == BulkMode.INSERT:
self._writeBatch()
elif self._bulkMode == BulkMode.UPDATE:
self._updateBatch()
elif self._bulkMode == BulkMode.DELETE:
self._removeBatch()
# elif self._bulkMode == BulkMode.NONE:
self._bulkSize = 0
self._isBulkInProgress = False
self._batchParams = None
self._bulkMode = BulkMode.NONE
def importBulk(self, data, **addParams):
url = "%s/import" % (self.database.getURL())
payload = json.dumps(data, default=str)
params = {"collection": self.name, "type": "auto"}
params.update(addParams)
r = self.connection.session.post(url , params = params, data = payload)
data = r.json()
if not r.status_code == 201 or data["error"]:
raise CreationError(data["errorMessage"], data)
return data
def exportDocs( self, **data):
url = "%s/export" % (self.database.getURL())
params = {"collection": self.name}
payload = json.dumps(data)
r = self.connection.session.post(url, params = params, data = payload)
data = r.json()
if not r.status_code == 201 or data["error"]:
raise ExportError( data["errorMessage"], data )
docs = data['result']
return docs
def ensureHashIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
"""Create a hash index if it does not already exist, then return it."""
data = {
"type" : "hash",
"fields" : fields,
"unique" : unique,
"sparse" : sparse,
"deduplicate": deduplicate
}
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes["hash"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensureSkiplistIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
"""Create a skiplist index if it does not already exist, then return it."""
data = {
"type" : "skiplist",
"fields" : fields,
"unique" : unique,
"sparse" : sparse,
"deduplicate": deduplicate
}
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes["skiplist"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensurePersistentIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
"""Create a persistent index if it does not already exist, then return it."""
data = {
"type" : "persistent",
"fields" : fields,
"unique" : unique,
"sparse" : sparse,
"deduplicate": deduplicate
}
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes["skiplist"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensureTTLIndex(self, fields, expireAfter, unique = False, sparse = True, name = None):
"""Create a TTL index if it does not already exist, then return it."""
data = {
"type" : "ttl",
"fields" : fields,
"unique" : unique,
"sparse" : sparse,
"expireAfter" : expireAfter
}
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes["skiplist"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensureGeoIndex(self, fields, name = None):
"""Create a geo index if it does not already exist, then return it."""
data = {
"type" : "geo",
"fields" : fields,
}
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes["geo"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensureFulltextIndex(self, fields, minLength = None, name = None):
"""Create a fulltext index if it does not already exist, then return it."""
data = {
"type" : "fulltext",
"fields" : fields,
}
if name:
data["name"] = name
if minLength is not None:
data["minLength"] = minLength
ind = Index(self, creationData = data)
self.indexes["fulltext"][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def ensureIndex(self, index_type, fields, name=None, **index_args):
"""Create an index of any type."""
data = {
"type" : index_type,
"fields" : fields,
}
data.update(index_args)
if name:
data["name"] = name
ind = Index(self, creationData = data)
self.indexes[index_type][ind.infos["id"]] = ind
if name:
self.indexes_by_name[name] = ind
return ind
def restoreIndexes(self, indexes_dct=None):
"""Restore all previously removed indexes."""
if indexes_dct is None:
indexes_dct = self.indexes
for typ in indexes_dct.keys():
if typ != "primary":
for name, idx in indexes_dct[typ].items():
infos = dict(idx.infos)
del infos["fields"]
self.ensureIndex(typ, idx.infos["fields"], **infos)
def validatePrivate(self, field, value):
"""Validate a private field value."""
if field not in self.arangoPrivates:
raise ValueError("%s is not a private field of collection %s" % (field, self))
if field in self._fields:
self._fields[field].validate(value)
return True
@classmethod
def hasField(cls, fieldName):
"""Return 'True' or 'False' whether the collection has field 'K' in its schema.
Use the dot notation for the nested fields: address.street"""
path = fieldName.split(".")
v = cls._fields
for k in path:
try:
v = v[k]
except KeyError:
return False
return True
def fetchDocument(self, key, rawResults = False, rev = None):
"""Fetche a document from the collection given its key.
This function always goes straight to the db and bypasses the cache.
If you want to take advantage of the cache use the '__getitem__' interface: collection[key]"""
url = "%s/%s/%s" % (self.getDocumentsURL(), self.name, key)
if rev is not None:
r = self.connection.session.get(url, params = {'rev' : rev})
else:
r = self.connection.session.get(url)
if r.status_code < 400:
if rawResults:
return r.json()
return self.documentClass(self, r.json(), on_load_validation=self._validation["on_load"])
elif r.status_code == 404 :
raise DocumentNotFoundError("Unable to find document with _key: %s" % key, r.json())
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json())
def fetchByExample(self, exampleDict, batchSize, rawResults = False, **queryArgs):
"""'exampleDict' should be something like {'age' : 28}."""
return self.simpleQuery('by-example', rawResults, example = exampleDict, batchSize = batchSize, **queryArgs)
def fetchFirstExample(self, exampleDict, rawResults = False):
"""'exampleDict' should be something like {'age' : 28}.
Return the first example found that matches the example, still in a 'SimpleQuery' object."""
return self.simpleQuery('first-example', rawResults = rawResults, example = exampleDict)
def fetchAll(self, rawResults = False, **queryArgs):
"""Returns all the documents in the collection.
You can use the optinal arguments 'skip' and 'limit'::
fetchAlll(limit = 3, shik = 10)"""
return self.simpleQuery('all', rawResults = rawResults, **queryArgs)
def simpleQuery(self, queryType, rawResults = False, **queryArgs):
"""General interface for simple queries.
'queryType' takes the arguments known to the ArangoDB, for instance: 'all' or 'by-example'.
See the ArangoDB documentation for a list of valid 'queryType's.
If 'rawResults' is set to 'True', the query will return dictionaries instead of 'Document' objetcs."""
return SimpleQuery(self, queryType, rawResults, **queryArgs)
def action(self, method, action, **params):
"""A generic 'fct' for interacting everything that does not have an assigned 'fct'."""
fct = getattr(self.connection.session, method.lower())
r = fct(self.getURL() + "/" + action, params = params)
return r.json()
def bulkSave(self, docs, onDuplicate="error", **params):
"""Parameter 'docs' must be either an iterable of documents or dictionaries.
This function will return the number of documents, created and updated, and will raise an UpdateError exception if there is at least one error.
'params' are any parameters from the ArangoDB documentation."""
payload = []
for d in docs:
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
payload.append(d.toJson())
except Exception as e:
payload.append(json.dumps(d.getStore(), default=str))
payload = '\n'.join(payload)
params["type"] = "documents"
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
url = "%s/import" % self.database.getURL()
r = self.connection.session.post(url, params = params, data = payload)
data = r.json()
if (r.status_code == 201) and "error" not in data:
return True
if "errors" in data and data["errors"] > 0:
raise UpdateError("%d documents could not be created" % data["errors"], data)
elif data["error"]:
raise UpdateError("Documents could not be created", data)
return data["updated"] + data["created"]
def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **params):
"""Bulk import from a file following the ArangoDB key-value format."""
url = "%s/import" % self.database.getURL()
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
params["type"] = formatType
with open(filename) as f:
data = f.read()
r = self.connection.session.post(url, params = params, data = data)
if r.status_code != 201:
raise UpdateError('Unable to bulk import JSON', r)
def bulkImport_values(self, filename, onDuplicate="error", **params):
"""Bulk import from a file following the ArangoDB json format."""
url = "%s/import" % self.database.getURL()
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
with open(filename) as f:
data = f.read()
r = self.connection.session.post(url, params = params, data = data)
if r.status_code != 201:
raise UpdateError('Unable to bulk import values', r)
def truncate(self):
"""Delete every document in the collection."""
return self.action('PUT', 'truncate')
def empty(self):
"""Alias for truncate."""
return self.truncate()
def load(self):
"""Load collection in memory."""
return self.action('PUT', 'load')
def unload(self):
"""Unload collection from memory."""
return self.action('PUT', 'unload')
def revision(self):
"""Return the current revision."""
return self.action('GET', 'revision')["revision"]
def properties(self):
"""Return the current properties."""
return self.action('GET', 'properties')
def checksum(self):
"""Return the current checksum."""
return self.action('GET', 'checksum')["checksum"]
def count(self):
"""Return the number of documents in the collection."""
return self.action('GET', 'count')["count"]
def figures(self):
"""A more elaborate version of 'count', see the ArangoDB documentation for more."""
return self.action('GET', 'figures')
def getType(self):
"""Return a word describing the type of the collection (edges or ducments) instead of a number.
If you prefer the number it is in 'self.type'."""
if self.type == CONST.COLLECTION_DOCUMENT_TYPE:
return "document"
elif self.type == CONST.COLLECTION_EDGE_TYPE:
return "edge"
raise ValueError("The collection is of Unknown type %s" % self.type)
def getStatus(self):
"""Return a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it is in 'self.status'."""
if self.status == CONST.COLLECTION_LOADING_STATUS:
return "loading"
elif self.status == CONST.COLLECTION_LOADED_STATUS:
return "loaded"
elif self.status == CONST.COLLECTION_DELETED_STATUS:
return "deleted"
elif self.status == CONST.COLLECTION_UNLOADED_STATUS:
return "unloaded"
elif self.status == CONST.COLLECTION_NEWBORN_STATUS:
return "newborn"
raise ValueError("The collection has an Unknown status %s" % self.status)
def __len__(self):
"""Return the number of documents in the collection."""
return self.count()
def __repr__(self):
return "ArangoDB collection name: %s, id: %s, type: %s, status: %s" % (self.name, self.id, self.getType(), self.getStatus())
def __getitem__(self, key):
"""Return a document from the cache.
If it is not there, fetch from the db and cache it first.
If the cache is not activated, this is equivalent to 'fetchDocument(rawResults=False)'."""
if self.documentCache is None:
return self.fetchDocument(key, rawResults = False)
try:
return self.documentCache[key]
except KeyError:
doc = self.fetchDocument(key, rawResults = False)
self.documentCache.cache(doc)
return doc
def __contains__(self, key):
"""Return 'True' or 'False' whether the doc is in the collection."""
try:
self.fetchDocument(key, rawResults = False)
return True
except DocumentNotFoundError as e:
return False
|
class Collection(with_metaclass(Collection_metaclass, object)):
'''A document collection. Collections are meant to be instantiated by databases.'''
def __init__(self, database, jsonData):
pass
def getDefaultDocument(self, fields=None, dct=None):
pass
def getURL(self):
pass
def getDocumentsURL(self):
pass
def getIndexes(self):
'''Fill 'self.indexes' with all the indexes associated with the collection and return it.'''
pass
def getIndexes(self):
pass
def activateCache(self, cacheSize):
'''Activate the caching system.
Cached documents are only available through the __getitem__ interface.'''
pass
def deactivateCache(self):
'''Deactivate the caching system.'''
pass
def delete(self):
'''Delete the collection from the database.'''
pass
def createDocument(self, initDict = None):
'''Create and return a completely empty document unless the initial document is set via 'initDict'.'''
pass
def _writeBatch(self):
pass
def _saveBatch(self, document, params):
pass
def _updateBatch(self):
pass
def _patchBatch(self, document, params):
pass
def _removeBatch(self):
pass
def _deleteBatch(self, document, params):
pass
def _finalizeBatch(self):
pass
def importBulk(self, data, **addParams):
pass
def exportDocs( self, **data):
pass
def ensureHashIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
'''Create a hash index if it does not already exist, then return it.'''
pass
def ensureSkiplistIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
'''Create a skiplist index if it does not already exist, then return it.'''
pass
def ensurePersistentIndex(self, fields, unique = False, sparse = True, deduplicate = False, name = None):
'''Create a persistent index if it does not already exist, then return it.'''
pass
def ensureTTLIndex(self, fields, expireAfter, unique = False, sparse = True, name = None):
'''Create a TTL index if it does not already exist, then return it.'''
pass
def ensureGeoIndex(self, fields, name = None):
'''Create a geo index if it does not already exist, then return it.'''
pass
def ensureFulltextIndex(self, fields, minLength = None, name = None):
'''Create a fulltext index if it does not already exist, then return it.'''
pass
def ensureIndex(self, index_type, fields, name=None, **index_args):
'''Create an index of any type.'''
pass
def restoreIndexes(self, indexes_dct=None):
'''Restore all previously removed indexes.'''
pass
def validatePrivate(self, field, value):
'''Validate a private field value.'''
pass
@classmethod
def hasField(cls, fieldName):
'''Return 'True' or 'False' whether the collection has field 'K' in its schema.
Use the dot notation for the nested fields: address.street'''
pass
def fetchDocument(self, key, rawResults = False, rev = None):
'''Fetche a document from the collection given its key.
This function always goes straight to the db and bypasses the cache.
If you want to take advantage of the cache use the '__getitem__' interface: collection[key]'''
pass
def fetchByExample(self, exampleDict, batchSize, rawResults = False, **queryArgs):
''''exampleDict' should be something like {'age' : 28}.'''
pass
def fetchFirstExample(self, exampleDict, rawResults = False):
''''exampleDict' should be something like {'age' : 28}.
Return the first example found that matches the example, still in a 'SimpleQuery' object.'''
pass
def fetchAll(self, rawResults = False, **queryArgs):
'''Returns all the documents in the collection.
You can use the optinal arguments 'skip' and 'limit'::
fetchAlll(limit = 3, shik = 10)'''
pass
def simpleQuery(self, queryType, rawResults = False, **queryArgs):
'''General interface for simple queries.
'queryType' takes the arguments known to the ArangoDB, for instance: 'all' or 'by-example'.
See the ArangoDB documentation for a list of valid 'queryType's.
If 'rawResults' is set to 'True', the query will return dictionaries instead of 'Document' objetcs.'''
pass
def action(self, method, action, **params):
'''A generic 'fct' for interacting everything that does not have an assigned 'fct'.'''
pass
def bulkSave(self, docs, onDuplicate="error", **params):
'''Parameter 'docs' must be either an iterable of documents or dictionaries.
This function will return the number of documents, created and updated, and will raise an UpdateError exception if there is at least one error.
'params' are any parameters from the ArangoDB documentation.'''
pass
def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **params):
'''Bulk import from a file following the ArangoDB key-value format.'''
pass
def bulkImport_values(self, filename, onDuplicate="error", **params):
'''Bulk import from a file following the ArangoDB json format.'''
pass
def truncate(self):
'''Delete every document in the collection.'''
pass
def empty(self):
'''Alias for truncate.'''
pass
def load(self):
'''Load collection in memory.'''
pass
def unload(self):
'''Unload collection from memory.'''
pass
def revision(self):
'''Return the current revision.'''
pass
def properties(self):
'''Return the current properties.'''
pass
def checksum(self):
'''Return the current checksum.'''
pass
def count(self):
'''Return the number of documents in the collection.'''
pass
def figures(self):
'''A more elaborate version of 'count', see the ArangoDB documentation for more.'''
pass
def getType(self):
'''Return a word describing the type of the collection (edges or ducments) instead of a number.
If you prefer the number it is in 'self.type'.'''
pass
def getStatus(self):
'''Return a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it is in 'self.status'.'''
pass
def __len__(self):
'''Return the number of documents in the collection.'''
pass
def __repr__(self):
pass
def __getitem__(self, key):
'''Return a document from the cache.
If it is not there, fetch from the db and cache it first.
If the cache is not activated, this is equivalent to 'fetchDocument(rawResults=False)'.'''
pass
def __contains__(self, key):
'''Return 'True' or 'False' whether the doc is in the collection.'''
pass
| 55 | 39 | 11 | 1 | 9 | 1 | 3 | 0.12 | 1 | 22 | 13 | 22 | 52 | 12 | 53 | 53 | 643 | 94 | 491 | 153 | 436 | 58 | 421 | 145 | 367 | 12 | 1 | 3 | 153 |
6,475 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.Collection_metaclass
|
class Collection_metaclass(type):
"""The metaclass that takes care of keeping a register of all collection types."""
collectionClasses = {}
_validationDefault = {
'on_save' : False,
'on_set' : False,
'on_load' : False,
'allow_foreign_fields' : True
}
def __new__(cls, name, bases, attrs):
def check_set_ConfigDict(dictName):
defaultDict = getattr(cls, "%sDefault" % dictName)
if dictName not in attrs:
attrs[dictName] = defaultDict
else:
for k, v in attrs[dictName].items():
if k not in defaultDict:
raise KeyError("Unknown validation parameter '%s' for class '%s'" %(k, name))
if type(v) is not type(defaultDict[k]):
raise ValueError("'%s' parameter '%s' for class '%s' is of type '%s' instead of '%s'" %(dictName, k, name, type(v), type(defaultDict[k])))
for k, v in defaultDict.items():
if k not in attrs[dictName]:
attrs[dictName][k] = v
check_set_ConfigDict('_validation')
clsObj = type.__new__(cls, name, bases, attrs)
Collection_metaclass.collectionClasses[name] = clsObj
return clsObj
@classmethod
def getCollectionClass(cls, name):
"""Return the class object of a collection given its 'name'."""
try:
return cls.collectionClasses[name]
except KeyError:
raise KeyError( "There is no Collection Class of type: '%s'; currently supported values: [%s]" % (name, ', '.join(getCollectionClasses().keys())) )
@classmethod
def isCollection(cls, name) -> bool:
"""return 'True' or 'False' whether 'name' is the name of collection."""
return name in cls.collectionClasses
@classmethod
def isDocumentCollection(cls, name) -> bool:
"""Return 'True' or 'False' whether 'name' is the name of a document collection."""
try:
col = cls.getCollectionClass(name)
return issubclass(col, Collection)
except KeyError:
return False
@classmethod
def isEdgeCollection(cls, name) -> bool:
"""Return 'True' or 'False' whether 'name' is the name of an edge collection."""
try:
col = cls.getCollectionClass(name)
return issubclass(col, Edges)
except KeyError:
return False
|
class Collection_metaclass(type):
'''The metaclass that takes care of keeping a register of all collection types.'''
def __new__(cls, name, bases, attrs):
pass
def check_set_ConfigDict(dictName):
pass
@classmethod
def getCollectionClass(cls, name):
'''Return the class object of a collection given its 'name'.'''
pass
@classmethod
def isCollection(cls, name) -> bool:
'''return 'True' or 'False' whether 'name' is the name of collection.'''
pass
@classmethod
def isDocumentCollection(cls, name) -> bool:
'''Return 'True' or 'False' whether 'name' is the name of a document collection.'''
pass
@classmethod
def isEdgeCollection(cls, name) -> bool:
'''Return 'True' or 'False' whether 'name' is the name of an edge collection.'''
pass
| 11 | 5 | 10 | 1 | 8 | 1 | 3 | 0.1 | 1 | 5 | 2 | 0 | 1 | 0 | 5 | 18 | 64 | 10 | 49 | 18 | 38 | 5 | 39 | 14 | 32 | 7 | 2 | 3 | 15 |
6,476 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.DocumentCache
|
class DocumentCache(object):
"""Document cache for collection, with insert, deletes and updates in O(1)."""
def __init__(self, cacheSize):
self.cacheSize = cacheSize
self.cacheStore = {}
self.head = None
self.tail = None
def cache(self, doc):
if doc._key in self.cacheStore:
ret = self.cacheStore[doc._key]
if ret.prev is not None:
ret.prev.nextDoc = ret.nextDoc
self.head.prev = ret
ret.nextDoc = self.head
self.head = ret
return self.head
else:
if len(self.cacheStore) == 0:
ret = CachedDoc(doc, prev = None, nextDoc = None)
self.head = ret
self.tail = self.head
self.cacheStore[doc._key] = ret
else:
if len(self.cacheStore) >= self.cacheSize:
del(self.cacheStore[self.tail._key])
self.tail = self.tail.prev
self.tail.nextDoc = None
ret = CachedDoc(doc, prev = None, nextDoc = self.head)
self.head.prev = ret
self.head = self.head.prev
self.cacheStore[doc._key] = ret
def delete(self, _key):
"""Remove a document from the cache."""
try:
doc = self.cacheStore[_key]
doc.prev.nextDoc = doc.nextDoc
doc.nextDoc.prev = doc.prev
del(self.cacheStore[_key])
except KeyError:
raise KeyError("Document with _key %s is not available in cache" % _key)
def getChain(self):
"""Return a list of keys representing the chain of documents."""
l = []
h = self.head
while h:
l.append(h._key)
h = h.nextDoc
return l
def stringify(self) -> str:
"""Return a pretty string of 'getChain()'."""
l = []
h = self.head
while h:
l.append(str(h._key))
h = h.nextDoc
return "<->".join(l)
def __getitem__(self, _key):
try:
ret = self.cacheStore[_key]
self.cache(ret)
return ret
except KeyError:
raise KeyError("Document with _key %s is not available in cache" % _key)
def __repr__(self):
return "[DocumentCache, size: %d, full: %d]" %(self.cacheSize, len(self.cacheStore))
|
class DocumentCache(object):
'''Document cache for collection, with insert, deletes and updates in O(1).'''
def __init__(self, cacheSize):
pass
def cache(self, doc):
pass
def delete(self, _key):
'''Remove a document from the cache.'''
pass
def getChain(self):
'''Return a list of keys representing the chain of documents.'''
pass
def stringify(self) -> str:
'''Return a pretty string of 'getChain()'.'''
pass
def __getitem__(self, _key):
pass
def __repr__(self):
pass
| 8 | 4 | 9 | 0 | 9 | 0 | 2 | 0.07 | 1 | 3 | 1 | 0 | 7 | 4 | 7 | 7 | 73 | 8 | 61 | 19 | 53 | 4 | 59 | 19 | 51 | 5 | 1 | 3 | 15 |
6,477 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.Edges
|
class Edges(Collection):
"""The default edge collection. All edge Collections must inherit from it."""
arangoPrivates = ["_id", "_key", "_rev", "_to", "_from"]
def __init__(self, database, jsonData):
"""This one is meant to be called by the database."""
Collection.__init__(self, database, jsonData)
self.documentClass = Edge
self.edgesURL = "%s/edges/%s" % (self.database.getURL(), self.name)
@classmethod
def validateField(cls, fieldName, value):
"""Check if 'value' is valid for field 'fieldName'.
If the validation fails, raise a 'SchemaViolation' or a 'ValidationError'.
For nested dicts ex: {address : { street: xxx} }, 'fieldName' can take the form 'address.street'."""
try:
valValue = Collection.validateField(fieldName, value)
except SchemaViolation as e:
if fieldName == "_from" or fieldName == "_to":
return True
raise e
return valValue
def createEdge(self, initValues = None):
"""Create an edge populated with defaults."""
return self.createDocument(initValues)
def getInEdges(self, vertex, rawResults = False):
"""An alias for 'getEdges()' that returns only the in 'Edges'."""
return self.getEdges(vertex, inEdges = True, outEdges = False, rawResults = rawResults)
def getOutEdges(self, vertex, rawResults = False):
"""An alias for 'getEdges()' that returns only the out 'Edges'."""
return self.getEdges(vertex, inEdges = False, outEdges = True, rawResults = rawResults)
def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
"""Return in, out, or both edges linked to a given document.
Vertex can be either a 'Document' object or a string for an '_id'.
If 'rawResults' is set to 'True', return the results just as fetched without any processing.
Otherwise, return a list of Edge objects."""
if isinstance(vertex, Document):
vId = vertex._id
elif isinstance(vertex,str) or isinstance(vertex,bytes):
vId = vertex
else:
raise ValueError("Vertex is neither a Document nor a String")
params = {"vertex" : vId}
if inEdges and outEdges:
pass
elif inEdges:
params["direction"] = "in"
elif outEdges:
params["direction"] = "out"
else:
raise ValueError("inEdges, outEdges or both must have a boolean value")
r = self.connection.session.get(self.edgesURL, params = params)
data = r.json()
if r.status_code == 200:
if not rawResults:
ret = []
for e in data["edges"]:
ret.append(Edge(self, e))
return ret
return data["edges"]
else:
raise CreationError("Unable to return edges for vertex: %s" % vId, data)
|
class Edges(Collection):
'''The default edge collection. All edge Collections must inherit from it.'''
def __init__(self, database, jsonData):
'''This one is meant to be called by the database.'''
pass
@classmethod
def validateField(cls, fieldName, value):
'''Check if 'value' is valid for field 'fieldName'.
If the validation fails, raise a 'SchemaViolation' or a 'ValidationError'.
For nested dicts ex: {address : { street: xxx} }, 'fieldName' can take the form 'address.street'.'''
pass
def createEdge(self, initValues = None):
'''Create an edge populated with defaults.'''
pass
def getInEdges(self, vertex, rawResults = False):
'''An alias for 'getEdges()' that returns only the in 'Edges'.'''
pass
def getOutEdges(self, vertex, rawResults = False):
'''An alias for 'getEdges()' that returns only the out 'Edges'.'''
pass
def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
'''Return in, out, or both edges linked to a given document.
Vertex can be either a 'Document' object or a string for an '_id'.
If 'rawResults' is set to 'True', return the results just as fetched without any processing.
Otherwise, return a list of Edge objects.'''
pass
| 8 | 7 | 10 | 1 | 8 | 2 | 3 | 0.25 | 1 | 7 | 4 | 6 | 5 | 2 | 6 | 59 | 71 | 11 | 48 | 19 | 40 | 12 | 41 | 17 | 34 | 9 | 2 | 3 | 16 |
6,478 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.Field
|
class Field(object):
"""The class for defining pyArango fields."""
def __init__(self, validators = None, default = None):
"""Validators must be a list of validators.
'default' can also be a callable."""
if not validators:
validators = []
self.validators = validators
self.default = default
def validate(self, value):
"""Check the validity of 'value' given the list of validators."""
for v in self.validators:
v.validate(value)
return True
def __str__(self):
strv = []
for v in self.validators:
strv.append(str(v))
return "<Field, validators: '%s'>" % ', '.join(strv)
|
class Field(object):
'''The class for defining pyArango fields.'''
def __init__(self, validators = None, default = None):
'''Validators must be a list of validators.
'default' can also be a callable.'''
pass
def validate(self, value):
'''Check the validity of 'value' given the list of validators.'''
pass
def __str__(self):
pass
| 4 | 3 | 6 | 0 | 5 | 1 | 2 | 0.27 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 3 | 22 | 3 | 15 | 9 | 11 | 4 | 15 | 9 | 11 | 2 | 1 | 1 | 6 |
6,479 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/collection.py
|
pyArango.collection.SystemCollection
|
class SystemCollection(Collection):
"""For all collections with 'isSystem=True'."""
def __init__(self, database, jsonData):
Collection.__init__(self, database, jsonData)
|
class SystemCollection(Collection):
'''For all collections with 'isSystem=True'.'''
def __init__(self, database, jsonData):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.33 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 54 | 4 | 0 | 3 | 2 | 1 | 1 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
6,480 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/connection.py
|
pyArango.connection.AikidoSession
|
class AikidoSession:
"""Magical Aikido being that you probably do not need to access directly
that deflects every http request to requests in the most graceful way.
It will also save basic stats on requests in it's attribute '.log'.
"""
class Holder(object):
def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30):
self.fct = fct
self.auth = auth
self.max_conflict_retries = max_conflict_retries
if not isinstance(verify, bool) and not isinstance(verify, CA_Certificate) and not not isinstance(verify, str) :
raise ValueError("'verify' argument can only be of type: bool, CA_Certificate or str ")
self.verify = verify
self.timeout = timeout
def __call__(self, *args, **kwargs):
if self.auth:
kwargs["auth"] = self.auth
if isinstance(self.verify, CA_Certificate):
kwargs["verify"] = self.verify.get_file_path()
else :
kwargs["verify"] = self.verify
kwargs["timeout"] = self.timeout
try:
do_retry = True
retry = 0
while do_retry and retry < self.max_conflict_retries:
ret = self.fct(*args, **kwargs)
do_retry = ret.status_code == 1200
try :
data = ret.json()
do_retry = do_retry or ("errorNum" in data and data["errorNum"] == 1200)
except JSONDecodeError:
pass
retry += 1
except:
print ("===\nUnable to establish connection, perhaps arango is not running.\n===")
raise
if len(ret.content) < 1:
raise ConnectionError("Empty server response", ret.url, ret.status_code, ret.content)
elif ret.status_code == 401:
raise ConnectionError("Unauthorized access, you must supply a (username, password) with the correct credentials", ret.url, ret.status_code, ret.content)
ret.json = JsonHook(ret)
return ret
def __init__(
self,
username,
password,
verify=True,
cert=None,
max_conflict_retries=5,
max_retries=5,
single_session=True,
log_requests=False,
pool_maxsize=10,
timeout=30,
):
if username:
self.auth = (username, password)
else:
self.auth = None
self.pool_maxsize = pool_maxsize
self.verify = verify
self.cert = cert
self.max_retries = max_retries
self.log_requests = log_requests
self.max_conflict_retries = max_conflict_retries
self.timeout = timeout
self.session = None
if single_session:
self.session = self._make_session()
if log_requests:
self.log = {}
self.log["nb_request"] = 0
self.log["requests"] = {}
def _make_session(self):
session = requests.Session()
kwargs = {
'max_retries': self.max_retries,
'pool_connections': self.pool_maxsize,
'pool_maxsize': self.pool_maxsize,
#'pool_block': True # We don't want to lose connections
}
http = requests.adapters.HTTPAdapter(**kwargs)
https = requests.adapters.HTTPAdapter(**kwargs)
session.mount('http://', http)
session.mount('https://', https)
if self.cert:
session.cert = self.cert
return session
def __getattr__(self, request_function_name):
if self.session is not None:
session = self.session
else:
session = self._make_session()
try:
request_function = getattr(session, request_function_name)
except AttributeError:
raise AttributeError("Attribute '%s' not found (no Aikido move available)" % request_function_name)
auth = object.__getattribute__(self, "auth")
verify = object.__getattribute__(self, "verify")
timeout = object.__getattribute__(self, "timeout")
if self.log_requests:
log = object.__getattribute__(self, "log")
log["nb_request"] += 1
log["requests"][request_function.__name__] += 1
return AikidoSession.Holder(request_function, auth, max_conflict_retries=self.max_conflict_retries, verify=verify, timeout=timeout)
def disconnect(self):
pass
|
class AikidoSession:
'''Magical Aikido being that you probably do not need to access directly
that deflects every http request to requests in the most graceful way.
It will also save basic stats on requests in it's attribute '.log'.
'''
class Holder(object):
def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30):
pass
def __call__(self, *args, **kwargs):
pass
def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30):
pass
def _make_session(self):
pass
def __getattr__(self, request_function_name):
pass
def disconnect(self):
pass
| 8 | 1 | 19 | 2 | 17 | 0 | 4 | 0.05 | 0 | 4 | 1 | 0 | 4 | 10 | 4 | 4 | 125 | 17 | 103 | 49 | 83 | 5 | 83 | 37 | 75 | 8 | 0 | 3 | 21 |
6,481 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/connection.py
|
pyArango.connection.JsonHook
|
class JsonHook(object):
"""This one replaces requests' original json() function. If a call to json() fails, it will print a message with the request content"""
def __init__(self, ret):
self.ret = ret
self.ret.json_originalFct = self.ret.json
def __call__(self, *args, **kwargs):
try:
return self.ret.json_originalFct(*args, **kwargs)
except Exception as e:
print( "Unable to get json for request: %s. Content: %s" % (self.ret.url, self.ret.content) )
raise e
|
class JsonHook(object):
'''This one replaces requests' original json() function. If a call to json() fails, it will print a message with the request content'''
def __init__(self, ret):
pass
def __call__(self, *args, **kwargs):
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 2 | 0.1 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 2 | 12 | 1 | 10 | 5 | 7 | 1 | 10 | 4 | 7 | 2 | 1 | 1 | 3 |
6,482 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/database.py
|
pyArango.database.DBHandle
|
class DBHandle(Database):
"As the loading of a Database also triggers the loading of collections and graphs within. Only handles are loaded first. The full database are loaded on demand in a fully transparent manner."
def __init__(self, connection, name):
self.connection = connection
self.name = name
def __getattr__(self, k):
name = Database.__getattribute__(self, 'name')
connection = Database.__getattribute__(self, 'connection')
Database.__init__(self, connection, name)
return Database.__getattribute__(self, k)
|
class DBHandle(Database):
'''As the loading of a Database also triggers the loading of collections and graphs within. Only handles are loaded first. The full database are loaded on demand in a fully transparent manner.'''
def __init__(self, connection, name):
pass
def __getattr__(self, k):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 1 | 0.11 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 31 | 11 | 1 | 9 | 7 | 6 | 1 | 9 | 7 | 6 | 1 | 2 | 0 | 2 |
6,483 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/database.py
|
pyArango.database.Database
|
class Database(object):
"""Databases are meant to be instanciated by connections"""
def __init__(self, connection, name):
self.name = name
self.connection = connection
self.action = DatabaseAction(self)
self.collections = {}
self.graphs = {}
self.foxx = Foxx(self)
self.tasks = Tasks(self)
self.reload()
def getURL(self):
return '%s/_db/%s/_api' % (self.connection.getEndpointURL(), self.name)
def getCollectionsURL(self):
return '%s/collection' % (self.getURL())
def getCursorsURL(self):
return '%s/cursor' % (self.getURL())
def getExplainURL(self):
return '%s/explain' % (self.getURL())
def getGraphsURL(self):
return "%s/gharial" % self.getURL()
def getTransactionURL(self):
return "%s/transaction" % self.getURL()
def reloadCollections(self):
"reloads the collection list."
r = self.connection.session.get(self.getCollectionsURL())
data = r.json()
if r.status_code == 200:
self.collections = {}
for colData in data["result"]:
colName = colData['name']
if colData['isSystem']:
colObj = COL.SystemCollection(self, colData)
else:
try:
colClass = COL.getCollectionClass(colName)
colObj = colClass(self, colData)
except KeyError:
if colData["type"] == CONST.COLLECTION_EDGE_TYPE:
colObj = COL.Edges(self, colData)
elif colData["type"] == CONST.COLLECTION_DOCUMENT_TYPE:
colObj = COL.Collection(self, colData)
else:
print(("Warning!! Collection of unknown type: %d, trying to load it as Collection nonetheless." % colData["type"]))
colObj = COL.Collection(self, colData)
self.collections[colName] = colObj
else:
raise UpdateError(data["errorMessage"], data)
def reloadGraphs(self):
"reloads the graph list"
r = self.connection.session.get(self.getGraphsURL())
data = r.json()
if r.status_code == 200:
self.graphs = {}
for graphData in data["graphs"]:
try:
self.graphs[graphData["_key"]] = GR.getGraphClass(graphData["_key"])(self, graphData)
except KeyError:
self.graphs[graphData["_key"]] = Graph(self, graphData)
else:
raise UpdateError(data["errorMessage"], data)
def reload(self):
"reloads collections and graphs"
self.reloadCollections()
self.reloadGraphs()
self.foxx.reload()
def createCollection(self, className = 'Collection', **colProperties):
"""Creates a collection and returns it.
ClassName the name of a class inheriting from Collection or Egdes, it can also be set to 'Collection' or 'Edges' in order to create untyped collections of documents or edges.
Use colProperties to put things such as 'waitForSync = True' (see ArangoDB's doc
for a full list of possible arugments). If a '_properties' dictionary is defined in the collection schema, arguments to this function overide it"""
colClass = COL.getCollectionClass(className)
if len(colProperties) > 0:
colProperties = dict(colProperties)
else:
try:
colProperties = dict(colClass._properties)
except AttributeError:
colProperties = {}
if className != 'Collection' and className != 'Edges' and 'name' not in colProperties:
colProperties['name'] = className
else:
if 'name' not in colProperties:
raise ValueError("a 'name' argument mush be supplied if you want to create a generic collection")
if colProperties['name'] in self.collections:
raise CreationError("Database %s already has a collection named %s" % (self.name, colProperties['name']) )
if issubclass(colClass, COL.Edges) or colClass.__class__ is COL.Edges:
colProperties["type"] = CONST.COLLECTION_EDGE_TYPE
else:
colProperties["type"] = CONST.COLLECTION_DOCUMENT_TYPE
payload = json.dumps(colProperties, default=str)
req = self.connection.session.post(self.getCollectionsURL(), data = payload)
data = req.json()
if req.status_code == 200 and not data["error"]:
col = colClass(self, data)
self.collections[col.name] = col
return self.collections[col.name]
else:
raise CreationError(data["errorMessage"], data)
def fetchDocument(self, _id):
"fetchs a document using it's _id"
sid = _id.split("/")
return self[sid[0]][sid[1]]
def createGraph(self, name, createCollections = True, isSmart = False, numberOfShards = None, smartGraphAttribute = None, replicationFactor = None, writeConcern = None):
"""Creates a graph and returns it. 'name' must be the name of a class inheriting from Graph.
Checks will be performed to make sure that every collection mentionned in the edges definition exist. Raises a ValueError in case of
a non-existing collection."""
def _checkCollectionList(lst):
for colName in lst:
if not COL.isCollection(colName):
raise ValueError("'%s' is not a defined Collection" % colName)
graphClass = GR.getGraphClass(name)
ed = []
for e in graphClass._edgeDefinitions:
if not COL.isEdgeCollection(e.edgesCollection):
raise ValueError("'%s' is not a defined Edge Collection" % e.edgesCollection)
_checkCollectionList(e.fromCollections)
_checkCollectionList(e.toCollections)
ed.append(e.toJson())
_checkCollectionList(graphClass._orphanedCollections)
options = {}
if numberOfShards:
options['numberOfShards'] = numberOfShards
if smartGraphAttribute:
options['smartGraphAttribute'] = smartGraphAttribute
if replicationFactor:
options['replicationFactor'] = replicationFactor
if writeConcern:
options['writeConcern'] = writeConcern
payload = {
"name": name,
"edgeDefinitions": ed,
"orphanCollections": graphClass._orphanedCollections
}
if isSmart:
payload['isSmart'] = isSmart
if options:
payload['options'] = options
payload = json.dumps(payload)
r = self.connection.session.post(self.getGraphsURL(), data = payload)
data = r.json()
if r.status_code == 201 or r.status_code == 202:
self.graphs[name] = graphClass(self, data["graph"])
else:
raise CreationError(data["errorMessage"], data)
return self.graphs[name]
def createSatelliteGraph(self, name, createCollections = True):
return self.createGraph(name, createCollections, False, None, None, "satellite", None);
def hasCollection(self, name):
"""returns true if the databse has a collection by the name of 'name'"""
return name in self.collections
def hasGraph(self, name):
"""returns true if the databse has a graph by the name of 'name'"""
return name in self.graphs
def dropAllCollections(self):
"""drops all public collections (graphs included) from the database"""
for graph_name in self.graphs:
self.graphs[graph_name].delete()
for collection_name in self.collections:
# Collections whose name starts with '_' are system collections
if not collection_name.startswith('_'):
self[collection_name].delete()
return
def AQLQuery(self, query, batchSize = 100, rawResults = False, bindVars = None, options = None, count = False, fullCount = False,
json_encoder = None, **moreArgs):
"""Set rawResults = True if you want the query to return dictionnaries instead of Document objects.
You can use **moreArgs to pass more arguments supported by the api, such as ttl=60 (time to live)"""
if bindVars is None:
bindVars = {}
if options is None:
options = {}
return AQLQuery(self, query, rawResults = rawResults, batchSize = batchSize, bindVars = bindVars, options = options, count = count, fullCount = fullCount,
json_encoder = json_encoder, **moreArgs)
def __get_logger(self, logger, log_level):
if logger is None:
return None
return getattr(logger, logging.getLevelName(log_level).lower())
def fetch_element(
self, aql_query, bind_vars=None, dont_raise_error_if_empty=False,
default_output=None, logger=None, log_level=logging.DEBUG
):
"""Fetch element by running a query.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
default_output: dict, optional
default output if no value is returned. (the default is None)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results or more than one 1 results returned.
Returns
-------
any
an element returned by query.
"""
log = self.__get_logger(logger, log_level)
if log is not None:
log(aql_query)
if bind_vars is None:
bind_vars = {}
response = self.AQLQuery(
aql_query, bindVars=bind_vars, rawResults=True
).response
if log is not None:
log(response["result"])
num_results = len(response["result"])
if num_results == 1:
return response["result"][0]
if dont_raise_error_if_empty and num_results == 0:
return default_output
raise AQLFetchError(
"No results matched for query." if num_results == 0
else "More than one results received"
)
def fetch_list(
self, aql_query, bind_vars=None, batch_size=200,
dont_raise_error_if_empty=False, logger=None,
log_level=logging.DEBUG
):
"""Fetch list of elements by running a query and merging all the batches.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
batch_size : int, optional
fetching batch size (the default is 200)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results
Returns
-------
list(any)
a list returned by query.
"""
try:
log = self.__get_logger(logger, log_level)
if log is not None:
log(aql_query)
query = self.AQLQuery(
aql_query, batchSize=batch_size, rawResults=True,
bindVars=(bind_vars if bind_vars is not None else {})
)
batch_index = 0
result = []
while True:
if len(query.response['result']) == 0:
break
result.extend(query.response['result'])
batch_index += 1
query.nextBatch()
except StopIteration:
if log is not None:
log(result)
if len(result) != 0:
return result
except:
raise
if batch_index == 0 and dont_raise_error_if_empty:
return []
raise AQLFetchError(
"No results matched for query in fetching the batch index: %s." % (
batch_index
)
)
def fetch_list_as_batches(
self, aql_query, bind_vars=None, batch_size=200,
dont_raise_error_if_empty=False, logger=None,
log_level=logging.DEBUG
):
"""Fetch list of elements as batches by running the query.
Generator which yeilds each batch as result.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
batch_size : int, optional
fetching batch size (the default is 200)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results
Returns
-------
list(any)
a list returned by query.
"""
try:
log = self.__get_logger(logger, log_level)
if log is not None:
log(aql_query)
query = self.AQLQuery(
aql_query, batchSize=batch_size, rawResults=True,
bindVars=(bind_vars if bind_vars is not None else {})
)
batch_index = 0
while True:
if len(query.response['result']) == 0:
break
if log is not None:
log(
"batch_result for index '%s': %s",
batch_index, query.response['result']
)
yield query.response['result']
batch_index += 1
query.nextBatch()
except StopIteration:
return
except:
raise
if batch_index == 0 and dont_raise_error_if_empty:
return
raise AQLFetchError(
"No results matched for query in fetching the batch index: %s." % (
batch_index
)
)
def no_fetch_run(
self, aql_query, bind_vars=None, logger=None,
log_level=logging.DEBUG
):
"""Run query which doesn't have a return.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When able to fetch results.
"""
log = self.__get_logger(logger, log_level)
if log is not None:
log(aql_query)
response = self.AQLQuery(
aql_query, rawResults=True,
bindVars=(bind_vars if bind_vars is not None else {})
).response
if log is not None:
log(response["result"])
if len(response["result"]) == 0:
return
raise AQLFetchError("No results should be returned for the query.")
def explainAQLQuery(self, query, bindVars = None, allPlans = False):
"""Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan"""
if bindVars is None:
bindVars = {}
payload = {'query' : query, 'bindVars' : bindVars, 'allPlans' : allPlans}
request = self.connection.session.post(self.getExplainURL(), data = json.dumps(payload, default=str))
return request.json()
def validateAQLQuery(self, query, bindVars = None, options = None):
"returns the server answer is the query is valid. Raises an AQLQueryError if not"
if bindVars is None:
bindVars = {}
if options is None:
options = {}
payload = {'query' : query, 'bindVars' : bindVars, 'options' : options}
r = self.connection.session.post(self.getCursorsURL(), data = json.dumps(payload, default=str))
data = r.json()
if r.status_code == 201 and not data["error"]:
return data
else:
raise AQLQueryError(data["errorMessage"], query, data)
def transaction(self, collections, action, waitForSync = False, lockTimeout = None, params = None):
"""Execute a server-side transaction"""
payload = {
"collections": collections,
"action": action,
"waitForSync": waitForSync}
if lockTimeout is not None:
payload["lockTimeout"] = lockTimeout
if params is not None:
payload["params"] = params
self.connection.reportStart(action)
r = self.connection.session.post(self.getTransactionURL(), data = json.dumps(payload, default=str))
self.connection.reportItem()
data = r.json()
if (r.status_code == 200 or r.status_code == 201 or r.status_code == 202) and not data.get("error"):
return data
else:
raise TransactionError(data["errorMessage"], action, data)
def __repr__(self):
return "ArangoDB database: %s" % self.name
# def __contains__(self, name):
# """if name in database"""
# return self.hasCollection(name) or self.hasGraph(name)
def __contains__(self, name_or_id):
"""allows to check if name_or_id:str is the id of an existing document"""
splid = name_or_id.split('/')
if len(splid) == 2:
col, key = splid
try:
return key in self[col]
except KeyError:
return False
else:
return self.hasCollection(name_or_id) or self.hasGraph(name_or_id)
def __getitem__(self, col_or_doc_id):
"""use database[col_or_doc_id] to get a collection from the database"""
try:
col_name, doc_key = col_or_doc_id.split('/')
return self.collections[col_name][doc_key]
except ValueError:
try:
return self.collections[col_or_doc_id]
except KeyError:
self.reload()
try:
return self.collections[col_or_doc_id]
except KeyError:
raise KeyError("Can't find any collection named : %s" % col_or_doc_id)
|
class Database(object):
'''Databases are meant to be instanciated by connections'''
def __init__(self, connection, name):
pass
def getURL(self):
pass
def getCollectionsURL(self):
pass
def getCursorsURL(self):
pass
def getExplainURL(self):
pass
def getGraphsURL(self):
pass
def getTransactionURL(self):
pass
def reloadCollections(self):
'''reloads the collection list.'''
pass
def reloadGraphs(self):
'''reloads the graph list'''
pass
def reloadCollections(self):
'''reloads collections and graphs'''
pass
def createCollection(self, className = 'Collection', **colProperties):
'''Creates a collection and returns it.
ClassName the name of a class inheriting from Collection or Egdes, it can also be set to 'Collection' or 'Edges' in order to create untyped collections of documents or edges.
Use colProperties to put things such as 'waitForSync = True' (see ArangoDB's doc
for a full list of possible arugments). If a '_properties' dictionary is defined in the collection schema, arguments to this function overide it'''
pass
def fetchDocument(self, _id):
'''fetchs a document using it's _id'''
pass
def createGraph(self, name, createCollections = True, isSmart = False, numberOfShards = None, smartGraphAttribute = None, replicationFactor = None, writeConcern = None):
'''Creates a graph and returns it. 'name' must be the name of a class inheriting from Graph.
Checks will be performed to make sure that every collection mentionned in the edges definition exist. Raises a ValueError in case of
a non-existing collection.'''
pass
def _checkCollectionList(lst):
pass
def createSatelliteGraph(self, name, createCollections = True):
pass
def hasCollection(self, name):
'''returns true if the databse has a collection by the name of 'name''''
pass
def hasGraph(self, name):
'''returns true if the databse has a graph by the name of 'name''''
pass
def dropAllCollections(self):
'''drops all public collections (graphs included) from the database'''
pass
def AQLQuery(self, query, batchSize = 100, rawResults = False, bindVars = None, options = None, count = False, fullCount = False,
json_encoder = None, **moreArgs):
'''Set rawResults = True if you want the query to return dictionnaries instead of Document objects.
You can use **moreArgs to pass more arguments supported by the api, such as ttl=60 (time to live)'''
pass
def __get_logger(self, logger, log_level):
pass
def fetch_element(
self, aql_query, bind_vars=None, dont_raise_error_if_empty=False,
default_output=None, logger=None, log_level=logging.DEBUG
):
'''Fetch element by running a query.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
default_output: dict, optional
default output if no value is returned. (the default is None)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results or more than one 1 results returned.
Returns
-------
any
an element returned by query.
'''
pass
def fetch_list(
self, aql_query, bind_vars=None, batch_size=200,
dont_raise_error_if_empty=False, logger=None,
log_level=logging.DEBUG
):
'''Fetch list of elements by running a query and merging all the batches.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
batch_size : int, optional
fetching batch size (the default is 200)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results
Returns
-------
list(any)
a list returned by query.
'''
pass
def fetch_list_as_batches(
self, aql_query, bind_vars=None, batch_size=200,
dont_raise_error_if_empty=False, logger=None,
log_level=logging.DEBUG
):
'''Fetch list of elements as batches by running the query.
Generator which yeilds each batch as result.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
batch_size : int, optional
fetching batch size (the default is 200)
dont_raise_error_if_empty: bool, optional
do not raise error if the returned is empty. (the default is False)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When unable to fetch results
Returns
-------
list(any)
a list returned by query.
'''
pass
def no_fetch_run(
self, aql_query, bind_vars=None, logger=None,
log_level=logging.DEBUG
):
'''Run query which doesn't have a return.
Parameters
----------
aql_query : str
aql query string.
bind_vars : dict, optional
dictonary of bind variables (the default is None)
logger : Logger, optional
logger to log the query and result.
(the default is None means don't log)
log_level: Logger.loglevel, optional
level of the log. (the default is logging.DEBUG)
Raises
------
AQLFetchError
When able to fetch results.
'''
pass
def explainAQLQuery(self, query, bindVars = None, allPlans = False):
'''Returns an explanation of the query. Setting allPlans to True will result in ArangoDB returning all possible plans. False returns only the optimal plan'''
pass
def validateAQLQuery(self, query, bindVars = None, options = None):
'''returns the server answer is the query is valid. Raises an AQLQueryError if not'''
pass
def transaction(self, collections, action, waitForSync = False, lockTimeout = None, params = None):
'''Execute a server-side transaction'''
pass
def __repr__(self):
pass
def __contains__(self, name_or_id):
'''allows to check if name_or_id:str is the id of an existing document'''
pass
def __getitem__(self, col_or_doc_id):
'''use database[col_or_doc_id] to get a collection from the database'''
pass
| 31 | 20 | 16 | 2 | 11 | 4 | 3 | 0.36 | 1 | 18 | 12 | 1 | 29 | 7 | 29 | 29 | 524 | 76 | 329 | 101 | 283 | 119 | 268 | 86 | 237 | 10 | 1 | 5 | 102 |
6,484 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/document.py
|
pyArango.document.Document
|
class Document(object):
"""The class that represents a document. Documents are meant to be instanciated by collections"""
def __init__(self, collection, jsonFieldInit = None, on_load_validation=False) :
if jsonFieldInit is None :
jsonFieldInit = {}
self.privates = ["_id", "_key", "_rev"]
self.reset(collection, jsonFieldInit, on_load_validation=on_load_validation)
self.typeName = "ArangoDoc"
# self._store = None
def reset(self, collection, jsonFieldInit = None, on_load_validation=False) :
"""replaces the current values in the document by those in jsonFieldInit"""
if not jsonFieldInit:
jsonFieldInit = {}
for k in self.privates:
setattr(self, k, None)
self.collection = collection
self.connection = self.collection.connection
self.setPrivates(jsonFieldInit)
self._store = DocumentStore(self.collection, validators=self.collection._fields, initDct=jsonFieldInit, validateInit=on_load_validation)
if self.collection._validation['on_load']:
self.validate()
self.modified = True
def to_default(self):
"""reset the document to the default values"""
self.reset(self.collection, self.collection.getDefaultDocument())
def fill_default(self):
"""reset the document to the default values"""
self._store.fill_default()
def validate(self):
"""validate the document"""
self._store.validate()
for pField in self.collection.arangoPrivates:
self.collection.validatePrivate(pField, getattr(self, pField))
def setPrivates(self, fieldDict):
"""will set self._id, self._rev and self._key field."""
for priv in self.privates:
if priv in fieldDict:
setattr(self, priv, fieldDict[priv])
# else:
# setattr(self, priv, None)
# if priv not in ["_from", "_to"]:
def getURL(self):
if self._id is None:
return AttributeError("An unsaved document cannot have an URL")
return "%s/%s" % (self.collection.getDocumentsURL(), self._id)
def set(self, fieldDict):
"""set the document with a dictionary"""
self.setPrivates(fieldDict)
self._store.set(fieldDict)
def save(self, waitForSync = False, **docArgs):
"""Saves the document to the database by either performing a POST (for a new document) or a PUT (complete document overwrite).
If you want to only update the modified fields use the .patch() function.
Use docArgs to put things such as 'waitForSync = True' (for a full list cf ArangoDB's doc).
It will only trigger a saving of the document if it has been modified since the last save. If you want to force the saving you can use forceSave()"""
self._store.fill_default()
payload = self._store.getStore()
# print(payload)
self._save(payload, waitForSync = False, **docArgs)
def _save(self, payload, waitForSync = False, **docArgs):
if self.modified:
params = dict(docArgs)
params.update({'collection': self.collection.name, "waitForSync" : waitForSync })
if self.collection._validation['on_save']:
self.validate()
if self.collection._isBulkInProgress:
if self._key is not None:
payload["_key"] = self._key
self.collection._saveBatch(self, params)
return self._store.resetPatch()
if self._id is None:
if self._key is not None:
payload["_key"] = self._key
payload = json.dumps(payload, default=str)
r = self.connection.session.post(self.collection.getDocumentsURL(), params = params, data = payload)
update = False
data = r.json()
self.setPrivates(data)
else:
payload = json.dumps(payload, default=str)
r = self.connection.session.put(self.getURL(), params = params, data = payload)
update = True
data = r.json()
if (r.status_code == 201 or r.status_code == 202) and "error" not in data:
if update:
self._rev = data['_rev']
else:
self.set(data)
else:
if update:
raise UpdateError(data['errorMessage'], data)
else:
if data["errorNum"] == 1210:
raise UniqueConstrainViolation(data['errorMessage'], data)
else:
raise CreationError(data['errorMessage'], data)
self.modified = False
self._store.resetPatch()
def forceSave(self, **docArgs):
"saves even if the document has not been modified since the last save"
self.modified = True
self.save(**docArgs)
def saveCopy(self):
"saves a copy of the object and become that copy. returns a tuple (old _key, new _key)"
old_key = self._key
self.reset(self.collection)
self.save()
return (old_key, self._key)
def patch(self, keepNull = True, **docArgs):
"""Saves the document by only updating the modified fields.
The default behaviour concening the keepNull parameter is the opposite of ArangoDB's default, Null values won't be ignored
Use docArgs for things such as waitForSync = True"""
if self._id is None:
raise ValueError("Cannot patch a document that was not previously saved")
params = dict(docArgs)
params.update({'collection': self.collection.name, 'keepNull' : keepNull})
if self.collection._isBulkInProgress:
self.collection._patchBatch(self, params )
return self._store.resetPatch()
payload = self._store.getPatches()
if self.collection._validation['on_save']:
self.validate()
if len(payload) > 0:
payload = json.dumps(payload, default=str)
r = self.connection.session.patch(self.getURL(), params = params, data = payload)
data = r.json()
if (r.status_code == 201 or r.status_code == 202) and "error" not in data:
self._rev = data['_rev']
else:
raise UpdateError(data['errorMessage'], data)
self.modified = False
self._store.resetPatch()
def delete(self):
"deletes the document from the database"
if self._id is None:
raise DeletionError("Can't delete a document that was not saved")
if self.collection._isBulkInProgress:
params = {'collection': self.collection.name}
self.collection._deleteBatch(self, params)
self.modified = True
return
r = self.connection.session.delete(self.getURL())
data = r.json()
if (r.status_code != 200 and r.status_code != 202) or 'error' in data:
raise DeletionError(data['errorMessage'], data)
self.reset(self.collection)
self.modified = True
def getInEdges(self, edges, rawResults = False):
"An alias for getEdges() that returns only the in Edges"
return self.getEdges(edges, inEdges = True, outEdges = False, rawResults = rawResults)
def getOutEdges(self, edges, rawResults = False):
"An alias for getEdges() that returns only the out Edges"
return self.getEdges(edges, inEdges = False, outEdges = True, rawResults = rawResults)
def getEdges(self, edges, inEdges = True, outEdges = True, rawResults = False):
"""returns in, out, or both edges linked to self belonging the collection 'edges'.
If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects"""
try:
return edges.getEdges(self, inEdges, outEdges, rawResults)
except AttributeError:
raise AttributeError("%s does not seem to be a valid Edges object" % edges)
def getResponsibleShard(self):
""" If we're working with an arangodb cluster, we can use this method to fetch where a document lives."""
result = self.connection.session.put("%s/responsibleShard" % self.collection.getURL(), data = json.dumps(self.getStore()))
if result.status_code == 200:
return result.json()["shardId"]
raise ArangoError(result.json()['errorMessage'], result.json())
def getStore(self):
"""return the store in a dict format"""
store = self._store.getStore()
for priv in self.privates:
v = getattr(self, priv)
if v:
store[priv] = v
return store
def getPatches(self):
"""return the patches in a dict format"""
return self._store.getPatches()
def __dir__(self):
if not self._store:
return []
return dir(self._store)
def __len__(self):
if not self._store:
return 0
return self._store.__len__()
def __dict__(self):
if not self._store:
return {}
return dict(self._store)
def __contains__(self, field):
if not self._store:
return False
return field in self._store
def __getitem__(self, k):
"""get an element from the document"""
if k in self.collection.arangoPrivates:
return getattr(self, k)
return self._store[k]
def __getattr__(self, k):
if not self._store:
return None
return self._store[k]
def __setitem__(self, k, v):
"""set an element in the document"""
if k in self.collection.arangoPrivates:
setattr(self, k, v)
else:
self.modified = True
self._store[k] = v
def __delitem__(self, k):
"""removes an element from the document"""
self.modified = True
del(self._store[k])
def __str__(self):
return repr(self)
def __repr__(self):
privStr = []
for p in self.collection.arangoPrivates:
privStr.append("%s: %s" % (p, getattr(self, p)) )
privStr = ', '.join(privStr)
return "%s '%s': %s" % (self.typeName, privStr, repr(self._store))
|
class Document(object):
'''The class that represents a document. Documents are meant to be instanciated by collections'''
def __init__(self, collection, jsonFieldInit = None, on_load_validation=False) :
pass
def reset(self, collection, jsonFieldInit = None, on_load_validation=False) :
'''replaces the current values in the document by those in jsonFieldInit'''
pass
def to_default(self):
'''reset the document to the default values'''
pass
def fill_default(self):
'''reset the document to the default values'''
pass
def validate(self):
'''validate the document'''
pass
def setPrivates(self, fieldDict):
'''will set self._id, self._rev and self._key field.'''
pass
def getURL(self):
pass
def setPrivates(self, fieldDict):
'''set the document with a dictionary'''
pass
def save(self, waitForSync = False, **docArgs):
'''Saves the document to the database by either performing a POST (for a new document) or a PUT (complete document overwrite).
If you want to only update the modified fields use the .patch() function.
Use docArgs to put things such as 'waitForSync = True' (for a full list cf ArangoDB's doc).
It will only trigger a saving of the document if it has been modified since the last save. If you want to force the saving you can use forceSave()'''
pass
def _save(self, payload, waitForSync = False, **docArgs):
pass
def forceSave(self, **docArgs):
'''saves even if the document has not been modified since the last save'''
pass
def saveCopy(self):
'''saves a copy of the object and become that copy. returns a tuple (old _key, new _key)'''
pass
def patch(self, keepNull = True, **docArgs):
'''Saves the document by only updating the modified fields.
The default behaviour concening the keepNull parameter is the opposite of ArangoDB's default, Null values won't be ignored
Use docArgs for things such as waitForSync = True'''
pass
def delete(self):
'''deletes the document from the database'''
pass
def getInEdges(self, edges, rawResults = False):
'''An alias for getEdges() that returns only the in Edges'''
pass
def getOutEdges(self, edges, rawResults = False):
'''An alias for getEdges() that returns only the out Edges'''
pass
def getEdges(self, edges, inEdges = True, outEdges = True, rawResults = False):
'''returns in, out, or both edges linked to self belonging the collection 'edges'.
If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects'''
pass
def getResponsibleShard(self):
''' If we're working with an arangodb cluster, we can use this method to fetch where a document lives.'''
pass
def getStore(self):
'''return the store in a dict format'''
pass
def getPatches(self):
'''return the patches in a dict format'''
pass
def __dir__(self):
pass
def __len__(self):
pass
def __dict__(self):
pass
def __contains__(self, field):
pass
def __getitem__(self, k):
'''get an element from the document'''
pass
def __getattr__(self, k):
pass
def __setitem__(self, k, v):
'''set an element in the document'''
pass
def __delitem__(self, k):
'''removes an element from the document'''
pass
def __str__(self):
pass
def __repr__(self):
pass
| 31 | 21 | 8 | 1 | 6 | 1 | 2 | 0.17 | 1 | 10 | 6 | 1 | 30 | 7 | 30 | 30 | 276 | 56 | 188 | 60 | 157 | 32 | 181 | 60 | 150 | 11 | 1 | 4 | 68 |
6,485 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/document.py
|
pyArango.document.DocumentStore
|
class DocumentStore(object):
"""Store all the data of a document in hierarchy of stores and handles validation.
Does not store private information, these are in the document."""
def __init__(self, collection, validators=None, initDct=None, patch=False, subStore=False, validateInit=False):
if validators is None:
validators = {}
if initDct is None:
initDct = {}
self.store = {}
self.patchStore = {}
self.collection = collection
self.validators = validators
self.validateInit = validateInit
self.isSubStore = subStore
self.subStores = {}
self.patching = patch
if not self.validateInit :
self.mustValidate = False
self.set(initDct)
for v in self.collection._validation.values():
if v:
self.mustValidate = True
break
if self.validateInit:
self.set(initDct)
self.patching = True
def resetPatch(self):
"""reset patches"""
self.patchStore = {}
def getPatches(self):
"""get patches as a dictionary"""
if not self.mustValidate:
return self.getStore()
res = {}
res.update(self.patchStore)
for k, v in self.subStores.items():
res[k] = v.getPatches()
return res
def getStore(self):
"""get the inner store as dictionary"""
res = {}
res.update(self.store)
for k, v in self.subStores.items():
res[k] = v.getStore()
return res
def validateField(self, field):
"""Validatie a field"""
if field not in self.validators and not self.collection._validation['allow_foreign_fields']:
raise SchemaViolation(self.collection.__class__, field)
if field in self.store:
if isinstance(self.store[field], DocumentStore):
return self[field].validate()
if field in self.patchStore:
try:
return self.validators[field].validate(self.patchStore[field])
except ValidationError as e:
raise ValidationError( "'%s' -> %s" % ( field, str(e)) )
else:
try:
return self.validators[field].validate(self.store[field])
except ValidationError as e:
raise ValidationError( "'%s' -> %s" % ( field, str(e)) )
except AttributeError:
if isinstance(self.validators[field], dict) and not isinstance(self.store[field], dict):
raise ValueError("Validator expected a sub document for field '%s', got '%s' instead" % (field, self.store[field]) )
else:
raise
return True
def validate(self):
"""Validate the whole document"""
if not self.mustValidate:
return True
res = {}
for field in self.validators.keys():
try:
if isinstance(self.validators[field], dict) and field not in self.store:
self.store[field] = DocumentStore(self.collection, validators = self.validators[field], initDct = {}, subStore=True, validateInit=self.validateInit)
self.validateField(field)
except InvalidDocument as e:
res.update(e.errors)
except (ValidationError, SchemaViolation) as e:
res[field] = str(e)
if len(res) > 0:
raise InvalidDocument(res)
return True
def set(self, dct):
"""Set the values to a dict. Any missing value will be filled by it's default"""
for field, value in dct.items():
if field not in self.collection.arangoPrivates:
if isinstance(value, dict):
if field in self.validators and isinstance(self.validators[field], dict):
vals = self.validators[field]
else:
vals = {}
self[field] = DocumentStore(self.collection, validators = vals, initDct = value, patch = self.patching, subStore=True, validateInit=self.validateInit)
self.subStores[field] = self.store[field]
else:
self[field] = value
def fill_default(self):
"""replace all None values with defaults"""
for field, value in self.validators.items():
if isinstance(value, dict):
self[field].fill_default()
elif self[field] is None:
self[field] = value.default
def __dir__(self):
return dir(self.getStore())
def __len__(self):
return len(self.store)
def __dict__(self):
return dict(self.store) + dict(self.patchStore)
def __contains__(self, field):
return field in self.store
def __getitem__(self, field):
"""Get an element from the store"""
if self.mustValidate and (field in self.validators) and isinstance(self.validators[field], dict) and (field not in self.store) :
self.store[field] = DocumentStore(self.collection, validators = self.validators[field], initDct = {}, patch = self.patching, subStore=True, validateInit=self.validateInit)
self.subStores[field] = self.store[field]
self.patchStore[field] = self.store[field]
if self.collection._validation['allow_foreign_fields'] or self.collection.hasField(field):
return self.store.get(field)
if not field in self.validators:
raise SchemaViolation(self.collection.__class__, field)
try:
return self.store[field]
except KeyError:
self.store[field] = self.validators[field].default
return self.store[field]
def __setitem__(self, field, value):
"""Set an element in the store"""
if self.mustValidate and (not self.collection._validation['allow_foreign_fields']) and (field not in self.validators) and (field not in self.collection.arangoPrivates):
raise SchemaViolation(self.collection.__class__, field)
if field in self.collection.arangoPrivates:
raise ValueError("DocumentStore cannot contain private field (got %s)" % field)
if isinstance(value, dict):
if field in self.validators and isinstance(self.validators[field], dict):
vals = self.validators[field]
else:
vals = {}
self.store[field] = DocumentStore(self.collection, validators = vals, initDct = value, patch = self.patching, subStore=True, validateInit=self.validateInit)
self.subStores[field] = self.store[field]
else:
self.store[field] = value
if self.patching:
self.patchStore[field] = self.store[field]
if self.mustValidate and self.collection._validation['on_set']:
self.validateField(field)
def __delitem__(self, k):
"""removes an element from the store"""
try:
del(self.store[k])
except:
pass
try:
del(self.patchStore[k])
except:
pass
try:
del(self.subStores[k])
except:
pass
def __contains__(self, k):
"""returns true or false weither the store has a key k"""
return (k in self.store) or (k in self.validators)
def __repr__(self):
return "<store: %s>" % repr(self.store)
|
class DocumentStore(object):
'''Store all the data of a document in hierarchy of stores and handles validation.
Does not store private information, these are in the document.'''
def __init__(self, collection, validators=None, initDct=None, patch=False, subStore=False, validateInit=False):
pass
def resetPatch(self):
'''reset patches'''
pass
def getPatches(self):
'''get patches as a dictionary'''
pass
def getStore(self):
'''get the inner store as dictionary'''
pass
def validateField(self, field):
'''Validatie a field'''
pass
def validateField(self, field):
'''Validate the whole document'''
pass
def set(self, dct):
'''Set the values to a dict. Any missing value will be filled by it's default'''
pass
def fill_default(self):
'''replace all None values with defaults'''
pass
def __dir__(self):
pass
def __len__(self):
pass
def __dict__(self):
pass
def __contains__(self, field):
pass
def __getitem__(self, field):
'''Get an element from the store'''
pass
def __setitem__(self, field, value):
'''Set an element in the store'''
pass
def __delitem__(self, k):
'''removes an element from the store'''
pass
def __contains__(self, field):
'''returns true or false weither the store has a key k'''
pass
def __repr__(self):
pass
| 18 | 12 | 11 | 1 | 9 | 1 | 4 | 0.08 | 1 | 8 | 3 | 0 | 17 | 9 | 17 | 17 | 205 | 39 | 153 | 40 | 135 | 13 | 146 | 38 | 128 | 9 | 1 | 4 | 60 |
6,486 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/document.py
|
pyArango.document.Edge
|
class Edge(Document):
"""An Edge document"""
def __init__(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) :
if not jsonFieldInit:
jsonFieldInit = {}
self.typeName = "ArangoEdge"
self.privates = ["_id", "_key", "_rev", "_from", "_to"]
self.reset(edgeCollection, jsonFieldInit, on_load_validation=on_load_validation)
def reset(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) :
if jsonFieldInit is None:
jsonFieldInit = {}
Document.reset(self, edgeCollection, jsonFieldInit, on_load_validation=on_load_validation)
def links(self, fromVertice, toVertice, **edgeArgs):
"""
An alias to save that updates the _from and _to attributes.
fromVertice and toVertice, can be either strings or documents. It they are unsaved documents, they will be automatically saved.
"""
if isinstance(fromVertice, Document) or isinstance(getattr(fromVertice, 'document', None), Document):
if not fromVertice._id:
fromVertice.save()
self._from = fromVertice._id
elif (type(fromVertice) is bytes) or (type(fromVertice) is str):
self._from = fromVertice
elif not self._from:
raise CreationError('fromVertice %s is invalid!' % str(fromVertice))
if isinstance(toVertice, Document) or isinstance(getattr(toVertice, 'document', None), Document):
if not toVertice._id:
toVertice.save()
self._to = toVertice._id
elif (type(toVertice) is bytes) or (type(toVertice) is str):
self._to = toVertice
elif not self._to:
raise CreationError('toVertice %s is invalid!' % str(toVertice))
self.save(**edgeArgs)
def save(self, **edgeArgs):
"""Works like Document's except that you must specify '_from' and '_to' vertices before.
There's also a links() function especially for first saves."""
if not getattr(self, "_from") or not getattr(self, "_to"):
raise AttributeError("You must specify '_from' and '_to' attributes before saving. You can also use the function 'links()'")
payload = self._store.getStore()
payload["_from"] = self._from
payload["_to"] = self._to
Document._save(self, payload, **edgeArgs)
|
class Edge(Document):
'''An Edge document'''
def __init__(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) :
pass
def reset(self, edgeCollection, jsonFieldInit = None, on_load_validation=False) :
pass
def links(self, fromVertice, toVertice, **edgeArgs):
'''
An alias to save that updates the _from and _to attributes.
fromVertice and toVertice, can be either strings or documents. It they are unsaved documents, they will be automatically saved.
'''
pass
def save(self, **edgeArgs):
'''Works like Document's except that you must specify '_from' and '_to' vertices before.
There's also a links() function especially for first saves.'''
pass
| 5 | 3 | 12 | 1 | 9 | 2 | 4 | 0.19 | 1 | 5 | 1 | 0 | 4 | 4 | 4 | 34 | 51 | 8 | 36 | 10 | 31 | 7 | 32 | 10 | 27 | 9 | 2 | 2 | 15 |
6,487 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/foxx.py
|
pyArango.foxx.Foxx
|
class Foxx:
"""A generic foxx function executor."""
def __init__(self, database):
"""Initialise database and its services."""
self.database = database
self.services = []
self.mounts = {}
def service(self, mount):
"""Return a service so that only route after the mount.
Parameters
----------
mount : str
mount point.
Returns
-------
FoxxService
A mounted service
"""
if mount not in self.mounts:
self.reload()
if mount not in self.mounts:
raise ValueError("Unable to find the mount: '%s'", mount)
return FoxxService(self.database, mount)
def get_available_services(self):
response = self.database.action.get('/_api/foxx', params={'excludeSystem': False})
response.raise_for_status()
return response.json()
def reload(self):
self.services = self.get_available_services()
self.mounts = {service['mount'] for service in self.services}
|
class Foxx:
'''A generic foxx function executor.'''
def __init__(self, database):
'''Initialise database and its services.'''
pass
def service(self, mount):
'''Return a service so that only route after the mount.
Parameters
----------
mount : str
mount point.
Returns
-------
FoxxService
A mounted service
'''
pass
def get_available_services(self):
pass
def reload(self):
pass
| 5 | 3 | 8 | 1 | 4 | 3 | 2 | 0.67 | 0 | 2 | 1 | 0 | 4 | 3 | 4 | 4 | 37 | 7 | 18 | 10 | 13 | 12 | 18 | 9 | 13 | 3 | 0 | 1 | 6 |
6,488 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/foxx.py
|
pyArango.foxx.FoxxService
|
class FoxxService(DatabaseAction):
"""A foxx mount function executor."""
def __init__(self, database, mount):
"""Initialise mount and database."""
self.database = database
self.mount = mount
@property
def end_point_url(self):
"""End point url for foxx service."""
return '%s/_db/%s%s' % (
self.database.connection.getEndpointURL(), self.database.name,
self.mount
)
|
class FoxxService(DatabaseAction):
'''A foxx mount function executor.'''
def __init__(self, database, mount):
'''Initialise mount and database.'''
pass
@property
def end_point_url(self):
'''End point url for foxx service.'''
pass
| 4 | 3 | 5 | 0 | 4 | 1 | 1 | 0.3 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 15 | 15 | 2 | 10 | 6 | 6 | 3 | 6 | 5 | 3 | 1 | 2 | 0 | 2 |
6,489 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/gevent_session.py
|
pyArango.gevent_session.AikidoSession_GRequests
|
class AikidoSession_GRequests(object):
"""A version of Aikido that uses grequests."""
def __init__(
self, username, password, urls, use_jwt_authentication=False,
use_lock_for_reseting_jwt=True, max_retries=5, verify=None
):
self.max_retries = max_retries
self.use_jwt_authentication = use_jwt_authentication
if username:
if self.use_jwt_authentication:
self.auth = JWTAuth(
username, password, urls,
use_lock_for_reseting_jwt, max_retries
)
else:
self.auth = (username, password)
if (verify is not None) and not isinstance(verify, bool) and not isinstance(verify, CA_Certificate) and not isinstance(verify, str) :
raise ValueError("'verify' argument can only be of type: bool, CA_Certificate or str or None")
self.verify = verify
else:
self.auth = None
def __reset_auth(self):
if not self.use_jwt_authentication:
return
if self.auth.lock_for_reseting_jwt is not None:
self.auth.lock_for_reseting_jwt.acquire()
self.auth.reset_token()
if self.auth.lock_for_reseting_jwt is not None:
self.auth.lock_for_reseting_jwt.release()
def _run(self, req):
"""Run the request."""
if not self.use_jwt_authentication and self.verify is not None:
if isinstance(self.verify, CA_Certificate):
req.kwargs['verify'] = self.verify.get_file_path()
else :
req.kwargs['verify'] = self.verify
for _ in range(self.max_retries):
gevent.joinall([gevent.spawn(req.send)])
if self.use_jwt_authentication:
if hasattr(req, 'exception'):
logging.critical("%s is raised, will try to reset the auth and request again.", req.exception)
self.__reset_auth()
elif req.response.status_code == 401:
logging.critical("Invalid authentication token provided, will try to reset the auth and request again.")
self.__reset_auth()
else:
return req.response
else:
if hasattr(req, 'exception'):
logging.critical("%s is raised, will try to request again", req.exception)
elif req.response.status_code == 401:
logging.critical("Unauthorized access, you must supply a (username, password) with the correct credentials")
else:
return req.response
logging.critical("Tried to send the request max number of times.")
return req.response
def post(self, url, data=None, json=None, **kwargs):
"""HTTP POST Method."""
if data is not None:
kwargs['data'] = data
if json is not None:
kwargs['json'] = json
kwargs['auth'] = self.auth
req = grequests.post(url, **kwargs)
return self._run(req)
def get(self, url, **kwargs):
"""HTTP GET Method."""
kwargs['auth'] = self.auth
req = grequests.get(url, **kwargs)
return self._run(req)
def put(self, url, data=None, **kwargs):
"""HTTP PUT Method."""
if data is not None:
kwargs['data'] = data
kwargs['auth'] = self.auth
req = grequests.put(url, **kwargs)
return self._run(req)
def head(self, url, **kwargs):
"""HTTP HEAD Method."""
kwargs['auth'] = self.auth
req = grequests.head(url, **kwargs)
return self._run(req)
def options(self, url, **kwargs):
"""HTTP OPTIONS Method."""
kwargs['auth'] = self.auth
req = grequests.options(url, **kwargs)
return self._run(req)
def patch(self, url, data=None, **kwargs):
"""HTTP PATCH Method."""
if data is not None:
kwargs['data'] = data
kwargs['auth'] = self.auth
req = grequests.patch(url, **kwargs)
return self._run(req)
def delete(self, url, **kwargs):
"""HTTP DELETE Method."""
kwargs['auth'] = self.auth
req = grequests.delete(url, **kwargs)
return self._run(req)
def disconnect(self):
pass
|
class AikidoSession_GRequests(object):
'''A version of Aikido that uses grequests.'''
def __init__(
self, username, password, urls, use_jwt_authentication=False,
use_lock_for_reseting_jwt=True, max_retries=5, verify=None
):
pass
def __reset_auth(self):
pass
def _run(self, req):
'''Run the request.'''
pass
def post(self, url, data=None, json=None, **kwargs):
'''HTTP POST Method.'''
pass
def get(self, url, **kwargs):
'''HTTP GET Method.'''
pass
def put(self, url, data=None, **kwargs):
'''HTTP PUT Method.'''
pass
def head(self, url, **kwargs):
'''HTTP HEAD Method.'''
pass
def options(self, url, **kwargs):
'''HTTP OPTIONS Method.'''
pass
def patch(self, url, data=None, **kwargs):
'''HTTP PATCH Method.'''
pass
def delete(self, url, **kwargs):
'''HTTP DELETE Method.'''
pass
def disconnect(self):
pass
| 12 | 9 | 9 | 0 | 8 | 1 | 3 | 0.1 | 1 | 6 | 2 | 0 | 11 | 4 | 11 | 11 | 115 | 14 | 92 | 27 | 77 | 9 | 78 | 24 | 66 | 9 | 1 | 3 | 29 |
6,490 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/graph.py
|
pyArango.graph.EdgeDefinition
|
class EdgeDefinition(object):
"""An edge definition for a graph"""
def __init__(self, edgesCollection, fromCollections, toCollections):
self.name = edgesCollection
self.edgesCollection = edgesCollection
self.fromCollections = fromCollections
self.toCollections = toCollections
def toJson(self):
return { 'collection' : self.edgesCollection, 'from' : self.fromCollections, 'to' : self.toCollections }
def __str__(self):
return '<ArangoED>'+ str(self.toJson())
def __repr__(self):
return str(self)
|
class EdgeDefinition(object):
'''An edge definition for a graph'''
def __init__(self, edgesCollection, fromCollections, toCollections):
pass
def toJson(self):
pass
def __str__(self):
pass
def __repr__(self):
pass
| 5 | 1 | 3 | 0 | 3 | 0 | 1 | 0.08 | 1 | 1 | 0 | 0 | 4 | 4 | 4 | 4 | 17 | 4 | 12 | 9 | 7 | 1 | 12 | 9 | 7 | 1 | 1 | 0 | 4 |
6,491 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/graph.py
|
pyArango.graph.Graph
|
class Graph(with_metaclass(Graph_metaclass, object)):
"""The class from witch all your graph types must derive"""
_edgeDefinitions = []
_orphanedCollections = []
def __init__(self, database, jsonInit):
self.database = database
self.connection = self.database.connection
try:
self._key = jsonInit["_key"]
except KeyError:
self._key = jsonInit["name"]
except KeyError:
raise KeyError("'jsonInit' must have a field '_key' or a field 'name'")
self.name = self._key
self._rev = jsonInit["_rev"]
self._id = jsonInit["_id"]
orfs = set(self._orphanedCollections)
for o in jsonInit["orphanCollections"]:
if o not in orfs:
self._orphanedCollections.append(o)
if self.connection.verbose:
print("Orphan collection %s is not in graph definition. Added it" % o)
self.definitions = {}
edNames = set()
for ed in self._edgeDefinitions:
self.definitions[ed.edgesCollection] = ed.edgesCollection
for ed in jsonInit["edgeDefinitions"]:
if ed["collection"] not in self.definitions:
self.definitions[ed["collection"]] = EdgeDefinition(ed["collection"], fromCollections = ed["from"], toCollections = ed["to"])
if self.connection.verbose:
print("Edge definition %s is not in graph definition. Added it" % ed)
for de in self._edgeDefinitions:
if de.edgesCollection not in self.database.collections and not COL.isEdgeCollection(de.edgesCollection):
raise KeyError("'%s' is not a valid edge collection" % de.edgesCollection)
self.definitions[de.edgesCollection] = de
def getURL(self):
return "%s/%s" % (self.database.getGraphsURL(), self._key)
def createVertex(self, collectionName, docAttributes, waitForSync = False):
"""adds a vertex to the graph and returns it"""
url = "%s/vertex/%s" % (self.getURL(), collectionName)
store = DOC.DocumentStore(self.database[collectionName], validators=self.database[collectionName]._fields, initDct=docAttributes)
# self.database[collectionName].validateDct(docAttributes)
store.validate()
r = self.connection.session.post(url, data = json.dumps(docAttributes, default=str), params = {'waitForSync' : waitForSync})
data = r.json()
if r.status_code == 201 or r.status_code == 202:
return self.database[collectionName][data["vertex"]["_key"]]
raise CreationError("Unable to create vertice, %s" % data["errorMessage"], data)
def deleteVertex(self, document, waitForSync = False):
"""deletes a vertex from the graph as well as al linked edges"""
url = "%s/vertex/%s" % (self.getURL(), document._id)
r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync})
data = r.json()
if r.status_code == 200 or r.status_code == 202:
return True
raise DeletionError("Unable to delete vertice, %s" % document._id, data)
def createEdge(self, collectionName, _fromId, _toId, edgeAttributes, waitForSync = False):
"""creates an edge between two documents"""
if not _fromId:
raise ValueError("Invalid _fromId: %s" % _fromId)
if not _toId:
raise ValueError("Invalid _toId: %s" % _toId)
if collectionName not in self.definitions:
raise KeyError("'%s' is not among the edge definitions" % collectionName)
url = "%s/edge/%s" % (self.getURL(), collectionName)
self.database[collectionName].validatePrivate("_from", _fromId)
self.database[collectionName].validatePrivate("_to", _toId)
ed = self.database[collectionName].createEdge()
ed.set(edgeAttributes)
ed.validate()
payload = ed.getStore()
payload.update({'_from' : _fromId, '_to' : _toId})
r = self.connection.session.post(url, data = json.dumps(payload, default=str), params = {'waitForSync' : waitForSync})
data = r.json()
if r.status_code == 201 or r.status_code == 202:
return self.database[collectionName][data["edge"]["_key"]]
# print "\ngraph 160, ", data, payload, _fromId
raise CreationError("Unable to create edge, %s" % r.json()["errorMessage"], data)
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False):
"""A shorthand for createEdge that takes two documents as input"""
if type(doc1) is DOC.Document:
if not doc1._id:
doc1.save()
doc1_id = doc1._id
else:
doc1_id = doc1
if type(doc2) is DOC.Document:
if not doc2._id:
doc2.save()
doc2_id = doc2._id
else:
doc2_id = doc2
return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync)
def unlink(self, definition, doc1, doc2):
"""deletes all links between doc1 and doc2"""
links = self.database[definition].fetchByExample( {"_from": doc1._id,"_to" : doc2._id}, batchSize = 100)
for l in links:
self.deleteEdge(l)
def deleteEdge(self, edge, waitForSync = False):
"""removes an edge from the graph"""
url = "%s/edge/%s" % (self.getURL(), edge._id)
r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync})
if r.status_code == 200 or r.status_code == 202:
return True
raise DeletionError("Unable to delete edge, %s" % edge._id, r.json())
def delete(self):
"""deletes the graph"""
r = self.connection.session.delete(self.getURL())
data = r.json()
if r.status_code < 200 or r.status_code > 202 or data["error"]:
raise DeletionError(data["errorMessage"], data)
def traverse(self, startVertex, **kwargs):
"""Traversal! see: https://docs.arangodb.com/HttpTraversal/README.html for a full list of the possible kwargs.
The function must have as argument either: direction = "outbout"/"any"/"inbound" or expander = "custom JS (see arangodb's doc)".
The function can't have both 'direction' and 'expander' as arguments.
"""
url = "%s/traversal" % self.database.getURL()
if type(startVertex) is DOC.Document:
startVertex_id = startVertex._id
else:
startVertex_id = startVertex
payload = {"startVertex": startVertex_id, "graphName" : self.name}
if "expander" in kwargs:
if "direction" in kwargs:
raise ValueError("""The function can't have both 'direction' and 'expander' as arguments""")
elif "direction" not in kwargs:
raise ValueError("""The function must have as argument either: direction = "outbout"/"any"/"inbound" or expander = "custom JS (see arangodb's doc)" """)
payload.update(kwargs)
r = self.connection.session.post(url, data = json.dumps(payload, default=str))
data = r.json()
if r.status_code < 200 or r.status_code > 202 or data["error"]:
raise TraversalError(data["errorMessage"], data)
return data["result"]
def __str__(self):
return "ArangoGraph: %s" % self.name
|
class Graph(with_metaclass(Graph_metaclass, object)):
'''The class from witch all your graph types must derive'''
def __init__(self, database, jsonInit):
pass
def getURL(self):
pass
def createVertex(self, collectionName, docAttributes, waitForSync = False):
'''adds a vertex to the graph and returns it'''
pass
def deleteVertex(self, document, waitForSync = False):
'''deletes a vertex from the graph as well as al linked edges'''
pass
def createEdge(self, collectionName, _fromId, _toId, edgeAttributes, waitForSync = False):
'''creates an edge between two documents'''
pass
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False):
'''A shorthand for createEdge that takes two documents as input'''
pass
def unlink(self, definition, doc1, doc2):
'''deletes all links between doc1 and doc2'''
pass
def deleteEdge(self, edge, waitForSync = False):
'''removes an edge from the graph'''
pass
def deleteVertex(self, document, waitForSync = False):
'''deletes the graph'''
pass
def traverse(self, startVertex, **kwargs):
'''Traversal! see: https://docs.arangodb.com/HttpTraversal/README.html for a full list of the possible kwargs.
The function must have as argument either: direction = "outbout"/"any"/"inbound" or expander = "custom JS (see arangodb's doc)".
The function can't have both 'direction' and 'expander' as arguments.
'''
pass
def __str__(self):
pass
| 12 | 9 | 14 | 2 | 11 | 1 | 4 | 0.12 | 1 | 11 | 6 | 3 | 11 | 7 | 11 | 11 | 172 | 37 | 121 | 51 | 109 | 14 | 117 | 51 | 105 | 12 | 1 | 3 | 40 |
6,492 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/graph.py
|
pyArango.graph.Graph_metaclass
|
class Graph_metaclass(type):
"""Keeps track of all graph classes and does basic validations on fields"""
graphClasses = {}
def __new__(cls, name, bases, attrs):
clsObj = type.__new__(cls, name, bases, attrs)
if name != 'Graph':
try:
if len(attrs['_edgeDefinitions']) < 1:
raise CreationError("Graph class '%s' has no edge definition" % name)
except KeyError:
raise CreationError("Graph class '%s' has no field _edgeDefinition" % name)
if name != "Graph":
Graph_metaclass.graphClasses[name] = clsObj
return clsObj
@classmethod
def getGraphClass(cls, name):
"""return a graph class by its name"""
try:
return cls.graphClasses[name]
except KeyError:
raise KeyError("There's no child of Graph by the name of: %s" % name)
@classmethod
def isGraph(cls, name):
"""returns true/false depending if there is a graph called name"""
return name in cls.graphClasses
|
class Graph_metaclass(type):
'''Keeps track of all graph classes and does basic validations on fields'''
def __new__(cls, name, bases, attrs):
pass
@classmethod
def getGraphClass(cls, name):
'''return a graph class by its name'''
pass
@classmethod
def isGraph(cls, name):
'''returns true/false depending if there is a graph called name'''
pass
| 6 | 3 | 7 | 0 | 6 | 1 | 3 | 0.14 | 1 | 2 | 1 | 0 | 1 | 0 | 3 | 16 | 29 | 4 | 22 | 8 | 16 | 3 | 20 | 6 | 16 | 5 | 2 | 3 | 8 |
6,493 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.pyArangoException
|
class pyArangoException(Exception):
"""The calss from witch all Exceptions inherit"""
def __init__(self, message, errors = None):
Exception.__init__(self, message)
if errors is None:
errors = {}
self.message = message
self.errors = errors
def __str__(self):
return self.message + ". Errors: " + str(self.errors)
|
class pyArangoException(Exception):
'''The calss from witch all Exceptions inherit'''
def __init__(self, message, errors = None):
pass
def __str__(self):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 2 | 0.11 | 1 | 1 | 0 | 18 | 2 | 2 | 2 | 12 | 11 | 1 | 9 | 5 | 6 | 1 | 9 | 5 | 6 | 2 | 3 | 1 | 3 |
6,494 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/users.py
|
pyArango.users.User
|
class User(object):
"""This class represents a user"""
def __init__(self, users, jsonData = None):
if jsonData is None:
jsonData = {}
self._store = {}
self.users = users
self.connection = self.users.connection
self._store = {
"username": None,
"active": True,
"extra": None,
"changePassword": None,
"password": None,
}
self.isSet = False
if len(jsonData) > 0:
self._set(jsonData)
def _set(self, jsonData):
"""Initialize all fields at once. If no password is specified, it will be set as an empty string"""
self["username"] = jsonData["user"]
self["active"] = jsonData["active"]
self["extra"] = jsonData["extra"]
try:
self["changePassword"] = jsonData["changePassword"]
except Exception as e:
pass
# self["changePassword"] = ""
try:
self["password"] = jsonData["passwd"]
except KeyError:
self["password"] = ""
self.isSet = True
def getURL(self):
return "%s/user/%s" % (self.connection.getURL(), self["username"])
def save(self):
"""Save/updates the user"""
import json
payload = {}
payload.update(self._store)
payload["user"] = payload["username"]
payload["passwd"] = payload["password"]
del(payload["username"])
del(payload["password"])
payload = json.dumps(payload, default=str)
if not self.isSet:
if "username" not in self._store or "password" not in self._store:
raise KeyError("You must define self['name'] and self['password'] to be able to create a new user")
r = self.connection.session.post(self.users.getURL(), data = payload)
data = r.json()
if r.status_code == 201:
self._set(data)
else:
raise CreationError("Unable to create new user", data)
else:
r = self.connection.session.put(self.getURL(), data = payload)
data = r.json()
if r.status_code == 200:
self._set(data)
else:
raise UpdateError("Unable to update user, status: %s" %r.status_code, data)
def setPermissions(self, dbName, access):
"""Grant revoke rights on a database, 'access' is supposed to be boolean. ArangoDB grants/revokes both read and write rights at the same time"""
import json
if not self.isSet:
raise CreationError("Please save user first", None, None)
rights = []
if access:
rights.append("rw")
rights = ''.join(rights)
if not self.connection.hasDatabase(dbName):
raise KeyError("Unknown database: %s" % dbName)
url = "%s/database/%s" % (self.getURL(), dbName)
r = self.connection.session.put(url, data = json.dumps({"grant": rights}, default=str))
if r.status_code < 200 or r.status_code > 202:
raise CreationError("Unable to grant rights", r.content)
def delete(self):
"""Permanently remove the user"""
if not self.isSet:
raise CreationError("Please save user first", None, None)
r = self.connection.session.delete(self.getURL())
if r.status_code < 200 or r.status_code > 202:
raise DeletionError("Unable to delete user, url: %s, status: %s" %(r.url, r.status_code), r.content )
self.isSet = False
def __repr__(self):
return "ArangoUser: %s" % (self._store)
def __setitem__(self, k, v):
if k not in list(self._store.keys()):
raise KeyError("The only keys available for user are: %s" % (list(self._store.keys())))
self._store[k] = v
def __getitem__(self, k):
return self._store[k]
|
class User(object):
'''This class represents a user'''
def __init__(self, users, jsonData = None):
pass
def _set(self, jsonData):
'''Initialize all fields at once. If no password is specified, it will be set as an empty string'''
pass
def getURL(self):
pass
def save(self):
'''Save/updates the user'''
pass
def setPermissions(self, dbName, access):
'''Grant revoke rights on a database, 'access' is supposed to be boolean. ArangoDB grants/revokes both read and write rights at the same time'''
pass
def delete(self):
'''Permanently remove the user'''
pass
def __repr__(self):
pass
def __setitem__(self, k, v):
pass
def __getitem__(self, k):
pass
| 10 | 5 | 12 | 2 | 9 | 1 | 3 | 0.07 | 1 | 7 | 3 | 0 | 9 | 4 | 9 | 9 | 115 | 23 | 86 | 24 | 74 | 6 | 77 | 23 | 65 | 5 | 1 | 2 | 24 |
6,495 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/theExceptions.py
|
pyArango.theExceptions.CreationError
|
class CreationError(pyArangoException):
"""Something went wrong when creating something"""
def __init__(self, message, errors = None):
if errors is None:
errors = {}
pyArangoException.__init__(self, message, errors)
|
class CreationError(pyArangoException):
'''Something went wrong when creating something'''
def __init__(self, message, errors = None):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 0 | 0 | 1 | 1 | 0 | 1 | 13 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 4 | 1 | 2 |
6,496 |
ArangoDB-Community/pyArango
|
ArangoDB-Community_pyArango/pyArango/validation.py
|
pyArango.validation.Bool
|
class Bool(Validator):
"""The value must be a boolean"""
def validate(self, value):
if not isinstance(value, bool):
raise ValidationError("%s is not a valid boolean" % value)
return True
|
class Bool(Validator):
'''The value must be a boolean'''
def validate(self, value):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 2 | 0.2 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 4 | 6 | 0 | 5 | 2 | 3 | 1 | 5 | 2 | 3 | 2 | 2 | 1 | 2 |
6,497 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/examples/createSocialGraph.py
|
createSocialGraph.Social.relation
|
class relation(Edges):
_fields = {
"number": Field()
}
|
class relation(Edges):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 4 | 0 | 4 | 2 | 3 | 0 | 2 | 2 | 1 | 0 | 3 | 0 | 0 |
6,498 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/examples/createSocialGraph.py
|
createSocialGraph.Social.social
|
class social(Graph):
_edgeDefinitions = (EdgeDefinition('relation',
fromCollections=["female", "male"],
toCollections=["female", "male"]),)
_orphanedCollections = []
|
class social(Graph):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 6 | 1 | 5 | 3 | 4 | 0 | 3 | 3 | 2 | 0 | 2 | 0 | 0 |
6,499 |
ArangoDB-Community/pyArango
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/ArangoDB-Community_pyArango/pyArango/connection.py
|
pyArango.connection.AikidoSession.Holder
|
class Holder(object):
def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30):
self.fct = fct
self.auth = auth
self.max_conflict_retries = max_conflict_retries
if not isinstance(verify, bool) and not isinstance(verify, CA_Certificate) and not not isinstance(verify, str):
raise ValueError(
"'verify' argument can only be of type: bool, CA_Certificate or str ")
self.verify = verify
self.timeout = timeout
def __call__(self, *args, **kwargs):
if self.auth:
kwargs["auth"] = self.auth
if isinstance(self.verify, CA_Certificate):
kwargs["verify"] = self.verify.get_file_path()
else:
kwargs["verify"] = self.verify
kwargs["timeout"] = self.timeout
try:
do_retry = True
retry = 0
while do_retry and retry < self.max_conflict_retries:
ret = self.fct(*args, **kwargs)
do_retry = ret.status_code == 1200
try:
data = ret.json()
do_retry = do_retry or (
"errorNum" in data and data["errorNum"] == 1200)
except JSONDecodeError:
pass
retry += 1
except:
print(
"===\nUnable to establish connection, perhaps arango is not running.\n===")
raise
if len(ret.content) < 1:
raise ConnectionError(
"Empty server response", ret.url, ret.status_code, ret.content)
elif ret.status_code == 401:
raise ConnectionError(
"Unauthorized access, you must supply a (username, password) with the correct credentials", ret.url, ret.status_code, ret.content)
ret.json = JsonHook(ret)
return ret
|
class Holder(object):
def __init__(self, fct, auth, max_conflict_retries=5, verify=True, timeout=30):
pass
def __call__(self, *args, **kwargs):
pass
| 3 | 0 | 21 | 3 | 19 | 0 | 5 | 0 | 1 | 7 | 2 | 0 | 2 | 5 | 2 | 2 | 44 | 6 | 38 | 12 | 35 | 0 | 36 | 12 | 33 | 8 | 1 | 3 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.