text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
"""`Factory of Factories` pattern."""
from dependency_injector import containers, providers
class SqlAlchemyDatabaseService:
def __init__(self, session, base_class):
self.session = session
self.base_class = base_class
class TokensService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
self.database = database
class Token:
...
class UsersService:
def __init__(self, id_generator, database):
self.id_generator = id_generator
self.database = database
class User:
...
# Sample objects
session = object()
id_generator = object()
class Container(containers.DeclarativeContainer):
database_factory = providers.Factory(
providers.Factory,
SqlAlchemyDatabaseService,
session=session,
)
token_service = providers.Factory(
TokensService,
id_generator=id_generator,
database=database_factory(base_class=Token),
)
user_service = providers.Factory(
UsersService,
id_generator=id_generator,
database=database_factory(base_class=User),
)
if __name__ == '__main__':
container = Container()
token_service = container.token_service()
assert token_service.database.base_class is Token
user_service = container.user_service()
assert user_service.database.base_class is User
|
rmk135/objects
|
examples/miniapps/factory-patterns/factory_of_factories.py
|
Python
|
bsd-3-clause
| 1,394 | 0 |
"""Local Media Source Implementation."""
from __future__ import annotations
import mimetypes
from pathlib import Path
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import raise_if_invalid_path
from .const import DOMAIN, MEDIA_CLASS_MAP, MEDIA_MIME_TYPES
from .error import Unresolvable
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
@callback
def async_setup(hass: HomeAssistant) -> None:
"""Set up local media source."""
source = LocalSource(hass)
hass.data[DOMAIN][DOMAIN] = source
hass.http.register_view(LocalMediaView(hass, source))
class LocalSource(MediaSource):
"""Provide local directories as media sources."""
name: str = "Local Media"
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize local source."""
super().__init__(DOMAIN)
self.hass = hass
@callback
def async_full_path(self, source_dir_id: str, location: str) -> Path:
"""Return full path."""
return Path(self.hass.config.media_dirs[source_dir_id], location)
@callback
def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]:
"""Parse identifier."""
if not item.identifier:
# Empty source_dir_id and location
return "", ""
source_dir_id, location = item.identifier.split("/", 1)
if source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
try:
raise_if_invalid_path(location)
except ValueError as err:
raise Unresolvable("Invalid path.") from err
return source_dir_id, location
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
source_dir_id, location = self.async_parse_identifier(item)
if source_dir_id == "" or source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
mime_type, _ = mimetypes.guess_type(
str(self.async_full_path(source_dir_id, location))
)
assert isinstance(mime_type, str)
return PlayMedia(f"/media/{item.identifier}", mime_type)
async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource:
"""Return media."""
try:
source_dir_id, location = self.async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
result = await self.hass.async_add_executor_job(
self._browse_media, source_dir_id, location
)
return result
def _browse_media(self, source_dir_id: str, location: str) -> BrowseMediaSource:
"""Browse media."""
# If only one media dir is configured, use that as the local media root
if source_dir_id == "" and len(self.hass.config.media_dirs) == 1:
source_dir_id = list(self.hass.config.media_dirs)[0]
# Multiple folder, root is requested
if source_dir_id == "":
if location:
raise BrowseError("Folder not found.")
base = BrowseMediaSource(
domain=DOMAIN,
identifier="",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type=None,
title=self.name,
can_play=False,
can_expand=True,
children_media_class=MEDIA_CLASS_DIRECTORY,
)
base.children = [
self._browse_media(source_dir_id, "")
for source_dir_id in self.hass.config.media_dirs
]
return base
full_path = Path(self.hass.config.media_dirs[source_dir_id], location)
if not full_path.exists():
if location == "":
raise BrowseError("Media directory does not exist.")
raise BrowseError("Path does not exist.")
if not full_path.is_dir():
raise BrowseError("Path is not a directory.")
result = self._build_item_response(source_dir_id, full_path)
if not result:
raise BrowseError("Unknown source directory.")
return result
def _build_item_response(
self, source_dir_id: str, path: Path, is_child: bool = False
) -> BrowseMediaSource | None:
mime_type, _ = mimetypes.guess_type(str(path))
is_file = path.is_file()
is_dir = path.is_dir()
# Make sure it's a file or directory
if not is_file and not is_dir:
return None
# Check that it's a media file
if is_file and (
not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES
):
return None
title = path.name
if is_dir:
title += "/"
media_class = MEDIA_CLASS_DIRECTORY
if mime_type:
media_class = MEDIA_CLASS_MAP.get(
mime_type.split("/")[0], MEDIA_CLASS_DIRECTORY
)
media = BrowseMediaSource(
domain=DOMAIN,
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.media_dirs[source_dir_id])}",
media_class=media_class,
media_content_type=mime_type or "",
title=title,
can_play=is_file,
can_expand=is_dir,
)
if is_file or is_child:
return media
# Append first level children
media.children = []
for child_path in path.iterdir():
child = self._build_item_response(source_dir_id, child_path, True)
if child:
media.children.append(child)
# Sort children showing directories first, then by name
media.children.sort(key=lambda child: (child.can_play, child.title))
return media
class LocalMediaView(HomeAssistantView):
"""
Local Media Finder View.
Returns media files in config/media.
"""
url = "/media/{source_dir_id}/{location:.*}"
name = "media"
def __init__(self, hass: HomeAssistant, source: LocalSource) -> None:
"""Initialize the media view."""
self.hass = hass
self.source = source
async def get(
self, request: web.Request, source_dir_id: str, location: str
) -> web.FileResponse:
"""Start a GET request."""
try:
raise_if_invalid_path(location)
except ValueError as err:
raise web.HTTPBadRequest() from err
if source_dir_id not in self.hass.config.media_dirs:
raise web.HTTPNotFound()
media_path = self.source.async_full_path(source_dir_id, location)
# Check that the file exists
if not media_path.is_file():
raise web.HTTPNotFound()
# Check that it's a media file
mime_type, _ = mimetypes.guess_type(str(media_path))
if not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES:
raise web.HTTPNotFound()
return web.FileResponse(media_path)
|
mezz64/home-assistant
|
homeassistant/components/media_source/local_source.py
|
Python
|
apache-2.0
| 7,330 | 0.000546 |
import django.dispatch
# Whenever a permission object is saved, it sends out the signal. This allows
# models to keep their permissions in sync
permission_changed = django.dispatch.Signal(providing_args=('to_whom', 'to_what'))
|
callowayproject/django-objectpermissions
|
objectpermissions/signals.py
|
Python
|
apache-2.0
| 227 | 0.008811 |
# Copyright 2022 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import subprocess
from setup_product.setup_cleanup import (
create_bq_dataset,
create_bq_table,
delete_bq_table,
upload_data_to_bq_table,
)
def test_import_products_bq(table_id_prefix):
dataset = "products"
valid_products_table = f"{table_id_prefix}products"
product_schema = "../resources/product_schema.json"
valid_products_source_file = "../resources/products.json"
create_bq_dataset(dataset)
create_bq_table(dataset, valid_products_table, product_schema)
upload_data_to_bq_table(
dataset, valid_products_table, valid_products_source_file, product_schema
)
output = str(
subprocess.check_output(
f"python import_products_big_query_table.py {dataset} {valid_products_table}",
shell=True,
)
)
delete_bq_table(dataset, valid_products_table)
assert re.match(".*import products from big query table request.*", output)
assert re.match(".*the operation was started.*", output)
assert re.match(
".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*",
output,
)
assert re.match(".*number of successfully imported products.*?316.*", output)
assert re.match(".*number of failures during the importing.*?0.*", output)
|
googleapis/python-retail
|
samples/interactive-tutorials/product/import_products_bq_test.py
|
Python
|
apache-2.0
| 1,917 | 0.002087 |
import pytest
from conftest import DeSECAPIV1Client
@pytest.mark.parametrize("init_rrsets", [
{
('www', 'A'): (3600, {'1.2.3.4'}),
('www', 'AAAA'): (3600, {'::1'}),
('one', 'CNAME'): (3600, {'some.example.net.'}),
('other', 'TXT'): (3600, {'"foo" "bar"', '"bar" "foo"'}),
}
])
@pytest.mark.parametrize("rrsets", [
{ # create three RRsets
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('b', 'PTR'): (7000, {'1.foo.bar.com.', '2.bar.foo.net.'}),
('c.' + 'a' * 63, 'MX'): (7000, {'10 mail.something.net.'}),
},
{ # update three RRsets
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): (7000, {'other.example.net.'}),
('other', 'TXT'): (7000, {'"foobar"'}),
},
{ # delete three RRsets
('www', 'A'): (7000, {}),
('www', 'AAAA'): None, # ensure value from init_rrset is still there
('one', 'CNAME'): (7000, {}),
('other', 'TXT'): (7000, {}),
},
{ # create, update, delete
('a' * 63, 'A'): (7000, {'4.3.2.1', '7.6.5.4'}),
('www', 'A'): None, # ensure value from init_rrset is still there
('www', 'AAAA'): (7000, {'6666::6666', '7777::7777'}),
('one', 'CNAME'): None, # ensure value from init_rrset is still there
('other', 'TXT'): (7000, {}),
},
{ # complex usecase
('', 'A'): (3600, {'1.2.3.4', '255.254.253.252'}), # create apex reocrd
('*', 'MX'): (3601, {'0 mx.example.net.'}), # create wildcard record
('www', 'AAAA'): (3602, {}), # remove existing record
('www', 'A'): (7000, {'4.3.2.1', '7.6.5.4'}), # update existing record
('one', 'A'): (3603, {'1.1.1.1'}), # configure A instead of ...
('one', 'CNAME'): (3603, {}), # ... CNAME
('other', 'CNAME'): (3603, {'cname.example.com.'}), # configure CNAME instead of ...
('other', 'TXT'): (3600, {}), # ... TXT
('nonexistent', 'DNAME'): (3600, {}), # delete something that doesn't exist
('sub', 'CDNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}), # non-apex DNSSEC
('sub', 'CDS'): (3600, {'35217 15 2 401781b934e392de492ec77ae2e15d70f6575a1c0bc59c5275c04ebe80c6614c'}), # dto.
# ('sub', 'DNSKEY'): (3600, {'257 3 15 l02Woi0iS8Aa25FQkUd9RMzZHJpBoRQwAQEX1SxZJA4='}) # no pdns support >= 4.6
},
])
def test(api_user_domain_rrsets: DeSECAPIV1Client, rrsets: dict):
api_user_domain_rrsets.patch(f"/domains/{api_user_domain_rrsets.domain}/rrsets/", data=[
{"subname": k[0], "type": k[1], "ttl": v[0], "records": list(v[1])}
for k, v in rrsets.items()
if v is not None
])
api_user_domain_rrsets.assert_rrsets(rrsets)
|
desec-io/desec-stack
|
test/e2e2/spec/test_api_rrset.py
|
Python
|
mit
| 2,838 | 0.002467 |
'''
t4_adm.py - this file is part of S3QL (http://s3ql.googlecode.com)
Copyright (C) 2008-2009 Nikolaus Rath <Nikolaus@rath.org>
This program can be distributed under the terms of the GNU GPLv3.
'''
from __future__ import division, print_function, absolute_import
from s3ql.backends import local
from s3ql.backends.common import BetterBackend
import shutil
import sys
import tempfile
import unittest2 as unittest
import subprocess
import os.path
if __name__ == '__main__':
mypath = sys.argv[0]
else:
mypath = __file__
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(mypath), '..'))
class AdmTests(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp()
self.backend_dir = tempfile.mkdtemp()
self.storage_url = 'local://' + self.backend_dir
self.passphrase = 'oeut3d'
def tearDown(self):
shutil.rmtree(self.cache_dir)
shutil.rmtree(self.backend_dir)
def mkfs(self):
proc = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'bin', 'mkfs.s3ql'),
'-L', 'test fs', '--max-obj-size', '500',
'--cachedir', self.cache_dir, '--quiet',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(self.passphrase, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
def test_passphrase(self):
self.mkfs()
passphrase_new = 'sd982jhd'
proc = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'bin', 's3qladm'),
'--quiet', 'passphrase',
self.storage_url ], stdin=subprocess.PIPE)
print(self.passphrase, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
print(passphrase_new, file=proc.stdin)
proc.stdin.close()
self.assertEqual(proc.wait(), 0)
plain_backend = local.Backend(self.storage_url, None, None)
backend = BetterBackend(passphrase_new, 'bzip2', plain_backend)
self.assertTrue(isinstance(backend['s3ql_passphrase'], str))
# Somehow important according to pyunit documentation
def suite():
return unittest.makeSuite(AdmTests)
# Allow calling from command line
if __name__ == "__main__":
unittest.main()
|
thefirstwind/s3qloss
|
tests/t4_adm.py
|
Python
|
gpl-3.0
| 2,378 | 0.002103 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
def hello_world_view(request):
return HttpResponse("hello world", content_type="text/plain")
|
ludovic-bouguerra/tutorial-travis-docker
|
webservice/views.py
|
Python
|
gpl-3.0
| 237 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import contents.models
class Migration(migrations.Migration):
dependencies = [
('contents', '0017_auto_20170329_1504'),
]
operations = [
migrations.AlterField(
model_name='frontpageimage',
name='image',
field=models.ImageField(null=True, upload_to=contents.models.get_front_page_image_path),
),
]
|
andersonjonathan/Navitas
|
navitas/contents/migrations/0018_auto_20170329_1549.py
|
Python
|
mit
| 483 | 0.00207 |
# coding: utf-8
#
# This file is part of Progdupeupl.
#
# Progdupeupl is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Progdupeupl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Progdupeupl. If not, see <http://www.gnu.org/licenses/>.
"""Tests for utils app."""
import unittest
import hashlib
from django.contrib.auth.models import User
from django_dynamic_fixture import G
from pdp.member.models import Profile, ActivationToken
from pdp.utils.templatetags.profile import profile
from pdp.utils.templatetags.interventions import interventions_topics
from pdp.utils.paginator import paginator_range
from pdp.utils import mail
class TemplateTagsTests(unittest.TestCase):
"""Test for the custom template tags about users."""
def setUp(self):
self.user = G(User)
def test_profile_none(self):
"""Test the output of profile templatetag if profile does not exist."""
self.assertEqual(None, profile(self.user))
def test_profile_existing(self):
"""Test the output of profile templatetag if profile does exist."""
p = G(Profile, user=self.user)
self.assertEqual(p, profile(self.user))
def test_interventions_none(self):
"""Test templatetags when no topic should match."""
self.assertEqual(interventions_topics(self.user), {'unread': [],
'read': []})
class PaginatorRangeTests(unittest.TestCase):
"""Tests for the paginator_range function."""
def test_out_of_range(self):
self.assertRaises(ValueError, lambda: paginator_range(3, 2))
def test_one(self):
result = paginator_range(1, 1)
self.assertEqual(result, [1])
def test_small(self):
result = paginator_range(2, 3)
self.assertEqual(result, [1, 2, 3])
def test_small_limit(self):
result = paginator_range(1, 4)
self.assertEqual(result, [1, 2, 3, 4])
def test_big_start(self):
result = paginator_range(1, 10)
self.assertEqual(result, [1, 2, None, 10])
def test_big_start_limit(self):
result = paginator_range(3, 10)
self.assertEqual(result, [1, 2, 3, 4, None, 10])
def test_big_middle(self):
result = paginator_range(5, 10)
self.assertEqual(result, [1, None, 4, 5, 6, None, 10])
def test_big_end(self):
result = paginator_range(10, 10)
self.assertEqual(result, [1, None, 9, 10])
def test_big_end_limit(self):
result = paginator_range(7, 10)
self.assertEqual(result, [1, None, 6, 7, 8, 9, 10])
class MailTests(unittest.TestCase):
"""Tests for the mail utilities."""
def test_send_templated_mail(self):
recipients = ['test1@localhost']
result = mail.send_templated_mail(
subject='Fake subject',
template='base.txt',
context={},
recipients=recipients
)
self.assertEqual(result, 1)
def test_send_mail_to_confirm_registration(self):
user = G(User, username='Blaireau1', email='test1@localhost')
link = hashlib.sha1('blbl'.encode('ascii')).hexdigest()
token = G(ActivationToken, user=user, token=link)
result = mail.send_mail_to_confirm_registration(token)
self.assertEqual(result, 1)
|
progdupeupl/pdp_website
|
pdp/utils/tests.py
|
Python
|
agpl-3.0
| 3,776 | 0 |
# Partname: ATmega16U4
# generated automatically, do not edit
MCUREGS = {
'WDTCSR': '&96',
'WDTCSR_WDIF': '$80',
'WDTCSR_WDIE': '$40',
'WDTCSR_WDP': '$27',
'WDTCSR_WDCE': '$10',
'WDTCSR_WDE': '$08',
'PORTD': '&43',
'DDRD': '&42',
'PIND': '&41',
'SPCR': '&76',
'SPCR_SPIE': '$80',
'SPCR_SPE': '$40',
'SPCR_DORD': '$20',
'SPCR_MSTR': '$10',
'SPCR_CPOL': '$08',
'SPCR_CPHA': '$04',
'SPCR_SPR': '$03',
'SPSR': '&77',
'SPSR_SPIF': '$80',
'SPSR_WCOL': '$40',
'SPSR_SPI2X': '$01',
'SPDR': '&78',
'UDR1': '&206',
'UCSR1A': '&200',
'UCSR1A_RXC1': '$80',
'UCSR1A_TXC1': '$40',
'UCSR1A_UDRE1': '$20',
'UCSR1A_FE1': '$10',
'UCSR1A_DOR1': '$08',
'UCSR1A_UPE1': '$04',
'UCSR1A_U2X1': '$02',
'UCSR1A_MPCM1': '$01',
'UCSR1B': '&201',
'UCSR1B_RXCIE1': '$80',
'UCSR1B_TXCIE1': '$40',
'UCSR1B_UDRIE1': '$20',
'UCSR1B_RXEN1': '$10',
'UCSR1B_TXEN1': '$08',
'UCSR1B_UCSZ12': '$04',
'UCSR1B_RXB81': '$02',
'UCSR1B_TXB81': '$01',
'UCSR1C': '&202',
'UCSR1C_UMSEL1': '$C0',
'UCSR1C_UPM1': '$30',
'UCSR1C_USBS1': '$08',
'UCSR1C_UCSZ1': '$06',
'UCSR1C_UCPOL1': '$01',
'UBRR1': '&204',
'SPMCSR': '&87',
'SPMCSR_SPMIE': '$80',
'SPMCSR_RWWSB': '$40',
'SPMCSR_SIGRD': '$20',
'SPMCSR_RWWSRE': '$10',
'SPMCSR_BLBSET': '$08',
'SPMCSR_PGWRT': '$04',
'SPMCSR_PGERS': '$02',
'SPMCSR_SPMEN': '$01',
'EEAR': '&65',
'EEDR': '&64',
'EECR': '&63',
'EECR_EEPM': '$30',
'EECR_EERIE': '$08',
'EECR_EEMPE': '$04',
'EECR_EEPE': '$02',
'EECR_EERE': '$01',
'OCR0B': '&72',
'OCR0A': '&71',
'TCNT0': '&70',
'TCCR0B': '&69',
'TCCR0B_FOC0A': '$80',
'TCCR0B_FOC0B': '$40',
'TCCR0B_WGM02': '$08',
'TCCR0B_CS0': '$07',
'TCCR0A': '&68',
'TCCR0A_COM0A': '$C0',
'TCCR0A_COM0B': '$30',
'TCCR0A_WGM0': '$03',
'TIMSK0': '&110',
'TIMSK0_OCIE0B': '$04',
'TIMSK0_OCIE0A': '$02',
'TIMSK0_TOIE0': '$01',
'TIFR0': '&53',
'TIFR0_OCF0B': '$04',
'TIFR0_OCF0A': '$02',
'TIFR0_TOV0': '$01',
'GTCCR': '&67',
'GTCCR_TSM': '$80',
'GTCCR_PSRSYNC': '$01',
'TCCR3A': '&144',
'TCCR3A_COM3A': '$C0',
'TCCR3A_COM3B': '$30',
'TCCR3A_COM3C': '$0C',
'TCCR3A_WGM3': '$03',
'TCCR3B': '&145',
'TCCR3B_ICNC3': '$80',
'TCCR3B_ICES3': '$40',
'TCCR3B_WGM3': '$18',
'TCCR3B_CS3': '$07',
'TCCR3C': '&146',
'TCCR3C_FOC3A': '$80',
'TCCR3C_FOC3B': '$40',
'TCCR3C_FOC3C': '$20',
'TCNT3': '&148',
'OCR3A': '&152',
'OCR3B': '&154',
'OCR3C': '&156',
'ICR3': '&150',
'TIMSK3': '&113',
'TIMSK3_ICIE3': '$20',
'TIMSK3_OCIE3C': '$08',
'TIMSK3_OCIE3B': '$04',
'TIMSK3_OCIE3A': '$02',
'TIMSK3_TOIE3': '$01',
'TIFR3': '&56',
'TIFR3_ICF3': '$20',
'TIFR3_OCF3C': '$08',
'TIFR3_OCF3B': '$04',
'TIFR3_OCF3A': '$02',
'TIFR3_TOV3': '$01',
'TCCR1A': '&128',
'TCCR1A_COM1A': '$C0',
'TCCR1A_COM1B': '$30',
'TCCR1A_COM1C': '$0C',
'TCCR1A_WGM1': '$03',
'TCCR1B': '&129',
'TCCR1B_ICNC1': '$80',
'TCCR1B_ICES1': '$40',
'TCCR1B_WGM1': '$18',
'TCCR1B_CS1': '$07',
'TCCR1C': '&130',
'TCCR1C_FOC1A': '$80',
'TCCR1C_FOC1B': '$40',
'TCCR1C_FOC1C': '$20',
'TCNT1': '&132',
'OCR1A': '&136',
'OCR1B': '&138',
'OCR1C': '&140',
'ICR1': '&134',
'TIMSK1': '&111',
'TIMSK1_ICIE1': '$20',
'TIMSK1_OCIE1C': '$08',
'TIMSK1_OCIE1B': '$04',
'TIMSK1_OCIE1A': '$02',
'TIMSK1_TOIE1': '$01',
'TIFR1': '&54',
'TIFR1_ICF1': '$20',
'TIFR1_OCF1C': '$08',
'TIFR1_OCF1B': '$04',
'TIFR1_OCF1A': '$02',
'TIFR1_TOV1': '$01',
'OCDR': '&81',
'MCUCR': '&85',
'MCUCR_JTD': '$80',
'MCUSR': '&84',
'MCUSR_JTRF': '$10',
'EICRA': '&105',
'EICRA_ISC3': '$C0',
'EICRA_ISC2': '$30',
'EICRA_ISC1': '$0C',
'EICRA_ISC0': '$03',
'EICRB': '&106',
'EICRB_ISC7': '$C0',
'EICRB_ISC6': '$30',
'EICRB_ISC5': '$0C',
'EICRB_ISC4': '$03',
'EIMSK': '&61',
'EIMSK_INT': '$FF',
'EIFR': '&60',
'EIFR_INTF': '$FF',
'PCMSK0': '&107',
'PCIFR': '&59',
'PCIFR_PCIF0': '$01',
'PCICR': '&104',
'PCICR_PCIE0': '$01',
'TCCR4A': '&192',
'TCCR4A_COM4A': '$C0',
'TCCR4A_COM4B': '$30',
'TCCR4A_FOC4A': '$08',
'TCCR4A_FOC4B': '$04',
'TCCR4A_PWM4A': '$02',
'TCCR4A_PWM4B': '$01',
'TCCR4B': '&193',
'TCCR4B_PWM4X': '$80',
'TCCR4B_PSR4': '$40',
'TCCR4B_DTPS4': '$30',
'TCCR4B_CS4': '$0F',
'TCCR4C': '&194',
'TCCR4C_COM4A1S': '$80',
'TCCR4C_COM4A0S': '$40',
'TCCR4C_COM4B1S': '$20',
'TCCR4C_COM4B0S': '$10',
'TCCR4C_COM4D': '$0C',
'TCCR4C_FOC4D': '$02',
'TCCR4C_PWM4D': '$01',
'TCCR4D': '&195',
'TCCR4D_FPIE4': '$80',
'TCCR4D_FPEN4': '$40',
'TCCR4D_FPNC4': '$20',
'TCCR4D_FPES4': '$10',
'TCCR4D_FPAC4': '$08',
'TCCR4D_FPF4': '$04',
'TCCR4D_WGM4': '$03',
'TCCR4E': '&196',
'TCCR4E_TLOCK4': '$80',
'TCCR4E_ENHC4': '$40',
'TCCR4E_OC4OE': '$3F',
'TCNT4': '&190',
'TC4H': '&191',
'OCR4A': '&207',
'OCR4B': '&208',
'OCR4C': '&209',
'OCR4D': '&210',
'TIMSK4': '&114',
'TIMSK4_OCIE4D': '$80',
'TIMSK4_OCIE4A': '$40',
'TIMSK4_OCIE4B': '$20',
'TIMSK4_TOIE4': '$04',
'TIFR4': '&57',
'TIFR4_OCF4D': '$80',
'TIFR4_OCF4A': '$40',
'TIFR4_OCF4B': '$20',
'TIFR4_TOV4': '$04',
'DT4': '&212',
'DT4_DT4L': '$FF',
'PORTB': '&37',
'DDRB': '&36',
'PINB': '&35',
'PORTC': '&40',
'DDRC': '&39',
'PINC': '&38',
'PORTE': '&46',
'DDRE': '&45',
'PINE': '&44',
'PORTF': '&49',
'DDRF': '&48',
'PINF': '&47',
'ADMUX': '&124',
'ADMUX_REFS': '$C0',
'ADMUX_ADLAR': '$20',
'ADMUX_MUX': '$1F',
'ADCSRA': '&122',
'ADCSRA_ADEN': '$80',
'ADCSRA_ADSC': '$40',
'ADCSRA_ADATE': '$20',
'ADCSRA_ADIF': '$10',
'ADCSRA_ADIE': '$08',
'ADCSRA_ADPS': '$07',
'ADC': '&120',
'ADCSRB': '&123',
'ADCSRB_ADHSM': '$80',
'ADCSRB_MUX5': '$20',
'ADCSRB_ADTS': '$17',
'DIDR0': '&126',
'DIDR0_ADC7D': '$80',
'DIDR0_ADC6D': '$40',
'DIDR0_ADC5D': '$20',
'DIDR0_ADC4D': '$10',
'DIDR0_ADC3D': '$08',
'DIDR0_ADC2D': '$04',
'DIDR0_ADC1D': '$02',
'DIDR0_ADC0D': '$01',
'DIDR2': '&125',
'DIDR2_ADC13D': '$20',
'DIDR2_ADC12D': '$10',
'DIDR2_ADC11D': '$08',
'DIDR2_ADC10D': '$04',
'DIDR2_ADC9D': '$02',
'DIDR2_ADC8D': '$01',
'ACSR': '&80',
'ACSR_ACD': '$80',
'ACSR_ACBG': '$40',
'ACSR_ACO': '$20',
'ACSR_ACI': '$10',
'ACSR_ACIE': '$08',
'ACSR_ACIC': '$04',
'ACSR_ACIS': '$03',
'DIDR1': '&127',
'DIDR1_AIN1D': '$02',
'DIDR1_AIN0D': '$01',
'SREG': '&95',
'SREG_I': '$80',
'SREG_T': '$40',
'SREG_H': '$20',
'SREG_S': '$10',
'SREG_V': '$08',
'SREG_N': '$04',
'SREG_Z': '$02',
'SREG_C': '$01',
'SP': '&93',
'OSCCAL': '&102',
'RCCTRL': '&103',
'RCCTRL_RCFREQ': '$01',
'CLKPR': '&97',
'CLKPR_CLKPCE': '$80',
'CLKPR_CLKPS': '$0F',
'SMCR': '&83',
'SMCR_SM': '$0E',
'SMCR_SE': '$01',
'EIND': '&92',
'GPIOR2': '&75',
'GPIOR2_GPIOR': '$FF',
'GPIOR1': '&74',
'GPIOR1_GPIOR': '$FF',
'GPIOR0': '&62',
'GPIOR0_GPIOR07': '$80',
'GPIOR0_GPIOR06': '$40',
'GPIOR0_GPIOR05': '$20',
'GPIOR0_GPIOR04': '$10',
'GPIOR0_GPIOR03': '$08',
'GPIOR0_GPIOR02': '$04',
'GPIOR0_GPIOR01': '$02',
'GPIOR0_GPIOR00': '$01',
'PRR1': '&101',
'PRR1_PRUSB': '$80',
'PRR1_PRTIM3': '$08',
'PRR1_PRUSART1': '$01',
'PRR0': '&100',
'PRR0_PRTWI': '$80',
'PRR0_PRTIM2': '$40',
'PRR0_PRTIM0': '$20',
'PRR0_PRTIM1': '$08',
'PRR0_PRSPI': '$04',
'PRR0_PRUSART0': '$02',
'PRR0_PRADC': '$01',
'CLKSTA': '&199',
'CLKSTA_RCON': '$02',
'CLKSTA_EXTON': '$01',
'CLKSEL1': '&198',
'CLKSEL1_RCCKSEL': '$F0',
'CLKSEL1_EXCKSEL': '$0F',
'CLKSEL0': '&197',
'CLKSEL0_RCSUT': '$C0',
'CLKSEL0_EXSUT': '$30',
'CLKSEL0_RCE': '$08',
'CLKSEL0_EXTE': '$04',
'CLKSEL0_CLKS': '$01',
'PLLCSR': '&73',
'PLLCSR_PINDIV': '$10',
'PLLCSR_PLLE': '$02',
'PLLCSR_PLOCK': '$01',
'PLLFRQ': '&82',
'PLLFRQ_PINMUX': '$80',
'PLLFRQ_PLLUSB': '$40',
'PLLFRQ_PLLTM': '$30',
'PLLFRQ_PDIV': '$0F',
'UEINT': '&244',
'UEBCHX': '&243',
'UEBCLX': '&242',
'UEDATX': '&241',
'UEDATX_DAT': '$FF',
'UEIENX': '&240',
'UEIENX_FLERRE': '$80',
'UEIENX_NAKINE': '$40',
'UEIENX_NAKOUTE': '$10',
'UEIENX_RXSTPE': '$08',
'UEIENX_RXOUTE': '$04',
'UEIENX_STALLEDE': '$02',
'UEIENX_TXINE': '$01',
'UESTA1X': '&239',
'UESTA1X_CTRLDIR': '$04',
'UESTA1X_CURRBK': '$03',
'UESTA0X': '&238',
'UESTA0X_CFGOK': '$80',
'UESTA0X_OVERFI': '$40',
'UESTA0X_UNDERFI': '$20',
'UESTA0X_DTSEQ': '$0C',
'UESTA0X_NBUSYBK': '$03',
'UECFG1X': '&237',
'UECFG1X_EPSIZE': '$70',
'UECFG1X_EPBK': '$0C',
'UECFG1X_ALLOC': '$02',
'UECFG0X': '&236',
'UECFG0X_EPTYPE': '$C0',
'UECFG0X_EPDIR': '$01',
'UECONX': '&235',
'UECONX_STALLRQ': '$20',
'UECONX_STALLRQC': '$10',
'UECONX_RSTDT': '$08',
'UECONX_EPEN': '$01',
'UERST': '&234',
'UERST_EPRST': '$7F',
'UENUM': '&233',
'UEINTX': '&232',
'UEINTX_FIFOCON': '$80',
'UEINTX_NAKINI': '$40',
'UEINTX_RWAL': '$20',
'UEINTX_NAKOUTI': '$10',
'UEINTX_RXSTPI': '$08',
'UEINTX_RXOUTI': '$04',
'UEINTX_STALLEDI': '$02',
'UEINTX_TXINI': '$01',
'UDMFN': '&230',
'UDMFN_FNCERR': '$10',
'UDFNUM': '&228',
'UDADDR': '&227',
'UDADDR_ADDEN': '$80',
'UDADDR_UADD': '$7F',
'UDIEN': '&226',
'UDIEN_UPRSME': '$40',
'UDIEN_EORSME': '$20',
'UDIEN_WAKEUPE': '$10',
'UDIEN_EORSTE': '$08',
'UDIEN_SOFE': '$04',
'UDIEN_SUSPE': '$01',
'UDINT': '&225',
'UDINT_UPRSMI': '$40',
'UDINT_EORSMI': '$20',
'UDINT_WAKEUPI': '$10',
'UDINT_EORSTI': '$08',
'UDINT_SOFI': '$04',
'UDINT_SUSPI': '$01',
'UDCON': '&224',
'UDCON_LSM': '$04',
'UDCON_RSTCPU': '$08',
'UDCON_RMWKUP': '$02',
'UDCON_DETACH': '$01',
'USBCON': '&216',
'USBCON_USBE': '$80',
'USBCON_FRZCLK': '$20',
'USBCON_OTGPADE': '$10',
'USBCON_VBUSTE': '$01',
'USBINT': '&218',
'USBINT_VBUSTI': '$01',
'USBSTA': '&217',
'USBSTA_SPEED': '$08',
'USBSTA_VBUS': '$01',
'UHWCON': '&215',
'UHWCON_UVREGE': '$01',
'INT0Addr': '2',
'INT1Addr': '4',
'INT2Addr': '6',
'INT3Addr': '8',
'Reserved1Addr': '10',
'Reserved2Addr': '12',
'INT6Addr': '14',
'Reserved3Addr': '16',
'PCINT0Addr': '18',
'USB_GENAddr': '20',
'USB_COMAddr': '22',
'WDTAddr': '24',
'Reserved4Addr': '26',
'Reserved5Addr': '28',
'Reserved6Addr': '30',
'TIMER1_CAPTAddr': '32',
'TIMER1_COMPAAddr': '34',
'TIMER1_COMPBAddr': '36',
'TIMER1_COMPCAddr': '38',
'TIMER1_OVFAddr': '40',
'TIMER0_COMPAAddr': '42',
'TIMER0_COMPBAddr': '44',
'TIMER0_OVFAddr': '46',
'SPI__STCAddr': '48',
'USART1__RXAddr': '50',
'USART1__UDREAddr': '52',
'USART1__TXAddr': '54',
'ANALOG_COMPAddr': '56',
'ADCAddr': '58',
'EE_READYAddr': '60',
'TIMER3_CAPTAddr': '62',
'TIMER3_COMPAAddr': '64',
'TIMER3_COMPBAddr': '66',
'TIMER3_COMPCAddr': '68',
'TIMER3_OVFAddr': '70',
'TWIAddr': '72',
'SPM_READYAddr': '74',
'TIMER4_COMPAAddr': '76',
'TIMER4_COMPBAddr': '78',
'TIMER4_COMPDAddr': '80',
'TIMER4_OVFAddr': '82',
'TIMER4_FPFAddr': '84'
}
|
hickey/amforth
|
core/devices/atmega16u4/device.py
|
Python
|
gpl-2.0
| 10,989 | 0.070252 |
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Fabio Falcinelli
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import socket
from tornado import httpclient
from tornado.ioloop import IOLoop
from tornado.tcpserver import TCPServer
from tornado.websocket import WebSocketClientConnection
from wstunnel.toolbox import tuple_to_address
from wstunnel.exception import EndpointNotAvailableException
from wstunnel.filters import FilterException
__author__ = "fabio"
logger = logging.getLogger(__name__)
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None, **kwargs):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
"""
options = httpclient.HTTPRequest._DEFAULTS.copy()
options.update(kwargs)
if io_loop is None:
io_loop = IOLoop.current()
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout,
validate_cert=kwargs.get("validate_cert", True))
request = httpclient._RequestProxy(request, options)
conn = WebSocketClientConnection(io_loop, request)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
class WebSocketProxy(TCPServer):
"""
Listen on a port and delegate the accepted connection to a WebSocketLocalProxyHandler
"""
def __init__(self, port, ws_url, **kwargs):
super(WebSocketProxy, self).__init__(kwargs.get("io_loop"),
kwargs.get("ssl_options"))
self.bind(port,
kwargs.get("address", ''),
kwargs.get("family", socket.AF_UNSPEC),
kwargs.get("backlog", 128))
self.ws_url = ws_url
self.ws_options = kwargs.get("ws_options", {})
self.filters = kwargs.get("filters", [])
self.serving = False
self.ws_conn = None
self._address_list = []
@property
def address_list(self):
return self._address_list
def handle_stream(self, stream, address):
"""
Handle a new client connection with a proxy over websocket
"""
logger.info("Got connection from %s on %s" % (tuple_to_address(stream.socket.getpeername()),
tuple_to_address(stream.socket.getsockname())))
self.ws_conn = WebSocketProxyConnection(self.ws_url, stream, address,
filters=self.filters,
ws_options=self.ws_options)
self.ws_conn.connect()
def start(self, num_processes=1):
super(WebSocketProxy, self).start(num_processes)
self._address_list = [(s.getsockname()[0], s.getsockname()[1]) for s in self._sockets.values()]
self.serving = True
def stop(self):
super(WebSocketProxy, self).stop()
self.serving = False
def __str__(self):
return "WebSocketProxy %s" % (" | ".join(["%s --> %s" %
("%s:%d" % (a, p), self.ws_url) for (a, p) in self.address_list]))
class WebSocketProxyConnection(object):
"""
Handles the client connection and works as a proxy over a websocket connection
"""
def __init__(self, url, io_stream, address, ws_options=None, **kwargs):
self.url = url
self.io_loop = kwargs.get("io_loop")
self.connect_timeout = kwargs.get("connect_timeout", None)
self.keep_alive = kwargs.get("keep_alive", None)
self.ws_options = ws_options
self.io_stream, self.address = io_stream, address
self.filters = kwargs.get("filters", [])
self.io_stream.set_close_callback(self.on_close)
self.ws_conn = None
def connect(self):
logger.info("Connecting WebSocket at url %s" % self.url)
websocket_connect(self.url,
self.io_loop,
callback=self.on_open,
connect_timeout=self.connect_timeout,
**self.ws_options)
def on_open(self, ws_conn):
"""
When the websocket connection is handshaked, start reading for data over the client socket
connection
"""
try:
self.ws_conn = ws_conn.result()
except httpclient.HTTPError as e:
#TODO: change with raise EndpointNotAvailableException(message="The server endpoint is not available") from e
raise EndpointNotAvailableException("The server endpoint is not available", cause=e)
self.ws_conn.on_message = self.on_message
self.ws_conn.release_callback = self.on_close
self.io_stream.read_until_close(self.on_close, streaming_callback=self.on_peer_message)
def on_message(self, message):
"""
On a message received from websocket, send back to client peer
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.ws_to_socket(data=data)
if data:
self.io_stream.write(data)
except FilterException as e:
logger.exception(e)
self.on_close()
def on_close(self, *args, **kwargs):
"""
Handles the close event from the client socket
"""
logger.info("Closing connection with client at {0}:{1}".format(*self.address))
logger.debug("Received args %s and %s", args, kwargs)
if not self.io_stream.closed():
self.io_stream.close()
def on_peer_message(self, message):
"""
On data received from client peer, forward through WebSocket
"""
try:
data = None if message is None else bytes(message)
for filtr in self.filters:
data = filtr.socket_to_ws(data=data)
if data:
self.ws_conn.write_message(data, binary=True)
except FilterException as e:
logger.exception(e)
self.on_close()
class WSTunnelClient(object):
"""
Manages redirects from local ports to remote websocket servers
"""
def __init__(self, proxies=None, address='', family=socket.AF_UNSPEC, io_loop=None, ssl_options=None,
ws_options=None):
self.stream_options = {
"address": address,
"family": family,
"io_loop": io_loop,
"ssl_options": ssl_options,
}
self.ws_options = ws_options or {}
self.proxies = proxies or {}
self.serving = False
self._num_proc = 1
if proxies:
for port, ws_url in proxies.items():
self.add_proxy(port, WebSocketProxy(port=port,
ws_url=ws_url,
ws_options=self.ws_options,
**self.stream_options))
def add_proxy(self, key, ws_proxy):
"""
Adds a proxy to the list.
If the tunnel is serving connection, the proxy it gets started.
"""
self.proxies[key] = ws_proxy
if self.serving:
ws_proxy.start(self._num_proc)
logger.info("Started %s" % ws_proxy)
def remove_proxy(self, key):
"""
Removes a proxy from the list.
If the tunnel is serving connection, the proxy it gets stopped.
"""
ws_proxy = self.proxies.get(key)
if ws_proxy:
if self.serving:
ws_proxy.stop()
logger.info("Removing %s" % ws_proxy)
del self.proxies[key]
def get_proxy(self, key):
"""
Return the proxy associated to the given name.
"""
return self.proxies.get(key)
@property
def address_list(self):
"""
Returns the address (<host>, <port> tuple) list of all the addresses used
"""
l = []
for service in self.proxies.values():
l.extend(service.address_list)
return l
def install_filter(self, filtr):
"""
Install the given filter to all the current mapped services
"""
for ws_proxy in self.proxies.values():
ws_proxy.filters.append(filtr)
def uninstall_filter(self, filtr):
"""
Uninstall the given filter from all the current mapped services
"""
for ws_proxy in self.proxies.values():
ws_proxy.filters.remove(filtr)
def start(self, num_processes=1):
"""
Start the client tunnel service by starting each configured proxy
"""
logger.info("Starting %d %s processes" % (num_processes, self.__class__.__name__))
self._num_processes = num_processes
for key, ws_proxy in self.proxies.items():
ws_proxy.start(num_processes)
logger.info("Started %s" % ws_proxy)
self.serving = True
def stop(self):
"""
Stop the client tunnel service by stopping each configured proxy
"""
logger.info("Stopping {}".format(self.__class__.__name__))
for key, ws_proxy in self.proxies.items():
ws_proxy.stop()
logger.info("Stopped %s" % ws_proxy)
self.serving = False
|
ffalcinelli/wstunnel
|
wstunnel/client.py
|
Python
|
lgpl-3.0
| 10,007 | 0.001799 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and methods related to model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.estimator.export.export_output import ExportOutput
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
class ModeKeys(object):
"""Standard names for model modes.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `PREDICT`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
PREDICT = 'infer'
LOSS_METRIC_KEY = 'loss'
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'predictions', 'loss', 'train_op', 'eval_metric_ops',
'export_outputs', 'training_chief_hooks', 'training_hooks',
'scaffold', 'evaluation_hooks'
])):
"""Ops and objects returned from a `model_fn` and passed to `Estimator`.
`EstimatorSpec` fully defines the model to be run by `Estimator`.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
export_outputs=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None,
evaluation_hooks=None):
"""Creates a validated `EstimatorSpec` instance.
Depending on the value of `mode`, different arguments are required. Namely
* For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
* For `mode == ModeKeys.EVAL`: required field is `loss`.
* For `mode == ModeKeys.PREDICT`: required fields are `predictions`.
model_fn can populate all arguments independent of mode. In this case, some
arguments will be ignored by `Estimator`. E.g. `train_op` will be ignored
in eval and infer modes. Example:
```python
def my_model_fn(mode, features, labels):
predictions = ...
loss = ...
train_op = ...
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Alternatively, model_fn can just populate the arguments appropriate to the
given mode. Example:
```python
def my_model_fn(mode, features, labels):
if (mode == tf.estimator.ModeKeys.TRAIN or
mode == tf.estimator.ModeKeys.EVAL):
loss = ...
else:
loss = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = ...
else:
train_op = None
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = ...
else:
predictions = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Args:
mode: A `ModeKeys`. Specifies if this is training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, namely a
`(metric_tensor, update_op)` tuple.
export_outputs: Describes the output signatures to be exported to
`SavedModel` and used during serving.
A dict `{name: output}` where:
* name: An arbitrary name for this output.
* output: an `ExportOutput` object such as `ClassificationOutput`,
`RegressionOutput`, or `PredictOutput`.
Single-headed models only need to specify one entry in this dictionary.
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
on all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during evaluation.
Returns:
A validated `EstimatorSpec` object.
Raises:
ValueError: If validation fails.
TypeError: If any of the arguments is not the expected type.
"""
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
else:
_check_is_tensor_or_operation(train_op, 'train_op')
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = _check_is_tensor(loss, 'loss')
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar, given: {}'.format(loss))
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.PREDICT:
raise ValueError('Missing predictions.')
predictions = {}
else:
if isinstance(predictions, dict):
predictions = {
k: _check_is_tensor(v, 'predictions[{}]'.format(k))
for k, v in six.iteritems(predictions)
}
else:
predictions = _check_is_tensor(predictions, 'predictions')
# Validate eval_metric_ops.
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise TypeError(
'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
for key, metric_value_and_update in six.iteritems(eval_metric_ops):
if (not isinstance(metric_value_and_update, tuple) or
len(metric_value_and_update) != 2):
raise TypeError(
'Values of eval_metric_ops must be (metric_value, update_op) '
'tuples, given: {} for key: {}'.format(
metric_value_and_update, key))
metric_value, metric_update = metric_value_and_update
for metric_value_member in nest.flatten(metric_value):
# Allow (possibly nested) tuples for metric values, but require that
# each of them be Tensors or Operations.
_check_is_tensor_or_operation(metric_value_member,
'eval_metric_ops[{}]'.format(key))
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
# Validate export_outputs.
if export_outputs is not None:
if not isinstance(export_outputs, dict):
raise TypeError('export_outputs must be dict, given: {}'.format(
export_outputs))
for v in six.itervalues(export_outputs):
if not isinstance(v, ExportOutput):
raise TypeError(
'Values in export_outputs must be ExportOutput objects. '
'Given: {}'.format(export_outputs))
# Note export_outputs is allowed to be empty.
if len(export_outputs) == 1:
(key, value), = export_outputs.items()
if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
export_outputs[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
if len(export_outputs) > 1:
if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
not in export_outputs):
raise ValueError(
'Multiple export_outputs were provided, but none of them is '
'specified as the default. Do this by naming one of them with '
'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
# Validate that all tensors and ops are from the default graph.
default_graph = ops.get_default_graph()
# We enumerate possible error causes here to aid in debugging.
error_message_template = (
'{0} with "{1}" must be from the default graph. '
'Possible causes of this error include: \n\n'
'1) {0} was created outside the context of the default graph.'
'\n\n'
'2) The object passed through to EstimatorSpec was not created '
'in the most recent call to "model_fn".')
if isinstance(predictions, dict):
for key, value in six.iteritems(predictions):
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values',
'{0}: {1}'.format(key, value.name)))
elif predictions is not None:
# 'predictions' must be a single Tensor.
if predictions.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values', predictions.name))
if loss is not None and loss.graph is not default_graph:
raise ValueError(error_message_template.format('loss', loss.name))
if train_op is not None and train_op.graph is not default_graph:
raise ValueError(error_message_template.format('train_op', train_op.name))
for key, value in list(six.iteritems(eval_metric_ops)):
values = nest.flatten(value)
for value in values:
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'eval_metric_ops',
'{0}: {1}'.format(key, value.name)))
# Validate hooks.
training_chief_hooks = tuple(training_chief_hooks or [])
training_hooks = tuple(training_hooks or [])
evaluation_hooks = tuple(evaluation_hooks or [])
for hook in training_hooks + training_chief_hooks + evaluation_hooks:
if not isinstance(hook, session_run_hook.SessionRunHook):
raise TypeError(
'All hooks must be SessionRunHook instances, given: {}'.format(
hook))
scaffold = scaffold or monitored_session.Scaffold()
# Validate scaffold.
if not isinstance(scaffold, monitored_session.Scaffold):
raise TypeError(
'scaffold must be tf.train.Scaffold. Given: {}'.format(scaffold))
return super(EstimatorSpec, cls).__new__(
cls,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs=export_outputs,
training_chief_hooks=training_chief_hooks,
training_hooks=training_hooks,
scaffold=scaffold,
evaluation_hooks=evaluation_hooks)
def _check_is_tensor_or_operation(x, name):
if not (isinstance(x, ops.Operation) or isinstance(x, ops.Tensor)):
raise TypeError('{} must be Operation or Tensor, given: {}'.format(name, x))
def _check_is_tensor(x, tensor_name):
"""Returns `x` if it is a `Tensor`, raises TypeError otherwise."""
if not isinstance(x, ops.Tensor):
raise TypeError('{} must be Tensor, given: {}'.format(tensor_name, x))
return x
|
mixturemodel-flow/tensorflow
|
tensorflow/python/estimator/model_fn.py
|
Python
|
apache-2.0
| 12,164 | 0.004357 |
import logging
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class BigQueryOperator(BaseOperator):
"""
Executes BigQuery SQL queries in a specific BigQuery database
"""
template_fields = ('bql', 'destination_dataset_table')
template_ext = ('.sql',)
ui_color = '#e4f0e8'
@apply_defaults
def __init__(self,
bql,
destination_dataset_table = False,
write_disposition = 'WRITE_EMPTY',
bigquery_conn_id='bigquery_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new BigQueryOperator.
:param bql: the sql code to be executed
:type bql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param destination_dataset_table: A dotted dataset.table that, if set,
will store the results of the query.
:type destination_dataset_table: string
:param bigquery_conn_id: reference to a specific BigQuery hook.
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide delegation enabled.
:type delegate_to: string
"""
super(BigQueryOperator, self).__init__(*args, **kwargs)
self.bql = bql
self.destination_dataset_table = destination_dataset_table
self.write_disposition = write_disposition
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
def execute(self, context):
logging.info('Executing: %s', str(self.bql))
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_query(self.bql, self.destination_dataset_table, self.write_disposition)
|
Chedi/airflow
|
airflow/contrib/operators/bigquery_operator.py
|
Python
|
apache-2.0
| 2,155 | 0.003712 |
#!/usr/bin/python
# coding: utf-8
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess, time, sys
import json
import datetime
from optparse import OptionParser
SKIP_TEST="-DskipTests"
AMBARI_AUTH_HEADERS = "--header 'Authorization:Basic YWRtaW46YWRtaW4=' --header 'X-Requested-By: PIVOTAL'"
AMBARI_BUILD_DOCKER_ROOT = "/tmp/ambari-build-docker"
NO_EXIT_SLEEP_TIME=60
RETRY_MAX=20
def git_deep_cleaning():
proc = subprocess.Popen("git clean -xdf",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def ambariUnitTest():
proc = subprocess.Popen("mvn -fae clean install",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def buildAmbari(stack_distribution, supplemental_distribution=None):
stack_distribution_param = ("-Dstack.distribution=" + stack_distribution) if stack_distribution is not None else ""
supplemental_distribution_param = ("-Dsupplemental.distribution=" + supplemental_distribution) if supplemental_distribution is not None else ""
proc = subprocess.Popen("mvn -B clean install package rpm:rpm -Dmaven.clover.skip=true -Dfindbugs.skip=true "
+ SKIP_TEST + " "
+ stack_distribution_param + " "
+ supplemental_distribution_param + " "
+ " -Dpython.ver=\"python >= 2.6\"",
shell=True,
cwd="/tmp/ambari")
return proc.wait()
def install_ambari_server():
proc = subprocess.Popen("sudo yum install -y ambari-server-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-server/target/rpm/ambari-server/RPMS/x86_64")
return proc.wait()
def install_ambari_agent():
proc = subprocess.Popen("sudo yum install -y ambari-agent-*.x86_64.rpm",
shell=True,
cwd="/tmp/ambari/ambari-agent/target/rpm/ambari-agent/RPMS/x86_64")
return proc.wait()
def setup_ambari_server():
proc = subprocess.Popen("echo -e '\n\n\n\n' | sudo ambari-server setup",
shell=True)
return proc.wait()
def start_ambari_server(debug=False):
proc = subprocess.Popen("sudo ambari-server start" + (" --debug" if debug else ""),
shell=True)
return proc.wait()
def start_dependant_services():
retcode = 0
proc = subprocess.Popen("sudo service sshd start", shell=True)
retcode += proc.wait()
proc = subprocess.Popen("sudo service ntpd start", shell=True)
retcode += proc.wait()
return retcode
def configure_ambari_agent():
proc = subprocess.Popen("hostname -f", stdout=subprocess.PIPE, shell=True)
hostname = proc.stdout.read().rstrip()
proc = subprocess.Popen("sudo sed -i 's/hostname=localhost/hostname=" + hostname + "/g' /etc/ambari-agent/conf/ambari-agent.ini",
shell=True)
return proc.wait()
def start_ambari_agent(wait_until_registered = True):
retcode = 0
proc = subprocess.Popen("service ambari-agent start",
shell=True)
retcode += proc.wait()
if wait_until_registered:
if not wait_until_ambari_agent_registered():
print "ERROR: ambari-agent was not registered."
sys.exit(1)
return retcode
def wait_until_ambari_agent_registered():
'''
return True if ambari agent is found registered.
return False if timeout
'''
count = 0
while count < RETRY_MAX:
count += 1
proc = subprocess.Popen("curl " +
"http://localhost:8080/api/v1/hosts " +
AMBARI_AUTH_HEADERS,
stdout=subprocess.PIPE,
shell=True)
hosts_result_string = proc.stdout.read()
hosts_result_json = json.loads(hosts_result_string)
if len(hosts_result_json["items"]) != 0:
return True
time.sleep(5)
return False
def post_blueprint():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-HDP-2.1-blueprint1.json http://localhost:8080/api/v1/blueprints/myblueprint1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
def create_cluster():
proc = subprocess.Popen("curl -X POST -D - " +
"-d @single-node-hostmapping1.json http://localhost:8080/api/v1/clusters/mycluster1 " +
AMBARI_AUTH_HEADERS ,
cwd=AMBARI_BUILD_DOCKER_ROOT + "/blueprints",
shell=True)
return proc.wait()
# Loop to not to exit Docker container
def no_exit():
print ""
print "loop to not to exit docker container..."
print ""
while True:
time.sleep(NO_EXIT_SLEEP_TIME)
class ParseResult:
is_deep_clean = False
is_rebuild = False
stack_distribution = None
supplemental_distribution = None
is_test = False
is_install_server = False
is_install_agent = False
is_deploy = False
is_server_debug = False
def parse(argv):
result = ParseResult()
if len(argv) >=2:
parser = OptionParser()
parser.add_option("-c", "--clean",
dest="is_deep_clean",
action="store_true",
default=False,
help="if this option is set, git clean -xdf is executed for the ambari local git repo")
parser.add_option("-b", "--rebuild",
dest="is_rebuild",
action="store_true",
default=False,
help="set this flag if you want to rebuild Ambari code")
parser.add_option("-s", "--stack_distribution",
dest="stack_distribution",
help="set a stack distribution. [HDP|PHD|BIGTOP]. Make sure -b is also set when you set a stack distribution")
parser.add_option("-x", "--supplemental_distribution",
dest="supplemental_distribution",
help="set a supplement stack distribution in addition to the primary one. [BigInsights]. Make sure -b is also set when you set a supplement stack distribution")
parser.add_option("-d", "--server_debug",
dest="is_server_debug",
action="store_true",
default=False,
help="set a debug option for ambari-server")
(options, args) = parser.parse_args(argv[1:])
if options.is_deep_clean:
result.is_deep_clean = True
if options.is_rebuild:
result.is_rebuild = True
if options.stack_distribution:
result.stack_distribution = options.stack_distribution
if options.supplemental_distribution:
result.supplemental_distribution = options.supplemental_distribution
if options.is_server_debug:
result.is_server_debug = True
if argv[0] == "test":
result.is_test = True
if argv[0] == "server":
result.is_install_server = True
if argv[0] == "agent":
result.is_install_server = True
result.is_install_agent = True
if argv[0] == "deploy":
result.is_install_server = True
result.is_install_agent = True
result.is_deploy = True
return result
if __name__ == "__main__":
if len(sys.argv) == 1:
print "specify one of test, server, agent or deploy"
sys.exit(1)
start = datetime.datetime.utcnow()
# test: execute unit test
# server: install ambari-server
# with or without rebuild
# agent: install ambari-server and ambari-agent
# with or without rebuild
# deploy: install ambari-server, ambari-agent and deploy Hadoop
# with or without rebuild
parsed_args = parse(sys.argv[1:])
if parsed_args.is_deep_clean:
retcode = git_deep_cleaning()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_test:
retcode = ambariUnitTest()
end = datetime.datetime.utcnow()
print ""
print "Duration: " + str((end-start).seconds) + " seconds"
sys.exit(retcode)
if parsed_args.is_rebuild:
retcode = buildAmbari(parsed_args.stack_distribution, supplemental_distribution=parsed_args.supplemental_distribution)
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_server:
retcode = install_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = setup_ambari_server()
if retcode != 0: sys.exit(retcode)
retcode = start_ambari_server(parsed_args.is_server_debug)
if retcode != 0: sys.exit(retcode)
retcode = start_dependant_services()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_install_agent:
retcode = install_ambari_agent()
if retcode != 0: sys.exit(retcode)
retcode = configure_ambari_agent()
if retcode != 0: sys.exit(retcode)
retcode = start_ambari_agent()
if retcode != 0: sys.exit(retcode)
if parsed_args.is_deploy:
retcode = post_blueprint()
if retcode != 0: sys.exit(retcode)
retcode = create_cluster()
if retcode != 0: sys.exit(retcode)
end = datetime.datetime.utcnow()
print ""
print "Duration: " + str((end-start).seconds) + " seconds"
print "Parameters: " + str(sys.argv)
no_exit()
|
arenadata/ambari
|
dev-support/docker/docker/bin/ambaribuild.py
|
Python
|
apache-2.0
| 8,584 | 0.033667 |
#!/usr/bin/env python
from setuptools import setup, find_packages
with open('pypd/version.py') as version_file:
exec(compile(version_file.read(), version_file.name, 'exec'))
options = {
'name': 'pypd',
'version': __version__,
'packages': find_packages(),
'scripts': [],
'description': 'A python client for PagerDuty API',
'author': 'JD Cumpson',
'author_email': 'jdc@pagerduty.com',
'maintainer_email': 'jdc@pagerduty.com',
'license': 'MIT',
'url': 'https://github.com/PagerDuty/pypd',
'download_url': 'https://github.com/PagerDuty/pypd/archive/master.tar.gz',
'classifiers': [
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
'install_requires': ['ujson', 'requests'],
'tests_require': [],
'cmdclass': {}
}
setup(**options)
|
ryplo/helpme
|
setup.py
|
Python
|
mit
| 959 | 0 |
import unittest
from autosklearn.pipeline.components.classification.extra_trees import \
ExtraTreesClassifier
from autosklearn.pipeline.util import _test_classifier, \
_test_classifier_iterative_fit, _test_classifier_predict_proba
import numpy as np
import sklearn.metrics
import sklearn.ensemble
class ExtraTreesComponentTest(unittest.TestCase):
def test_default_configuration(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier)
self.assertAlmostEqual(0.95999999999999996,
sklearn.metrics.accuracy_score(targets, predictions))
def test_default_configuration_predict_proba(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(ExtraTreesClassifier)
self.assertAlmostEqual(0.12052046298054782,
sklearn.metrics.log_loss(
targets, predictions))
def test_default_configuration_sparse(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, sparse=True)
self.assertAlmostEqual(0.71999999999999997,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_iterative_fit(self):
for i in range(10):
predictions, targets = \
_test_classifier_iterative_fit(ExtraTreesClassifier)
self.assertAlmostEqual(0.93999999999999995,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_binary(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, make_binary=True)
self.assertAlmostEqual(1,
sklearn.metrics.accuracy_score(targets,
predictions))
def test_default_configuration_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier(ExtraTreesClassifier, make_multilabel=True)
self.assertAlmostEqual(0.97060428849902536,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_default_configuration_predict_proba_multilabel(self):
for i in range(10):
predictions, targets = \
_test_classifier_predict_proba(ExtraTreesClassifier,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assertAlmostEqual(0.98976738180772728,
sklearn.metrics.average_precision_score(
targets, predictions))
def test_target_algorithm_multioutput_multiclass_support(self):
cls = sklearn.ensemble.ExtraTreesClassifier()
X = np.random.random((10, 10))
y = np.random.randint(0, 1, size=(10, 10))
# Running this without an exception is the purpose of this test!
cls.fit(X, y)
|
hmendozap/auto-sklearn
|
test/test_pipeline/components/classification/test_extra_trees.py
|
Python
|
bsd-3-clause
| 3,415 | 0.000586 |
import unittest
from os import mkdir
from os.path import exists
from shutil import rmtree
from whoosh import fields, index, qparser, store, writing
class TestIndexing(unittest.TestCase):
def make_index(self, dirname, schema):
if not exists(dirname):
mkdir(dirname)
st = store.FileStorage(dirname)
ix = index.Index(st, schema, create = True)
return ix
def destroy_index(self, dirname):
if exists(dirname):
rmtree(dirname)
def test_creation(self):
s = fields.Schema()
s.add("content", fields.TEXT(phrase = True))
s.add("title", fields.TEXT(stored = True))
s.add("path", fields.ID(stored = True))
s.add("tags", fields.KEYWORD(stored = True))
s.add("quick", fields.NGRAM)
s.add("note", fields.STORED)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(title = u"First", content = u"This is the first document", path = u"/a",
tags = u"first second third", quick = u"First document", note = u"This is the first document")
w.start_document()
w.add_field("content", u"Let's try this again")
w.add_field("title", u"Second")
w.add_field("path", u"/b")
w.add_field("tags", u"Uno Dos Tres")
w.add_field("quick", u"Second document")
w.add_field("note", u"This is the second document")
w.end_document()
w.commit()
def test_integrity(self):
s = fields.Schema(name = fields.TEXT, value = fields.TEXT)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(name = u"Yellow brown", value = u"Blue red green purple?")
w.add_document(name = u"Alpha beta", value = u"Gamma delta epsilon omega.")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(name = u"One two", value = u"Three four five.")
w.commit()
tr = ix.term_reader()
self.assertEqual(ix.doc_count_all(), 3)
self.assertEqual(list(tr.lexicon("name")), ["alpha", "beta", "brown", "one", "two", "yellow"])
def test_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
ix = self.make_index("testindex", s)
try:
w = ix.writer()
tokens = u"ABCDEFG"
from itertools import cycle, islice
lengths = [10, 20, 2, 102, 45, 3, 420, 2]
for length in lengths:
w.add_document(f2 = u" ".join(islice(cycle(tokens), length)))
w.commit()
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, len(lengths))]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, len(lengths))]
self.assertEqual(ls1, [0]*len(lengths))
self.assertEqual(ls2, lengths)
dr.close()
ix.close()
finally:
self.destroy_index("testindex")
def test_lengths_ram(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C D E", f2 = u"X Y Z")
w.add_document(f1 = u"B B B B C D D Q", f2 = u"Q R S T")
w.add_document(f1 = u"D E F", f2 = u"U V A B C D E")
w.commit()
dr = ix.doc_reader()
ls1 = [dr.doc_field_length(i, "f1") for i in xrange(0, 3)]
ls2 = [dr.doc_field_length(i, "f2") for i in xrange(0, 3)]
self.assertEqual(dr[0]["f1"], "A B C D E")
self.assertEqual(dr.doc_field_length(0, "f1"), 5)
self.assertEqual(dr.doc_field_length(1, "f1"), 8)
self.assertEqual(dr.doc_field_length(2, "f1"), 3)
self.assertEqual(dr.doc_field_length(0, "f2"), 3)
self.assertEqual(dr.doc_field_length(1, "f2"), 4)
self.assertEqual(dr.doc_field_length(2, "f2"), 7)
self.assertEqual(ix.field_length("f1"), 16)
self.assertEqual(ix.field_length("f2"), 14)
def test_merged_lengths(self):
s = fields.Schema(f1 = fields.KEYWORD(stored = True, scorable = True),
f2 = fields.KEYWORD(stored = True, scorable = True))
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B C", f2 = u"X")
w.add_document(f1 = u"B C D E", f2 = u"Y Z")
w.commit()
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A", f2 = u"B C D E X Y")
w.add_document(f1 = u"B C", f2 = u"X")
w.commit(writing.NO_MERGE)
w = writing.IndexWriter(ix)
w.add_document(f1 = u"A B X Y Z", f2 = u"B C")
w.add_document(f1 = u"Y X", f2 = u"A B")
w.commit(writing.NO_MERGE)
dr = ix.doc_reader()
self.assertEqual(dr[0]["f1"], u"A B C")
self.assertEqual(dr.doc_field_length(0, "f1"), 3)
self.assertEqual(dr.doc_field_length(2, "f2"), 6)
self.assertEqual(dr.doc_field_length(4, "f1"), 5)
def test_frequency_keyword(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"A B C D E")
w.add_document(content = u"B B B B C D D")
w.add_document(content = u"D E F")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequency("content", u"B"), 2)
self.assertEqual(tr.frequency("content", u"B"), 5)
self.assertEqual(tr.doc_frequency("content", u"E"), 2)
self.assertEqual(tr.frequency("content", u"E"), 2)
self.assertEqual(tr.doc_frequency("content", u"A"), 1)
self.assertEqual(tr.frequency("content", u"A"), 1)
self.assertEqual(tr.doc_frequency("content", u"D"), 3)
self.assertEqual(tr.frequency("content", u"D"), 4)
self.assertEqual(tr.doc_frequency("content", u"F"), 1)
self.assertEqual(tr.frequency("content", u"F"), 1)
self.assertEqual(tr.doc_frequency("content", u"Z"), 0)
self.assertEqual(tr.frequency("content", u"Z"), 0)
self.assertEqual(list(tr), [(0, u"A", 1, 1), (0, u"B", 2, 5),
(0, u"C", 2, 2), (0, u"D", 3, 4),
(0, u"E", 2, 2), (0, u"F", 1, 1)])
def test_frequency_text(self):
s = fields.Schema(content = fields.KEYWORD)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = ix.writer()
w.add_document(content = u"alfa bravo charlie delta echo")
w.add_document(content = u"bravo bravo bravo bravo charlie delta delta")
w.add_document(content = u"delta echo foxtrot")
w.commit()
tr = ix.term_reader()
self.assertEqual(tr.doc_frequency("content", u"bravo"), 2)
self.assertEqual(tr.frequency("content", u"bravo"), 5)
self.assertEqual(tr.doc_frequency("content", u"echo"), 2)
self.assertEqual(tr.frequency("content", u"echo"), 2)
self.assertEqual(tr.doc_frequency("content", u"alfa"), 1)
self.assertEqual(tr.frequency("content", u"alfa"), 1)
self.assertEqual(tr.doc_frequency("content", u"delta"), 3)
self.assertEqual(tr.frequency("content", u"delta"), 4)
self.assertEqual(tr.doc_frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.frequency("content", u"foxtrot"), 1)
self.assertEqual(tr.doc_frequency("content", u"zulu"), 0)
self.assertEqual(tr.frequency("content", u"zulu"), 0)
self.assertEqual(list(tr), [(0, u"alfa", 1, 1), (0, u"bravo", 2, 5),
(0, u"charlie", 2, 2), (0, u"delta", 3, 4),
(0, u"echo", 2, 2), (0, u"foxtrot", 1, 1)])
def test_deletion(self):
s = fields.Schema(key = fields.ID, name = fields.TEXT, value = fields.TEXT)
st = store.RamStorage()
ix = index.Index(st, s, create = True)
w = writing.IndexWriter(ix)
w.add_document(key = u"A", name = u"Yellow brown", value = u"Blue red green purple?")
w.add_document(key = u"B", name = u"Alpha beta", value = u"Gamma delta epsilon omega.")
w.add_document(key = u"C", name = u"One two", value = u"Three four five.")
w.commit()
count = ix.delete_by_term("key", u"B")
self.assertEqual(count, 1)
ix.commit()
self.assertEqual(ix.doc_count_all(), 3)
self.assertEqual(ix.doc_count(), 2)
ix.optimize()
self.assertEqual(ix.doc_count(), 2)
tr = ix.term_reader()
self.assertEqual(list(tr.lexicon("name")), ["brown", "one", "two", "yellow"])
def test_update(self):
# Test update with multiple unique keys
SAMPLE_DOCS = [{"id": u"test1", "path": u"/test/1", "text": u"Hello"},
{"id": u"test2", "path": u"/test/2", "text": u"There"},
{"id": u"test3", "path": u"/test/3", "text": u"Reader"},
]
schema = fields.Schema(id=fields.ID(unique=True, stored=True),
path=fields.ID(unique=True, stored=True),
text=fields.TEXT)
ix = self.make_index("testindex", schema)
try:
writer = ix.writer()
for doc in SAMPLE_DOCS:
writer.add_document(**doc)
writer.commit()
writer = ix.writer()
writer.update_document(**{"id": u"test2",
"path": u"test/1",
"text": u"Replacement"})
writer.commit()
ix.close()
finally:
self.destroy_index("testindex")
def test_reindex(self):
SAMPLE_DOCS = [
{'id': u'test1', 'text': u'This is a document. Awesome, is it not?'},
{'id': u'test2', 'text': u'Another document. Astounding!'},
{'id': u'test3', 'text': u'A fascinating article on the behavior of domestic steak knives.'},
]
schema = fields.Schema(text=fields.TEXT(stored=True),
id=fields.ID(unique=True, stored=True))
ix = self.make_index("testindex", schema)
try:
def reindex():
writer = ix.writer()
for doc in SAMPLE_DOCS:
writer.update_document(**doc)
writer.commit()
reindex()
self.assertEqual(ix.doc_count_all(), 3)
reindex()
ix.close()
finally:
self.destroy_index("testindex")
if __name__ == '__main__':
unittest.main()
|
tallstreet/Whoosh-AppEngine
|
tests/test_indexing.py
|
Python
|
apache-2.0
| 11,352 | 0.019204 |
from django.conf import settings
from datetime import timedelta
# Endpoint settings
OAI_BASE_URL="http"
if settings.HTTPS == "on":
OAI_BASE_URL="https"
OAI_BASE_URL=OAI_BASE_URL+"://"+settings.SITE_NAME
REPOSITORY_NAME = settings.PLATFORM_NAME
ADMIN_EMAIL = settings.TECH_SUPPORT_EMAIL
OAI_ENDPOINT_NAME = 'oai'
RESULTS_LIMIT = 100
RESUMPTION_TOKEN_VALIDITY = timedelta(hours=6)
METADATA_FORMAT = 'oai_dc'
OWN_SET_PREFIX = settings.PLATFORM_NAME
DISABLE_PRINT_OWN_SET_PREFIX= True
RESUMPTION_TOKEN_SALT = 'change_me' # salt used to generate resumption tokens
if hasattr(settings, 'OAI_SETTINGS'):
OAI_ENDPOINT_NAME = settings.OAI_SETTINGS.get('OAI_ENDPOINT_NAME')
RESULTS_LIMIT = settings.OAI_SETTINGS.get('RESULTS_LIMIT') or RESULTS_LIMIT
METADATA_FORMAT = settings.OAI_SETTINGS.get('METADATA_FORMAT') or METADATA_FORMAT
RESUMPTION_TOKEN_SALT = settings.OAI_SETTINGS.get('RESUMPTION_TOKEN_SALT') or RESUMPTION_TOKEN_SALT
DISABLE_PRINT_OWN_SET_PREFIX = settings.OAI_SETTINGS.get('DISABLE_PRINT_OWN_SET_PREFIX') or DISABLE_PRINT_OWN_SET_PREFIX
|
dcosentino/edx-platform
|
lms/djangoapps/oai/settings.py
|
Python
|
agpl-3.0
| 1,073 | 0.008388 |
from djoser.conf import settings
__all__ = ['settings']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
return user.get_email_field_name()
|
akalipetis/djoser
|
djoser/compat.py
|
Python
|
mit
| 267 | 0 |
from trackers import *
|
MikeDMorgan/scRNAseq
|
pipeline_docs/pipeline_scRnaseq/__init__.py
|
Python
|
mit
| 24 | 0 |
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
''' This module contains the tray icon's class'''
import sys
import PyQt4.QtGui as QtGui
import gui
import extension
from e3 import status
class TrayIcon (QtGui.QSystemTrayIcon, gui.BaseTray):
'''A class that implements the tray icon of emesene for Qt4'''
NAME = 'TrayIcon'
DESCRIPTION = 'Qt4 Tray Icon'
AUTHOR = 'Gabriele "Whisky" Visconti'
WEBSITE = ''
def __init__(self, handler, main_window=None):
'''
constructor
handler -- a e3common.Handler.TrayIconHandler object
'''
gui.BaseTray.__init__(self, handler)
QtGui.QSystemTrayIcon.__init__(self)
self._main_window = main_window
self.menu = None
self._conversations = None
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo))
self.activated.connect(self._on_tray_icon_clicked)
self.set_login()
# TODO: this is for mac os, and should be changed in the
# future (probably no tray icon at all, just the dock icon)
if sys.platform == 'darwin':
icon = QtGui.QIcon(gui.theme.image_theme.logo)
qt_app = QtGui.QApplication.instance()
qt_app.setWindowIcon(icon)
qt_app.setApplicationName('BHAWH')
else:
self.show()
def set_login(self):
'''Called when the login window is shown. Sets a proper
context menu in the Tray Icon.'''
tray_login_menu_cls = extension.get_default('tray login menu')
self.menu = tray_login_menu_cls(self.handler, self._main_window)
self.setIcon(QtGui.QIcon(gui.theme.image_theme.logo_panel))
self.setToolTip("emesene")
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_main(self, session):
'''Called when the main window is shown. Stores the contact list
and registers the callback for the status_change_succeed event'''
gui.BaseTray.set_main(self, session)
if self.menu:
self.menu.unsubscribe()
tray_main_menu_cls = extension.get_default('tray main menu')
self.menu = tray_main_menu_cls(self.handler, self._main_window)
self.setToolTip("emesene - " + self.handler.session.account.account)
self._on_status_change_succeed(self.handler.session.account.status)
if sys.platform == 'darwin':
QtGui.qt_mac_set_dock_menu(self.menu)
else:
self.setContextMenu(self.menu)
def set_conversations(self, conversations):
'''Store a reference to the conversation page'''
self._conversations = conversations
def set_visible(self, visible):
'''Changes icon's visibility'''
self.setVisible(visible)
def _on_tray_icon_clicked(self, reason):
'''This slot is called when the user clicks the tray icon.
Toggles main window's visibility'''
if not self._main_window:
return
if reason == QtGui.QSystemTrayIcon.Trigger:
if not self._main_window.isVisible():
self._main_window.show()
self._main_window.activateWindow()
self._main_window.raise_()
else: # visible
if self._main_window.isActiveWindow():
self._main_window.hide()
else:
self._main_window.activateWindow()
self._main_window.raise_()
elif reason == QtGui.QSystemTrayIcon.Context:
if self.menu:
self.menu.show()
def _on_contact_attr_changed(self, *args):
"""
This is called when a contact changes something
"""
self.menu.list._on_contact_change_something(*args)
def _on_status_change_succeed(self, stat):
"""
This is called when status is successfully changed
"""
if stat not in status.ALL or stat == -1:
return
self.setIcon(QtGui.QIcon(
gui.theme.image_theme.status_icons_panel[stat]))
def hide(self):
self.unsubscribe()
QtGui.QSystemTrayIcon.setVisible(self, False)
def unsubscribe(self):
self.disconnect_signals()
if self.menu:
self.menu.unsubscribe()
|
emesene/emesene
|
emesene/gui/qt4ui/TrayIcon.py
|
Python
|
gpl-3.0
| 5,102 | 0 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
import pytz
def now():
return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC)
|
Callek/build-relengapi
|
relengapi/lib/time.py
|
Python
|
mpl-2.0
| 304 | 0 |
import smbus
from time import sleep
def delay(time):
sleep(time/1000.0)
def delayMicroseconds(time):
sleep(time/1000000.0)
from hd44780 import HD44780
class Screen(HD44780):
"""A driver for MCP23008-based I2C LCD backpacks. The one tested had "WIDE.HK" written on it."""
def __init__(self, bus=1, addr=0x27, debug=False, **kwargs):
"""Initialises the ``Screen`` object.
Kwargs:
* ``bus``: I2C bus number.
* ``addr``: I2C address of the board.
* ``debug``: enables printing out LCD commands.
* ``**kwargs``: all the other arguments, get passed further to HD44780 constructor
"""
self.bus_num = bus
self.bus = smbus.SMBus(self.bus_num)
if type(addr) in [str, unicode]:
addr = int(addr, 16)
self.addr = addr
self.debug = debug
self.i2c_init()
HD44780.__init__(self, debug=self.debug, **kwargs)
def i2c_init(self):
"""Inits the MCP23017 IC for desired operation."""
self.setMCPreg(0x05, 0x0c)
self.setMCPreg(0x00, 0x00)
def write_byte(self, byte, char_mode=False):
"""Takes a byte and sends the high nibble, then the low nibble (as per HD44780 doc). Passes ``char_mode`` to ``self.write4bits``."""
if self.debug and not char_mode:
print(hex(byte))
self.write4bits(byte >> 4, char_mode)
self.write4bits(byte & 0x0F, char_mode)
def write4bits(self, data, char_mode=False):
"""Writes a nibble to the display. If ``char_mode`` is set, holds the RS line high."""
if char_mode:
data |= 0x10
self.setMCPreg(0x0a, data)
data ^= 0x80
delayMicroseconds(1.0)
self.setMCPreg(0x0a, data)
data ^= 0x80
delayMicroseconds(1.0)
self.setMCPreg(0x0a, data)
delay(1.0)
def setMCPreg(self, reg, val):
"""Sets the MCP23017 register."""
self.bus.write_byte_data(self.addr, reg, val)
if __name__ == "__main__":
screen = Screen(bus=1, addr=0x27, cols=16, rows=2, debug=True, autoscroll=False)
line = "0123456789012345"
if True:
screen.display_data(line, line[::-1])
sleep(1)
screen.display_data(line[::-1], line)
sleep(1)
screen.clear()
|
CRImier/pyLCI
|
output/drivers/mcp23008.py
|
Python
|
apache-2.0
| 2,598 | 0.007698 |
# Copyright (c) 2014 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from blazar import exceptions
from blazar.i18n import _
LOG = logging.getLogger(__name__)
class BlazarDBException(exceptions.BlazarException):
msg_fmt = _('An unknown database exception occurred')
class BlazarDBDuplicateEntry(BlazarDBException):
msg_fmt = _('Duplicate entry for %(columns)s in %(model)s model was found')
class BlazarDBNotFound(BlazarDBException):
msg_fmt = _('%(id)s %(model)s was not found')
class BlazarDBInvalidFilter(BlazarDBException):
msg_fmt = _('%(query_filter)s is invalid')
class BlazarDBInvalidFilterOperator(BlazarDBException):
msg_fmt = _('%(filter_operator)s is invalid')
class BlazarDBExtraCapabilitiesNotEnabled(BlazarDBException):
msq_fmt = _('%(resource_type)s does not have extra capabilities enabled.')
class BlazarDBInvalidExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s does not exist for resource type '
'%(resource_type)s.')
class BlazarDBForbiddenExtraCapability(BlazarDBException):
msg_fmt = _('%(property_name)s cannot be set as an extra capability')
|
ChameleonCloud/blazar
|
blazar/db/exceptions.py
|
Python
|
apache-2.0
| 1,705 | 0 |
from urlparse import urljoin
from os.path import dirname, basename
from xml.etree import ElementTree
from mimetypes import guess_type
from StringIO import StringIO
import requests
def update_print(apibase, password, print_id, progress):
"""
"""
params = {'id': print_id}
data = dict(progress=progress, password=password)
res = requests.post(urljoin(apibase, '/update-atlas.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-atlas.php resulting in status %s instead of 200' % res.status
def finish_print(apibase, password, print_id, print_info):
"""
"""
params = {'id': print_id}
print_info.update(dict(password=password))
res = requests.post(urljoin(apibase, '/finish-atlas.php'), params=params, data=print_info)
assert res.status_code == 200, 'POST to finish-atlas.php resulting in status %s instead of 200' % res.status
def update_scan(apibase, password, scan_id, progress):
"""
"""
params = {'id': scan_id}
data = {'password': password,
'progress': progress}
res = requests.post(urljoin(apibase, '/update-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to update-scan.php resulting in status %s instead of 200' % res.status
def finish_scan(apibase, password, scan_id, uploaded_file, print_id, print_page_number, print_href, min_coord, max_coord, geojpeg_bounds):
"""
"""
params = {'id': scan_id}
data = {
'print_id': print_id,
'print_page_number': print_page_number,
'print_href': print_href,
'password': password,
'uploaded_file': uploaded_file,
'has_geotiff': 'yes',
'has_geojpeg': 'yes',
'has_stickers': 'no',
'min_row': min_coord.row, 'max_row': max_coord.row,
'min_column': min_coord.column, 'max_column': max_coord.column,
'min_zoom': min_coord.zoom, 'max_zoom': max_coord.zoom,
'geojpeg_bounds': '%.8f,%.8f,%.8f,%.8f' % geojpeg_bounds
}
res = requests.post(urljoin(apibase, '/finish-scan.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-scan.php resulting in status %s instead of 200' % res.status
def fail_scan(apibase, password, scan_id):
"""
"""
params = {'id': scan_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-scan.php'), params=params, data=data)
# TODO when does this fail? this failing shouldn't be fatal
assert res.status_code == 200, 'POST to fail-scan.php resulting in status %s instead of 200' % res.status
def finish_form(apibase, password, form_id, action_url, http_method, title, fields):
"""
"""
data = dict(password=password, action_url=action_url, http_method=http_method, title=title)
for (index, field) in enumerate(fields):
data['fields[%d][name]' % index] = field['name']
data['fields[%d][label]' % index] = field['label']
data['fields[%d][type]' % index] = field['type']
params = {'id': form_id}
res = requests.post(urljoin(apibase, '/finish-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to finish-form.php resulting in status %s instead of 200' % res.status
def fail_form(apibase, password, form_id):
"""
"""
params = {'id': form_id}
data = {'password': password}
res = requests.post(urljoin(apibase, '/fail-form.php'), params=params, data=data)
assert res.status_code == 200, 'POST to fail-form.php resulting in status %s instead of 200' % res.status
def upload(params, file_path, file_contents, apibase, password):
""" Upload a file via the API append.php form input provision thingie.
This allows uploads to either target S3 or the app itself.
"""
params.update(dict(password=password,
dirname=dirname(file_path),
mimetype=(guess_type(file_path)[0] or '')))
res = requests.get(urljoin(apibase, '/append.php'), params=params, headers=dict(Accept='application/paperwalking+xml'))
form = ElementTree.parse(StringIO(res.text)).getroot()
if form.tag == 'form':
form_action = form.attrib['action']
inputs = form.findall('.//input')
fields = {}
files = {}
for input in inputs:
if input.attrib['type'] != 'file' and 'name' in input.attrib:
fields[input.attrib['name']] = input.attrib['value']
elif input.attrib['type'] == 'file':
files[input.attrib['name']] = (basename(file_path), file_contents)
if len(files) == 1:
base_url = [el.text for el in form.findall(".//*") if el.get('id', '') == 'base-url'][0]
resource_url = urljoin(base_url, file_path)
res = requests.post(urljoin(apibase, form_action), data=fields, files=files)
assert res.status_code in range(200, 308), 'POST of file to %s resulting in status %s instead of 2XX/3XX' % (form_action, res.status_code)
return resource_url
raise Exception('Did not find a form with a file input, why is that?')
def append_print_file(print_id, file_path, file_contents, apibase, password):
""" Upload a print.
"""
params = {
"print": print_id,
}
return upload(params, file_path, file_contents, apibase, password)
def append_scan_file(scan_id, file_path, file_contents, apibase, password):
""" Upload a scan.
"""
params = {
"scan": scan_id,
}
return upload(params, file_path, file_contents, apibase, password)
def get_print_info(print_url):
"""
"""
print print_url
res = requests.get(print_url, headers=dict(Accept='application/paperwalking+xml'))
if res.status_code == 404:
raise Exception("No such atlas: %s" % print_url)
print_ = ElementTree.parse(StringIO(res.text)).getroot()
print_id = print_.attrib['id']
paper = print_.find('paper').attrib['size']
orientation = print_.find('paper').attrib['orientation']
layout = print_.find('paper').attrib.get('layout', 'full-page')
north = float(print_.find('bounds').find('north').text)
south = float(print_.find('bounds').find('south').text)
east = float(print_.find('bounds').find('east').text)
west = float(print_.find('bounds').find('west').text)
print print_id, north, west, south, east, paper, orientation, layout
return print_id, north, west, south, east, paper, orientation, layout
|
stamen/fieldpapers
|
decoder/apiutils.py
|
Python
|
gpl-2.0
| 6,689 | 0.007325 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides utilities to preprocess images in CIFAR-10.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
_PADDING = 4
slim = tf.contrib.slim
def preprocess_for_train(image,
output_height,
output_width,
padding=_PADDING):
"""Preprocesses the given image for training.
Note that the actual resizing scale is sampled from
[`resize_size_min`, `resize_size_max`].
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
padding: The amound of padding before and after each dimension of the image.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
if padding > 0:
image = tf.pad(image, [[padding, padding], [padding, padding], [0, 0]])
# image = tf.image.resize_images(image,(output_height,output_width))
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(image,
[32, 32, 3])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
tf.summary.image('distorted_image', tf.expand_dims(distorted_image, 0))
# Because these operations are not commutative, consider randomizing
# the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(distorted_image)
def preprocess_for_eval(image, output_height, output_width):
"""Preprocesses the given image for evaluation.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
Returns:
A preprocessed image.
"""
tf.summary.image('image', tf.expand_dims(image, 0))
# Transform the image to floats.
image = tf.to_float(image)
# image = tf.image.resize_images(image, (output_height, output_width))
# Resize and crop if needed.
resized_image = tf.image.resize_image_with_crop_or_pad(image,
output_width,
output_height)
tf.summary.image('resized_image', tf.expand_dims(resized_image, 0))
# Subtract off the mean and divide by the variance of the pixels.
return tf.image.per_image_standardization(resized_image)
def preprocess_image(image, output_height, output_width, is_training=False):
"""Preprocesses the given image.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
is_training: `True` if we're preprocessing the image for training and
`False` otherwise.
Returns:
A preprocessed image.
"""
if is_training:
return preprocess_for_train(image, output_height, output_width)
else:
return preprocess_for_eval(image, output_height, output_width)
|
MingLin-home/Ming_slim
|
preprocessing/cifarnet_preprocessing.py
|
Python
|
gpl-3.0
| 4,252 | 0.007291 |
#!/usr/bin/env python
import time
from nicfit.aio import Application
async def _main(args):
print(args)
print("Sleeping 2...")
time.sleep(2)
print("Sleeping 0...")
return 0
def atexit():
print("atexit")
app = Application(_main, atexit=atexit)
app.arg_parser.add_argument("--example", help="Example cli")
app.run()
assert not"will not execute"
|
nicfit/nicfit.py
|
examples/asyncio_example.py
|
Python
|
mit
| 371 | 0.005391 |
from compare import expect
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.client import Client
from django.core.management import call_command
import sys
from tardis.tardis_portal.models import \
Experiment, Dataset, Dataset_File, ExperimentACL, License, UserProfile, \
ExperimentParameterSet, ExperimentParameter, DatasetParameterSet, DatafileParameterSet
def _create_test_user():
user_ = User(username='tom',
first_name='Thomas',
last_name='Atkins',
email='tommy@atkins.net')
user_.save()
UserProfile(user=user_).save()
return user_
def _create_license():
license_ = License(name='Creative Commons Attribution-NoDerivs 2.5 Australia',
url='http://creativecommons.org/licenses/by-nd/2.5/au/',
internal_description='CC BY 2.5 AU',
allows_distribution=True)
license_.save()
return license_
def _create_test_experiment(user, license_):
experiment = Experiment(title='Norwegian Blue',
description='Parrot + 40kV',
created_by=user)
experiment.public_access = Experiment.PUBLIC_ACCESS_FULL
experiment.license = license_
experiment.save()
experiment.author_experiment_set.create(order=0,
author="John Cleese",
url="http://nla.gov.au/nla.party-1")
experiment.author_experiment_set.create(order=1,
author="Michael Palin",
url="http://nla.gov.au/nla.party-2")
acl = ExperimentACL(experiment=experiment,
pluginId='django_user',
entityId=str(user.id),
isOwner=True,
canRead=True,
canWrite=True,
canDelete=True,
aclOwnershipType=ExperimentACL.OWNER_OWNED)
acl.save()
return experiment
def _create_test_dataset(nosDatafiles):
ds_ = Dataset(description='happy snaps of plumage')
ds_.save()
for i in range (0, nosDatafiles) :
df_ = Dataset_File(dataset=ds_, url='http://planet-python.org/' + str(_next_id()))
df_.save()
ds_.save()
return ds_
def _create_test_data():
# Create 2 experiments with 3 datasets, one of which is in both experiments.
user_ = _create_test_user()
license_ = _create_license()
exp1_ = _create_test_experiment(user_, license_)
exp2_ = _create_test_experiment(user_, license_)
ds1_ = _create_test_dataset(1)
ds2_ = _create_test_dataset(2)
ds3_ = _create_test_dataset(3)
ds1_.experiments.add(exp1_);
ds2_.experiments.add(exp1_);
ds2_.experiments.add(exp2_);
ds3_.experiments.add(exp2_);
ds1_.save()
ds2_.save()
ds3_.save()
exp1_.save()
exp2_.save()
return (exp1_, exp2_)
_counter = 1
def _next_id():
global _counter
res = _counter
_counter += 1
return res
class RmExperimentTestCase(TestCase):
def setUp(self):
pass
def testList(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Check that --list doesn't remove anything
call_command('rmexperiment', exp1_.pk, list=True)
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
def testRemove(self):
(exp1_, exp2_) = _create_test_data()
expect(Dataset_File.objects.all().count()).to_be(6)
expect(len(exp1_.get_datafiles())).to_be(3)
expect(len(exp2_.get_datafiles())).to_be(5)
# Remove first experiment and check that the shared dataset hasn't been removed
call_command('rmexperiment', exp1_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(5)
expect(len(exp2_.get_datafiles())).to_be(5)
#Remove second experiment
call_command('rmexperiment', exp2_.pk, confirmed=True)
expect(Dataset_File.objects.all().count()).to_be(0)
#Check that everything else has been removed too
expect(ExperimentACL.objects.all().count()).to_be(0)
expect(ExperimentParameterSet.objects.all().count()).to_be(0)
expect(ExperimentParameter.objects.all().count()).to_be(0)
expect(DatasetParameterSet.objects.all().count()).to_be(0)
expect(DatafileParameterSet.objects.all().count()).to_be(0)
def tearDown(self):
pass
|
steveandroulakis/mytardis
|
tardis/tardis_portal/tests/test_rmexperiment.py
|
Python
|
bsd-3-clause
| 4,827 | 0.008494 |
import subprocess
from pkg_resources import resource_filename
def playit(file):
"""
Function used to play a sound file
"""
filepath = resource_filename(__name__, 'sound/' + file)
subprocess.Popen(["paplay", filepath])
|
lapisdecor/bzoinq
|
bzoinq/playit.py
|
Python
|
mit
| 240 | 0 |
"""revert: add plugin event acl to the admin backend
Revision ID: 97e2d9949db
Revises: 1e5140290977
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97e2d9949db'
down_revision = '1e5140290977'
POLICY_NAME = 'wazo_default_admin_policy'
ACL_TEMPLATES = ['events.plugin.#']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def downgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
op.bulk_insert(
policy_template,
[
{'policy_uuid': policy_uuid, 'template_id': template_id}
for template_id in acl_template_ids
],
)
def upgrade():
conn = op.get_bind()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if acl_template_ids:
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
wazo-pbx/xivo-auth
|
alembic/versions/97e2d9949db_revert_add_plugin_event_acl_to_the_.py
|
Python
|
gpl-3.0
| 2,816 | 0.00071 |
from . import minic_ast
class PrettyGenerator(object):
def __init__(self):
# Statements start with indentation of self.indent_level spaces, using
# the _make_indent method
#
self.indent_level = 0
def _make_indent(self):
return ' ' * self.indent_level
def visit(self, node):
method = 'visit_' + node.__class__.__name__
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
#~ print('generic:', type(node))
if node is None:
return ''
else:
return ''.join(self.visit(c) for c_name, c in node.children())
|
martylee/Python
|
CSC410-Project-1-master/minic/pretty_minic.py
|
Python
|
gpl-2.0
| 658 | 0.00152 |
from node_view import NodeGraphView
from node_scene import NodeGraphScene
from items.node_item import NodeItem
from items.connection_item import ConnectionItem
from items.connector_item import BaseConnectorItem, IOConnectorItem, InputConnectorItem, OutputConnectorItem
import node_utils
|
allebacco/PyNodeGraph
|
pynodegraph/__init__.py
|
Python
|
mit
| 286 | 0.006993 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import F, Func, Value
class AlphabeticalPaginationMixin(object):
alphabetical_pagination_field = 'name'
def get_alphabetical_pagination_field(self):
return self.alphabetical_pagination_field
def get_selected_letter(self):
return self.request.GET.get('letter', 'a')
def get_base_queryset(self):
"""
Queryset before applying pagination filters.
"""
qs = super(AlphabeticalPaginationMixin, self).get_queryset().exclude(
**{self.get_alphabetical_pagination_field(): ''}
)
return qs
def get_queryset(self):
qs = self.get_base_queryset()
# FIXME Select Umlauts (using downgrade and also downgrade sort_name field?)
# FIXME Select on TRIM/LEFT as in get_letter_choices
filter = {
"{}__istartswith".format(self.get_alphabetical_pagination_field()):
self.get_selected_letter()}
return qs.filter(**filter).order_by(self.alphabetical_pagination_field)
def get_letter_choices(self):
return self.get_base_queryset().annotate(name_lower=Func(
Func(
Func(
F(self.get_alphabetical_pagination_field()), function='LOWER'),
function='TRIM'),
Value("1"), function='LEFT')).order_by(
'name_lower').distinct('name_lower').values_list('name_lower', flat=True)
def get_context_data(self, **kwargs):
context = super(AlphabeticalPaginationMixin, self).get_context_data(**kwargs)
context['selected_letter'] = self.get_selected_letter()
context['alphabet'] = self.get_letter_choices()
return context
|
sha-red/django-shared-utils
|
shared/utils/views/alphabetical_pagination.py
|
Python
|
mit
| 1,768 | 0.002828 |
"""add graphql ACL to users
Revision ID: 2d4882d39dbb
Revises: c4d0e9ec46a9
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d4882d39dbb'
down_revision = 'dc2848563b53'
POLICY_NAME = 'wazo_default_user_policy'
ACL_TEMPLATES = ['dird.graphql.me']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)
return conn.execute(query).scalar()
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_acl_template_ids(conn, policy_uuid):
query = sa.sql.select([policy_template.c.template_id]).where(
policy_template.c.policy_uuid == policy_uuid
)
return [acl_template_id for (acl_template_id,) in conn.execute(query).fetchall()]
def upgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
acl_template_ids_already_associated = _get_acl_template_ids(conn, policy_uuid)
for template_id in set(acl_template_ids) - set(acl_template_ids_already_associated):
query = policy_template.insert().values(
policy_uuid=policy_uuid, template_id=template_id
)
conn.execute(query)
def downgrade():
conn = op.get_bind()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if not acl_template_ids:
return
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)
|
wazo-pbx/xivo-auth
|
alembic/versions/2d4882d39dbb_add_graphql_acl_to_users.py
|
Python
|
gpl-3.0
| 3,250 | 0.001538 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ndvi_test.py
#
# Copyright 2015 rob <rob@Novu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
'''just a annoying dummy to get rid of Gtk2 and Gtk3 incompatibilities'''
from infrapix import infrapix
import sys
infrapix.ndvi(sys.argv[1],sys.argv[2], show_histogram = True,)
|
PEAT-AI/Automato
|
Surveillance/make_ndvi.py
|
Python
|
gpl-3.0
| 1,020 | 0.009804 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Flowroute LLC
# Written by Matthew Williams <matthew@flowroute.com>
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: apt
short_description: Manages apt-packages
description:
- Manages I(apt) packages (such as for Debian/Ubuntu).
version_added: "0.0.2"
options:
name:
description:
- A package name, like C(foo), or package specifier with version, like C(foo=1.0). Name wildcards (fnmatch) like C(apt*) and version wildcards like C(foo=1.0*) are also supported.
required: false
default: null
state:
description:
- Indicates the desired package state. C(latest) ensures that the latest version is installed. C(build-dep) ensures the package build dependencies are installed.
required: false
default: present
choices: [ "latest", "absent", "present", "build-dep" ]
update_cache:
description:
- Run the equivalent of C(apt-get update) before the operation. Can be run as part of the package installation or as a separate step.
required: false
default: no
choices: [ "yes", "no" ]
cache_valid_time:
description:
- If C(update_cache) is specified and the last run is less or equal than I(cache_valid_time) seconds ago, the C(update_cache) gets skipped.
required: false
default: no
purge:
description:
- Will force purging of configuration files if the module state is set to I(absent).
required: false
default: no
choices: [ "yes", "no" ]
default_release:
description:
- Corresponds to the C(-t) option for I(apt) and sets pin priorities
required: false
default: null
install_recommends:
description:
- Corresponds to the C(--no-install-recommends) option for I(apt). Default behavior (C(yes)) replicates apt's default behavior; C(no) does not install recommended packages. Suggested packages are never installed.
required: false
default: yes
choices: [ "yes", "no" ]
force:
description:
- If C(yes), force installs/removes.
required: false
default: "no"
choices: [ "yes", "no" ]
upgrade:
description:
- 'If yes or safe, performs an aptitude safe-upgrade.'
- 'If full, performs an aptitude full-upgrade.'
- 'If dist, performs an apt-get dist-upgrade.'
- 'Note: This does not upgrade a specific package, use state=latest for that.'
version_added: "1.1"
required: false
default: "yes"
choices: [ "yes", "safe", "full", "dist"]
dpkg_options:
description:
- Add dpkg options to apt command. Defaults to '-o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold"'
- Options should be supplied as comma separated list
required: false
default: 'force-confdef,force-confold'
deb:
description:
- Path to a .deb package on the remote machine.
required: false
version_added: "1.6"
requirements: [ python-apt, aptitude ]
author: Matthew Williams
notes:
- Three of the upgrade modes (C(full), C(safe) and its alias C(yes)) require C(aptitude), otherwise
C(apt-get) suffices.
'''
EXAMPLES = '''
# Update repositories cache and install "foo" package
- apt: name=foo update_cache=yes
# Remove "foo" package
- apt: name=foo state=absent
# Install the package "foo"
- apt: name=foo state=present
# Install the version '1.00' of package "foo"
- apt: name=foo=1.00 state=present
# Update the repository cache and update package "nginx" to latest version using default release squeeze-backport
- apt: name=nginx state=latest default_release=squeeze-backports update_cache=yes
# Install latest version of "openjdk-6-jdk" ignoring "install-recommends"
- apt: name=openjdk-6-jdk state=latest install_recommends=no
# Update all packages to the latest version
- apt: upgrade=dist
# Run the equivalent of "apt-get update" as a separate step
- apt: update_cache=yes
# Only run "update_cache=yes" if the last one is more than 3600 seconds ago
- apt: update_cache=yes cache_valid_time=3600
# Pass options to dpkg on run
- apt: upgrade=dist update_cache=yes dpkg_options='force-confold,force-confdef'
# Install a .deb package
- apt: deb=/tmp/mypackage.deb
# Install the build dependencies for package "foo"
- apt: pkg=foo state=build-dep
'''
import traceback
# added to stave off future warnings about apt api
import warnings
warnings.filterwarnings('ignore', "apt API not stable yet", FutureWarning)
import os
import datetime
import fnmatch
import itertools
# APT related constants
APT_ENV_VARS = dict(
DEBIAN_FRONTEND = 'noninteractive',
DEBIAN_PRIORITY = 'critical',
LANG = 'C'
)
DPKG_OPTIONS = 'force-confdef,force-confold'
APT_GET_ZERO = "0 upgraded, 0 newly installed"
APTITUDE_ZERO = "0 packages upgraded, 0 newly installed"
APT_LISTS_PATH = "/var/lib/apt/lists"
APT_UPDATE_SUCCESS_STAMP_PATH = "/var/lib/apt/periodic/update-success-stamp"
HAS_PYTHON_APT = True
try:
import apt
import apt.debfile
import apt_pkg
except ImportError:
HAS_PYTHON_APT = False
def package_split(pkgspec):
parts = pkgspec.split('=', 1)
if len(parts) > 1:
return parts[0], parts[1]
else:
return parts[0], None
def package_versions(pkgname, pkg, pkg_cache):
try:
versions = set(p.version for p in pkg.versions)
except AttributeError:
# assume older version of python-apt is installed
# apt.package.Package#versions require python-apt >= 0.7.9.
pkg_cache_list = (p for p in pkg_cache.Packages if p.Name == pkgname)
pkg_versions = (p.VersionList for p in pkg_cache_list)
versions = set(p.VerStr for p in itertools.chain(*pkg_versions))
return versions
def package_version_compare(version, other_version):
try:
return apt_pkg.version_compare(version, other_version)
except AttributeError:
return apt_pkg.VersionCompare(version, other_version)
def package_status(m, pkgname, version, cache, state):
try:
# get the package from the cache, as well as the
# the low-level apt_pkg.Package object which contains
# state fields not directly acccesible from the
# higher-level apt.package.Package object.
pkg = cache[pkgname]
ll_pkg = cache._cache[pkgname] # the low-level package object
except KeyError:
if state == 'install':
try:
if cache.get_providing_packages(pkgname):
return False, True, False
m.fail_json(msg="No package matching '%s' is available" % pkgname)
except AttributeError:
# python-apt version too old to detect virtual packages
# mark as upgradable and let apt-get install deal with it
return False, True, False
else:
return False, False, False
try:
has_files = len(pkg.installed_files) > 0
except UnicodeDecodeError:
has_files = True
except AttributeError:
has_files = False # older python-apt cannot be used to determine non-purged
try:
package_is_installed = ll_pkg.current_state == apt_pkg.CURSTATE_INSTALLED
except AttributeError: # python-apt 0.7.X has very weak low-level object
try:
# might not be necessary as python-apt post-0.7.X should have current_state property
package_is_installed = pkg.is_installed
except AttributeError:
# assume older version of python-apt is installed
package_is_installed = pkg.isInstalled
if version:
versions = package_versions(pkgname, pkg, cache._cache)
avail_upgrades = fnmatch.filter(versions, version)
if package_is_installed:
try:
installed_version = pkg.installed.version
except AttributeError:
installed_version = pkg.installedVersion
# Only claim the package is installed if the version is matched as well
package_is_installed = fnmatch.fnmatch(installed_version, version)
# Only claim the package is upgradable if a candidate matches the version
package_is_upgradable = False
for candidate in avail_upgrades:
if package_version_compare(candidate, installed_version) > 0:
package_is_upgradable = True
break
else:
package_is_upgradable = bool(avail_upgrades)
else:
try:
package_is_upgradable = pkg.is_upgradable
except AttributeError:
# assume older version of python-apt is installed
package_is_upgradable = pkg.isUpgradable
return package_is_installed, package_is_upgradable, has_files
def expand_dpkg_options(dpkg_options_compressed):
options_list = dpkg_options_compressed.split(',')
dpkg_options = ""
for dpkg_option in options_list:
dpkg_options = '%s -o "Dpkg::Options::=--%s"' \
% (dpkg_options, dpkg_option)
return dpkg_options.strip()
def expand_pkgspec_from_fnmatches(m, pkgspec, cache):
new_pkgspec = []
for pkgspec_pattern in pkgspec:
pkgname_pattern, version = package_split(pkgspec_pattern)
# note that none of these chars is allowed in a (debian) pkgname
if frozenset('*?[]!').intersection(pkgname_pattern):
# handle multiarch pkgnames, the idea is that "apt*" should
# only select native packages. But "apt*:i386" should still work
if not ":" in pkgname_pattern:
try:
pkg_name_cache = _non_multiarch
except NameError:
pkg_name_cache = _non_multiarch = [pkg.name for pkg in cache if not ':' in pkg.name]
else:
try:
pkg_name_cache = _all_pkg_names
except NameError:
pkg_name_cache = _all_pkg_names = [pkg.name for pkg in cache]
matches = fnmatch.filter(pkg_name_cache, pkgname_pattern)
if len(matches) == 0:
m.fail_json(msg="No package(s) matching '%s' available" % str(pkgname_pattern))
else:
new_pkgspec.extend(matches)
else:
# No wildcards in name
new_pkgspec.append(pkgspec_pattern)
return new_pkgspec
def install(m, pkgspec, cache, upgrade=False, default_release=None,
install_recommends=True, force=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS),
build_dep=False):
pkg_list = []
packages = ""
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='install')
if build_dep:
# Let apt decide what to install
pkg_list.append("'%s'" % package)
continue
if not installed or (upgrade and upgradable):
pkg_list.append("'%s'" % package)
if installed and upgradable and version:
# This happens when the package is installed, a newer version is
# available, and the version is a wildcard that matches both
#
# We do not apply the upgrade flag because we cannot specify both
# a version and state=latest. (This behaviour mirrors how apt
# treats a version with wildcard in the package)
pkg_list.append("'%s'" % package)
packages = ' '.join(pkg_list)
if len(packages) != 0:
if force:
force_yes = '--force-yes'
else:
force_yes = ''
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
if build_dep:
cmd = "%s -y %s %s %s build-dep %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
else:
cmd = "%s -y %s %s %s install %s" % (APT_GET_CMD, dpkg_options, force_yes, check_arg, packages)
if default_release:
cmd += " -t '%s'" % (default_release,)
if not install_recommends:
cmd += " --no-install-recommends"
rc, out, err = m.run_command(cmd)
if rc:
return (False, dict(msg="'%s' failed: %s" % (cmd, err), stdout=out, stderr=err))
else:
return (True, dict(changed=True, stdout=out, stderr=err))
else:
return (True, dict(changed=False))
def install_deb(m, debs, cache, force, install_recommends, dpkg_options):
changed=False
deps_to_install = []
pkgs_to_install = []
for deb_file in debs.split(','):
try:
pkg = apt.debfile.DebPackage(deb_file)
except SystemError, e:
m.fail_json(msg="Error: %s\nSystem Error: %s" % (pkg._failure_string,str(e)))
# Check if it's already installed
if pkg.compare_to_version_in_cache() == pkg.VERSION_SAME:
continue
# Check if package is installable
if not pkg.check() and not force:
m.fail_json(msg=pkg._failure_string)
# add any missing deps to the list of deps we need
# to install so they're all done in one shot
deps_to_install.extend(pkg.missing_deps)
# and add this deb to the list of packages to install
pkgs_to_install.append(deb_file)
# install the deps through apt
retvals = {}
if len(deps_to_install) > 0:
(success, retvals) = install(m=m, pkgspec=deps_to_install, cache=cache,
install_recommends=install_recommends,
dpkg_options=expand_dpkg_options(dpkg_options))
if not success:
m.fail_json(**retvals)
changed = retvals.get('changed', False)
if len(pkgs_to_install) > 0:
options = ' '.join(["--%s"% x for x in dpkg_options.split(",")])
if m.check_mode:
options += " --simulate"
if force:
options += " --force-all"
cmd = "dpkg %s -i %s" % (options, " ".join(pkgs_to_install))
rc, out, err = m.run_command(cmd)
if "stdout" in retvals:
stdout = retvals["stdout"] + out
else:
stdout = out
if "stderr" in retvals:
stderr = retvals["stderr"] + err
else:
stderr = err
if rc == 0:
m.exit_json(changed=True, stdout=stdout, stderr=stderr)
else:
m.fail_json(msg="%s failed" % cmd, stdout=stdout, stderr=stderr)
else:
m.exit_json(changed=changed, stdout=retvals.get('stdout',''), stderr=retvals.get('stderr',''))
def remove(m, pkgspec, cache, purge=False,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
pkg_list = []
pkgspec = expand_pkgspec_from_fnmatches(m, pkgspec, cache)
for package in pkgspec:
name, version = package_split(package)
installed, upgradable, has_files = package_status(m, name, version, cache, state='remove')
if installed or (has_files and purge):
pkg_list.append("'%s'" % package)
packages = ' '.join(pkg_list)
if len(packages) == 0:
m.exit_json(changed=False)
else:
if purge:
purge = '--purge'
else:
purge = ''
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
cmd = "%s -q -y %s %s remove %s" % (APT_GET_CMD, dpkg_options, purge, packages)
if m.check_mode:
m.exit_json(changed=True)
rc, out, err = m.run_command(cmd)
if rc:
m.fail_json(msg="'apt-get remove %s' failed: %s" % (packages, err), stdout=out, stderr=err)
m.exit_json(changed=True, stdout=out, stderr=err)
def upgrade(m, mode="yes", force=False, default_release=None,
dpkg_options=expand_dpkg_options(DPKG_OPTIONS)):
if m.check_mode:
check_arg = '--simulate'
else:
check_arg = ''
apt_cmd = None
prompt_regex = None
if mode == "dist":
# apt-get dist-upgrade
apt_cmd = APT_GET_CMD
upgrade_command = "dist-upgrade"
elif mode == "full":
# aptitude full-upgrade
apt_cmd = APTITUDE_CMD
upgrade_command = "full-upgrade"
else:
# aptitude safe-upgrade # mode=yes # default
apt_cmd = APTITUDE_CMD
upgrade_command = "safe-upgrade"
prompt_regex = r"(^Do you want to ignore this warning and proceed anyway\?|^\*\*\*.*\[default=.*\])"
if force:
if apt_cmd == APT_GET_CMD:
force_yes = '--force-yes'
else:
force_yes = '--assume-yes --allow-untrusted'
else:
force_yes = ''
apt_cmd_path = m.get_bin_path(apt_cmd, required=True)
for (k,v) in APT_ENV_VARS.iteritems():
os.environ[k] = v
cmd = '%s -y %s %s %s %s' % (apt_cmd_path, dpkg_options,
force_yes, check_arg, upgrade_command)
if default_release:
cmd += " -t '%s'" % (default_release,)
rc, out, err = m.run_command(cmd, prompt_regex=prompt_regex)
if rc:
m.fail_json(msg="'%s %s' failed: %s" % (apt_cmd, upgrade_command, err), stdout=out)
if (apt_cmd == APT_GET_CMD and APT_GET_ZERO in out) or (apt_cmd == APTITUDE_CMD and APTITUDE_ZERO in out):
m.exit_json(changed=False, msg=out, stdout=out, stderr=err)
m.exit_json(changed=True, msg=out, stdout=out, stderr=err)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['installed', 'latest', 'removed', 'absent', 'present', 'build-dep']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
cache_valid_time = dict(type='int'),
purge = dict(default=False, type='bool'),
package = dict(default=None, aliases=['pkg', 'name'], type='list'),
deb = dict(default=None),
default_release = dict(default=None, aliases=['default-release']),
install_recommends = dict(default='yes', aliases=['install-recommends'], type='bool'),
force = dict(default='no', type='bool'),
upgrade = dict(choices=['yes', 'safe', 'full', 'dist']),
dpkg_options = dict(default=DPKG_OPTIONS)
),
mutually_exclusive = [['package', 'upgrade', 'deb']],
required_one_of = [['package', 'upgrade', 'update_cache', 'deb']],
supports_check_mode = True
)
if not HAS_PYTHON_APT:
try:
module.run_command('apt-get update && apt-get install python-apt -y -q', use_unsafe_shell=True, check_rc=True)
global apt, apt_pkg
import apt
import apt.debfile
import apt_pkg
except ImportError:
module.fail_json(msg="Could not import python modules: apt, apt_pkg. Please install python-apt package.")
global APTITUDE_CMD
APTITUDE_CMD = module.get_bin_path("aptitude", False)
global APT_GET_CMD
APT_GET_CMD = module.get_bin_path("apt-get")
p = module.params
if not APTITUDE_CMD and p.get('upgrade', None) in [ 'full', 'safe', 'yes' ]:
module.fail_json(msg="Could not find aptitude. Please ensure it is installed.")
install_recommends = p['install_recommends']
dpkg_options = expand_dpkg_options(p['dpkg_options'])
# Deal with deprecated aliases
if p['state'] == 'installed':
p['state'] = 'present'
if p['state'] == 'removed':
p['state'] = 'absent'
try:
cache = apt.Cache()
if p['default_release']:
try:
apt_pkg.config['APT::Default-Release'] = p['default_release']
except AttributeError:
apt_pkg.Config['APT::Default-Release'] = p['default_release']
# reopen cache w/ modified config
cache.open(progress=None)
if p['update_cache']:
# Default is: always update the cache
cache_valid = False
if p['cache_valid_time']:
tdelta = datetime.timedelta(seconds=p['cache_valid_time'])
try:
mtime = os.stat(APT_UPDATE_SUCCESS_STAMP_PATH).st_mtime
except:
mtime = False
if mtime is False:
# Looks like the update-success-stamp is not available
# Fallback: Checking the mtime of the lists
try:
mtime = os.stat(APT_LISTS_PATH).st_mtime
except:
mtime = False
if mtime is False:
# No mtime could be read - looks like lists are not there
# We update the cache to be safe
cache_valid = False
else:
mtimestamp = datetime.datetime.fromtimestamp(mtime)
if mtimestamp + tdelta >= datetime.datetime.now():
# dont update the cache
# the old cache is less than cache_valid_time seconds old - so still valid
cache_valid = True
if cache_valid is not True:
cache.update()
cache.open(progress=None)
if not p['package'] and not p['upgrade'] and not p['deb']:
module.exit_json(changed=False)
force_yes = p['force']
if p['upgrade']:
upgrade(module, p['upgrade'], force_yes,
p['default_release'], dpkg_options)
if p['deb']:
if p['state'] != 'present':
module.fail_json(msg="deb only supports state=present")
install_deb(module, p['deb'], cache,
install_recommends=install_recommends,
force=force_yes, dpkg_options=p['dpkg_options'])
packages = p['package']
latest = p['state'] == 'latest'
for package in packages:
if package.count('=') > 1:
module.fail_json(msg="invalid package spec: %s" % package)
if latest and '=' in package:
module.fail_json(msg='version number inconsistent with state=latest: %s' % package)
if p['state'] in ('latest', 'present', 'build-dep'):
state_upgrade = False
state_builddep = False
if p['state'] == 'latest':
state_upgrade = True
if p['state'] == 'build-dep':
state_builddep = True
result = install(module, packages, cache, upgrade=state_upgrade,
default_release=p['default_release'],
install_recommends=install_recommends,
force=force_yes, dpkg_options=dpkg_options,
build_dep=state_builddep)
(success, retvals) = result
if success:
module.exit_json(**retvals)
else:
module.fail_json(**retvals)
elif p['state'] == 'absent':
remove(module, packages, cache, p['purge'], dpkg_options)
except apt.cache.LockFailedException:
module.fail_json(msg="Failed to lock apt for exclusive operation")
except apt.cache.FetchFailedException:
module.fail_json(msg="Could not fetch updated apt files")
# import module snippets
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
|
ssssam/ansible-modules-core
|
packaging/os/apt.py
|
Python
|
gpl-3.0
| 24,239 | 0.004868 |
#!/usr/bin/env python
###############################################################################
# $Id: gdal2grd.py 27044 2014-03-16 23:41:27Z rouault $
#
# Project: GDAL Python samples
# Purpose: Script to write out ASCII GRD rasters (used in Golden Software
# Surfer)
# from any source supported by GDAL.
# Author: Andrey Kiselev, dron@remotesensing.org
#
###############################################################################
# Copyright (c) 2003, Andrey Kiselev <dron@remotesensing.org>
# Copyright (c) 2009, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
try:
from osgeo import gdal
from osgeo.gdalconst import *
gdal.TermProgress = gdal.TermProgress_nocb
except ImportError:
import gdal
from gdalconst import *
try:
import numpy as Numeric
Numeric.arrayrange = Numeric.arange
except ImportError:
import Numeric
import sys
# =============================================================================
def Usage():
print('Usage: gdal2grd.py [-b band] [-quiet] infile outfile')
print('Write out ASCII GRD rasters (used in Golden Software Surfer)')
print('')
print(' -b band Select a band number to convert (1 based)')
print(' -quiet Do not report any diagnostic information')
print(' infile Name of the input GDAL supported file')
print(' outfile Name of the output GRD file')
print('')
sys.exit(1)
# =============================================================================
infile = None
outfile = None
iBand = 1
quiet = 0
# Parse command line arguments.
i = 1
while i < len(sys.argv):
arg = sys.argv[i]
if arg == '-b':
i = i + 1
iBand = int(sys.argv[i])
elif arg == '-quiet':
quiet = 1
elif infile is None:
infile = arg
elif outfile is None:
outfile = arg
else:
Usage()
i = i + 1
if infile is None:
Usage()
if outfile is None:
Usage()
indataset = gdal.Open(infile, GA_ReadOnly)
if infile == None:
print('Cannot open', infile)
sys.exit(2)
geotransform = indataset.GetGeoTransform()
band = indataset.GetRasterBand(iBand)
if band == None:
print('Cannot load band', iBand, 'from the', infile)
sys.exit(2)
if not quiet:
print('Size is ',indataset.RasterXSize,'x',indataset.RasterYSize,'x',indataset.RasterCount)
print('Projection is ',indataset.GetProjection())
print('Origin = (',geotransform[0], ',',geotransform[3],')')
print('Pixel Size = (',geotransform[1], ',',geotransform[5],')')
print('Converting band number',iBand,'with type',gdal.GetDataTypeName(band.DataType))
# Header printing
fpout = open(outfile, "wt")
fpout.write("DSAA\n")
fpout.write(str(band.XSize) + " " + str(band.YSize) + "\n")
fpout.write(str(geotransform[0] + geotransform[1] / 2) + " " +
str(geotransform[0] + geotransform[1] * (band.XSize - 0.5)) + "\n")
if geotransform[5] < 0:
fpout.write(str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + " " +
str(geotransform[3] + geotransform[5] / 2) + "\n")
else:
fpout.write(str(geotransform[3] + geotransform[5] / 2) + " " +
str(geotransform[3] + geotransform[5] * (band.YSize - 0.5)) + "\n")
fpout.write(str(band.ComputeRasterMinMax(0)[0]) + " " +
str(band.ComputeRasterMinMax(0)[1]) + "\n")
for i in range(band.YSize - 1, -1, -1):
scanline = band.ReadAsArray(0, i, band.XSize, 1, band.XSize, 1)
j = 0
while j < band.XSize:
fpout.write(str(scanline[0, j]))
j = j + 1
if j % 10: # Print no more than 10 values per line
fpout.write(" ")
else:
fpout.write("\n")
fpout.write("\n")
# Display progress report on terminal
if not quiet:
gdal.TermProgress(float(band.YSize - i) / band.YSize)
|
tilemapjp/OSGeo.GDAL.Xamarin
|
gdal-1.11.0/swig/python/samples/gdal2grd.py
|
Python
|
mit
| 4,946 | 0.006066 |
#!/usr/bin/env python
'''
Faraday Penetration Test IDE - Community Version
Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/)
See the file 'doc/LICENSE' for the license information
'''
import os
import model.api
import model
import time
import datetime
from model.report import ReportManager
from model.diff import HostDiff
from model.container import ModelObjectContainer, CouchedModelObjectContainer
from model.conflict import Conflict
from model.hosts import Host
from model.guiapi import notification_center as notifier
import mockito
from config.configuration import getInstanceConfiguration
CONF = getInstanceConfiguration()
import json
import shutil
from persistence.orm import WorkspacePersister
from managers.all import PersistenceManagerFactory, CouchdbManager, FSManager
class Workspace(object):
"""
Handles a complete workspace (or project)
It contains a reference to the model and the command execution
history for all users working on the same workspace.
It has a list with all existing workspaces just in case user wants to
open a new one.
"""
def __init__(self, name, manager, shared=CONF.getAutoShareWorkspace()):
self.name = name
self.description = ""
self.customer = ""
self.start_date = datetime.date(1,1,1)
self.finish_date = datetime.date(1,1,1)
self.id = name
self._command_history = None
self._model_controller = None
self._workspace_manager = manager
self.shared = shared
self._path = os.path.join(CONF.getPersistencePath(), name)
self._persistence_excluded_filenames = ["categories.xml", "workspace.xml"]
self.container = ModelObjectContainer()
self.__conflicts = []
self._object_factory = model.common.factory
self._object_factory.register(model.hosts.Host)
self._report_path = os.path.join(CONF.getReportPath(), name)
self._report_ppath = os.path.join(self._report_path,"process")
if not os.path.exists(self._report_path):
os.mkdir(self._report_path)
if not os.path.exists(self._report_ppath):
os.mkdir(self._report_ppath)
def _notifyWorkspaceNoConnection(self):
notifier.showPopup("Couchdb Connection lost. Defaulting to memory. Fix network and try again in 5 minutes.")
def getReportPath(self):
return self._report_path
def saveObj(obj):raise NotImplementedError("Abstract method")
def delObj(obj):raise NotImplementedError("Abstract method")
def remove(self, host):
del self.container[host.getID()]
self.delObj(host)
def save(self): raise NotImplementedError("Abstract method")
def load(self): raise NotImplementedError("Abstract method")
def setModelController(self, model_controller):
self._model_controller = model_controller
def getContainee(self):
return self.container
def set_path(self, path):
self._path = path
def get_path(self):
return self._path
def set_report_path(self, path):
self._report_path = path
if not os.path.exists(self._report_path):
os.mkdir(self._report_path)
self._workspace_manager.report_manager.path = self.report_path
def get_report_path(self):
return self._report_path
path = property(get_path, set_path)
report_path = property(get_report_path, set_report_path)
def isActive(self):
return self.name == self._workspace_manager.getActiveWorkspace().name
def getAllHosts(self):
return self._model_controller.getAllHosts()
def getDeletedHosts(self):
return self._model_controller.getDeletedHosts()
def cleanDeletedHosts(self):
self._model_controller.cleanDeletedHosts()
def verifyConsistency(self):
hosts = self.getAllHosts()
hosts_counter = 0
for h1 in hosts[:-1]:
hosts_counter += 1
for h2 in hosts[hosts_counter:]:
if h1 == h2 :
diff = HostDiff(h1, h2)
if diff.existDiff():
self.addConflict(Conflict(h1, h2))
return len(self.getConflicts())
def getDataManager(self):
return self._dmanager
def addConflict(self, conflict):
self.__conflicts.append(conflict)
def getConflicts(self):
return self.__conflicts
def clearConflicts(self):
self.__conflicts.clear()
def resolveConflicts(self):
pass
def conflictResolved(self, conflict):
self.__conflicts.remove(conflict)
class WorkspaceOnFS(Workspace):
def __init__(self, name, manager, shared=CONF.getAutoShareWorkspace()):
Workspace.__init__(self, name, manager, shared)
self._dmanager = FSManager(self._path)
@staticmethod
def isAvailable():
return True
def saveObj(self, obj):
host = obj.getHost()
try:
model.api.devlog("Saving host to FileSystem")
model.api.devlog("Host, %s" % host.getID())
host_as_dict = host._toDict(full=True)
filepath = os.path.join(self._path, host.getID() + ".json")
with open(filepath, "w") as outfile:
json.dump(host_as_dict, outfile, indent = 2)
except Exception:
model.api.devlog("Failed while persisting workspace to filesystem, enough perms and space?")
def delObj(self, obj):
if obj.class_signature == "Host":
self._dmanager.removeObject(obj.getID())
return
host = obj.getHost()
self.saveObj(host)
def syncFiles(self):
self.load()
def load(self):
files = os.listdir(self._path)
files = filter(lambda f: f.endswith(".json") and f not in
self._persistence_excluded_filenames, files)
modelobjectcontainer = self.getContainee()
for filename in files:
newHost = self.__loadHostFromFile(filename)
modelobjectcontainer[newHost.getID()] = newHost
notifier.workspaceLoad(self.getAllHosts())
def __loadHostFromFile(self, filename):
if os.path.basename(filename) in self._persistence_excluded_filenames:
model.api.devlog("skipping file %s" % filename)
return
else:
model.api.devlog("loading file %s" % filename)
infilepath = os.path.join(self._path, filename)
host_dict = {}
try:
with open(infilepath) as infile:
host_dict = json.load(infile)
except Exception, e:
model.api.log("An error ocurred while parsing file %s\n%s" %
(filename, str(e)), "ERROR")
return mockito.mock()
try:
newHost = Host(name=None, dic=host_dict)
return newHost
except Exception, e:
model.api.log("Could not load host from file %s" % filename, "ERROR")
model.api.devlog(str(e))
return None
class WorkspaceOnCouch(Workspace):
"""A Workspace that is syncronized in couchdb"""
def __init__(self, name, manager, *args):
super(WorkspaceOnCouch, self).__init__(name, manager)
self._is_replicated = replicated = CONF.getCouchIsReplicated()
self.cdm = self._dmanager = manager.couchdbmanager
if not self.cdm.workspaceExists(name):
self.cdm.addWorkspace(name)
if self.is_replicated():
self.cdm.replicate(self.name, *self.validate_replic_urls(CONF.getCouchReplics()), create_target = True)
self.cdm.syncWorkspaceViews(name)
self.container = CouchedModelObjectContainer(name, self.cdm)
def syncFiles(self):
self.load()
@staticmethod
def isAvailable():
return CouchdbManager.testCouch(CONF.getCouchURI())
def is_replicated(self):
return self._is_replicated
def validate_replic_urls(self, urlsString):
urls = urlsString.split(";") if urlsString is not None else ""
valid_replics = []
for url in urls:
try:
self.cdm.testCouchUrl(url)
valid_replics.append(url)
except:
pass
return valid_replics
def saveObj(self, obj):
self.cdm.saveDocument(self.name, obj._toDict())
self.cdm.compactDatabase(self.name)
def delObj(self, obj):
obj_id = obj.ancestors_path()
if self._dmanager.checkDocument(self.name, obj_id):
self._dmanager.remove(self.name, obj_id)
def save(self):
model.api.devlog("Saving workspaces")
for host in self.getContainee().itervalues():
host_as_dict = host.toDict()
for obj_dic in host_as_dict:
self.cdm.saveDocument(self.name, obj_dic)
def load(self):
self._model_controller.setSavingModel(True)
hosts = {}
def find_leaf(path, sub_graph = hosts):
for i in path:
if len(path) > 1:
return find_leaf(path[1:], sub_graph['subs'][i])
else:
return sub_graph
try:
t = time.time()
model.api.devlog("load start: %s" % str(t))
docs = [i["doc"] for i in self.cdm.workspaceDocumentsIterator(self.name)]
model.api.devlog("time to get docs: %s" % str(time.time() - t))
t = time.time()
for d in docs:
id_path = d['_id'].split('.')
if d['type'] == "Host":
hosts[d['_id']] = d
subs = hosts.get('subs', {})
subs[d['_id']] = d
hosts['subs'] = subs
continue
leaf = {}
try:
leaf = find_leaf(id_path)
except Exception as e:
model.api.devlog('Object parent not found, skipping: %s' % '.'.join(id_path))
continue
subs = leaf.get('subs', {})
subs[d['obj_id']] = d
leaf['subs'] = subs
key = "%s" % d['type']
key = key.lower()
sub = leaf.get(key, {})
sub[d['obj_id']] = d
leaf[key] = sub
model.api.devlog("time to reconstruct: %s" % str(time.time() - t))
t = time.time()
self.container.clear()
for k, v in hosts.items():
if k is not "subs":
h = Host(name=None, dic=v)
self.container[k] = h
model.api.devlog("time to fill container: %s" % str(time.time() - t))
t = time.time()
except Exception, e:
model.api.devlog("Exception during load: %s" % e)
finally:
self._model_controller.setSavingModel(False)
notifier.workspaceLoad(self.getAllHosts())
class WorkspaceManager(object):
"""
This handles all workspaces. It checks for existing workspaces inside
the persistence directory.
It is in charge of starting the WorkspacesAutoSaver to persist each workspace.
This class stores information in $HOME/.faraday/config/workspacemanager.xml file
to keep track of created workspaces to be able to load them
"""
def __init__(self, model_controller, plugin_controller):
self.active_workspace = None
self._couchAvailable = False
self.report_manager = ReportManager(10, plugin_controller)
self.couchdbmanager = PersistenceManagerFactory().getInstance()
self.fsmanager = FSManager()
self._workspaces = {}
self._workspaces_types = {}
self._model_controller = model_controller
self._excluded_directories = [".svn"]
self.workspace_persister = WorkspacePersister()
def couchAvailable(self, isit):
self._couchAvailable = isit
def _notifyWorkspaceNoConnection(self):
notifier.showPopup("Couchdb Connection lost. Defaulting to memory. Fix network and try again in 5 minutes.")
def reconnect(self):
if not self.reconnectCouchManager():
self._notifyWorkspaceNoConnection()
def getCouchManager(self):
return self.couchdbmanager
def setCouchManager(self, cm):
self.couchdbmanager = cm
@staticmethod
def getAvailableWorkspaceTypes():
av = [w.__name__ for w in Workspace.__subclasses__() if w.isAvailable() ]
model.api.devlog("Available wortkspaces: %s" ", ".join(av))
return av
def reconnectCouchManager(self):
retval = True
if not self.couchdbmanager.reconnect():
retval = False
return retval
WorkspacePersister.reExecutePendingActions()
return retval
def startAutoLoader(self):
pass
def stopAutoLoader(self):
pass
def startReportManager(self):
self.report_manager.start()
def stopReportManager(self):
self.report_manager.stop()
self.report_manager.join()
def getActiveWorkspace(self):
return self.active_workspace
def saveWorkspaces(self):
pass
def addWorkspace(self, workspace):
self._workspaces[workspace.name] = workspace
def createVisualizations(self):
stat = False
url = ""
if self.couchdbmanager.isAvailable():
stat = True
url = self.couchdbmanager.pushReports()
else:
self._notifyNoVisualizationAvailable()
return stat, url
def _notifyNoVisualizationAvailable(self):
notifier.showPopup("No visualizations available, please install and configure CouchDB")
def createWorkspace(self, name, description="", workspaceClass = None, shared=CONF.getAutoShareWorkspace(),
customer="", sdate=None, fdate=None):
model.api.devlog("Creating Workspace")
if self.getWorkspaceType(name) in globals():
workspaceClass = globals()[self.getWorkspaceType(name)]
elif not workspaceClass:
# Defaulting =(
model.api.devlog("Defaulting to WorkspaceOnFS")
workspaceClass = WorkspaceOnFS
w = workspaceClass(name, self, shared)
# Register the created workspace type:
self._workspaces_types[name] = workspaceClass.__name__
w.description = description
w.customer = customer
if sdate is not None:
w.start_date = sdate
if fdate is not None:
w.finish_date = fdate
self.addWorkspace(w)
return w
def removeWorkspace(self, name):
work = self.getWorkspace(name)
if not work: return
dm = work.getDataManager()
dm.removeWorkspace(name)
datapath = CONF.getDataPath()
todelete = [i for i in os.listdir(datapath) if name in i ]
for i in todelete:
os.remove(os.path.join(datapath, i))
shutil.rmtree(self.getWorkspace(name).getReportPath())
del self._workspaces[name]
if self.getWorkspace(name) == self.getActiveWorkspace() and self.getWorkspacesCount() > 0:
self.setActiveWorkspace(self.getWorkspace(self._workspaces.keys()[0]))
def getWorkspace(self, name):
''' May return None '''
if not self._workspaces.get(name):
# Retrieve the workspace
self.loadWorkspace(name)
return self._workspaces.get(name)
def loadWorkspace(self, name):
workspaceClass = None
workspace = None
if name in self.fsmanager.getWorkspacesNames():
workspace = self.createWorkspace(name, workspaceClass = WorkspaceOnFS)
elif name in self.couchdbmanager.getWorkspacesNames():
workspace = self.createWorkspace(name, workspaceClass = WorkspaceOnCouch)
return workspace
def openWorkspace(self, name):
w = self.getWorkspace(name)
self.setActiveWorkspace(w)
return w
def getWorkspaces(self):
"""
Simply returns a list of all existing workspaces (including the active one)
"""
self.loadWorkspaces()
return [w for w in self._workspaces.itervalues()]
def getWorkspacesCount(self):
return len(self._workspaces)
def getWorkspacesNames(self):
return self._workspaces.keys()
def loadWorkspaces(self):
self._workspaces_types = {}
fsworkspaces = {name: None for name in self.fsmanager.getWorkspacesNames()}
self._workspaces.update(fsworkspaces)
couchworkspaces = {name: None for name in self.couchdbmanager .getWorkspacesNames()
if not name == 'reports'}
self._workspaces.update(couchworkspaces)
self._workspaces_types.update({name: WorkspaceOnFS.__name__ for name in fsworkspaces})
self._workspaces_types.update({name: WorkspaceOnCouch.__name__ for name in couchworkspaces})
def getWorkspaceType(self, name):
return self._workspaces_types.get(name, 'undefined')
def setActiveWorkspace(self, workspace):
try:
self.stopAutoLoader()
except : pass
if self.active_workspace is not None:
self.active_workspace.setModelController(None)
CONF.setLastWorkspace(workspace.name)
CONF.saveConfig()
self.active_workspace = workspace
self.active_workspace.setModelController(self._model_controller)
self._model_controller.setWorkspace(self.active_workspace)
self.workspace_persister.setPersister(self.active_workspace, self.active_workspace._dmanager)
self.report_manager.path = workspace.report_path
if isinstance(self.active_workspace, WorkspaceOnCouch):
self.startAutoLoader()
def isActive(self, name):
return self.active_workspace.name == name
def syncWorkspaces(self):
"""
Synchronize persistence directory using the DataManager.
We first make sure that all shared workspaces were added to the repo
"""
pass
class NotSyncronizableWorkspaceException(Exception): pass
class ConflictsPendingToSolveException(Exception): pass
class WorkspaceSyncronizer(object):
"""Object whom purpose is to correctly syncronize a workspace
Interacts with a DataManager and a Workspace Object as a mediator"""
def __init__(self, workspace):
self._workspace = workspace
self._dmanager = workspace.getDataManager()
def sync(self):
if not self.localCheck():
return False
self._workspace.save()
self._workspace.syncFiles()
return True
def localCheck(self):
return True
if (self._workspace.verifyConsistency() > 0):
if (len(self._workspace.resolveConflicts(local=True)) <
len(self._workspace.getConflicts())):
return False
return True
|
Snifer/BurpSuite-Plugins
|
faraday/model/workspace.py
|
Python
|
gpl-2.0
| 20,725 | 0.008733 |
# This file is part of EventGhost.
# Copyright (C) 2005 Lars-Peter Voss <bitmonster@eventghost.org>
#
# EventGhost is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# EventGhost is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EventGhost; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# TODO: Use of eg.SerialThread instead of eg.SerialPort
import eg
eg.RegisterPlugin(
name = "Serial Port",
author = "Bitmonster",
version = "1.1." + "$LastChangedRevision$".split()[1],
canMultiLoad = True,
description = "Arbitrary communication through a serial port.",
)
class Text:
port = "Port:"
baudrate = "Baudrate:"
bytesize = "Number of bits:"
parity = "Parity:"
parities = ['No parity', 'Odd', 'Even'] #, 'Mark', 'Space']
stopbits = "Stopbits:"
flowcontrol = "Flow control:"
handshakes = ['None', 'Xon / Xoff', 'Hardware']
generateEvents = "Generate events on incoming data"
terminator = "Terminator:"
eventPrefix = "Event prefix:"
encoding = "Encoding:"
codecChoices = [
"System code page",
"HEX",
"Latin-1",
"UTF-8",
"UTF-16",
"Python string escape",
]
class Write:
name = "Write Data"
description = (
"Writes some text through the serial port."
"\n\n<p>"
"You can use Python string escapes to send non-printable "
"characters. Some examples:<p>"
"\\n will send a Linefeed (LF)<br>"
"\\r will send a Carriage Return (CR)<br>"
"\\t will send a Horizontal Tab (TAB)<br>"
"\\x0B will send the ASCII character with the hexcode 0B<br>"
"\\\\ will send a single Backslash."
)
class Read:
name = "Read Data"
description = (
"Reads data from the serial port."
"\n\n<p>"
"This action returns the data through <i>eg.result</i>, as any "
"action does that is returning data. So you have to use "
'<a href="http://www.eventghost.net/docs/scripting">'
"Python scripting</a> to do anything with the result."
"<p>"
"Using this action and enabling event generation in the plugin "
"cannot be used at the same time, as one of it will always eat "
"the data away from the other."
)
read_all = "Read as many bytes as are currently available"
read_some = "Read exactly this number of bytes:"
read_time = "and wait this maximum number of milliseconds for them:"
import wx
import threading
import win32event
import win32file
import codecs
import binascii
BAUDRATES = [
'110', '300', '600', '1200', '2400', '4800', '9600', '14400', '19200',
'38400', '57600', '115200', '128000', '256000'
]
def MyHexDecoder(input):
return (binascii.b2a_hex(input).upper(), len(input))
DECODING_FUNCS = [
codecs.getdecoder(eg.systemEncoding),
MyHexDecoder,
codecs.getdecoder("latin1"),
codecs.getdecoder("utf8"),
codecs.getdecoder("utf16"),
codecs.getencoder("string_escape"),
]
class Serial(eg.RawReceiverPlugin):
text = Text
def __init__(self):
eg.RawReceiverPlugin.__init__(self)
self.AddAction(Write)
self.AddAction(Read)
self.serial = None
self.buffer = ""
def __start__(
self,
port,
baudrate,
bytesize=8,
parity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="",
prefix="Serial",
encodingNum=0,
):
xonxoff = 0
rtscts = 0
if handshake == 1:
xonxoff = 1
elif handshake == 2:
rtscts = 1
try:
self.serial = eg.SerialPort(
port,
baudrate=baudrate,
bytesize=(5, 6, 7, 8)[bytesize],
stopbits=(1, 2)[stopbits],
parity=('N', 'O', 'E')[parity],
xonxoff=xonxoff,
rtscts=rtscts,
)
except:
self.serial = None
raise self.Exceptions.SerialOpenFailed
self.serial.timeout = 1.0
self.serial.setRTS()
if generateEvents:
self.decoder = DECODING_FUNCS[encodingNum]
self.terminator = eg.ParseString(terminator).decode('string_escape')
self.info.eventPrefix = prefix
self.stopEvent = win32event.CreateEvent(None, 1, 0, None)
self.receiveThread = threading.Thread(target=self.ReceiveThread, name="SerialThread")
self.receiveThread.start()
else:
self.receiveThread = None
def __stop__(self):
if self.serial is not None:
if self.receiveThread:
win32event.SetEvent(self.stopEvent)
self.receiveThread.join(1.0)
self.serial.close()
self.serial = None
def HandleChar(self, ch):
self.buffer += ch
pos = self.buffer.find(self.terminator)
if pos != -1:
eventstring = self.buffer[:pos]
if eventstring:
self.TriggerEvent(self.decoder(eventstring)[0])
self.buffer = self.buffer[pos+len(self.terminator):]
def ReceiveThread(self):
from win32event import (
ResetEvent,
MsgWaitForMultipleObjects,
QS_ALLINPUT,
WAIT_OBJECT_0,
WAIT_TIMEOUT,
)
from win32file import ReadFile, AllocateReadBuffer, GetOverlappedResult
from win32api import GetLastError
continueLoop = True
overlapped = self.serial._overlappedRead
hComPort = self.serial.hComPort
hEvent = overlapped.hEvent
stopEvent = self.stopEvent
n = 1
waitingOnRead = False
buf = AllocateReadBuffer(n)
while continueLoop:
if not waitingOnRead:
ResetEvent(hEvent)
hr, _ = ReadFile(hComPort, buf, overlapped)
if hr == 997:
waitingOnRead = True
elif hr == 0:
pass
#n = GetOverlappedResult(hComPort, overlapped, 1)
#self.HandleChar(str(buf))
else:
self.PrintError("error")
raise
rc = MsgWaitForMultipleObjects(
(hEvent, stopEvent),
0,
1000,
QS_ALLINPUT
)
if rc == WAIT_OBJECT_0:
n = GetOverlappedResult(hComPort, overlapped, 1)
if n:
self.HandleChar(str(buf))
#else:
# print "WAIT_OBJECT_0", n, str(buf[:n])
waitingOnRead = False
elif rc == WAIT_OBJECT_0+1:
continueLoop = False
elif rc == WAIT_TIMEOUT:
pass
else:
self.PrintError("unknown message")
def Configure(
self,
port=0,
baudrate=9600,
bytesize=3,
parity=0,
stopbits=0,
handshake=0,
generateEvents=False,
terminator="\\r",
prefix="Serial",
encodingNum=0,
):
text = self.text
panel = eg.ConfigPanel()
portCtrl = panel.SerialPortChoice(port)
baudrateCtrl = panel.ComboBox(
str(baudrate),
BAUDRATES,
style=wx.CB_DROPDOWN,
validator=eg.DigitOnlyValidator()
)
bytesizeCtrl = panel.Choice(bytesize, ['5', '6', '7', '8'])
parityCtrl = panel.Choice(parity, text.parities)
stopbitsCtrl = panel.Choice(stopbits, ['1', '2'])
handshakeCtrl = panel.Choice(handshake, text.handshakes)
generateEventsCtrl = panel.CheckBox(generateEvents, text.generateEvents)
terminatorCtrl = panel.TextCtrl(terminator)
terminatorCtrl.Enable(generateEvents)
prefixCtrl = panel.TextCtrl(prefix)
prefixCtrl.Enable(generateEvents)
encodingCtrl = panel.Choice(encodingNum, text.codecChoices)
encodingCtrl.Enable(generateEvents)
def OnCheckBox(event):
flag = generateEventsCtrl.GetValue()
terminatorCtrl.Enable(flag)
prefixCtrl.Enable(flag)
encodingCtrl.Enable(flag)
event.Skip()
generateEventsCtrl.Bind(wx.EVT_CHECKBOX, OnCheckBox)
panel.SetColumnFlags(1, wx.EXPAND)
portSettingsBox = panel.BoxedGroup(
"Port settings",
(text.port, portCtrl),
(text.baudrate, baudrateCtrl),
(text.bytesize, bytesizeCtrl),
(text.parity, parityCtrl),
(text.stopbits, stopbitsCtrl),
(text.flowcontrol, handshakeCtrl),
)
eventSettingsBox = panel.BoxedGroup(
"Event generation",
(generateEventsCtrl),
(text.terminator, terminatorCtrl),
(text.eventPrefix, prefixCtrl),
(text.encoding, encodingCtrl),
)
eg.EqualizeWidths(portSettingsBox.GetColumnItems(0))
eg.EqualizeWidths(portSettingsBox.GetColumnItems(1))
eg.EqualizeWidths(eventSettingsBox.GetColumnItems(0)[1:])
eg.EqualizeWidths(eventSettingsBox.GetColumnItems(1))
panel.sizer.Add(eg.HBoxSizer(portSettingsBox, (10, 10), eventSettingsBox))
while panel.Affirmed():
panel.SetResult(
portCtrl.GetValue(),
int(baudrateCtrl.GetValue()),
bytesizeCtrl.GetValue(),
parityCtrl.GetValue(),
stopbitsCtrl.GetValue(),
handshakeCtrl.GetValue(),
generateEventsCtrl.GetValue(),
terminatorCtrl.GetValue(),
prefixCtrl.GetValue(),
encodingCtrl.GetValue(),
)
class Write(eg.ActionWithStringParameter):
def __call__(self, data):
data = eg.ParseString(data, self.replaceFunc)
data = data.decode('string_escape')
self.plugin.serial.write(str(data))
return self.plugin.serial
def replaceFunc(self, data):
data = data.strip()
if data == "CR":
return chr(13)
elif data == "LF":
return chr(10)
else:
return None
class Read(eg.ActionBase):
def __call__(self, count=None, timeout=0.0):
serial = self.plugin.serial
serial.timeout = timeout
if count is None:
count = 1024
data = serial.read(count)
return data
def GetLabel(self, *args):
return eg.ActionBase.GetLabel(self)
def Configure(self, count=None, timeout=1.0):
text = self.text
panel = eg.ConfigPanel()
if count is None:
count = 1
flag = False
else:
flag = True
if timeout is None:
timeout = 1.0
rb1 = panel.RadioButton(not flag, text.read_all, style=wx.RB_GROUP)
rb2 = panel.RadioButton(flag, text.read_some)
countCtrl = panel.SpinIntCtrl(count, 1, 1024)
countCtrl.Enable(flag)
timeCtrl = panel.SpinIntCtrl(int(timeout * 1000), 0, 10000)
timeCtrl.Enable(flag)
def OnRadioButton(event):
flag = rb2.GetValue()
countCtrl.Enable(flag)
timeCtrl.Enable(flag)
event.Skip()
rb1.Bind(wx.EVT_RADIOBUTTON, OnRadioButton)
rb2.Bind(wx.EVT_RADIOBUTTON, OnRadioButton)
Add = panel.sizer.Add
Add(rb1)
Add((5,5))
Add(rb2)
Add((5,5))
Add(countCtrl, 0, wx.LEFT, 25)
Add((5,5))
Add(panel.StaticText(text.read_time), 0, wx.LEFT, 25)
Add((5,5))
Add(timeCtrl, 0, wx.LEFT, 25)
while panel.Affirmed():
if rb1.GetValue():
panel.SetResult(None, 0.0)
else:
panel.SetResult(
countCtrl.GetValue(),
timeCtrl.GetValue() / 1000.0
)
|
garbear/EventGhost
|
plugins/Serial/__init__.py
|
Python
|
gpl-2.0
| 13,068 | 0.003367 |
import logging, os, random
from zc.buildout import UserError, easy_install
from zc.recipe.egg import Egg
SETTINGS_TEMPLATE = '''
from %(settings_module)s import *
SECRET_KEY = "%(secret)s"
%(settings_override)s
'''
SCRIPT_TEMPLATES = {
'wsgi': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.wsgi import get_wsgi_application
IS_14_PLUS = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
def app_factory(global_config, **local_config):
"""This function wraps our simple WSGI app so it
can be used with paste.deploy"""
if IS_14_PLUS:
return get_wsgi_application()
else:
return WSGIHandler()
application = app_factory(%(arguments)s)
''',
'manage': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.management import execute_from_command_line
IS_14_PLUS = True
except ImportError:
from django.core.management import ManagementUtility
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
if IS_14_PLUS:
execute_from_command_line(%(arguments)s)
else:
utility = ManagementUtility(%(arguments)s)
utility.execute()
'''
}
class Recipe(object):
wsgi_file = 'wsgi.py'
settings_file = 'settings.py'
sites_default = 'sites'
site_settings_template = '%(name)s_site_config'
secret_cfg = '.secret.cfg'
def __init__(self, buildout, name, options):
self.buildout, self.name, self.options = buildout, name, options
self.logger = logging.getLogger(name)
self.options['location'] = os.path.join(
self.buildout['buildout']['parts-directory'], self.name
)
self.options.setdefault('extra-paths', '')
self.options.setdefault('environment-vars', '')
self.options.setdefault('sites-directory', self.sites_default)
self.options.setdefault('settings-override', '')
self.options.setdefault('settings-file', self.settings_file)
self.options.setdefault('wsgi-file', self.wsgi_file)
self.options.setdefault('manage-py-file', 'django')
self.eggs = [ ]
if 'eggs' in self.buildout['buildout']:
self.eggs.extend(self.buildout['buildout']['eggs'].split())
if 'eggs' in self.options:
self.eggs.extend(self.options['eggs'].split())
self.working_set = None
self.extra_paths = [ self.options['location'] ]
sites_path = os.path.join(
self.buildout['buildout']['directory'],
self.options['sites-directory']
)
if os.path.isdir(sites_path):
self.extra_paths.append(sites_path)
if os.path.isdir(sites_path) and 'settings-module' not in self.options:
# Check if the user has created a module %(name)s_config
settings_module = self.site_settings_template % {
'name': self.name
}
settings_module_path = os.path.join(sites_path, settings_module)
initpy = os.path.join(settings_module_path, '__init__.py')
settingspy = os.path.join(settings_module_path, 'settings.py')
if os.path.isdir(settings_module_path) and \
os.path.isfile(initpy) and os.path.isfile(settingspy):
self.options.setdefault('settings-module',
'%s.settings' % settings_module)
self.extra_paths.extend(self.options['extra-paths'].split())
self.secret_key = None
def setup_working_set(self):
egg = Egg(
self.buildout, 'Django', self.options
)
self.working_set = egg.working_set(self.eggs)
def setup_secret(self):
secret_file = os.path.join(
self.buildout['buildout']['directory'],
self.secret_cfg
)
if os.path.isfile(secret_file):
stream = open(secret_file, 'rb')
data = stream.read().decode('utf-8').strip()
stream.close()
self.logger.debug("Read secret: %s" % data)
else:
stream = open(secret_file, 'wb')
chars = u'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
data = u''.join([random.choice(chars) for __ in range(50)])
stream.write(data.encode('utf-8')+u"\n")
stream.close()
self.logger.debug(
"Generated secret: %s (and written to %s)" % (data, secret_file)
)
self.secret_key = data
return secret_file
def setup_module_file(self, module, name, data):
with open(os.path.join(module, name), 'wb') as stream:
stream.write(data)
def get_settings(self, static_directory=None, media_directory=None):
if 'settings-module' not in self.options:
raise UserError(
("You should specify 'settings-module' in %(name)s "
"or create a module named '"+self.site_settings_template+"' "
"in '%(sites)s' with a 'settings.py' file in it") % {
'name': self.name,
'sites': self.options['sites-directory']
}
)
settings_override = self.options['settings-override']
if static_directory is not None:
settings_override += '\nSTATIC_ROOT = "%s"\n' % (
static_directory,
)
if media_directory is not None:
settings_override += '\nMEDIA_ROOT = "%s"\n' % (
media_directory,
)
return SETTINGS_TEMPLATE % {
'settings_module': self.options['settings-module'],
'secret': self.secret_key,
'settings_override': settings_override
}
def setup_directories(self):
result = []
for directory in [ 'static-directory', 'media-directory' ]:
result.append(None)
if directory in self.options:
path = os.path.join(
self.buildout['buildout']['directory'],
self.options[directory]
)
if not os.path.isdir(path):
os.makedirs(path)
result[-1] = path
return result
def get_initialization(self):
# The initialization code is expressed as a list of lines
initialization = []
# Gets the initialization code: the tricky part here is to preserve
# indentation.
# Since buildout does totally waste whitespace, if one wants to
# preserve indentation must prefix its lines with '>>> ' or '... '
raw_value = self.options.get('initialization', '')
is_indented = False
indentations = ('>>> ', '... ')
for line in raw_value.splitlines():
if line != "":
if len(initialization) == 0:
if line.startswith(indentations[0]):
is_indented = True
else:
if is_indented and not line.startswith(indentations[1]):
raise UserError(
("Line '%s' should be indented "
"properly but is not") % line
)
if is_indented:
line = line[4:]
initialization.append(line)
# Gets the environment-vars option and generates code to set the
# enviroment variables via os.environ
environment_vars = []
for line in self.options.get('environment-vars', '').splitlines():
line = line.strip()
if len(line) > 0:
try:
var_name, raw_value = line.split(' ', 1)
except ValueError:
raise RuntimeError(
"Bad djc.recipe2 environment-vars contents: %s" % line
)
environment_vars.append(
'os.environ["%s"] = r"%s"' % (
var_name,
raw_value.strip()
)
)
if len(environment_vars) > 0:
initialization.append("import os")
initialization.extend(environment_vars)
if len(initialization) > 0:
return "\n"+"\n".join(initialization)+"\n"
return ""
def create_script(self, name, path, settings, template, arguments):
"""Create arbitrary script.
This script will also include the eventual code found in
``initialization`` and will also set (via ``os.environ``) the
environment variables found in ``environment-vars``
"""
self.logger.info(
"Creating script at %s" % (os.path.join(path, name),)
)
settings = settings.rsplit(".", 1)
module = settings[0]
attrs = ""
if len(settings) > 1:
attrs = "." + settings[1]
old_script_template = easy_install.script_template
easy_install.script_template = template
script = easy_install.scripts(
reqs=[(name, module, attrs)],
working_set=self.working_set[1],
executable=self.options['executable'],
dest=path,
extra_paths=self.extra_paths,
initialization=self.get_initialization(),
arguments=str(arguments)
)
easy_install.script_template = old_script_template
return script
def setup_manage_script(self, settings):
arguments = "sys.argv"
return self.create_script(
self.options['manage-py-file'],
self.buildout['buildout']['bin-directory'],
settings,
SCRIPT_TEMPLATES['manage'],
arguments
)
def setup_wsgi_script(self, module_path, settings):
arguments = "global_config={}"
return self.create_script(
self.options['wsgi-file'],
module_path,
settings,
SCRIPT_TEMPLATES['wsgi'],
arguments
)
def setup(self, static_directory=None, media_directory=None):
part_module = '%s_part_site' % self.name
part_module_path = os.path.join(self.options['location'], part_module)
settings_module = "%s.%s" % (
part_module,
os.path.splitext(self.options['settings-file'])[0]
)
if not os.path.exists(part_module_path):
os.makedirs(part_module_path)
self.setup_module_file(part_module_path, '__init__.py', "#\n")
self.setup_module_file(
part_module_path,
self.options['settings-file'],
self.get_settings(static_directory, media_directory)
)
self.setup_wsgi_script(part_module_path, settings_module)
files = [ self.options['location'] ]
files.extend(self.setup_manage_script(settings_module))
return files
def install(self):
files = []
self.setup_working_set()
# The .secret.cfg file is not reported so it doesn't get deleted
self.setup_secret()
static_directory, media_directory = self.setup_directories()
# static and media are not added to files so that updates
# won't delete them, nor reinstallations of parts
files.extend(self.setup(static_directory, media_directory))
return tuple(files)
update = install
|
abstract-open-solutions/djc.recipe2
|
djc/recipe2/recipe.py
|
Python
|
bsd-3-clause
| 11,686 | 0.00077 |
from distutils.core import setup
setup(
name = 'mirobot',
packages = ['mirobot'],
version = '1.0.3',
description = 'A Python library to control Mirobot (http://mirobot.io)',
author = 'Ben Pirt',
author_email = 'ben@pirt.co.uk',
url = 'https://github.com/mirobot/mirobot-py',
download_url = 'https://github.com/mirobot/mirobot-py/tarball/v1.0.2',
keywords = ['robotics', 'control', 'mirobot'],
classifiers = ['Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Education', 'License :: OSI Approved :: MIT License'],
install_requires=[
"websocket-client",
],
)
|
mirobot/mirobot-py
|
setup.py
|
Python
|
mit
| 623 | 0.033708 |
"""Base rope package
This package contains rope core modules that are used by other modules
and packages.
"""
__all__ = ["project", "libutils", "exceptions"]
|
python-rope/rope
|
rope/base/__init__.py
|
Python
|
lgpl-3.0
| 161 | 0 |
import scipy.sparse as ss
import warnings
warnings.simplefilter('ignore', ss.SparseEfficiencyWarning)
from sparray import FlatSparray
class Operations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_scalar_multiplication(self, arr_type):
self.arr * 3
def time_sum(self, arr_type):
self.arr.sum()
def time_getitem_scalar(self, arr_type):
self.arr[154, 145]
def time_getitem_subarray(self, arr_type):
self.arr[:5, :5]
def time_getitem_row(self, arr_type):
self.arr[876]
def time_getitem_col(self, arr_type):
self.arr[:,273]
def time_diagonal(self, arr_type):
self.arr.diagonal()
class ImpureOperations(object):
params = [['FlatSparray', 'csr_matrix']]
param_names = ['arr_type']
number = 1 # make sure we re-run setup() before each timing
def setup(self, arr_type):
mat = ss.rand(3000, 4000, density=0.1, format='csr')
if arr_type == 'FlatSparray':
self.arr = FlatSparray.from_spmatrix(mat)
else:
self.arr = mat
def time_setdiag(self, arr_type):
self.arr.setdiag(99)
|
perimosocordiae/sparray
|
bench/benchmarks/ops.py
|
Python
|
mit
| 1,302 | 0.016129 |
from pyspark import SparkConf, SparkContext
from jsonrpc.authproxy import AuthServiceProxy
import json
import sys
#This is batch processing of bitcoind (locally run bitcoin daemon)
#RPC (Remote Procedure Call) block's json stored
#in HDFS. Currently 187,990 blocks' json representation is
#stored in HDFS. The HDFS file size is around 6.5GB
#The output of this program is block_number and the corresponding
#transaction fee in units of Satoshi. This data is written to HBASE
#table.
#The program takes only 69 minutes to run. While the streaming version
#of the program takes 177 minutes.
#It is a Good illustration of time-space(memory) tradeoff
conf = SparkConf().setMaster("local").setAppName("bitcoin_TransactionFee_calcultor")
sc = SparkContext(conf=conf)
rpcuser="bitcoinrpc"
rpcpassword="5C3Y6So6sCRPgBao8KyWV2bYpTHZt5RCVAiAg5JmTnHr"
rpcip = "127.0.0.1"
bitcoinrpc = AuthServiceProxy("http://"+rpcuser+":"+rpcpassword+"@"+rpcip+":8332")
#function SaveRecord: saves tx_fee for a block to hbase database
def SaveRecord(tx_fee_rdd):
host = 'localhost' #sys.argv[1]
table = 'tx_fee_table_sp_batch' #needs to be created before hand in hbase shell
conf = {"hbase.zookeeper.quorum": host,
"hbase.mapred.outputtable": table,
"mapreduce.outputformat.class": "org.apache.hadoop.hbase.mapreduce.TableOutputFormat",
"mapreduce.job.output.key.class": "org.apache.hadoop.hbase.io.ImmutableBytesWritable",
"mapreduce.job.output.value.class": "org.apache.hadoop.io.Writable"}
keyConv = "org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter"
valueConv = "org.apache.spark.examples.pythonconverters.StringListToPutConverter"
#row key id,id, cfamily=tx_fee_col,column_name = tx_fee, column_value=x
#datamap = tx_fee_rdd.map(lambda x: ("tx_fee",x) )
#( rowkey , [ row key , column family , column name , value ] )
datamap = tx_fee_rdd.map(lambda x: (str(x[0]),
[str(x[0]),"tx_fee_col","tx_fee",str(x[1])])
)
datamap.saveAsNewAPIHadoopDataset(conf=conf,
keyConverter=keyConv,
valueConverter=valueConv)
def get_tx_fee(gen_tx):
gen_tx_json = bitcoinrpc.decoderawtransaction(bitcoinrpc.getrawtransaction(gen_tx))
return gen_tx_json
content_rdd = sc.textFile("hdfs://ec2-52-21-47-235.compute-1.amazonaws.com:9000/bitcoin/block_chain_full.txt")
#The file below is for testing purposes
#content_rdd = sc.textFile("file:///home/ubuntu/unix_practice/bitcoin/2_blocks.txt")
dump_rdd = content_rdd.map(lambda x: json.dumps(x)).map(lambda x : x.decode('unicode_escape').encode('ascii','ignore'))
#print dump_rdd.take(2)
load_rdd = dump_rdd.map(lambda x: json.loads(x))
#print load_rdd.take(2)
split_blk_rdd = load_rdd.map(lambda x: x.split(":"))
#tx = load_rdd.filter(lambda x: "tx" in x)
#print split_blk_rdd.take(split_blk_rdd.count())
gen_tx_rdd = split_blk_rdd.map(lambda x : (x[8][1:7],x[6][4:68]) ) #this gets generation transactions
#print "*************HERE***************"
#print gen_tx_rdd.take(gen_tx_rdd.count()) #from the blocks
tx_json_rdd = gen_tx_rdd.map(lambda x : (x[0],get_tx_fee(x[1])) ) #function call
#print tx_json_rdd.take(tx_json_rdd.count())
tx_fee_rdd = tx_json_rdd.map(lambda x : (x[0],x[1].items()
[3][1][0]["value"]-25) )#.filter(lambda x : "value" in x)
#print tx_fee_rdd.take(tx_fee_rdd.count())
SaveRecord(tx_fee_rdd) #function call
#just to display values for debugging
#val_lst = tx_fee_rdd.take(tx_fee_rdd.count()) #use [3][1]
#print val_lst
|
tariq786/datafying_bitcoin
|
sp_batch_hdfs.py
|
Python
|
gpl-3.0
| 3,652 | 0.026013 |
# encoding: utf-8
"""Provides collection of events emitters"""
import time
from . import EndPoint
def container_count(host_fqdn, docker_client, statistics):
"""
Emit events providing:
- number of containers
- number of running containers
- number of crashed containers
:param host_fqdn: FQDN of the host where the docker-zabbix-daemon is running, for instance docker.acme.com
:type host_fqdn: string
:param docker_client: instance of docker.Client see http://docker-py.readthedocs.org/en/latest/api/
:type docker_client: docker.Client
:param statistics: List of dicts providing collected container statistics. see Docker stats API call on https://docs.docker.com/reference/api/docker_remote_api_v1.17/#get-container-stats-based-on-resource-usage
:return: list of dicts providing additional events to push to Zabbix.
Each dict is composed of 4 keys:
- hostname
- timestamp
- key
- value
"""
running = 0
crashed = 0
now = int(time.time())
containers = docker_client.containers(all=True)
for container in containers:
status = container['Status']
if status.startswith('Up'):
running += 1
elif not status.startswith('Exited (0)'):
crashed += 1
data = {
'all': len(containers),
'running': running,
'crashed': crashed,
}
return [
{
'hostname': '-',
'timestamp': now,
'key': EndPoint.EVENT_KEY_PREFIX + 'count.' + key,
'value': value
}
for key, value in data.items()
]
def container_ip(host_fqdn, docker_client, statistics):
"""Emit the ip addresses of containers.
"""
for stat in statistics:
containerId = stat['id']
details = docker_client.inspect_container(containerId)
yield {
'hostname': EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'ip',
'value': details['NetworkSettings']['IPAddress']
}
def cpu_count(host_fqdn, docker_client, statistics):
"""Emit the number of CPU available for each container.
"""
for stat in statistics:
yield {
'hostname': EndPoint.container_hostname(host_fqdn, stat['name']),
'timestamp': stat['timestamp'],
'key': EndPoint.EVENT_KEY_PREFIX + 'cpu.count',
'value': len(stat['cpu_stats']['cpu_usage']['percpu_usage'])
}
|
dockermeetupsinbordeaux/docker-zabbix-sender
|
docker_zabbix_sender/stats.py
|
Python
|
apache-2.0
| 2,565 | 0.00234 |
# From Python 3.6 functools.py
# Bug was in detecting "nonlocal" access
def not_bug():
cache_token = 5
def register():
nonlocal cache_token
return cache_token == 5
return register()
assert not_bug()
|
rocky/python-uncompyle6
|
test/simple_source/bug33/05_nonlocal.py
|
Python
|
gpl-3.0
| 230 | 0.004348 |
# coding: utf-8
from flask import Blueprint
__author__ = 'Jux.Liu'
user = Blueprint('user', __name__)
from . import views
|
MOOOWOOO/Q400K
|
app/user/__init__.py
|
Python
|
gpl-3.0
| 125 | 0.008 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import apps
from django.forms import widgets
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from cmsplugin_cascade.fields import PartialFormField
from cmsplugin_cascade.plugin_base import CascadePluginBase
from .forms import LinkForm
class LinkPluginBase(CascadePluginBase):
text_enabled = True
allow_children = False
parent_classes = []
require_parent = False
glossary_fields = (
PartialFormField('target',
widgets.RadioSelect(choices=(('', _("Same Window")), ('_blank', _("New Window")),
('_parent', _("Parent Window")), ('_top', _("Topmost Frame")),)),
initial='',
label=_("Link Target"),
help_text=_("Open Link in other target.")
),
PartialFormField('title',
widgets.TextInput(),
label=_("Title"),
help_text=_("Link's Title")
),
)
html_tag_attributes = {'title': 'title', 'target': 'target'}
# map field from glossary to these form fields
glossary_field_map = {'link': ('link_type', 'cms_page', 'ext_url', 'mail_to',)}
@classmethod
def get_link(cls, obj):
link = obj.glossary.get('link', {})
linktype = link.get('type')
if linktype == 'exturl':
return '{url}'.format(**link)
if linktype == 'email':
return 'mailto:{email}'.format(**link)
# otherwise try to resolve by model
if 'model' in link and 'pk' in link:
if not hasattr(obj, '_link_model'):
Model = apps.get_model(*link['model'].split('.'))
try:
obj._link_model = Model.objects.get(pk=link['pk'])
except Model.DoesNotExist:
obj._link_model = None
if obj._link_model:
return obj._link_model.get_absolute_url()
def get_ring_bases(self):
bases = super(LinkPluginBase, self).get_ring_bases()
bases.append('LinkPluginBase')
return bases
def get_form(self, request, obj=None, **kwargs):
kwargs.setdefault('form', LinkForm.get_form_class())
return super(LinkPluginBase, self).get_form(request, obj, **kwargs)
@python_2_unicode_compatible
class LinkElementMixin(object):
"""
A mixin class to convert a CascadeElement into a proxy model for rendering the ``<a>`` element.
Note that a Link inside the Text Editor Plugin is rendered using ``str(instance)`` rather
than ``instance.content``.
"""
def __str__(self):
return self.content
@property
def link(self):
return self.plugin_class.get_link(self)
@property
def content(self):
return mark_safe(self.glossary.get('link_content', ''))
|
jtiki/djangocms-cascade
|
cmsplugin_cascade/link/plugin_base.py
|
Python
|
mit
| 2,939 | 0.003403 |
__all__ = ['scraper', 'local_scraper', 'pw_scraper', 'uflix_scraper', 'watchseries_scraper', 'movie25_scraper', 'merdb_scraper', '2movies_scraper', 'icefilms_scraper',
'movieshd_scraper', 'yifytv_scraper', 'viooz_scraper', 'filmstreaming_scraper', 'myvideolinks_scraper', 'filmikz_scraper', 'clickplay_scraper', 'nitertv_scraper',
'iwatch_scraper', 'ororotv_scraper', 'view47_scraper', 'vidics_scraper', 'oneclickwatch_scraper', 'istreamhd_scraper', 'losmovies_scraper', 'movie4k_scraper',
'noobroom_scraper', 'solar_scraper', 'vkbox_scraper', 'directdl_scraper', 'movietv_scraper', 'moviesonline7_scraper', 'streamallthis_scraper', 'afdah_scraper',
'streamtv_scraper', 'moviestorm_scraper', 'wmo_scraper', 'zumvo_scraper', 'wso_scraper', 'tvrelease_scraper', 'hdmz_scraper', 'ch131_scraper', 'watchfree_scraper',
'pftv_scraper', 'flixanity_scraper', 'cmz_scraper', 'movienight_scraper', 'gvcenter_scraper', 'alluc_scraper', 'afdahorg_scraper', 'xmovies8_scraper',
'yifystreaming_scraper', 'mintmovies_scraper', 'playbox_scraper', 'shush_proxy', 'mvsnap_scraper', 'pubfilm_scraper', 'pctf_scraper', 'rlssource_scraper',
'couchtunerv1_scraper', 'couchtunerv2_scraper', 'tunemovie_scraper', 'watch8now_scraper', 'megabox_scraper', 'dizilab_scraper', 'beinmovie_scraper',
'dizimag_scraper', 'ayyex_scraper']
import re
import os
import xbmcaddon
import xbmc
import datetime
import time
from salts_lib import log_utils
from salts_lib.constants import VIDEO_TYPES
from . import scraper # just to avoid editor warning
from . import *
class ScraperVideo:
def __init__(self, video_type, title, year, trakt_id, season='', episode='', ep_title='', ep_airdate=''):
assert(video_type in (VIDEO_TYPES.__dict__[k] for k in VIDEO_TYPES.__dict__ if not k.startswith('__')))
self.video_type = video_type
self.title = title
self.year = year
self.season = season
self.episode = episode
self.ep_title = ep_title
self.trakt_id = trakt_id
self.ep_airdate = None
if ep_airdate:
try: self.ep_airdate = datetime.datetime.strptime(ep_airdate, "%Y-%m-%d").date()
except (TypeError, ImportError): self.ep_airdate = datetime.date(*(time.strptime(ep_airdate, '%Y-%m-%d')[0:3]))
def __str__(self):
return '|%s|%s|%s|%s|%s|%s|%s|' % (self.video_type, self.title, self.year, self.season, self.episode, self.ep_title, self.ep_airdate)
def update_xml(xml, new_settings, cat_count):
new_settings.insert(0, '<category label="Scrapers %s">' % (cat_count))
new_settings.append(' </category>')
new_str = '\n'.join(new_settings)
match = re.search('(<category label="Scrapers %s">.*?</category>)' % (cat_count), xml, re.DOTALL | re.I)
if match:
old_settings = match.group(1)
if old_settings != new_settings:
xml = xml.replace(old_settings, new_str)
else:
log_utils.log('Unable to match category: %s' % (cat_count), xbmc.LOGWARNING)
return xml
def update_settings():
path = xbmcaddon.Addon().getAddonInfo('path')
full_path = os.path.join(path, 'resources', 'settings.xml')
try:
with open(full_path, 'r') as f:
xml = f.read()
except:
raise
new_settings = []
cat_count = 1
old_xml = xml
classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)
for cls in sorted(classes, key=lambda x: x.get_name().upper()):
new_settings += cls.get_settings()
if len(new_settings) > 90:
xml = update_xml(xml, new_settings, cat_count)
new_settings = []
cat_count += 1
if new_settings:
xml = update_xml(xml, new_settings, cat_count)
if xml != old_xml:
try:
with open(full_path, 'w') as f:
f.write(xml)
except:
raise
else:
log_utils.log('No Settings Update Needed', xbmc.LOGDEBUG)
update_settings()
|
aplicatii-romanesti/allinclusive-kodi-pi
|
.kodi/addons/plugin.video.salts/scrapers/__init__.py
|
Python
|
apache-2.0
| 4,030 | 0.005707 |
# Copyright 2016-2017 Alan F Rubin, Daniel C Esposito
#
# This file is part of Enrich2.
#
# Enrich2 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Enrich2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Enrich2. If not, see <http://www.gnu.org/licenses/>.
"""
Enrich2 aligner module
======================
Module for alignment of variants to the wild type sequence.
This module is optional, and using it will dramatically increase runtime when
counting variants. It is only recommended for users who need to count
insertion and deletion variants (i.e. not coding sequences).
"""
from ctypes import c_int
import numpy as np
import logging
from ..base.utils import log_message
_AMBIVERT = False
try:
from ambivert.ambivert import gapped_alignment_to_cigar
from ambivert import align
# Reset the logging handlers after loading ambivert
for handler in logging.getLogger("ambivert").handlers:
handler.close()
logging.getLogger('ambivert').handlers = []
for handler in logging.getLogger().handlers:
handler.close()
logging.getLogger().handlers = []
logging.captureWarnings(False)
_AMBIVERT = True
except ImportError:
pass
__all__ = [
"Aligner"
]
#: Default similarity matrix used by the aligner.
#: User-defined matrices must have this format.
_simple_similarity = {
'A': {'A': 1, 'C': -1, 'G': -1, 'T': -1, 'N': 0, 'X': 0},
'C': {'A': -1, 'C': 1, 'G': -1, 'T': -1, 'N': 0, 'X': 0},
'G': {'A': -1, 'C': -1, 'G': 1, 'T': -1, 'N': 0, 'X': 0},
'T': {'A': -1, 'C': -1, 'G': -1, 'T': 1, 'N': 0, 'X': 0},
'N': {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0, 'X': 0},
'X': {'A': 0, 'C': 0, 'G': 0, 'T': 0, 'N': 0, 'X': 0},
'gap_open': -1,
'gap_extend': 0
}
class Aligner(object):
"""
Class for performing local alignment of two DNA sequences.
This class implements `Needleman-Wunsch <http://en.wikipedia.org/wiki/
Needleman%E2%80%93Wunsch_algorithm>`_ local alignment.
The :py:class:`~enrich2.sequence.aligner.Aligner` requires a scoring matrix
when created. The format is a nested dictionary, with a special ``'gap_open'``
entry for the gap_open penalty (this value is used for both gap_open opening and gap_open
extension).
The ``'X'`` nucleotide is a special case for unresolvable mismatches in
:py:class:`~enrich2.libraries.overlap.OverlapSeqLib` variant data.
Parameters
----------
similarity : `dict`
Similarity matrix used by the aligner, must contain a cost mapping
between each of 'A', 'C', 'G', 'T', 'N', 'X'.
backend : {'ambivert', 'enrich2'}, default: 'ambivert'
Select the alignment backend. If backend is 'ambivert' then
similarity is ignored.
Attributes
----------
similarity : `dict`
Similarity matrix used by the aligner, must contain a cost mapping
between each of 'A', 'C', 'G', 'T', 'N', 'X'.
matrix : :py:class:`~numpy.ndarray`
The dynamically computed cost matrix.
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
calls : `int`
Number of times `align` has been performed.
Methods
-------
align
Align two sequences using ``Needleman-Wusch``.
Notes
-----
This class implements `Needleman-Wunsch <http://en.wikipedia.org/wiki/
Needleman%E2%80%93Wunsch_algorithm>`_ local alignment.
"""
_MAT = 1 # match
_INS = 2 # insertion (with respect to wild type)
_DEL = 3 # deletion (with respect to wild type)
_END = 4 # end of traceback
def __init__(self, similarity=_simple_similarity, backend='ambivert'):
similarity_keys = list(similarity.keys())
if 'gap_open' in similarity_keys:
similarity_keys.remove('gap_open')
if 'gap_extend' in similarity_keys:
similarity_keys.remove('gap_extend')
for key in similarity_keys:
keys_map_to_dicts = all(x in similarity[key]
for x in similarity_keys)
symmetrical = len(similarity[key]) != len(similarity_keys)
if not keys_map_to_dicts or symmetrical:
raise ValueError("Asymmetrical alignment scoring matrix")
self.similarity = similarity
if 'gap_open' not in self.similarity:
raise ValueError(
"No gap_open open penalty in alignment scoring matrix.")
if 'gap_extend' not in self.similarity:
raise ValueError(
"No gap_open extend penalty in alignment scoring matrix.")
self.matrix = None
self.seq1 = None
self.seq2 = None
self.calls = 0
# TODO: uncomment aligner backend
# global _AMBIVERT
# if backend == 'ambivert' and _AMBIVERT:
# self.align = self.align_ambivert
# log_message(
# logging_callback=logging.info,
# msg="Using ambivert alignment backend.",
# extra={'oname': 'Aligner'}
# )
# else:
# self.align = self.align_enrich2
# log_message(
# logging_callback=logging.info,
# msg="Using enrich2 alignment backend.",
# extra={'oname': 'Aligner'}
# )
self.align = self.align_enrich2
log_message(
logging_callback=logging.info,
msg="Using enrich2 alignment backend.",
extra={'oname': 'Aligner'}
)
def align_ambivert(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
Returns
-------
`list`
list of tuples describing the differences between the sequences.
"""
if not isinstance(seq1, str):
raise TypeError("First sequence must be a str type")
if not isinstance(seq2, str):
raise TypeError("Second sequence must be a str type")
if not seq1:
raise ValueError("First sequence must not be empty.")
if not seq2:
raise ValueError("Second sequence must not be empty.")
self.matrix = np.ndarray(
shape=(len(seq1) + 1, len(seq2) + 1),
dtype=np.dtype([('score', np.int), ('trace', np.byte)])
)
seq1 = seq1.upper()
seq2 = seq2.upper()
a1, a2, *_ = self.needleman_wunsch(
seq1, seq2,
gap_open=self.similarity['gap_open'],
gap_extend=self.similarity['gap_extend']
)
backtrace = cigar_to_backtrace(
seq1, seq2,
gapped_alignment_to_cigar(a1, a2)[0]
)
return backtrace
def align_enrich2(self, seq1, seq2):
"""
Aligns the two sequences, *seq1* and *seq2* and returns a list of
tuples describing the differences between the sequences.
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
Reference sequence.
seq2 : `str`
The sequence that is to be aligned.
Returns
-------
`list`
list of tuples describing the differences between the sequences.
"""
if not isinstance(seq1, str):
raise TypeError("First sequence must be a str type")
if not isinstance(seq2, str):
raise TypeError("Second sequence must be a str type")
if not seq1:
raise ValueError("First sequence must not be empty.")
if not seq2:
raise ValueError("Second sequence must not be empty.")
self.matrix = np.ndarray(
shape=(len(seq1) + 1, len(seq2) + 1),
dtype=np.dtype([('score', np.int), ('trace', np.byte)])
)
seq1 = seq1.upper()
seq2 = seq2.upper()
# build matrix of scores/traceback information
for i in range(len(seq1) + 1):
self.matrix[i, 0] = (self.similarity['gap_open'] * i, Aligner._DEL)
for j in range(len(seq2) + 1):
self.matrix[0, j] = (self.similarity['gap_open'] * j, Aligner._INS)
for i in range(1, len(seq1) + 1):
for j in range(1, len(seq2) + 1):
match = (self.matrix[i - 1, j - 1]['score'] +
self.similarity[seq1[i - 1]][seq2[j - 1]],
Aligner._MAT)
delete = (self.matrix[i - 1, j]['score'] +
self.similarity['gap_open'], Aligner._DEL)
insert = (self.matrix[i, j - 1]['score'] +
self.similarity['gap_open'], Aligner._INS)
# traces = [delete, insert, match]
# max_score = max(delete, insert, match, key=lambda x: x[0])[0]
# possible_traces = [t for t in traces if t[0] == max_score]
# priority_move = sorted(possible_traces, key=lambda x: x[1])[0]
# self.matrix[i, j] = priority_move
# def dotype(lol):
# if lol == self._MAT:
# return 'match'
# if lol == self._INS:
# return 'insertion'
# if lol == self._DEL:
# return 'deletion'
# print(i, j)
# print("Possible Scores: {}".format([t[0] for t in possible_traces]))
# print("Possible Tracebacks: {}".format([dotype(t[1]) for t in possible_traces]))
# print("Chosen Traceback: {}".format(dotype(priority_move[1])))
max_score = max(delete, insert, match, key=lambda x: x[0])
self.matrix[i, j] = max_score
self.matrix[0, 0] = (0, Aligner._END)
# calculate alignment from the traceback
i = len(seq1)
j = len(seq2)
traceback = list()
while i > 0 or j > 0:
if self.matrix[i, j]['trace'] == Aligner._MAT:
if seq1[i - 1] == seq2[j - 1]:
traceback.append((i - 1, j - 1, "match", None))
else:
traceback.append((i - 1, j - 1, "mismatch", None))
i -= 1
j -= 1
elif self.matrix[i, j]['trace'] == Aligner._INS:
pos_1 = 0 if (i - 1) < 0 else (i - 1)
traceback.append((pos_1, j - 1, "insertion", 1))
j -= 1
elif self.matrix[i, j]['trace'] == Aligner._DEL:
pos_2 = 0 if (j - 1) < 0 else (j - 1)
traceback.append((i - 1, pos_2, "deletion", 1))
i -= 1
elif self.matrix[i, j]['trace'] == Aligner._END:
pass
else:
raise RuntimeError("Invalid value in alignment traceback.")
traceback.reverse()
# combine indels
indel = None
traceback_combined = list()
for t in traceback:
if t[2] == "insertion" or t[2] == "deletion":
if indel is not None:
if t[2] == indel[2]:
indel[3] += t[3]
else:
raise RuntimeError("Aligner failed to combine indels. "
"Check 'gap_open' penalty.")
else:
indel = list(t)
else:
if indel is not None:
traceback_combined.append(tuple(indel))
indel = None
traceback_combined.append(t)
if indel is not None:
traceback_combined.append(tuple(indel))
self.calls += 1
return traceback_combined
def needleman_wunsch(self, seq1, seq2, gap_open=-1, gap_extend=0):
"""
Wrapper method for Needleman-Wunsch alignment using
the plumb.bob C implementation
Parameters
----------
seq1 : `str`
an ascii DNA sequence string. This is the query
sequence and must be all upper case
seq2 : `str`
an ascii DNA sequence string. This is the reference
sequence and may contain lower case soft masking
gap_open : `int`
Cost for a gap_open open.
gap_extend : `int`
Cost for a gap_open extension.
Returns
-------
`tuple`
A tuple containing aligned seq1, aligned seq2, start position
in seq1 and start position in seq2
"""
DNA_MAP = align.align_ctypes.make_map('ACGTNX', 'N', True)
DNA_SCORE = make_dna_scoring_matrix(self.similarity)
alignment = align.global_align(
bytes(seq1, 'ascii'),
len(seq1),
bytes(seq2.upper(), 'ascii'),
len(seq2),
DNA_MAP[0],
DNA_MAP[1],
DNA_SCORE,
gap_open, gap_extend
)
if '-' in seq1 or '-' in seq2:
raise RuntimeError('Aligning Sequences with gaps is not supported',
seq1, seq2)
start_seq1 = 0
start_seq2 = 0
frag = alignment[0].align_frag
align_seq1 = ''
align_seq2 = ''
while frag:
frag = frag[0]
if frag.type == align.MATCH:
f1 = seq1[frag.sa_start:frag.sa_start + frag.hsp_len]
f2 = seq2[frag.sb_start:frag.sb_start + frag.hsp_len]
align_seq1 += f1
align_seq2 += f2
elif frag.type == align.A_GAP:
align_seq1 += '-' * frag.hsp_len
align_seq2 += seq2[frag.sb_start:frag.sb_start + frag.hsp_len]
elif frag.type == align.B_GAP:
align_seq1 += seq1[frag.sa_start:frag.sa_start + frag.hsp_len]
align_seq2 += '-' * frag.hsp_len
frag = frag.next
assert len(align_seq1) == len(align_seq2)
align.alignment_free(alignment)
return align_seq1, align_seq2, start_seq1, start_seq2
def smith_waterman(self, seq1, seq2, gap_open=-1, gap_extend=0):
"""
Wrapper method for Smith-Waterman alignment using
the plumb.bob C implementation
Parameters
----------
seq1 : `str`
an ascii DNA sequence string. This is the query
sequence and must be all upper case
seq2 : `str`
an ascii DNA sequence string. This is the reference
sequence and may contain lower case soft masking
gap_open : `int`
Cost for a gap_open open.
gap_extend : `int`
Cost for a gap_open extension.
Returns
-------
`tuple`
A tuple containing aligned seq1, aligned seq2, start position
in seq1 and start position in seq2
"""
DNA_MAP = align.align_ctypes.make_map('ACGTNX', 'N', True)
DNA_SCORE = make_dna_scoring_matrix(self.similarity)
alignment = align.local_align(
bytes(seq1, 'ascii'), len(seq1),
bytes(seq2.upper(), 'ascii'), len(seq2),
DNA_MAP[0],
DNA_MAP[1],
DNA_SCORE,
gap_open, gap_extend
)
if '-' in seq1 or '-' in seq2:
raise RuntimeError('Aligning Sequences with gaps is not supported',
seq1, seq2)
start_seq1 = alignment.contents.align_frag.contents.sa_start
start_seq2 = alignment.contents.align_frag.contents.sb_start
frag = alignment[0].align_frag
align_seq1 = ''
align_seq2 = ''
while frag:
frag = frag[0]
if frag.type == align.MATCH:
f1 = seq1[frag.sa_start:frag.sa_start + frag.hsp_len]
f2 = seq2[frag.sb_start:frag.sb_start + frag.hsp_len]
align_seq1 += f1
align_seq2 += f2
elif frag.type == align.A_GAP:
align_seq1 += '-' * frag.hsp_len
align_seq2 += seq2[frag.sb_start:frag.sb_start + frag.hsp_len]
elif frag.type == align.B_GAP:
align_seq1 += seq1[frag.sa_start:frag.sa_start + frag.hsp_len]
align_seq2 += '-' * frag.hsp_len
frag = frag.next
assert len(align_seq1) == len(align_seq2)
align.alignment_free(alignment)
return align_seq1, align_seq2, start_seq1, start_seq2
def cigar_to_backtrace(seq1, seq2, cigar):
"""
Converts a cigar sequence into an enrich2 backtrace
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
Parameters
----------
seq1 : `str`
The string used during alignment for ``seq1``
seq2 : `str`
The string used during alignment for ``seq2``
cigar : `str`
The cigar string expecting characters {'M', 'I', 'D'}
Returns
-------
`list`
The tuple format is ``(i, j, type, length)``, where ``i`` and ``j``
are the positions in *seq1* and *seq2*, respectively, and type is one
of ``"match"``, ``"mismatch"``, ``"insertion"``, or ``"deletion"``.
For indels, the ``length`` value is the number of bases inserted or
deleted with respect to *seq1* starting at ``i``.
"""
letters = cigar[1::2]
numbers = [int(x) for x in cigar[0::2]]
i = len(seq1)
j = len(seq2)
traceback = []
for num, char in reversed(list(zip(numbers, letters))):
if char == 'M':
for _ in range(num):
if seq1[i - 1] == seq2[j - 1]:
traceback.append((i - 1, j - 1, "match", None))
else:
traceback.append((i - 1, j - 1, "mismatch", None))
i -= 1
j -= 1
elif char == 'I':
pos_1 = 0 if (i - 1) < 0 else (i - 1)
traceback.append((pos_1, j - num, "insertion", num))
j -= num
elif char == 'D':
pos_2 = 0 if (j - 1) < 0 else (j - 1)
traceback.append((i - num, pos_2, "deletion", num))
i -= num
else:
raise RuntimeError("Invalid value in alignment traceback.")
traceback.reverse()
return traceback
def make_dna_scoring_matrix(similarity, ordering='ACGTNX'):
"""
Make a ctype DNA scoring matrix for alignment.
Parameters
----------
similarity : `dict`
Similarity matrix used by the aligner, must contain a cost mapping
between each of 'A', 'C', 'G', 'T', 'N', 'X'.
ordering : `str`
String representing the key order the dictionary should be
traversed to build the square similarity matrix.
Returns
-------
`list`
Matrix in single list format.
"""
similarity_matrix = []
n = len(ordering)
for key_fr in ordering:
for key_to in ordering:
cost = similarity[key_fr][key_to]
similarity_matrix.append(cost)
return (c_int * (n * n))(*similarity_matrix)
def test(seq1, seq2):
from enrich2.sequence.aligner import Aligner
amb = Aligner(backend='ambivert')
aen = Aligner(backend='enrich2')
print('Enrich2: {}'.format(aen.align(seq1, seq2)))
print('AmBiVert: {}'.format(amb.align(seq1, seq2)))
def build_aligners():
from enrich2.sequence.aligner import Aligner
amb = Aligner(backend='ambivert')
aen = Aligner(backend='enrich2')
return amb, aen
|
daniaki/Enrich2
|
enrich2/sequence/aligner.py
|
Python
|
gpl-3.0
| 21,237 | 0.002025 |
#!/usr/bin/env python
from distutils.core import setup, run_setup, Command
import zmq.auth
import shutil
import os
OSAD2_PATH = os.path.dirname(os.path.realpath(__file__))
OSAD2_SERVER_CERTS_DIR = "/etc/rhn/osad2-server/certs/"
OSAD2_SERVER_PUB_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/server.key")
OSAD2_SERVER_PRIVATE_KEY = os.path.join(OSAD2_SERVER_CERTS_DIR, "private_keys/server.key_secret")
OSAD2_CLIENT_SETUP_FILE = os.path.join(OSAD2_PATH, "setup_client.py")
PKGNAME_FILE = os.path.join(OSAD2_PATH, "PKGNAME")
class OSAD2Command(Command):
def _create_curve_certs(self, name):
print "Creating CURVE certificates for '%s'..." % name
pk_file, sk_file = zmq.auth.create_certificates(OSAD2_SERVER_CERTS_DIR,
name)
# OSAD2 certificates storage
pk_dst = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys")
sk_dst = os.path.join(OSAD2_SERVER_CERTS_DIR, "private_keys")
shutil.move(pk_file, pk_dst)
shutil.move(sk_file, sk_dst)
pk_dst = os.path.join(pk_dst, name + ".key")
sk_dst = os.path.join(sk_dst, name + ".key_secret")
print pk_dst
print sk_dst
return pk_dst, sk_dst
class CreateServerCommand(OSAD2Command):
description = "Create and install CURVE server key"
user_options = []
def initialize_options(self):
self.name = None
def finalize_options(self):
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert not os.path.isfile(server_keyfile), 'Server key already exists'
def run(self):
self._create_curve_certs("server")
class CreateClientCommand(OSAD2Command):
description = "Create a new client. Generate a RPM package"
user_options = [
('name=', None, 'Specify the new client name.'),
]
def initialize_options(self):
self.name = None
def finalize_options(self):
assert self.name, 'You must specify a client name'
assert os.path.isdir(OSAD2_SERVER_CERTS_DIR), \
'Certificates storage dir doesn\'t exist: %s' % OSAD2_SERVER_CERTS_DIR
keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, "public_keys/" + self.name + '.key')
server_keyfile = os.path.join(OSAD2_SERVER_CERTS_DIR, 'private_keys/server.key_secret')
assert os.path.isfile(server_keyfile), 'Server key doesn\'t exist'
assert not os.path.isfile(keyfile), 'Client name already exists'
def run(self):
pk_file, sk_file = self._create_curve_certs(self.name)
# Temporary key storage for RPM build
import shutil
shutil.copy(pk_file, "etc/client.key_secret")
shutil.copy(OSAD2_SERVER_PUB_KEY, "etc/")
self._build_client_rpm()
def _build_client_rpm(self):
print "Creating RPM package for '%s'..." % self.name
open(PKGNAME_FILE, "w").write(self.name)
run_setup(OSAD2_CLIENT_SETUP_FILE, script_args=["bdist_rpm", "--quiet"])
os.remove(PKGNAME_FILE)
os.remove("etc/client.key_secret")
os.remove("etc/server.key")
setup(name='spacewalk-osad2-server',
version='alpha',
license='GPLv2',
description='An alternative OSA dispatcher module for Spacewalk',
long_description='This is an experiment to improve osad, a service '
'that simulates instant execution of actions in a '
'Spacewalk environment.',
platforms=['All'],
packages=['osad2', 'osad2.server'],
scripts=['bin/osad2_server.py'],
data_files=[
('/etc/rhn/osad2-server/', ['etc/osad_server.prod.cfg']),
('/etc/rhn/osad2-server/certs/private_keys/', []),
('/etc/rhn/osad2-server/certs/public_keys/', []),
],
cmdclass={'createclient': CreateClientCommand,
'createserver': CreateServerCommand})
|
SUSE/spacewalk-osad2
|
setup.py
|
Python
|
gpl-2.0
| 4,111 | 0.002189 |
import pylab as pl
import scipy as sp
from serpentine import *
from elements import *
import visualize
class AtfExt :
def __init__(self) :
print 'AtfExt:__init__'
# set twiss parameters
mytwiss = Twiss()
mytwiss.betax = 6.85338806855804
mytwiss.alphax = 1.11230788371885
mytwiss.etax = 3.89188697330735e-012
mytwiss.etaxp = 63.1945125619190e-015
mytwiss.betay = 2.94129410712918
mytwiss.alphay = -1.91105724003646
mytwiss.etay = 0
mytwiss.etayp = 0
mytwiss.nemitx = 5.08807339588144e-006
mytwiss.nemity = 50.8807339588144e-009
mytwiss.sigz = 8.00000000000000e-003
mytwiss.sigP = 1.03999991965541e-003
mytwiss.pz_cor = 0
# load beam line
self.atfFull = Serpentine(line='newATF2lat.aml',twiss=mytwiss)
self.atfExt = Serpentine(line=beamline.Line(self.atfFull.beamline[947:]),twiss=mytwiss)
# zero zero cors
self.atfExt.beamline.ZeroCors()
# Track
self.atfExt.Track()
readings = self.atfExt.GetBPMReadings()
# Visualisation
self.v = visualize.Visualize()
def moverCalibration(self, mag, bpms) :
pass
def correctorCalibration(self, corr, bpms) :
pass
def bba(self, mag, bpm) :
pass
def magMoverCalibration(self, mag, bpm) :
pass
def setMagnet(self,name, value) :
ei = self.atfExt.beamline.FindEleByName(name)
print ei
e = self.atfExt.beamline[ei[0]]
e.B = value
def plotOrbit(self) :
self.v.PlotBPMReadings(self.atfExt)
def plotTwiss(self) :
self.v.PlotTwiss(self.atfExt)
def run(self) :
self.atfExt.Track()
def jitterBeam(self) :
r = 1+sp.random.standard_normal()
# self.s.beam_in.x[5,:] = (1+r/3e4)*self.nominalE
# print r,self.s.BeamIn.x[5,:]
|
OscarES/serpentinetracker
|
examples/atf/atfExt.py
|
Python
|
gpl-3.0
| 1,975 | 0.017722 |
#! /usr/bin/env python
""" cryptopy.cipher.rijndael_test
Tests for the rijndael encryption algorithm
Copyright (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
"""
from cryptopy.cipher.rijndael import Rijndael
from cryptopy.cipher.base import noPadding
from binascii import a2b_hex
import unittest
class Rijndael_TestVectors(unittest.TestCase):
""" Test Rijndael algorithm using know values."""
def testGladman_dev_vec(self):
""" All 25 combinations of block and key size.
These test vectors were generated by Dr Brian Gladman
using the program aes_vec.cpp <brg@gladman.uk.net> 24th May 2001.
vectors in file: dev_vec.txt
http://fp.gladman.plus.com/cryptography_technology/rijndael/index.htm
"""
def RijndaelTestVec(i, key, pt, ct):
""" Run single AES test vector with any legal blockSize
and any legal key size. """
bkey, plainText, cipherText = a2b_hex(key), a2b_hex(pt), a2b_hex(ct)
kSize = len(bkey)
bSize = len(cipherText) # set block size to length of block
alg = Rijndael(bkey, keySize=kSize, blockSize=bSize, padding=noPadding())
self.assertEqual( alg.encrypt(plainText), cipherText )
self.assertEqual( alg.decrypt(cipherText), plainText )
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '3925841d02dc09fbdc118597196a0b32')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '231d844639b31b412211cfe93712b880')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e0370734',
ct = 'f9fb29aefc384a250340d833b87ebc00')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '8faa8fe4dee9eb17caa4797502fc9d3f')
RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e0370734',
ct = '1a6e6c2c662e7da6501ffb62bc9e93f3')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '16e73aec921314c29df905432bc8968ab64b1f51')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '0553eb691670dd8a5a5b5addf1aa7450f7a0e587')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '73cd6f3423036790463aa9e19cfcde894ea16623')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '601b5dcd1cf4ece954c740445340bf0afdc048df')
RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a409382',
ct = '579e930b36c1529aa3e86628bacfe146942882cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'b24d275489e82bb8f7375e0d5fcdb1f481757c538b65148a')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '738dae25620d3d3beff4a037a04290d73eb33521a63ea568')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '725ae43b5f3161de806a7c93e0bca93c967ec1ae1b71e1cf')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = 'bbfc14180afbf6a36382a061843f0b63e769acdc98769130')
RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d',
ct = '0ebacf199e3315c2e34b24fcc7c46ef4388aa475d66c194c')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'b0a8f78f6b3c66213f792ffd2a61631f79331407a5e5c8d3793aceb1')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '08b99944edfce33a2acb131183ab0168446b2d15e958480010f545e3')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'be4c597d8f7efe22a2f7e5b1938e2564d452a5bfe72399c7af1101e2')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = 'ef529598ecbce297811b49bbed2c33bbe1241d6e1a833dbe119569e8')
RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9',
ct = '02fafc200176ed05deb8edb82a3555b0b10d47a388dfd59cab2f6c11')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 16 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = '7d15479076b69a46ffb3b3beae97ad8313f622f67fedb487de9f06b9ed9c8f19')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 20 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = '514f93fb296b5ad16aa7df8b577abcbd484decacccc7fb1f18dc567309ceeffd')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 24 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = '5d7101727bb25781bf6715b0e6955282b9610e23a43c2eb062699f0ebf5887b2')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 28 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = 'd56c5a63627432579e1dd308b2c8f157b40a4bfb56fea1377b25d3ed3d6dbf80')
RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 32 byte key',
key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe',
pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8',
ct = 'a49406115dfb30a40418aafa4869b7c6a886ff31602a7dd19c889dc64f7e4e7a')
# Make this test module runnable from the command prompt
if __name__ == "__main__":
unittest.main()
|
repotvsupertuga/tvsupertuga.repository
|
script.module.cryptolib/lib/cryptopy/cipher/rijndael_test.py
|
Python
|
gpl-2.0
| 9,568 | 0.037312 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Plugin's Base Class
"""
import sys
import cli
from cli.docopt import docopt
PLUGIN_NAME = "base-plugin"
PLUGIN_CLASS = "PluginBase"
VERSION = "Mesos Plugin Base 1.0"
SHORT_HELP = "This is the base plugin from which all other plugins inherit."
USAGE = \
"""
{short_help}
Usage:
mesos {plugin} (-h | --help)
mesos {plugin} --version
mesos {plugin} <command> (-h | --help)
mesos {plugin} [options] <command> [<args>...]
Options:
-h --help Show this screen.
--version Show version info.
Commands:
{commands}
"""
SUBCOMMAND_USAGE = \
"""{short_help}
Usage:
mesos {plugin} {command} (-h | --help)
mesos {plugin} {command} --version
mesos {plugin} {command} [options] {arguments}
Options:
{flags}
Description:
{long_help}
"""
class PluginBase():
"""
Base class from which all CLI plugins should inherit.
"""
# pylint: disable=too-few-public-methods
COMMANDS = {}
def __setup__(self, command, argv):
pass
def __module_reference__(self):
return sys.modules[self.__module__]
def __init__(self, settings, config):
# pylint: disable=invalid-name
self.PLUGIN_NAME = PLUGIN_NAME
self.PLUGIN_CLASS = PLUGIN_CLASS
self.VERSION = VERSION
self.SHORT_HELP = SHORT_HELP
self.USAGE = USAGE
module = self.__module_reference__()
if hasattr(module, "PLUGIN_NAME"):
self.PLUGIN_NAME = getattr(module, "PLUGIN_NAME")
if hasattr(module, "PLUGIN_CLASS"):
self.PLUGIN_CLASS = getattr(module, "PLUGIN_CLASS")
if hasattr(module, "VERSION"):
self.VERSION = getattr(module, "VERSION")
if hasattr(module, "SHORT_HELP"):
self.SHORT_HELP = getattr(module, "SHORT_HELP")
if hasattr(module, "USAGE"):
self.USAGE = getattr(module, "USAGE")
self.settings = settings
self.config = config
def __autocomplete__(self, command, current_word, argv):
# pylint: disable=unused-variable,unused-argument,
# attribute-defined-outside-init
return ("default", [])
def __autocomplete_base__(self, current_word, argv):
option = "default"
# <command>
comp_words = list(self.COMMANDS.keys())
comp_words = cli.util.completions(comp_words, current_word, argv)
if comp_words is not None:
return (option, comp_words)
# <args>...
comp_words = self.__autocomplete__(argv[0], current_word, argv[1:])
# In general, we expect a tuple to be returned from __autocomplete__,
# with the first element being a valid autocomplete option, and the
# second being a list of completion words. However, in the common
# case we usually use the default option, so it's OK for a plugin to
# just return a list. We will add the "default" option for them.
if isinstance(comp_words, tuple):
option, comp_words = comp_words
return (option, comp_words)
def main(self, argv):
"""
Main method takes argument from top level mesos and parses them
to call the appropriate method.
"""
command_strings = cli.util.format_commands_help(self.COMMANDS)
usage = self.USAGE.format(
plugin=self.PLUGIN_NAME,
short_help=self.SHORT_HELP,
commands=command_strings)
arguments = docopt(
usage,
argv=argv,
version=self.VERSION,
program="mesos " + self.PLUGIN_NAME,
options_first=True)
cmd = arguments["<command>"]
argv = arguments["<args>"]
if cmd in self.COMMANDS.keys():
if "external" not in self.COMMANDS[cmd]:
argument_format, short_help, long_help, flag_format = \
cli.util.format_subcommands_help(self.COMMANDS[cmd])
usage = SUBCOMMAND_USAGE.format(
plugin=self.PLUGIN_NAME,
command=cmd,
arguments=argument_format,
flags=flag_format,
short_help=short_help,
long_help=long_help)
arguments = docopt(
usage,
argv=argv,
program="mesos " + self.PLUGIN_NAME + " " + cmd,
version=self.VERSION,
options_first=True)
if "alias" in self.COMMANDS[cmd]:
cmd = self.COMMANDS[cmd]["alias"]
self.__setup__(cmd, argv)
return getattr(self, cmd.replace("-", "_"))(arguments)
return self.main(["--help"])
|
reneploetz/mesos
|
src/python/cli_new/lib/cli/plugins/base.py
|
Python
|
apache-2.0
| 5,461 | 0.000366 |
"""
Django settings for dts_test_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TENANT_APPS_DIR = os.path.join(BASE_DIR, os.pardir)
sys.path.insert(0, TENANT_APPS_DIR)
sys.path.insert(0, BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cl1)b#c&xmm36z3e(quna-vb@ab#&gpjtdjtpyzh!qn%bc^xxn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
SHARED_APPS = (
'django_tenants', # mandatory
'customers', # you must list the app where your tenant model resides in
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
TENANT_APPS = (
'dts_test_app',
)
TENANT_MODEL = "customers.Client" # app.Model
TENANT_DOMAIN_MODEL = "customers.Domain" # app.Model
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
INSTALLED_APPS = list(SHARED_APPS) + [app for app in TENANT_APPS if app not in SHARED_APPS]
ROOT_URLCONF = 'dts_test_project.urls'
WSGI_APPLICATION = 'dts_test_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django_tenants.postgresql_backend',
'NAME': 'dts_test_project',
'USER': 'postgres',
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'root'),
'HOST': os.environ.get('DATABASE_HOST', 'localhost'),
'PORT': '',
}
}
DATABASE_ROUTERS = (
'django_tenants.routers.TenantSyncRouter',
)
MIDDLEWARE = (
'tenant_tutorial.middleware.TenantTutorialMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
)
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
|
sigma-geosistemas/django-tenants
|
dts_test_project/dts_test_project/settings.py
|
Python
|
mit
| 3,048 | 0.000656 |
#! /usr/bin/env python3
import argparse
import logging
import os
from utils import run
logging.basicConfig(level=logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('dist_dir')
parser.add_argument('version')
return parser.parse_args()
args = parse_args()
DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(DIR)
DIST_DIR = os.path.abspath(args.dist_dir)
DRIVE_C = os.path.join(DIST_DIR, 'drive_c')
WINE_RN_DIR = os.path.join(DRIVE_C, 'rednotebook')
WINE_RN_WIN_DIR = os.path.join(WINE_RN_DIR, 'win')
os.environ['WINEPREFIX'] = DIST_DIR
ISCC = os.path.join(DRIVE_C, 'Program Files (x86)', 'Inno Setup 5', 'ISCC.exe')
VERSION_PARAM = '/dREDNOTEBOOK_VERSION=%s' % args.version
run(['wine', ISCC, VERSION_PARAM, 'rednotebook.iss'], cwd=WINE_RN_WIN_DIR)
|
jendrikseipp/rednotebook-elementary
|
win/build-installer.py
|
Python
|
gpl-2.0
| 831 | 0.002407 |
from django.contrib import admin
from common.admin import AutoUserMixin
from licenses.models import License
class LicenseAdmin(AutoUserMixin, admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['added', 'name', 'url', 'creative_commons',
'cc_attribution', 'cc_noncommercial',
'cc_no_deriv', 'cc_share_alike']
}),
]
# fields
readonly_fields = ['added']
list_display = ['name', 'url']
# field display
list_filter = ['name', 'added']
search_fields = ['name', 'url']
admin.site.register(License, LicenseAdmin)
|
seanbell/opensurfaces
|
server/licenses/admin.py
|
Python
|
mit
| 621 | 0.00161 |
import numpy as np
from ase import Hartree
from gpaw.aseinterface import GPAW
from gpaw.lcao.overlap import NewTwoCenterIntegrals
from gpaw.utilities import unpack
from gpaw.utilities.tools import tri2full, lowdin
from gpaw.lcao.tools import basis_subset2, get_bfi2
from gpaw.coulomb import get_vxc as get_ks_xc
from gpaw.utilities.blas import r2k, gemm
from gpaw.lcao.projected_wannier import dots, condition_number, eigvals, \
get_bfs, get_lcao_projections_HSP
def get_rot(F_MM, V_oM, L):
eps_M, U_MM = np.linalg.eigh(F_MM)
indices = eps_M.real.argsort()[-L:]
U_Ml = U_MM[:, indices]
U_Ml /= np.sqrt(dots(U_Ml.T.conj(), F_MM, U_Ml).diagonal())
U_ow = V_oM.copy()
U_lw = np.dot(U_Ml.T.conj(), F_MM)
for col1, col2 in zip(U_ow.T, U_lw.T):
norm = np.linalg.norm(np.hstack((col1, col2)))
col1 /= norm
col2 /= norm
return U_ow, U_lw, U_Ml
def get_lcao_xc(calc, P_aqMi, bfs=None, spin=0):
nq = len(calc.wfs.ibzk_qc)
nao = calc.wfs.setups.nao
dtype = calc.wfs.dtype
if bfs is None:
bfs = get_bfs(calc)
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_sg = calc.density.nt_sg
vxct_sg = calc.density.finegd.zeros(calc.wfs.nspins)
calc.hamiltonian.xc.calculate(calc.density.finegd, nt_sg, vxct_sg)
vxct_G = calc.wfs.gd.zeros()
calc.hamiltonian.restrict(vxct_sg[spin], vxct_G)
Vxc_qMM = np.zeros((nq, nao, nao), dtype)
for q, Vxc_MM in enumerate(Vxc_qMM):
bfs.calculate_potential_matrix(vxct_G, Vxc_MM, q)
tri2full(Vxc_MM, 'L')
# Add atomic PAW corrections
for a, P_qMi in P_aqMi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.hamiltonian.xc.calculate_paw_correction(calc.wfs.setups[a],
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
for Vxc_MM, P_Mi in zip(Vxc_qMM, P_qMi):
Vxc_MM += dots(P_Mi, H_ii, P_Mi.T.conj())
return Vxc_qMM * Hartree
def get_xc2(calc, w_wG, P_awi, spin=0):
if calc.density.nt_sg is None:
calc.density.interpolate()
nt_g = calc.density.nt_sg[spin]
vxct_g = calc.density.finegd.zeros()
calc.hamiltonian.xc.get_energy_and_potential(nt_g, vxct_g)
vxct_G = calc.wfs.gd.empty()
calc.hamiltonian.restrict(vxct_g, vxct_G)
# Integrate pseudo part
Nw = len(w_wG)
xc_ww = np.empty((Nw, Nw))
r2k(.5 * calc.wfs.gd.dv, w_wG, vxct_G * w_wG, .0, xc_ww)
tri2full(xc_ww, 'L')
# Add atomic PAW corrections
for a, P_wi in P_awi.items():
D_sp = calc.density.D_asp[a][:]
H_sp = np.zeros_like(D_sp)
calc.wfs.setups[a].xc_correction.calculate_energy_and_derivatives(
D_sp, H_sp)
H_ii = unpack(H_sp[spin])
xc_ww += dots(P_wi, H_ii, P_wi.T.conj())
return xc_ww * Hartree
class ProjectedWannierFunctionsFBL:
"""PWF in the finite band limit.
::
--N
|w_w> = > |psi_n> U_nw
--n=1
"""
def __init__(self, V_nM, No, ortho=False):
Nw = V_nM.shape[1]
assert No <= Nw
V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = np.dot(V_uM.T.conj(), V_uM)
U_ow, U_lw, U_Ml = get_rot(F_MM, V_oM, Nw - No)
self.U_nw = np.vstack((U_ow, dots(V_uM, U_Ml, U_lw)))
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1))
if ortho:
lowdin(self.U_nw, self.S_ww)
self.S_ww = np.identity(Nw)
self.norms_n = np.dot(self.U_nw, np.linalg.solve(
self.S_ww, self.U_nw.T.conj())).diagonal()
def rotate_matrix(self, A_nn):
if A_nn.ndim == 1:
return np.dot(self.U_nw.T.conj() * A_nn, self.U_nw)
else:
return dots(self.U_nw.T.conj(), A_nn, self.U_nw)
def rotate_projections(self, P_ani):
P_awi = {}
for a, P_ni in P_ani.items():
P_awi[a] = np.tensordot(self.U_nw, P_ni, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_nG):
return np.tensordot(self.U_nw, psit_nG, axes=[[0], [0]])
class ProjectedWannierFunctionsIBL:
"""PWF in the infinite band limit.
::
--No --Nw
|w_w> = > |psi_o> U_ow + > |f_M> U_Mw
--o=1 --M=1
"""
def __init__(self, V_nM, S_MM, No, lcaoindices=None):
Nw = V_nM.shape[1]
assert No <= Nw
self.V_oM, V_uM = V_nM[:No], V_nM[No:]
F_MM = S_MM - np.dot(self.V_oM.T.conj(), self.V_oM)
U_ow, U_lw, U_Ml = get_rot(F_MM, self.V_oM, Nw - No)
self.U_Mw = np.dot(U_Ml, U_lw)
self.U_ow = U_ow - np.dot(self.V_oM, self.U_Mw)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
# stop here ?? XXX
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
P_uw = np.dot(V_uM, self.U_Mw)
self.norms_n = np.hstack((
np.dot(U_ow, np.linalg.solve(self.S_ww, U_ow.T.conj())).diagonal(),
np.dot(P_uw, np.linalg.solve(self.S_ww, P_uw.T.conj())).diagonal()))
def rotate_matrix(self, A_o, A_MM):
assert A_o.ndim == 1
A_ww = dots(self.U_ow.T.conj() * A_o, self.V_oM, self.U_Mw)
A_ww += np.conj(A_ww.T)
A_ww += np.dot(self.U_ow.T.conj() * A_o, self.U_ow)
A_ww += dots(self.U_Mw.T.conj(), A_MM, self.U_Mw)
return A_ww
def rotate_projections(self, P_aoi, P_aMi, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
P_awi = {}
for a, P_oi in P_aoi.items():
P_awi[a] = np.tensordot(U_Mw, P_aMi[a], axes=[[0], [0]])
if len(U_ow) > 0:
P_awi[a] += np.tensordot(U_ow, P_oi, axes=[[0], [0]])
return P_awi
def rotate_function(self, psit_oG, bfs, q=-1, indices=None):
if indices is None:
U_ow = self.U_ow
U_Mw = self.U_Mw
else:
U_ow = self.U_ow[:, indices]
U_Mw = self.U_Mw[:, indices]
w_wG = np.zeros((U_ow.shape[1],) + psit_oG.shape[1:])
if len(U_ow) > 0:
gemm(1., psit_oG, U_ow.T.copy(), 0., w_wG)
bfs.lcao_to_grid(U_Mw.T.copy(), w_wG, q)
return w_wG
class PWFplusLCAO(ProjectedWannierFunctionsIBL):
def __init__(self, V_nM, S_MM, No, pwfmask, lcaoindices=None):
Nw = V_nM.shape[1]
self.V_oM = V_nM[:No]
dtype = V_nM.dtype
# Do PWF optimization for pwfbasis submatrix only!
Npwf = len(pwfmask.nonzero()[0])
pwfmask2 = np.outer(pwfmask, pwfmask)
s_MM = S_MM[pwfmask2].reshape(Npwf, Npwf)
v_oM = self.V_oM[:, pwfmask]
f_MM = s_MM - np.dot(v_oM.T.conj(), v_oM)
nw = len(s_MM)
assert No <= nw
u_ow, u_lw, u_Ml = get_rot(f_MM, v_oM, nw - No)
u_Mw = np.dot(u_Ml, u_lw)
u_ow = u_ow - np.dot(v_oM, u_Mw)
# Determine U for full lcao basis
self.U_ow = np.zeros((No, Nw), dtype)
for U_w, u_w in zip(self.U_ow, u_ow):
np.place(U_w, pwfmask, u_w)
self.U_Mw = np.identity(Nw, dtype)
np.place(self.U_Mw, pwfmask2, u_Mw.flat)
if lcaoindices is not None:
for i in lcaoindices:
self.U_ow[:, i] = 0.0
self.U_Mw[:, i] = 0.0
self.U_Mw[i, i] = 1.0
self.S_ww = self.rotate_matrix(np.ones(1), S_MM)
self.norms_n = None
def set_lcaoatoms(calc, pwf, lcaoatoms):
ind = get_bfi(calc, lcaoatoms)
for i in ind:
pwf.U_ow[:, i] = 0.0
pwf.U_Mw[:, i] = 0.0
pwf_U_Mw[i, i] = 1.0
class PWF2:
def __init__(self, gpwfilename, fixedenergy=0., spin=0, ibl=True,
basis='sz', zero_fermi=False, pwfbasis=None, lcaoatoms=None,
projection_data=None):
calc = GPAW(gpwfilename, txt=None, basis=basis)
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
if zero_fermi:
try:
Ef = calc.get_fermi_level()
except NotImplementedError:
Ef = calc.get_homo_lumo().mean()
else:
Ef = 0.0
self.ibzk_kc = calc.get_ibz_k_points()
self.nk = len(self.ibzk_kc)
self.eps_kn = [calc.get_eigenvalues(kpt=q, spin=spin) - Ef
for q in range(self.nk)]
self.M_k = [sum(eps_n <= fixedenergy) for eps_n in self.eps_kn]
print 'Fixed states:', self.M_k
self.calc = calc
self.dtype = self.calc.wfs.dtype
self.spin = spin
self.ibl = ibl
self.pwf_q = []
self.norms_qn = []
self.S_qww = []
self.H_qww = []
if ibl:
if pwfbasis is not None:
pwfmask = basis_subset2(calc.atoms.get_chemical_symbols(),
basis, pwfbasis)
if lcaoatoms is not None:
lcaoindices = get_bfi2(calc.atoms.get_chemical_symbols(),
basis,
lcaoatoms)
else:
lcaoindices = None
self.bfs = get_bfs(calc)
if projection_data is None:
V_qnM, H_qMM, S_qMM, self.P_aqMi = get_lcao_projections_HSP(
calc, bfs=self.bfs, spin=spin, projectionsonly=False)
else:
V_qnM, H_qMM, S_qMM, self.P_aqMi = projection_data
H_qMM -= Ef * S_qMM
for q, M in enumerate(self.M_k):
if pwfbasis is None:
pwf = ProjectedWannierFunctionsIBL(V_qnM[q], S_qMM[q], M,
lcaoindices)
else:
pwf = PWFplusLCAO(V_qnM[q], S_qMM[q], M, pwfmask,
lcaoindices)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q][:M],
H_qMM[q]))
else:
if projection_data is None:
V_qnM = get_lcao_projections_HSP(calc, spin=spin)
else:
V_qnM = projection_data
for q, M in enumerate(self.M_k):
pwf = ProjectedWannierFunctionsFBL(V_qnM[q], M, ortho=False)
self.pwf_q.append(pwf)
self.norms_qn.append(pwf.norms_n)
self.S_qww.append(pwf.S_ww)
self.H_qww.append(pwf.rotate_matrix(self.eps_kn[q]))
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'P_awi'):
if self.ibl:
M = self.M_k[q]
self.P_awi = self.pwf_q[q].rotate_projections(
dict([(a, P_ni[:M]) for a, P_ni in kpt.P_ani.items()]),
dict([(a, P_qMi[q]) for a, P_qMi in self.P_aqMi.items()]),
indices)
else:
self.P_awi = pwf.rotate_projections(kpt.P_ani, indices)
return self.P_awi
def get_orbitals(self, q=0, indices=None):
self.calc.wfs.initialize_wave_functions_from_restart_file()
kpt = self.calc.wfs.kpt_u[self.spin * self.nk + q]
if not hasattr(self, 'w_wG'):
if self.ibl:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG[:self.M_k[q]], self.bfs, q, indices)
else:
self.w_wG = self.pwf_q[q].rotate_function(
kpt.psit_nG, indices)
return self.w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_eigs(self, q=0):
return eigvals(self.H_qww[q], self.S_ww[q])
def get_condition_number(self, q=0):
return condition_number(self.S_qww[q])
def get_xc(self, q=0, indices=None):
#self.calc.density.ghat.set_positions(
# self.calc.atoms.get_scaled_positions() % 1.)
#self.calc.hamiltonian.poisson.initialize()
if self.ibl:
return get_xc2(self.calc, self.get_orbitals(q, indices),
self.get_projections(q, indices), self.spin)
else:
return self.pwf_q[q].rotate_matrix(get_ks_xc(self.calc,
spin=self.spin))
class LCAOwrap:
def __init__(self, calc, spin=0):
assert calc.wfs.gd.comm.size == 1
assert calc.wfs.kpt_comm.size == 1
assert calc.wfs.band_comm.size == 1
from gpaw.lcao.tools import get_lcao_hamiltonian
H_skMM, S_kMM = get_lcao_hamiltonian(calc)
self.calc = calc
self.dtype = calc.wfs.dtype
self.spin = spin
self.H_qww = H_skMM[spin]
self.S_qww = S_kMM
self.P_aqwi = calc.wfs.P_aqMi
self.Nw = self.S_qww.shape[-1]
for S in self.S_qww:
print 'Condition number: %0.1e' % condition_number(S)
def get_hamiltonian(self, q=0, indices=None):
if indices is None:
return self.H_qww[q]
else:
return self.H_qww[q].take(indices, 0).take(indices, 1)
def get_overlap(self, q=0, indices=None):
if indices is None:
return self.S_qww[q]
else:
return self.S_qww[q].take(indices, 0).take(indices, 1)
def get_projections(self, q=0, indices=None):
if indices is None:
return dict([(a, P_qwi[q]) for a, P_qwi in self.P_aqwi.items()])
else:
return dict([(a, P_qwi[q].take(indices, 0))
for a, P_qwi in self.P_aqwi.items()])
def get_orbitals(self, q=-1, indices=None):
assert q == -1
if indices is None:
indices = range(self.Nw)
Ni = len(indices)
C_wM = np.zeros((Ni, self.Nw), self.dtype)
for i, C_M in zip(indices, C_wM):
C_M[i] = 1.0
w_wG = self.calc.wfs.gd.zeros(Ni, dtype=self.dtype)
self.calc.wfs.basis_functions.lcao_to_grid(C_wM, w_wG, q=-1)
return w_wG
def get_Fcore(self, q=0, indices=None):
if indices is None:
Fcore_ww = np.zeros_like(self.H_qww[q])
else:
Fcore_ww = np.zeros((len(indices), len(indices)))
for a, P_wi in self.get_projections(q, indices).items():
if self.calc.wfs.setups[a].type != 'ghost':
X_ii = unpack(self.calc.wfs.setups[a].X_p)
Fcore_ww -= dots(P_wi, X_ii, P_wi.T.conj())
return Fcore_ww * Hartree
def get_xc(self, q=0, indices=None):
if not hasattr(self, 'Vxc_qww'):
self.Vxc_qww = get_lcao_xc(self.calc, self.P_aqwi,
bfs=self.calc.wfs.basis_functions,
spin=self.spin)
if indices is None:
return self.Vxc_qww[q]
else:
return self.Vxc_qww[q].take(indices, 0).take(indices, 1)
|
ajylee/gpaw-rtxs
|
gpaw/lcao/pwf2.py
|
Python
|
gpl-3.0
| 16,299 | 0.00135 |
import unittest, uuid
from nixie.core import Nixie, KeyError
class NixieErrorsTestCase(unittest.TestCase):
def test_read_missing(self):
nx = Nixie()
self.assertIsNone(nx.read('missing'))
def test_update_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.update('missing')
def test_update_with_wrong_value(self):
nx = Nixie()
key = nx.create()
with self.assertRaises(ValueError):
nx.update(key, 'a')
def test_delete_missing(self):
nx = Nixie()
with self.assertRaises(KeyError):
nx.delete('missing')
|
eiri/nixie
|
tests/test_nixie_errors.py
|
Python
|
mit
| 579 | 0.015544 |
from django.shortcuts import render_to_response
from django.core.context_processors import csrf
from django.conf import settings
def my_render(request, template, context={}):
context.update(csrf(request))
context['STATIC_URL'] = settings.STATIC_URL
context['flash'] = request.get_flash()
context['user'] = request.user
context['user_perfil'] = request.get_perfil()
context['credenciales'] = set(request.get_credenciales())
context['settings'] = settings
return render_to_response(template, context)
|
MERegistro/meregistro
|
meregistro/shortcuts.py
|
Python
|
bsd-3-clause
| 533 | 0 |
# -*- coding: utf-8 -*-
from django import forms
from cmskit.articles.models import Index, Article
from cms.plugin_pool import plugin_pool
from cms.plugins.text.widgets.wymeditor_widget import WYMEditor
from cms.plugins.text.settings import USE_TINYMCE
def get_editor_widget():
"""
Returns the Django form Widget to be used for
the text area
"""
#plugins = plugin_pool.get_text_enabled_plugins(self.placeholder, self.page)
if USE_TINYMCE and "tinymce" in settings.INSTALLED_APPS:
from cms.plugins.text.widgets.tinymce_widget import TinyMCEEditor
return TinyMCEEditor()
else:
return WYMEditor()
class IndexForm(forms.ModelForm):
class Meta:
model = Index
def __init__(self, *args, **kwargs):
super(IndexForm, self).__init__(*args, **kwargs)
choices = [self.fields['page'].choices.__iter__().next()]
for page in self.fields['page'].queryset:
choices.append(
(page.id, ''.join(['- '*page.level, page.__unicode__()]))
)
self.fields['page'].choices = choices
class ArticleForm(forms.ModelForm):
body = forms.CharField(widget=get_editor_widget())
class Meta:
model = Article
|
ozgurgunes/django-cmskit
|
cmskit/articles/forms.py
|
Python
|
mit
| 1,271 | 0.008655 |
"""
"""
import os
import sys
import datetime
import logging
import argparse
import threading
import time
import socket
import json
import base64
import subprocess
import warnings
import numpy as np
import pandas as pd
from scipy.stats import linregress
import tables
warnings.simplefilter('ignore', category=tables.NaturalNameWarning)
from autopilot import prefs
from autopilot.core.loggers import init_logger
if __name__ == '__main__':
# Parse arguments - this should have been called with a .json prefs file passed
# We'll try to look in the default location first
parser = argparse.ArgumentParser(description="Run an autopilot")
parser.add_argument('-f', '--prefs', help="Location of .json prefs file (created during setup_autopilot.py)")
args = parser.parse_args()
if not args.prefs:
prefs_file = '/usr/autopilot/prefs.json'
if not os.path.exists(prefs_file):
raise Exception("No Prefs file passed, and file not in default location")
raise Warning('No prefs file passed, loaded from default location. Should pass explicitly with -p')
else:
prefs_file = args.prefs
prefs.init(prefs_file)
if prefs.get('AUDIOSERVER') or 'AUDIO' in prefs.get('CONFIG'):
if prefs.get('AUDIOSERVER') == 'pyo':
from autopilot.stim.sound import pyoserver
else:
from autopilot.stim.sound import jackclient
from autopilot.core.networking import Pilot_Station, Net_Node, Message
from autopilot import external
from autopilot import tasks
from autopilot.hardware import gpio
########################################
class Pilot:
"""
Drives the Raspberry Pi
Coordinates the hardware and networking objects to run tasks.
Typically used with a connection to a :class:`.Terminal` object to
coordinate multiple subjects and tasks, but a high priority for future releases
is to do the (trivial amount of) work to make this class optionally
standalone.
Called as a module with the -f flag to give the location of a prefs file, eg::
python pilot.py -f prefs_file.json
if the -f flag is not passed, looks in the default location for prefs
(ie. `/usr/autopilot/prefs.json`)
Needs the following prefs (typically established by :mod:`.setup.setup_pilot`):
* **NAME** - The name used by networking objects to address this Pilot
* **BASEDIR** - The base directory for autopilot files (/usr/autopilot)
* **PUSHPORT** - Router port used by the Terminal we connect to.
* **TERMINALIP** - IP Address of our upstream Terminal.
* **MSGPORT** - Port used by our own networking object
* **HARDWARE** - Any hardware and its mapping to GPIO pins. No pins are required to be set, instead each
task defines which pins it needs. Currently the default configuration asks for
* POKES - :class:`.hardware.Beambreak`
* LEDS - :class:`.hardware.LED_RGB`
* PORTS - :class:`.hardware.Solenoid`
* **AUDIOSERVER** - Which type, if any, audio server to use (`'jack'`, `'pyo'`, or `'none'`)
* **NCHANNELS** - Number of audio channels
* **FS** - Sampling rate of audio output
* **JACKDSTRING** - string used to start the jackd server, see `the jack manpages <https://linux.die.net/man/1/jackd>`_ eg::
jackd -P75 -p16 -t2000 -dalsa -dhw:sndrpihifiberry -P -rfs -n3 -s &
* **PIGPIOMASK** - Binary mask of pins for pigpio to control, see `the pigpio docs <http://abyz.me.uk/rpi/pigpio/pigpiod.html>`_ , eg::
1111110000111111111111110000
* **PULLUPS** - Pin (board) numbers to pull up on boot
* **PULLDOWNS** - Pin (board) numbers to pull down on boot.
Attributes:
name (str): The name used to identify ourselves in :mod:`.networking`
task (:class:`.tasks.Task`): The currently instantiated task
running (:class:`threading.Event`): Flag used to control task running state
stage_block (:class:`threading.Event`): Flag given to a task to signal when task stages finish
file_block (:class:`threading.Event`): Flag used to wait for file transfers
state (str): 'RUNNING', 'STOPPING', 'IDLE' - signals what this pilot is up to
pulls (list): list of :class:`~.hardware.Pull` objects to keep pins pulled up or down
server: Either a :func:`~.sound.pyoserver.pyo_server` or :class:`~.jackclient.JackClient` , sound server.
node (:class:`.networking.Net_Node`): Our Net_Node we use to communicate with our main networking object
networking (:class:`.networking.Pilot_Station`): Our networking object to communicate with the outside world
ip (str): Our IPv4 address
listens (dict): Dictionary mapping message keys to methods used to process them.
logger (:class:`logging.Logger`): Used to log messages and network events.
"""
logger = None
# Events for thread handling
running = None
stage_block = None
file_block = None
quitting = None
"""mp.Event to signal when process is quitting"""
# networking - our internal and external messengers
node = None
networking = None
# audio server
server = None
def __init__(self, splash=True):
if splash:
with open(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'setup', 'welcome_msg.txt'), 'r') as welcome_f:
welcome = welcome_f.read()
print('')
for line in welcome.split('\n'):
print(line)
print('')
sys.stdout.flush()
self.name = prefs.get('NAME')
if prefs.get('LINEAGE') == "CHILD":
self.child = True
self.parentid = prefs.get('PARENTID')
else:
self.child = False
self.parentid = 'T'
self.logger = init_logger(self)
self.logger.debug('pilot logger initialized')
# Locks, etc. for threading
self.running = threading.Event() # Are we running a task?
self.stage_block = threading.Event() # Are we waiting on stage triggers?
self.file_block = threading.Event() # Are we waiting on file transfer?
self.quitting = threading.Event()
self.quitting.clear()
# init pigpiod process
self.init_pigpio()
# Init audio server
if prefs.get('AUDIOSERVER') or 'AUDIO' in prefs.get('CONFIG'):
self.init_audio()
# Init Station
# Listen dictionary - what do we do when we receive different messages?
self.listens = {
'START': self.l_start, # We are being passed a task and asked to start it
'STOP' : self.l_stop, # We are being asked to stop running our task
'PARAM': self.l_param, # A parameter is being changed
'CALIBRATE_PORT': self.l_cal_port, # Calibrate a water port
'CALIBRATE_RESULT': self.l_cal_result, # Compute curve and store result
'BANDWIDTH': self.l_bandwidth # test our bandwidth
}
# spawn_network gives us the independent message-handling process
self.networking = Pilot_Station()
self.networking.start()
self.node = Net_Node(id = "_{}".format(self.name),
upstream = self.name,
port = prefs.get('MSGPORT'),
listens = self.listens,
instance=False)
self.logger.debug('pilot networking initialized')
# if we need to set pins pulled up or down, do that now
self.pulls = []
if prefs.get( 'PULLUPS'):
for pin in prefs.get('PULLUPS'):
self.pulls.append(gpio.Digital_Out(int(pin), pull='U', polarity=0))
if prefs.get( 'PULLDOWNS'):
for pin in prefs.get('PULLDOWNS'):
self.pulls.append(gpio.Digital_Out(int(pin), pull='D', polarity=1))
self.logger.debug('pullups and pulldowns set')
# check if the calibration file needs to be updated
# Set and update state
self.state = 'IDLE' # or 'Running'
self.update_state()
# Since we're starting up, handshake to introduce ourselves
self.ip = self.get_ip()
self.handshake()
self.logger.debug('handshake sent')
#self.blank_LEDs()
# TODO Synchronize system clock w/ time from terminal.
#################################################################
# Station
#################################################################
def get_ip(self):
"""
Get our IP
"""
# shamelessly stolen from https://www.w3resource.com/python-exercises/python-basic-exercise-55.php
# variables are badly named because this is just a rough unwrapping of what was a monstrous one-liner
# get ips that aren't the loopback
unwrap00 = [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1]
# ???
unwrap01 = [[(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]]
unwrap2 = [l for l in (unwrap00,unwrap01) if l][0][0]
return unwrap2
def handshake(self):
"""
Send the terminal our name and IP to signal that we are alive
"""
# send the terminal some information about ourselves
# TODO: Report any calibrations that we have
hello = {'pilot':self.name, 'ip':self.ip, 'state':self.state}
self.node.send(self.parentid, 'HANDSHAKE', value=hello)
def update_state(self):
"""
Send our current state to the Terminal,
our Station object will cache this and will handle any
future requests.
"""
self.node.send(self.parentid, 'STATE', self.state, flags={'NOLOG':True})
def l_start(self, value):
"""
Start running a task.
Get the task object by using `value['task_type']` to select from
:data:`.tasks.TASK_LIST` , then feed the rest of `value` as kwargs
into the task object.
Calls :meth:`.autopilot.run_task` in a new thread
Args:
value (dict): A dictionary of task parameters
"""
# TODO: If any of the sounds are 'file,' make sure we have them. If not, request them.
# Value should be a dict of protocol params
# The networking object should have already checked that we have all the files we need
if self.state == "RUNNING" or self.running.is_set():
self.logger.warning("Asked to a run a task when already running")
return
self.state = 'RUNNING'
self.running.set()
try:
# Get the task object by its type
if 'child' in value.keys():
task_class = tasks.CHILDREN_LIST[value['task_type']]
else:
task_class = tasks.TASK_LIST[value['task_type']]
# Instantiate the task
self.stage_block.clear()
# Make a group for this subject if we don't already have one
self.subject = value['subject']
prefs.set('SUBJECT', self.subject)
# Run the task and tell the terminal we have
# self.running.set()
threading.Thread(target=self.run_task, args=(task_class, value)).start()
self.update_state()
except Exception as e:
self.state = "IDLE"
self.logger.exception("couldn't start task: {}".format(e))
# TODO: Send a message back to the terminal with the runtime if there is one so it can handle timed stops
def l_stop(self, value):
"""
Stop the task.
Clear the running event, set the stage block.
TODO:
Do a coherence check between our local file and the Terminal's data.
Args:
value: ignored
"""
# Let the terminal know we're stopping
# (not stopped yet because we'll still have to sync data, etc.)
self.state = 'STOPPING'
self.update_state()
# We just clear the stage block and reset the running flag here
# and call the cleanup routine from run_task so it can exit cleanly
self.running.clear()
self.stage_block.set()
# TODO: Cohere here before closing file
if hasattr(self, 'h5f'):
self.h5f.close()
self.state = 'IDLE'
self.update_state()
def l_param(self, value):
"""
Change a task parameter mid-run
Warning:
Not Implemented
Args:
value:
"""
pass
def l_cal_port(self, value):
"""
Initiate the :meth:`.calibrate_port` routine.
Args:
value (dict): Dictionary of values defining the port calibration to be run, including
- ``port`` - which port to calibrate
- ``n_clicks`` - how many openings should be performed
- ``open_dur`` - how long the valve should be open
- ``iti`` - 'inter-trial interval`, or how long should we wait between valve openings.
"""
port = value['port']
n_clicks = value['n_clicks']
open_dur = value['dur']
iti = value['click_iti']
threading.Thread(target=self.calibrate_port,args=(port, n_clicks, open_dur, iti)).start()
def calibrate_port(self, port_name, n_clicks, open_dur, iti):
"""
Run port calibration routine
Open a :class:`.hardware.gpio.Solenoid` repeatedly,
measure volume of water dispersed, compute lookup table mapping
valve open times to volume.
Continuously sends progress of test with ``CAL_PROGRESS`` messages
Args:
port_name (str): Port name as specified in ``prefs``
n_clicks (int): number of times the valve should be opened
open_dur (int, float): how long the valve should be opened for in ms
iti (int, float): how long we should :func:`~time.sleep` between openings
"""
pin_num = prefs.get('HARDWARE')['PORTS'][port_name]
port = gpio.Solenoid(pin_num, duration=int(open_dur))
msg = {'click_num': 0,
'pilot': self.name,
'port': port_name
}
iti = float(iti)/1000.0
cal_name = "Cal_{}".format(self.name)
for i in range(int(n_clicks)):
port.open()
msg['click_num'] = i + 1
self.node.send(to=cal_name, key='CAL_PROGRESS',
value= msg)
time.sleep(iti)
port.release()
def l_cal_result(self, value):
"""
Save the results of a port calibration
"""
# files for storing raw and fit calibration results
cal_fn = os.path.join(prefs.get('BASEDIR'), 'port_calibration.json')
if os.path.exists(cal_fn):
try:
with open(cal_fn, 'r') as cal_file:
calibration = json.load(cal_file)
except ValueError:
# usually no json can be decoded, that's fine calibrations aren't expensive
calibration = {}
else:
calibration = {}
for port, results in value.items():
if port in calibration.keys():
calibration[port].extend(results)
else:
calibration[port] = results
with open(cal_fn, 'w+') as cal_file:
json.dump(calibration, cal_file)
def l_bandwidth(self, value):
"""
Send messages with a poissonian process according to the settings in value
"""
#turn off logging for now
self.networking.logger.setLevel(logging.ERROR)
self.node.logger.setLevel(logging.ERROR)
n_msg = int(value['n_msg'])
rate = float(value['rate'])
payload = int(value['payload'])
confirm = bool(value['confirm'])
payload = np.zeros(payload*1024, dtype=np.bool)
payload_size = sys.getsizeof(payload)
message = {
'pilot': self.name,
'payload': payload,
}
# make a fake message to test how large the serialized message is
test_msg = Message(to='bandwith', key='BANDWIDTH_MSG', value=message, repeat=confirm, flags={'MINPRINT':True},
id="test_message", sender="test_sender")
msg_size = sys.getsizeof(test_msg.serialize())
message['message_size'] = msg_size
message['payload_size'] = payload_size
if rate > 0:
spacing = 1.0/rate
else:
spacing = 0
# wait for half a second to let the terminal get messages out
time.sleep(0.25)
if spacing > 0:
last_message = time.perf_counter()
for i in range(n_msg):
message['n_msg'] = i
message['timestamp'] = datetime.datetime.now().isoformat()
self.node.send(to='bandwidth',key='BANDWIDTH_MSG',
value=message, repeat=confirm, flags={'MINPRINT':True})
this_message = time.perf_counter()
waitfor = np.clip(spacing-(this_message-last_message), 0, spacing)
#time.sleep(np.random.exponential(1.0/rate))
# just do linear spacing lol.
time.sleep(waitfor)
last_message = time.perf_counter()
else:
for i in range(n_msg):
message['n_msg'] = i
message['timestamp'] = datetime.datetime.now().isoformat()
self.node.send(to='bandwidth',key='BANDWIDTH_MSG',
value=message, repeat=confirm, flags={'MINPRINT':True})
self.node.send(to='bandwidth',key='BANDWIDTH_MSG', value={'pilot':self.name, 'test_end':True,
'rate': rate, 'payload':payload,
'n_msg':n_msg, 'confirm':confirm},
flags={'MINPRINT':True})
#self.networking.set_logging(True)
#self.node.do_logging.set()
def calibration_curve(self, path=None, calibration=None):
"""
# compute curve to compute duration from desired volume
Args:
calibration:
path: If present, use calibration file specified, otherwise use default.
"""
lut_fn = os.path.join(prefs.get('BASEDIR'), 'port_calibration_fit.json')
if not calibration:
# if we weren't given calibration results, load them
if path:
open_fn = path
else:
open_fn = os.path.join(prefs.get('BASEDIR'), "port_calibration.json")
with open(open_fn, 'r') as open_f:
calibration = json.load(open_f)
luts = {}
for port, samples in calibration.items():
sample_df = pd.DataFrame(samples)
# TODO: Filter for only most recent timestamps
# volumes are saved in mL because of how they are measured, durations are stored in ms
# but reward volumes are typically in the uL range, so we make the conversion
# by multiplying by 1000
line_fit = linregress((sample_df['vol']/sample_df['n_clicks'])*1000., sample_df['dur'])
luts[port] = {'intercept': line_fit.intercept,
'slope': line_fit.slope}
# write to file, overwriting any previous
with open(lut_fn, 'w') as lutf:
json.dump(luts, lutf)
#################################################################
# Hardware Init
#################################################################
def init_pigpio(self):
try:
self.pigpiod = external.start_pigpiod()
self.logger.debug('pigpio daemon started')
except ImportError as e:
self.pigpiod = None
self.logger.exception(e)
def init_audio(self):
"""
Initialize an audio server depending on the value of
`prefs.get('AUDIOSERVER')`
* 'pyo' = :func:`.pyoserver.pyo_server`
* 'jack' = :class:`.jackclient.JackClient`
"""
if prefs.get('AUDIOSERVER') == 'pyo':
self.server = pyoserver.pyo_server()
self.logger.info("pyo server started")
elif prefs.get('AUDIOSERVER') in ('jack', True):
self.jackd = external.start_jackd()
self.server = jackclient.JackClient()
self.server.start()
self.logger.info('Started jack audio server')
def blank_LEDs(self):
"""
If any 'LEDS' are defined in `prefs.get('HARDWARE')` ,
instantiate them, set their color to [0,0,0],
and then release them.
"""
if 'LEDS' not in prefs.get('HARDWARE').keys():
return
for position, pins in prefs.get('HARDWARE')['LEDS'].items():
led = gpio.LED_RGB(pins=pins)
time.sleep(1.)
led.set_color(col=[0,0,0])
led.release()
#################################################################
# Trial Running and Management
#################################################################
def open_file(self):
"""
Setup a table to store data locally.
Opens `prefs.get('DATADIR')/local.h5`, creates a group for the current subject,
a new table for the current day.
.. todo::
This needs to be unified with a general file constructor abstracted from :class:`.Subject` so it doesn't reimplement file creation!!
Returns:
(:class:`tables.File`, :class:`tables.Table`,
:class:`tables.tableextension.Row`): The file, table, and row for the local data table
"""
local_file = os.path.join(prefs.get('DATADIR'), 'local.h5')
try:
h5f = tables.open_file(local_file, mode='a')
except (IOError, tables.HDF5ExtError) as e:
self.logger.warning("local file was broken, making new")
self.logger.warning(e)
os.remove(local_file)
h5f = tables.open_file(local_file, mode='w')
os.chmod(local_file, 0o777)
try:
h5f.create_group("/", self.subject, "Local Data for {}".format(self.subject))
except tables.NodeError:
# already made it
pass
subject_group = h5f.get_node('/', self.subject)
# Make a table for today's data, appending a conflict-avoidance int if one already exists
datestring = datetime.date.today().isoformat()
conflict_avoid = 0
while datestring in subject_group:
conflict_avoid += 1
datestring = datetime.date.today().isoformat() + '-' + str(conflict_avoid)
# Get data table descriptor
if hasattr(self.task, 'TrialData'):
table_descriptor = self.task.TrialData
table = h5f.create_table(subject_group, datestring, table_descriptor,
"Subject {} on {}".format(self.subject, datestring))
# The Row object is what we write data into as it comes in
row = table.row
return h5f, table, row
else:
return h5f, None, None
def run_task(self, task_class, task_params):
"""
Called in a new thread, run the task.
Opens a file with :meth:`~.autopilot.open_file` , then
continually calls `task.stages.next` to process stages.
Sends data back to the terminal between every stage.
Waits for the task to clear `stage_block` between stages.
"""
# TODO: give a net node to the Task class and let the task run itself.
# Run as a separate thread, just keeps calling next() and shoveling data
self.task = task_class(stage_block=self.stage_block, **task_params)
# do we expect TrialData?
trial_data = False
if hasattr(self.task, 'TrialData'):
trial_data = True
# Open local file for saving
h5f, table, row = self.open_file()
# TODO: Init sending continuous data here
self.logger.debug('Starting task loop')
while True:
# Calculate next stage data and prep triggers
data = next(self.task.stages)() # Double parens because next just gives us the function, we still have to call it
self.logger.debug('called stage method')
if data:
data['pilot'] = self.name
data['subject'] = self.subject
# Send data back to terminal (subject is identified by the networking object)
self.node.send('T', 'DATA', data)
# Store a local copy
# the task class has a class variable DATA that lets us know which data the row is expecting
if trial_data:
for k, v in data.items():
if k in self.task.TrialData.columns.keys():
row[k] = v
# If the trial is over (either completed or bailed), flush the row
if 'TRIAL_END' in data.keys():
row.append()
table.flush()
self.logger.debug('sent data')
# Wait on the stage lock to clear
self.stage_block.wait()
self.logger.debug('stage lock passed')
# If the running flag gets set, we're closing.
if not self.running.is_set():
self.task.end()
self.task = None
row.append()
table.flush()
self.logger.debug('stopping task')
break
h5f.flush()
h5f.close()
if __name__ == "__main__":
try:
a = Pilot()
a.quitting.wait()
except KeyboardInterrupt:
a.quitting.set()
sys.exit()
|
wehr-lab/RPilot
|
autopilot/core/pilot.py
|
Python
|
gpl-3.0
| 26,194 | 0.005612 |
# -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
======
Jobs
======
Jobs have been entered into the scheduler once. They may be even finished
already.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import collections as _collections
import itertools as _it
from .. import _graph
from .. import interfaces as _interfaces
from .. import _lock
#: Exception raised on cycles, when a todo DAG is resolved
DependencyCycle = _graph.DependencyCycle
#: Job ID sequence
#:
#: :Type: callable
_gen_id = _it.count(1).next
def last_job_id():
"""
Determine the largest job ID assigned until now
:Return: The ID. It's ``0``, if no job ID was assigned until now (job IDs
start with ``1``)
:Rtype: ``id``
"""
# this inspects the counter iterable by calling pickling methods and
# retrieving the next value from there and then subtracting one.
# __reduce__ returns the factory ('count') and the argument tuple
# containing the initial value (advanced with each call to next())
# pylint: disable = no-member
return _gen_id.__self__.__reduce__()[1][0] - 1
class Job(object):
"""
Job after is been scheduled.
:See: `JobInterface`
"""
__implements__ = [_interfaces.JobInterface]
def __init__(self, job_id, desc, group, locks, importance, not_before,
extra, predecessors, attempts):
"""
Initialization
:Parameters:
`job_id` : ``int``
Job ID
`desc` : `TodoDescription`
Job description
`group` : ``str``
Job Group
`locks` : iterable
List of locks that need to be aquired (``(`LockInterface`, ...)``)
`importance` : ``int``
Job importance
`not_before` : various
execute job not before this time. Special formats are allowed:
``int``
Number of seconds from now (delay)
``datetime.datetime``
a specific point in time (server time). Use UTC if you can. For
naive date times, UTC is assumed.
If omitted or ``None``, ``0`` is assumed.
`extra` : ``dict``
Extra job data
`predecessors` : iterable
List of jobs to be run successfully before this one
(``(int, ...)``)
`attempts` : ``list``
execution attempts (``[ExecutionAttemptInterface, ...]``)
"""
self.id = job_id
self.desc = desc
self.group = group
self.locks = _lock.validate(locks)
self.locks_waiting = None
self.importance = importance
self.extra = extra
self.predecessors = set()
self.predecessors_waiting = None
self.attempts = attempts
self.not_before = not_before
for item in predecessors or ():
self.depend_on(item)
def depend_on(self, job_id):
"""
Add predecessor job ID
Duplicates are silently ignored.
:See: `interfaces.JobInterface.depend_on`
"""
assert self.predecessors_waiting is None
try:
job_id = int(job_id)
except TypeError:
raise ValueError("Invalid job_id: %r" % (job_id,))
if job_id < 1 or job_id >= self.id:
raise ValueError("Invalid job_id: %r" % (job_id,))
self.predecessors.add(job_id)
def job_from_todo(todo):
"""
Construct Job from Todo
:Parameters:
`todo` : `Todo`
Todo to construct from
:Return: New job instance
:Rtype: `JobInterface`
"""
return Job(
_gen_id(), todo.desc, todo.group, todo.locks, todo.importance,
todo.not_before, {}, set(), []
)
def joblist_from_todo(todo):
"""
Construct a list of jobs from Todo graph
:Parameters:
`todo` : `Todo`
todo to be inspected.
:Return: List of jobs (``[JobInterface, ...]``)
:Rtype: ``list``
"""
jobs, todos, virtuals = [], {}, {}
toinspect = _collections.deque([(todo, None)])
graph = _graph.DependencyGraph()
# 1) fill the dependency graph with the todo nodes (detects cycles, too)
try:
while toinspect:
todo, parent = toinspect.pop()
todo_id = id(todo)
if todo_id in todos:
virtual_id, pre, _ = todos[todo_id]
else:
pre = []
virtual_id = len(virtuals)
todos[todo_id] = virtual_id, pre, todo
virtuals[virtual_id] = todo_id
for parent_id in todo.predecessors():
graph.add((False, parent_id), (True, virtual_id))
pre.append((False, parent_id))
for succ in todo.successors():
toinspect.appendleft((succ, (True, virtual_id)))
if parent is not None:
graph.add(parent, (True, virtual_id))
pre.append(parent)
else:
graph.add((False, None), (True, virtual_id))
except DependencyCycle as e:
# remap to our input (todos and not some weird virtual IDs)
raise DependencyCycle([
todos[virtuals[tup[1]]][2] for tup in e.args[0]
])
# 2) resolve the graph (create topological order)
id_mapping = {}
for is_virtual, virtual_id in graph.resolve():
if is_virtual:
_, pres, todo = todos[virtuals[virtual_id]]
job = job_from_todo(todo)
for is_virtual, pre in pres:
if is_virtual:
pre = id_mapping[pre]
job.depend_on(pre)
id_mapping[virtual_id] = job.id
jobs.append(job)
return jobs
|
ndparker/wolfe
|
wolfe/scheduler/_job.py
|
Python
|
apache-2.0
| 6,561 | 0 |
from builtins import str
from builtins import object
import smtplib
import email.utils
from biomaj.workflow import Workflow
import logging
import sys
if sys.version < '3':
from email.MIMEText import MIMEText
else:
from email.mime.text import MIMEText
class Notify(object):
"""
Send notifications
"""
@staticmethod
def notifyBankAction(bank):
if not bank.config.get('mail.smtp.host') or bank.session is None:
logging.info('Notify:none')
return
admins = bank.config.get('mail.admin')
if not admins:
logging.info('Notify: no mail.admin defined')
return
admin_list = admins.split(',')
logging.info('Notify:' + bank.config.get('mail.admin'))
mfrom = bank.config.get('mail.from')
log_file = bank.config.log_file
msg = MIMEText('')
if log_file:
fp = None
if sys.version < '3':
fp = open(log_file, 'rb')
else:
fp = open(log_file, 'r')
msg = MIMEText(fp.read(2000000))
fp.close()
msg['From'] = email.utils.formataddr(('Author', mfrom))
msg['Subject'] = 'BANK[' + bank.name + '] - STATUS[' + str(bank.session.get_status(Workflow.FLOW_OVER)) + '] - UPDATE[' + str(bank.session.get('update')) + '] - REMOVE[' + str(bank.session.get('remove')) + ']' + ' - RELEASE[' + str(bank.session.get('release')) + ']'
logging.info(msg['subject'])
server = None
for mto in admin_list:
msg['To'] = email.utils.formataddr(('Recipient', mto))
try:
server = smtplib.SMTP(bank.config.get('mail.smtp.host'))
if bank.config.get('mail.tls') is not None and str(bank.config.get('mail.tls')) == 'true':
server.starttls()
if bank.config.get('mail.user') is not None and str(bank.config.get('mail.user')) != '':
server.login(bank.config.get('mail.user'), bank.config.get('mail.password'))
server.sendmail(mfrom, [mto], msg.as_string())
except Exception as e:
logging.error('Could not send email: ' + str(e))
finally:
if server is not None:
server.quit()
|
horkko/biomaj
|
biomaj/notify.py
|
Python
|
agpl-3.0
| 2,306 | 0.001735 |
from __future__ import print_function
import os
twyg = ximport('twyg')
# reload(twyg)
datafiles = list(filelist( os.path.abspath('example-data')))
datafile = choice(datafiles)
configs = [ 'boxes', 'bubbles', 'edge', 'flowchart', 'hive', 'ios', 'jellyfish',
'junction1', 'junction2', 'modern', 'nazca', 'rounded', 'square',
'synapse', 'tron']
colorschemes = [ 'aqua', 'azure', 'bordeaux', 'clay', 'cmyk', 'cobalt', 'colors21',
'crayons', 'earth', 'forest', 'grape', 'honey', 'inca', 'jelly', 'kelp',
'mango', 'mellow', 'merlot', 'milkshake', 'mint-gray', 'mint', 'moon',
'mustard', 'neo', 'orbit', 'pastels', 'quartz', 'salmon', 'tentacle',
'terracotta', 'turquoise', 'violet']
config = choice(configs)
colorscheme = choice(colorschemes)
margins = ['10%', '5%']
print( config )
print( colorscheme )
print( os.path.basename(datafile) )
print()
twyg.generate_output_nodebox(datafile, config, colorscheme=colorscheme, margins=margins)
|
karstenw/nodebox-pyobjc
|
examples/New Functions/twyg/demo1.py
|
Python
|
mit
| 1,032 | 0.014535 |
# SPDX-License-Identifier: MIT
"""
Unit tests for slots-related functionality.
"""
import pickle
import sys
import types
import weakref
import pytest
import attr
from attr._compat import PY2, PYPY, just_warn, make_set_closure_cell
# Pympler doesn't work on PyPy.
try:
from pympler.asizeof import asizeof
has_pympler = True
except BaseException: # Won't be an import error.
has_pympler = False
@attr.s
class C1(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
if not PY2:
def my_class(self):
return __class__
def my_super(self):
"""Just to test out the no-arg super."""
return super().__repr__()
@attr.s(slots=True, hash=True)
class C1Slots(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
if not PY2:
def my_class(self):
return __class__
def my_super(self):
"""Just to test out the no-arg super."""
return super().__repr__()
def test_slots_being_used():
"""
The class is really using __slots__.
"""
non_slot_instance = C1(x=1, y="test")
slot_instance = C1Slots(x=1, y="test")
assert "__dict__" not in dir(slot_instance)
assert "__slots__" in dir(slot_instance)
assert "__dict__" in dir(non_slot_instance)
assert "__slots__" not in dir(non_slot_instance)
assert set(["__weakref__", "x", "y"]) == set(slot_instance.__slots__)
if has_pympler:
assert asizeof(slot_instance) < asizeof(non_slot_instance)
non_slot_instance.t = "test"
with pytest.raises(AttributeError):
slot_instance.t = "test"
assert 1 == non_slot_instance.method()
assert 1 == slot_instance.method()
assert attr.fields(C1Slots) == attr.fields(C1)
assert attr.asdict(slot_instance) == attr.asdict(non_slot_instance)
def test_basic_attr_funcs():
"""
Comparison, `__eq__`, `__hash__`, `__repr__`, `attrs.asdict` work.
"""
a = C1Slots(x=1, y=2)
b = C1Slots(x=1, y=3)
a_ = C1Slots(x=1, y=2)
# Comparison.
assert b > a
assert a_ == a
# Hashing.
hash(b) # Just to assert it doesn't raise.
# Repr.
assert "C1Slots(x=1, y=2)" == repr(a)
assert {"x": 1, "y": 2} == attr.asdict(a)
def test_inheritance_from_nonslots():
"""
Inheritance from a non-slotted class works.
Note that a slotted class inheriting from an ordinary class loses most of
the benefits of slotted classes, but it should still work.
"""
@attr.s(slots=True, hash=True)
class C2Slots(C1):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
c2.t = "test" # This will work, using the base class.
assert "test" == c2.t
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
assert set(["z"]) == set(C2Slots.__slots__)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_nonslots_these():
"""
Enhancing a dict class using 'these' works.
This will actually *replace* the class with another one, using slots.
"""
class SimpleOrdinaryClass(object):
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
C2Slots = attr.s(
these={"x": attr.ib(), "y": attr.ib(), "z": attr.ib()},
init=False,
slots=True,
hash=True,
)(SimpleOrdinaryClass)
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
with pytest.raises(AttributeError):
c2.t = "test" # We have slots now.
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
assert set(["__weakref__", "x", "y", "z"]) == set(C2Slots.__slots__)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "SimpleOrdinaryClass(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_inheritance_from_slots():
"""
Inheriting from an attrs slotted class works.
"""
@attr.s(slots=True, hash=True)
class C2Slots(C1Slots):
z = attr.ib()
@attr.s(slots=True, hash=True)
class C2(C1):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert set(["z"]) == set(C2Slots.__slots__)
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
with pytest.raises(AttributeError):
c2.t = "test"
non_slot_instance = C2(x=1, y=2, z="test")
if has_pympler:
assert asizeof(c2) < asizeof(non_slot_instance)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
def test_inheritance_from_slots_with_attribute_override():
"""
Inheriting from a slotted class doesn't re-create existing slots
"""
class HasXSlot(object):
__slots__ = ("x",)
@attr.s(slots=True, hash=True)
class C2Slots(C1Slots):
# y re-defined here but it shouldn't get a slot
y = attr.ib()
z = attr.ib()
@attr.s(slots=True, hash=True)
class NonAttrsChild(HasXSlot):
# Parent class has slot for "x" already, so we skip it
x = attr.ib()
y = attr.ib()
z = attr.ib()
c2 = C2Slots(1, 2, "test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert {"z"} == set(C2Slots.__slots__)
na = NonAttrsChild(1, 2, "test")
assert 1 == na.x
assert 2 == na.y
assert "test" == na.z
assert {"__weakref__", "y", "z"} == set(NonAttrsChild.__slots__)
def test_inherited_slot_reuses_slot_descriptor():
"""
We reuse slot descriptor for an attr.ib defined in a slotted attr.s
"""
class HasXSlot(object):
__slots__ = ("x",)
class OverridesX(HasXSlot):
@property
def x(self):
return None
@attr.s(slots=True)
class Child(OverridesX):
x = attr.ib()
assert Child.x is not OverridesX.x
assert Child.x is HasXSlot.x
c = Child(1)
assert 1 == c.x
assert set() == set(Child.__slots__)
ox = OverridesX()
assert ox.x is None
def test_bare_inheritance_from_slots():
"""
Inheriting from a bare attrs slotted class works.
"""
@attr.s(
init=False, eq=False, order=False, hash=False, repr=False, slots=True
)
class C1BareSlots(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
@attr.s(init=False, eq=False, order=False, hash=False, repr=False)
class C1Bare(object):
x = attr.ib(validator=attr.validators.instance_of(int))
y = attr.ib()
def method(self):
return self.x
@classmethod
def classmethod(cls):
return "clsmethod"
@staticmethod
def staticmethod():
return "staticmethod"
@attr.s(slots=True, hash=True)
class C2Slots(C1BareSlots):
z = attr.ib()
@attr.s(slots=True, hash=True)
class C2(C1Bare):
z = attr.ib()
c2 = C2Slots(x=1, y=2, z="test")
assert 1 == c2.x
assert 2 == c2.y
assert "test" == c2.z
assert 1 == c2.method()
assert "clsmethod" == c2.classmethod()
assert "staticmethod" == c2.staticmethod()
with pytest.raises(AttributeError):
c2.t = "test"
non_slot_instance = C2(x=1, y=2, z="test")
if has_pympler:
assert asizeof(c2) < asizeof(non_slot_instance)
c3 = C2Slots(x=1, y=3, z="test")
assert c3 > c2
c2_ = C2Slots(x=1, y=2, z="test")
assert c2 == c2_
assert "C2Slots(x=1, y=2, z='test')" == repr(c2)
hash(c2) # Just to assert it doesn't raise.
assert {"x": 1, "y": 2, "z": "test"} == attr.asdict(c2)
@pytest.mark.skipif(PY2, reason="closure cell rewriting is PY3-only.")
class TestClosureCellRewriting(object):
def test_closure_cell_rewriting(self):
"""
Slotted classes support proper closure cell rewriting.
This affects features like `__class__` and the no-arg super().
"""
non_slot_instance = C1(x=1, y="test")
slot_instance = C1Slots(x=1, y="test")
assert non_slot_instance.my_class() is C1
assert slot_instance.my_class() is C1Slots
# Just assert they return something, and not an exception.
assert non_slot_instance.my_super()
assert slot_instance.my_super()
def test_inheritance(self):
"""
Slotted classes support proper closure cell rewriting when inheriting.
This affects features like `__class__` and the no-arg super().
"""
@attr.s
class C2(C1):
def my_subclass(self):
return __class__
@attr.s
class C2Slots(C1Slots):
def my_subclass(self):
return __class__
non_slot_instance = C2(x=1, y="test")
slot_instance = C2Slots(x=1, y="test")
assert non_slot_instance.my_class() is C1
assert slot_instance.my_class() is C1Slots
# Just assert they return something, and not an exception.
assert non_slot_instance.my_super()
assert slot_instance.my_super()
assert non_slot_instance.my_subclass() is C2
assert slot_instance.my_subclass() is C2Slots
@pytest.mark.parametrize("slots", [True, False])
def test_cls_static(self, slots):
"""
Slotted classes support proper closure cell rewriting for class- and
static methods.
"""
# Python can reuse closure cells, so we create new classes just for
# this test.
@attr.s(slots=slots)
class C:
@classmethod
def clsmethod(cls):
return __class__
assert C.clsmethod() is C
@attr.s(slots=slots)
class D:
@staticmethod
def statmethod():
return __class__
assert D.statmethod() is D
@pytest.mark.skipif(PYPY, reason="set_closure_cell always works on PyPy")
@pytest.mark.skipif(
sys.version_info >= (3, 8),
reason="can't break CodeType.replace() via monkeypatch",
)
def test_code_hack_failure(self, monkeypatch):
"""
Keeps working if function/code object introspection doesn't work
on this (nonstandard) interpreter.
A warning is emitted that points to the actual code.
"""
# This is a pretty good approximation of the behavior of
# the actual types.CodeType on Brython.
monkeypatch.setattr(types, "CodeType", lambda: None)
func = make_set_closure_cell()
with pytest.warns(RuntimeWarning) as wr:
func()
w = wr.pop()
assert __file__ == w.filename
assert (
"Running interpreter doesn't sufficiently support code object "
"introspection. Some features like bare super() or accessing "
"__class__ will not work with slotted classes.",
) == w.message.args
assert just_warn is func
@pytest.mark.skipif(PYPY, reason="__slots__ only block weakref on CPython")
def test_not_weakrefable():
"""
Instance is not weak-referenceable when `weakref_slot=False` in CPython.
"""
@attr.s(slots=True, weakref_slot=False)
class C(object):
pass
c = C()
with pytest.raises(TypeError):
weakref.ref(c)
@pytest.mark.skipif(
not PYPY, reason="slots without weakref_slot should only work on PyPy"
)
def test_implicitly_weakrefable():
"""
Instance is weak-referenceable even when `weakref_slot=False` in PyPy.
"""
@attr.s(slots=True, weakref_slot=False)
class C(object):
pass
c = C()
w = weakref.ref(c)
assert c is w()
def test_weakrefable():
"""
Instance is weak-referenceable when `weakref_slot=True`.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
pass
c = C()
w = weakref.ref(c)
assert c is w()
def test_weakref_does_not_add_a_field():
"""
`weakref_slot=True` does not add a field to the class.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
field = attr.ib()
assert [f.name for f in attr.fields(C)] == ["field"]
def tests_weakref_does_not_add_when_inheriting_with_weakref():
"""
`weakref_slot=True` does not add a new __weakref__ slot when inheriting
one.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
pass
@attr.s(slots=True, weakref_slot=True)
class D(C):
pass
d = D()
w = weakref.ref(d)
assert d is w()
def tests_weakref_does_not_add_with_weakref_attribute():
"""
`weakref_slot=True` does not add a new __weakref__ slot when an attribute
of that name exists.
"""
@attr.s(slots=True, weakref_slot=True)
class C(object):
__weakref__ = attr.ib(
init=False, hash=False, repr=False, eq=False, order=False
)
c = C()
w = weakref.ref(c)
assert c is w()
def test_slots_empty_cell():
"""
Tests that no `ValueError: Cell is empty` exception is raised when
closure cells are present with no contents in a `slots=True` class.
(issue https://github.com/python-attrs/attrs/issues/589)
On Python 3, if a method mentions `__class__` or uses the no-arg `super()`,
the compiler will bake a reference to the class in the method itself as
`method.__closure__`. Since `attrs` replaces the class with a clone,
`_ClassBuilder._create_slots_class(self)` will rewrite these references so
it keeps working. This method was not properly covering the edge case where
the closure cell was empty, we fixed it and this is the non-regression
test.
"""
@attr.s(slots=True)
class C(object):
field = attr.ib()
def f(self, a):
super(C, self).__init__()
C(field=1)
@attr.s(getstate_setstate=True)
class C2(object):
x = attr.ib()
@attr.s(slots=True, getstate_setstate=True)
class C2Slots(object):
x = attr.ib()
class TestPickle(object):
@pytest.mark.parametrize("protocol", range(pickle.HIGHEST_PROTOCOL))
def test_pickleable_by_default(self, protocol):
"""
If nothing else is passed, slotted classes can be pickled and
unpickled with all supported protocols.
"""
i1 = C1Slots(1, 2)
i2 = pickle.loads(pickle.dumps(i1, protocol))
assert i1 == i2
assert i1 is not i2
def test_no_getstate_setstate_for_dict_classes(self):
"""
As long as getstate_setstate is None, nothing is done to dict
classes.
"""
i = C1(1, 2)
assert None is getattr(i, "__getstate__", None)
assert None is getattr(i, "__setstate__", None)
def test_no_getstate_setstate_if_option_false(self):
"""
Don't add getstate/setstate if getstate_setstate is False.
"""
@attr.s(slots=True, getstate_setstate=False)
class C(object):
x = attr.ib()
i = C(42)
assert None is getattr(i, "__getstate__", None)
assert None is getattr(i, "__setstate__", None)
@pytest.mark.parametrize("cls", [C2(1), C2Slots(1)])
def test_getstate_set_state_force_true(self, cls):
"""
If getstate_setstate is True, add them unconditionally.
"""
assert None is not getattr(cls, "__getstate__", None)
assert None is not getattr(cls, "__setstate__", None)
def test_slots_super_property_get():
"""
On Python 2/3: the `super(self.__class__, self)` works.
"""
@attr.s(slots=True)
class A(object):
x = attr.ib()
@property
def f(self):
return self.x
@attr.s(slots=True)
class B(A):
@property
def f(self):
return super(B, self).f ** 2
assert B(11).f == 121
assert B(17).f == 289
@pytest.mark.skipif(PY2, reason="shortcut super() is PY3-only.")
def test_slots_super_property_get_shortcut():
"""
On Python 3, the `super()` shortcut is allowed.
"""
@attr.s(slots=True)
class A(object):
x = attr.ib()
@property
def f(self):
return self.x
@attr.s(slots=True)
class B(A):
@property
def f(self):
return super().f ** 2
assert B(11).f == 121
assert B(17).f == 289
|
python-attrs/attrs
|
tests/test_slots.py
|
Python
|
mit
| 18,010 | 0 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def engine():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
yield session
session.close()
trans.rollback()
connection.close()
|
Debian/dak
|
daklib/conftest.py
|
Python
|
gpl-2.0
| 1,325 | 0.000755 |
import re
import copy
import logging
import datetime
import objectpath
from indra.statements import *
logger = logging.getLogger(__name__)
class EidosProcessor(object):
"""This processor extracts INDRA Statements from Eidos JSON-LD output.
Parameters
----------
json_dict : dict
A JSON dictionary containing the Eidos extractions in JSON-LD format.
Attributes
----------
statements : list[indra.statements.Statement]
A list of INDRA Statements that were extracted by the processor.
"""
def __init__(self, json_dict, grounding_ns=None):
self.doc = EidosDocument(json_dict)
self.grounding_ns = grounding_ns
self.statements = []
def extract_causal_relations(self):
"""Extract causal relations as Statements."""
# Get the extractions that are labeled as directed and causal
relations = [e for e in self.doc.extractions if
'DirectedRelation' in e['labels'] and
'Causal' in e['labels']]
# For each relation, we try to extract an INDRA Statement and
# save it if its valid
for relation in relations:
stmt = self.get_causal_relation(relation)
if stmt is not None:
self.statements.append(stmt)
def extract_correlations(self):
events = [e for e in self.doc.extractions if
'UndirectedRelation' in e['labels'] and
'Correlation' in e['labels']]
for event in events:
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
arg_ids = find_args(event, 'argument')
if len(arg_ids) != 2:
logger.warning('Skipping correlation with not 2 arguments.')
# Resolve coreferences by ID
arg_ids = [self.doc.coreferences.get(arg_id, arg_id)
for arg_id in arg_ids]
# Get the actual entities
args = [self.doc.entities[arg_id] for arg_id in arg_ids]
# Make Events from the entities
members = [self.get_event(arg) for arg in args]
# Get the evidence
evidence = self.get_evidence(event)
st = Association(members, evidence=[evidence])
self.statements.append(st)
def extract_events(self):
events = [e for e in self.doc.extractions if
'Concept-Expanded' in e['labels']]
for event_entry in events:
event = self.get_event(event_entry)
evidence = self.get_evidence(event_entry)
event.evidence = [evidence]
if not event.context and evidence.context:
event.context = copy.deepcopy(evidence.context)
evidence.context = None
self.statements.append(event)
def get_event_by_id(self, event_id):
# Resolve coreferences by ID
event_id = self.doc.coreferences.get(event_id, event_id)
# Get the actual entity
event = self.doc.entities[event_id]
return self.get_event(event)
def get_event(self, event):
concept = self.get_concept(event)
states = event.get('states', [])
extracted_states = self.extract_entity_states(states)
polarity = extracted_states.get('polarity')
adjectives = extracted_states.get('adjectives')
delta = QualitativeDelta(polarity=polarity, adjectives=adjectives)
timex = extracted_states.get('time_context', None)
geo = extracted_states.get('geo_context', None)
context = WorldContext(time=timex, geo_location=geo) \
if timex or geo else None
stmt = Event(concept, delta=delta, context=context)
return stmt
def get_causal_relation(self, relation):
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
subj_id = find_arg(relation, 'source')
obj_id = find_arg(relation, 'destination')
if subj_id is None or obj_id is None:
return None
subj = self.get_event_by_id(subj_id)
obj = self.get_event_by_id(obj_id)
evidence = self.get_evidence(relation)
# We also put the adjectives and polarities into annotations since
# they could otherwise get squashed upon preassembly
evidence.annotations['subj_polarity'] = subj.delta.polarity
evidence.annotations['obj_polarity'] = obj.delta.polarity
evidence.annotations['subj_adjectives'] = subj.delta.adjectives
evidence.annotations['obj_adjectives'] = obj.delta.adjectives
evidence.annotations['subj_context'] = subj.context.to_json() if \
subj.context else {}
evidence.annotations['obj_context'] = obj.context.to_json() if \
obj.context else {}
st = Influence(subj, obj, evidence=[evidence])
return st
def get_evidence(self, relation):
"""Return the Evidence object for the INDRA Statment."""
provenance = relation.get('provenance')
# First try looking up the full sentence through provenance
text = None
context = None
if provenance:
sentence_tag = provenance[0].get('sentence')
if sentence_tag and '@id' in sentence_tag:
sentence_id = sentence_tag['@id']
sentence = self.doc.sentences.get(sentence_id)
if sentence is not None:
text = _sanitize(sentence['text'])
# Here we try to get the title of the document and set it
# in the provenance
doc_id = provenance[0].get('document', {}).get('@id')
if doc_id:
title = self.doc.documents.get(doc_id, {}).get('title')
if title:
provenance[0]['document']['title'] = title
annotations = {'found_by': relation.get('rule'),
'provenance': provenance}
if self.doc.dct is not None:
annotations['document_creation_time'] = self.doc.dct.to_json()
epistemics = {}
negations = self.get_negation(relation)
hedgings = self.get_hedging(relation)
if hedgings:
epistemics['hedgings'] = hedgings
if negations:
# This is the INDRA standard to show negation
epistemics['negated'] = True
# But we can also save the texts associated with the negation
# under annotations, just in case it's needed
annotations['negated_texts'] = negations
# If that fails, we can still get the text of the relation
if text is None:
text = _sanitize(relation.get('text'))
ev = Evidence(source_api='eidos', text=text, annotations=annotations,
context=context, epistemics=epistemics)
return ev
@staticmethod
def get_negation(event):
"""Return negation attached to an event.
Example: "states": [{"@type": "State", "type": "NEGATION",
"text": "n't"}]
"""
states = event.get('states', [])
if not states:
return []
negs = [state for state in states
if state.get('type') == 'NEGATION']
neg_texts = [neg['text'] for neg in negs]
return neg_texts
@staticmethod
def get_hedging(event):
"""Return hedging markers attached to an event.
Example: "states": [{"@type": "State", "type": "HEDGE",
"text": "could"}
"""
states = event.get('states', [])
if not states:
return []
hedgings = [state for state in states
if state.get('type') == 'HEDGE']
hedging_texts = [hedging['text'] for hedging in hedgings]
return hedging_texts
def extract_entity_states(self, states):
if states is None:
return {'polarity': None, 'adjectives': []}
polarity = None
adjectives = []
time_context = None
geo_context = None
for state in states:
if polarity is None:
if state['type'] == 'DEC':
polarity = -1
# Handle None entry here
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'INC':
polarity = 1
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'QUANT':
adjectives.append(state['text'])
if state['type'] == 'TIMEX':
time_context = self.time_context_from_ref(state)
elif state['type'] == 'LocationExp':
# TODO: here we take only the first geo_context occurrence.
# Eidos sometimes provides a list of locations, it may
# make sense to break those up into multiple statements
# each with one location
if not geo_context:
geo_context = self.geo_context_from_ref(state)
return {'polarity': polarity, 'adjectives': adjectives,
'time_context': time_context, 'geo_context': geo_context}
def get_groundings(self, entity):
"""Return groundings as db_refs for an entity."""
def get_grounding_entries(grounding):
if not grounding:
return None
entries = []
values = grounding.get('values', [])
# Values could still have been a None entry here
if values:
for entry in values:
ont_concept = entry.get('ontologyConcept')
value = entry.get('value')
if ont_concept is None or value is None:
continue
entries.append((ont_concept, value))
return entries
# Save raw text and Eidos scored groundings as db_refs
db_refs = {'TEXT': entity['text']}
groundings = entity.get('groundings')
if not groundings:
return db_refs
for g in groundings:
entries = get_grounding_entries(g)
# Only add these groundings if there are actual values listed
if entries:
key = g['name'].upper()
if self.grounding_ns is not None and \
key not in self.grounding_ns:
continue
if key == 'UN':
db_refs[key] = [(s[0].replace(' ', '_'), s[1])
for s in entries]
elif key == 'WM_FLATTENED' or key == 'WM':
db_refs['WM'] = [(s[0].strip('/'), s[1])
for s in entries]
else:
db_refs[key] = entries
return db_refs
def get_concept(self, entity):
"""Return Concept from an Eidos entity."""
# Use the canonical name as the name of the Concept
name = entity['canonicalName']
db_refs = self.get_groundings(entity)
concept = Concept(name, db_refs=db_refs)
return concept
def time_context_from_ref(self, timex):
"""Return a time context object given a timex reference entry."""
# If the timex has a value set, it means that it refers to a DCT or
# a TimeExpression e.g. "value": {"@id": "_:DCT_1"} and the parameters
# need to be taken from there
value = timex.get('value')
if value:
# Here we get the TimeContext directly from the stashed DCT
# dictionary
tc = self.doc.timexes.get(value['@id'])
return tc
return None
def geo_context_from_ref(self, ref):
"""Return a ref context object given a location reference entry."""
value = ref.get('value')
if value:
# Here we get the RefContext from the stashed geoloc dictionary
rc = self.doc.geolocs.get(value['@id'])
return rc
return None
def get_all_events(self):
"""Return a list of all standalone events from a list
of statements."""
events = []
for stmt in self.statements:
stmt = copy.deepcopy(stmt)
if isinstance(stmt, Influence):
for member in [stmt.subj, stmt.obj]:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Association):
for member in stmt.members:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Event):
events.append(stmt)
return events
class EidosDocument(object):
def __init__(self, json_dict):
self.tree = objectpath.Tree(json_dict)
self.extractions = []
self.sentences = {}
self.entities = {}
self.documents = {}
self.coreferences = {}
self.timexes = {}
self.geolocs = {}
self.dct = None
self._preprocess_extractions()
def _preprocess_extractions(self):
extractions = \
self.tree.execute("$.extractions[(@.@type is 'Extraction')]")
if not extractions:
return
# Listify for multiple reuse
self.extractions = list(extractions)
# Build a dictionary of entities
entities = [e for e in self.extractions if 'Concept' in
e.get('labels', [])]
self.entities = {entity['@id']: entity for entity in entities}
# Build a dictionary of sentences and document creation times (DCTs)
documents = self.tree.execute("$.documents[(@.@type is 'Document')]")
self.sentences = {}
for document in documents:
dct = document.get('dct')
title = document.get('title')
self.documents[document['@id']] = {'title': title}
# We stash the DCT here as a TimeContext object
if dct is not None:
self.dct = self.time_context_from_dct(dct)
self.timexes[dct['@id']] = self.dct
sentences = document.get('sentences', [])
for sent in sentences:
self.sentences[sent['@id']] = sent
timexes = sent.get('timexes')
if timexes:
for timex in timexes:
tc = time_context_from_timex(timex)
self.timexes[timex['@id']] = tc
geolocs = sent.get('geolocs')
if geolocs:
for geoloc in geolocs:
rc = ref_context_from_geoloc(geoloc)
self.geolocs[geoloc['@id']] = rc
# Build a dictionary of coreferences
for extraction in self.extractions:
if 'Coreference' in extraction['labels']:
reference = find_arg(extraction, 'reference')
anchor = find_arg(extraction, 'anchor')
self.coreferences[reference] = anchor
@staticmethod
def time_context_from_dct(dct):
"""Return a time context object given a DCT entry."""
time_text = dct.get('text')
start = _get_time_stamp(dct.get('start'))
end = _get_time_stamp(dct.get('end'))
duration = _get_duration(start, end)
tc = TimeContext(text=time_text, start=start, end=end,
duration=duration)
return tc
def _sanitize(text):
"""Return sanitized Eidos text field for human readability."""
d = {'-LRB-': '(', '-RRB-': ')'}
return re.sub('|'.join(d.keys()), lambda m: d[m.group(0)], text)
def _get_time_stamp(entry):
"""Return datetime object from a timex constraint start/end entry.
Example string format to convert: 2018-01-01T00:00
"""
if not entry or entry == 'Undef':
return None
try:
dt = datetime.datetime.strptime(entry, '%Y-%m-%dT%H:%M')
except Exception as e:
logger.debug('Could not parse %s format' % entry)
return None
return dt
def _get_duration(start, end):
if not start or not end:
return None
try:
duration = int((end - start).total_seconds())
except Exception as e:
logger.debug('Failed to get duration from %s and %s' %
(str(start), str(end)))
duration = None
return duration
def ref_context_from_geoloc(geoloc):
"""Return a RefContext object given a geoloc entry."""
text = geoloc.get('text')
geoid = geoloc.get('geoID')
rc = RefContext(name=text, db_refs={'GEOID': geoid})
return rc
def time_context_from_timex(timex):
"""Return a TimeContext object given a timex entry."""
time_text = timex.get('text')
intervals = timex.get('intervals')
if not intervals:
start = end = duration = None
else:
constraint = intervals[0]
start = _get_time_stamp(constraint.get('start'))
end = _get_time_stamp(constraint.get('end'))
duration = _get_duration(start, end)
tc = TimeContext(text=time_text, start=start, end=end,
duration=duration)
return tc
def find_arg(event, arg_type):
"""Return ID of the first argument of a given type"""
obj_ids = find_args(event, arg_type)
if not obj_ids:
return None
else:
return obj_ids[0]
def find_args(event, arg_type):
"""Return IDs of all arguments of a given type"""
args = event.get('arguments', {})
obj_tags = [arg for arg in args if arg['type'] == arg_type]
if obj_tags:
return [o['value']['@id'] for o in obj_tags]
else:
return []
|
johnbachman/belpy
|
indra/sources/eidos/processor.py
|
Python
|
mit
| 18,394 | 0 |
#----------------------------------------------------------------------
# Copyright (c) 2008 Board of Trustees, Princeton University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
##
# Implements SFA GID. GIDs are based on certificates, and the GID class is a
# descendant of the certificate class.
##
import xmlrpclib
import uuid
from handler.geni.v3.extensions.sfa.trust.certificate import Certificate
from handler.geni.v3.extensions.sfa.util.faults import GidInvalidParentHrn, GidParentHrn
from handler.geni.v3.extensions.sfa.util.sfalogging import logger
from handler.geni.v3.extensions.sfa.util.xrn import hrn_to_urn, urn_to_hrn, hrn_authfor_hrn
##
# Create a new uuid. Returns the UUID as a string.
def create_uuid():
return str(uuid.uuid4().int)
##
# GID is a tuple:
# (uuid, urn, public_key)
#
# UUID is a unique identifier and is created by the python uuid module
# (or the utility function create_uuid() in gid.py).
#
# HRN is a human readable name. It is a dotted form similar to a backward domain
# name. For example, planetlab.us.arizona.bakers.
#
# URN is a human readable identifier of form:
# "urn:publicid:IDN+toplevelauthority[:sub-auth.]*[\res. type]\ +object name"
# For example, urn:publicid:IDN+planetlab:us:arizona+user+bakers
#
# PUBLIC_KEY is the public key of the principal identified by the UUID/HRN.
# It is a Keypair object as defined in the cert.py module.
#
# It is expected that there is a one-to-one pairing between UUIDs and HRN,
# but it is uncertain how this would be inforced or if it needs to be enforced.
#
# These fields are encoded using xmlrpc into the subjectAltName field of the
# x509 certificate. Note: Call encode() once the fields have been filled in
# to perform this encoding.
class GID(Certificate):
uuid = None
hrn = None
urn = None
email = None # for adding to the SubjectAltName
##
# Create a new GID object
#
# @param create If true, create the X509 certificate
# @param subject If subject!=None, create the X509 cert and set the subject name
# @param string If string!=None, load the GID from a string
# @param filename If filename!=None, load the GID from a file
# @param lifeDays life of GID in days - default is 1825==5 years
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825):
Certificate.__init__(self, lifeDays, create, subject, string, filename)
if subject:
logger.debug("Creating GID for subject: %s" % subject)
if uuid:
self.uuid = int(uuid)
if hrn:
self.hrn = hrn
self.urn = hrn_to_urn(hrn, 'unknown')
if urn:
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def set_uuid(self, uuid):
if isinstance(uuid, str):
self.uuid = int(uuid)
else:
self.uuid = uuid
def get_uuid(self):
if not self.uuid:
self.decode()
return self.uuid
def set_hrn(self, hrn):
self.hrn = hrn
def get_hrn(self):
if not self.hrn:
self.decode()
return self.hrn
def set_urn(self, urn):
self.urn = urn
self.hrn, type = urn_to_hrn(urn)
def get_urn(self):
if not self.urn:
self.decode()
return self.urn
# Will be stuffed into subjectAltName
def set_email(self, email):
self.email = email
def get_email(self):
if not self.email:
self.decode()
return self.email
def get_type(self):
if not self.urn:
self.decode()
_, t = urn_to_hrn(self.urn)
return t
##
# Encode the GID fields and package them into the subject-alt-name field
# of the X509 certificate. This must be called prior to signing the
# certificate. It may only be called once per certificate.
def encode(self):
if self.urn:
urn = self.urn
else:
urn = hrn_to_urn(self.hrn, None)
str = "URI:" + urn
if self.uuid:
str += ", " + "URI:" + uuid.UUID(int=self.uuid).urn
if self.email:
str += ", " + "email:" + self.email
self.set_data(str, 'subjectAltName')
##
# Decode the subject-alt-name field of the X509 certificate into the
# fields of the GID. This is automatically called by the various get_*()
# functions in this class.
def decode(self):
data = self.get_data('subjectAltName')
dict = {}
if data:
if data.lower().startswith('uri:http://<params>'):
dict = xmlrpclib.loads(data[11:])[0][0]
else:
spl = data.split(', ')
for val in spl:
if val.lower().startswith('uri:urn:uuid:'):
dict['uuid'] = uuid.UUID(val[4:]).int
elif val.lower().startswith('uri:urn:publicid:idn+'):
dict['urn'] = val[4:]
elif val.lower().startswith('email:'):
# FIXME: Ensure there isn't cruft in that address...
# EG look for email:copy,....
dict['email'] = val[6:]
self.uuid = dict.get("uuid", None)
self.urn = dict.get("urn", None)
self.hrn = dict.get("hrn", None)
self.email = dict.get("email", None)
if self.urn:
self.hrn = urn_to_hrn(self.urn)[0]
##
# Dump the credential to stdout.
#
# @param indent specifies a number of spaces to indent the output
# @param dump_parents If true, also dump the parents of the GID
def dump(self, *args, **kwargs):
print self.dump_string(*args,**kwargs)
def dump_string(self, indent=0, dump_parents=False):
result=" "*(indent-2) + "GID\n"
result += " "*indent + "hrn:" + str(self.get_hrn()) +"\n"
result += " "*indent + "urn:" + str(self.get_urn()) +"\n"
result += " "*indent + "uuid:" + str(self.get_uuid()) + "\n"
if self.get_email() is not None:
result += " "*indent + "email:" + str(self.get_email()) + "\n"
filename=self.get_filename()
if filename: result += "Filename %s\n"%filename
if self.parent and dump_parents:
result += " "*indent + "parent:\n"
result += self.parent.dump_string(indent+4, dump_parents)
return result
##
# Verify the chain of authenticity of the GID. First perform the checks
# of the certificate class (verifying that each parent signs the child,
# etc). In addition, GIDs also confirm that the parent's HRN is a prefix
# of the child's HRN, and the parent is of type 'authority'.
#
# Verifying these prefixes prevents a rogue authority from signing a GID
# for a principal that is not a member of that authority. For example,
# planetlab.us.arizona cannot sign a GID for planetlab.us.princeton.foo.
def verify_chain(self, trusted_certs = None):
# do the normal certificate verification stuff
trusted_root = Certificate.verify_chain(self, trusted_certs)
if self.parent:
# make sure the parent's hrn is a prefix of the child's hrn
if not hrn_authfor_hrn(self.parent.get_hrn(), self.get_hrn()):
raise GidParentHrn("This cert HRN %s isn't in the namespace for parent HRN %s" % (self.get_hrn(), self.parent.get_hrn()))
# Parent must also be an authority (of some type) to sign a GID
# There are multiple types of authority - accept them all here
if not self.parent.get_type().find('authority') == 0:
raise GidInvalidParentHrn("This cert %s's parent %s is not an authority (is a %s)" % (self.get_hrn(), self.parent.get_hrn(), self.parent.get_type()))
# Then recurse up the chain - ensure the parent is a trusted
# root or is in the namespace of a trusted root
self.parent.verify_chain(trusted_certs)
else:
# make sure that the trusted root's hrn is a prefix of the child's
trusted_gid = GID(string=trusted_root.save_to_string())
trusted_type = trusted_gid.get_type()
trusted_hrn = trusted_gid.get_hrn()
#if trusted_type == 'authority':
# trusted_hrn = trusted_hrn[:trusted_hrn.rindex('.')]
cur_hrn = self.get_hrn()
if not hrn_authfor_hrn(trusted_hrn, cur_hrn):
raise GidParentHrn("Trusted root with HRN %s isn't a namespace authority for this cert: %s" % (trusted_hrn, cur_hrn))
# There are multiple types of authority - accept them all here
if not trusted_type.find('authority') == 0:
raise GidInvalidParentHrn("This cert %s's trusted root signer %s is not an authority (is a %s)" % (self.get_hrn(), trusted_hrn, trusted_type))
return
|
dana-i2cat/felix
|
modules/resource/manager/stitching-entity/src/handler/geni/v3/extensions/sfa/trust/gid.py
|
Python
|
apache-2.0
| 10,122 | 0.004149 |
"""
Django settings for DocumentsFlow project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-3v-w43(q0sg$!%e+i@#f#=w(j40i=afhjrmyedj-+x36+z'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DocumentsFlow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DocumentsFlow.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
meltiseugen/DocumentsFlow
|
DocumentsFlow/settings.py
|
Python
|
mit
| 3,117 | 0.001283 |
# -*- coding: utf-8
from __future__ import absolute_import, unicode_literals
import django
DEBUG = True
USE_TZ = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "o0fy)a6pmew*fe9b+^wf)96)2j8)%6oz555d7by7_(*i!b8wj8"
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
}
ROOT_URLCONF = "tests.urls"
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.staticfiles",
"django.contrib.admin",
"django.contrib.messages",
"lock_tokens.apps.LockTokensConfig",
"tests",
]
SITE_ID = 1
STATIC_URL = '/static/'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
]
}
},
]
if django.VERSION >= (1, 10):
MIDDLEWARE = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
else:
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
)
|
rparent/django-lock-tokens
|
tests/settings.py
|
Python
|
mit
| 1,481 | 0 |
# coding=utf-8
import pytest
@pytest.fixture
def dns_sd():
from pymachinetalk import dns_sd
return dns_sd
@pytest.fixture
def sd():
from pymachinetalk import dns_sd
sd = dns_sd.ServiceDiscovery()
return sd
def test_registeringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
assert service in sd.services
def test_registeringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
assert service in sd.services
def test_registeringAnythingElseFails(sd):
item = object()
try:
sd.register(item)
except TypeError:
assert True
assert item not in sd.services
def test_registeringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.register(service)
except RuntimeError:
assert True
assert service not in sd.services
def test_unregisteringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
sd.unregister(service)
assert service not in sd.services
def test_unregisteringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
sd.unregister(discoverable)
assert service not in sd.services
def test_unregisteringAnythingElseFails(sd):
item = 34
try:
sd.unregister(item)
except TypeError:
assert True
assert item not in sd.services
def test_unregisteringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.unregister(service)
except RuntimeError:
assert True
assert service not in sd.services
class ServiceInfoFactory(object):
def create(
self,
base_type='machinekit',
domain='local',
sd_protocol='tcp',
name='Hugo on Franz',
service=b'halrcomp',
uuid=b'12345678',
host='127.0.0.1',
protocol='tcp',
port=12345,
version=0,
properties=None,
server='127.0.0.1',
address=None,
):
from zeroconf import ServiceInfo
typestring = '_%s._%s.%s.' % (base_type, sd_protocol, domain)
dsn = b'%s://%s:%i' % (protocol.encode(), host.encode(), port)
if properties is None:
properties = {
b'uuid': uuid,
b'service': service,
b'dsn': dsn,
b'version': version,
}
return ServiceInfo(
type_=typestring,
name='%s %s.%s' % (name, host, typestring),
properties=properties,
address=(address or host).encode(),
port=port,
server=server,
)
@pytest.fixture
def zeroconf(mocker):
from zeroconf import Zeroconf
service_info = ServiceInfoFactory().create()
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = service_info
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
@pytest.fixture
def zeroconf_without_service_info(mocker):
from zeroconf import Zeroconf
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = None
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
def test_serviceDiscoveredUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDisappearedUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_stoppingServiceDiscoveryResetsAllServices(dns_sd, sd, zeroconf):
service1 = dns_sd.Service(type_='halrcomp')
sd.register(service1)
service2 = dns_sd.Service(type_='halrcmd')
sd.register(service2)
sd.browser = object() # dummy
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.stop()
assert service1.ready is False
assert service2.ready is False
def test_serviceDiscoveredWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDisappearedWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
service.ready = True
sd.remove_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceInfoSetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
name='Foo on Bar',
uuid=b'987654321',
version=5,
host='10.0.0.10',
protocol='tcp',
port=12456,
server='sandybox.local',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
assert service.name == service_info.name
assert service.uuid == '987654321'
assert service.version == 5
assert service.host_name == 'sandybox.local'
assert service.host_address == '10.0.0.10'
def test_serviceInfoResolvesLocalHostnameIfMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='sandybox.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
def test_serviceInfoRetursRawUriIfHostnameIsNotMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='thinkpad.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://thinkpad.local:12456'
def test_serviceInfoWithIncompleteValuesIsIgnoredByService(dns_sd):
service = dns_sd.Service(type_='launcher')
service_info = ServiceInfoFactory().create(properties={})
service.add_service_info(service_info)
assert service.uri == ''
assert service.uuid == ''
assert service.version == b''
def test_removingServiceInfoResetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='blahus')
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
service.remove_service_info(service_info)
assert service.uri == ''
assert service.name == ''
assert service.uuid == ''
assert service.version == 0
assert service.host_name == ''
assert service.host_address == ''
def test_clearingServiceInfosResetsValuesOfService(dns_sd):
service = dns_sd.Service(type_='foobar')
service.add_service_info(ServiceInfoFactory().create())
service.add_service_info(ServiceInfoFactory().create())
service.clear_service_infos()
assert service.ready is False
assert service.uri == ''
def test_settingReadyPropertyOfServiceTriggersCallback(dns_sd):
cb_called = [False]
def cb(_):
cb_called[0] = True
service = dns_sd.Service(type_='halrcomp')
service.on_ready_changed.append(cb)
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
assert cb_called[0] is True
def test_discoverableAddingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
assert service in discoverable.services
def test_discoverableAddingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object()
try:
discoverable.add_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
def test_discoverableRemovingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
discoverable.remove_service(service)
assert service not in discoverable.services
def test_discoverableRemvoingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object()
try:
discoverable.remove_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
def test_discoverableAllServicesReadySetServicesReady(dns_sd):
discoverable = dns_sd.ServiceContainer()
service1 = dns_sd.Service(type_='foo')
discoverable.add_service(service1)
service2 = dns_sd.Service(type_='bar')
discoverable.add_service(service2)
service1.ready = True
service2.ready = True
assert discoverable.services_ready is True
def test_discoverableNotAllServicesReadyUnsetsServicesReady(dns_sd):
discoverable = dns_sd.ServiceContainer()
service1 = dns_sd.Service(type_='foo')
discoverable.add_service(service1)
service2 = dns_sd.Service(type_='bar')
discoverable.add_service(service2)
service1.ready = True
service2.ready = True
service1.ready = False
assert discoverable.services_ready is False
def test_discoverableServicesReadyChangedCallsCallback(dns_sd):
cb_called = [False]
def cb(_):
cb_called[0] = True
discoverable = dns_sd.ServiceContainer()
discoverable.on_services_ready_changed.append(cb)
discoverable.services_ready = True
assert cb_called[0] is True
def test_serviceDiscoveryFilterAcceptCorrectUuid(dns_sd):
service_info = ServiceInfoFactory().create(uuid=b'987654321')
filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'987654321'})
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterRejectWrongUuid(dns_sd):
service_info = ServiceInfoFactory().create(uuid=b'123456789')
filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'987654321'})
assert filter.matches_service_info(service_info) is False
def test_serviceDiscoveryFilterAcceptFuzzyName(dns_sd):
service_info = ServiceInfoFactory().create(name='Hello World')
filter = dns_sd.ServiceDiscoveryFilter(name='Hello')
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterAcceptExactMatchingName(dns_sd):
service_info = ServiceInfoFactory().create(name='Foo')
filter = dns_sd.ServiceDiscoveryFilter(name='Foo')
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterRejectNonMatchingName(dns_sd):
service_info = ServiceInfoFactory().create(name='Carolus Rex')
filter = dns_sd.ServiceDiscoveryFilter(name='Adolfus Maximus')
assert filter.matches_service_info(service_info) is False
def test_serviceDiscoveryFilterPassingWrongObjectFails(dns_sd):
filter = dns_sd.ServiceDiscoveryFilter()
try:
filter.matches_service_info(object())
assert False
except TypeError:
assert True
def test_serviceDiscoveryFiltersOutDiscoveredServiceWithWrongUuid(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'87654321'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Machinekit on MyBox 12.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDiscoveryFiltersInDiscoveredServiceWithCorrectUuid(
dns_sd, sd, zeroconf
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'12345678'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDiscoveryFiltersInDisappearedServiceWithCorrectUuid(
dns_sd, sd, zeroconf
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'12345678'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
assert service.ready is False
|
strahlex/pymachinetalk
|
pymachinetalk/tests/test_dns_sd.py
|
Python
|
mit
| 13,743 | 0.000291 |
from discord.ext import commands
class Github:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def permrole(self, ctx, argument:str):
await self.bot.say(';py for perm in discord.utils.get(ctx.message.server.roles, name="{}").permissions: print(perm)'.format(argument))
def setup(bot):
bot.add_cog(Github(bot))
|
shivaco/selfbot
|
cogs/role_perms.py
|
Python
|
gpl-3.0
| 398 | 0.012563 |
from .command_line_mixins import CommandLineMixins
from .module import Module
from .console_app import ConsoleApp
__all__ = ['CommandLineMixins', 'Module', 'ConsoleApp']
|
mattvperry/ktane-py
|
ktane/__init__.py
|
Python
|
mit
| 173 | 0.005848 |
# -*- coding: utf-8 -*-
'''Caution:
For Python 2.7, `__init__.py` file in folders is nessary.
'''
|
JarryShaw/jsntlib
|
src/NTLArchive/__init__.py
|
Python
|
gpl-3.0
| 100 | 0 |
# Copyright 2015 Nicta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
"""
Module for storing default and static values
"""
local_config_dir = '~/.clusterous'
local_session_data_dir = local_config_dir + '/' + 'session'
local_environment_dir = local_config_dir + '/' + 'environment'
cached_cluster_file = 'cluster_spec.yml'
cached_environment_file = 'environment.yml'
cached_cluster_file_path = local_environment_dir + '/' + cached_cluster_file
cached_environment_file_path = local_environment_dir + '/' + cached_environment_file
remote_environment_dir = '/home/ubuntu/environment'
current_nat_ip_file = local_config_dir + '/' + 'current_controller'
cluster_info_file = local_config_dir + '/' + 'cluster_info.yml'
taggable_name_re = re.compile('^[\w-]+$') # For user supplied strings such as cluster name
taggable_name_max_length = 64 # Arbitrary but ample, keeping in mind AWS keys can be max 127 chars
nat_name_format = '{0}-nat'
nat_name_tag_value = 'nat'
nat_instance_type = 't2.micro'
controller_name_format = '{0}-controller'
controller_name_tag_value = 'controller'
controller_instance_type = 't2.small'
node_name_format = '{0}-node-{1}'
instance_tag_key = '@clusterous'
instance_node_type_tag_key = 'NodeType'
registry_s3_path = '/docker-registry'
central_logging_name_format = '{0}-central-logging'
central_logging_name_tag_value = 'central-logging'
central_logging_instance_type = 't2.small'
default_zone = 'a'
controller_root_volume_size = 50 # GB
cluster_username = 'ubuntu'
cluster_user_home_dir = '/home/ubuntu'
shared_volume_path = '/home/data/'
shared_volume_size = 20 # GB
remote_scripts_dir = 'ansible/remote'
default_cluster_def_filename = 'default_cluster.yml'
remote_host_scripts_dir = 'clusterous'
remote_host_key_file = 'key.pem'
remote_host_vars_file = 'vars.yml'
container_id_script_file = 'container_id.sh'
mesos_port = 5050
marathon_port = 8080
central_logging_port = 8081
nat_ssh_port_forwarding = 22000
# How many seconds to wait for all Marathon applications to reach "started" state
# Currently 30 minutes
app_launch_start_timeout = 1800
app_destroy_timeout = 60
def get_script(filename):
"""
Takes script relative filename, returns absolute path
Assumes this file is in Clusterous source root, uses __file__
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', filename)
def get_remote_dir():
"""
Return full path of remote scripts directory
"""
return '{0}/{1}/{2}'.format(os.path.dirname(__file__), 'scripts', remote_scripts_dir)
|
sirca/clusterous
|
clusterous/defaults.py
|
Python
|
apache-2.0
| 3,073 | 0.002603 |
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2013 YunoHost
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
""" yunohost_hook.py
Manage hooks
"""
import os
import sys
import re
import json
import errno
import subprocess
from shlex import split as arg_split
from moulinette.core import MoulinetteError
from moulinette.utils.log import getActionLogger
hook_folder = '/usr/share/yunohost/hooks/'
custom_hook_folder = '/etc/yunohost/hooks.d/'
logger = getActionLogger('yunohost.hook')
def hook_add(app, file):
"""
Store hook script to filsystem
Keyword argument:
app -- App to link with
file -- Script to add (/path/priority-file)
"""
path, filename = os.path.split(file)
priority, action = _extract_filename_parts(filename)
try: os.listdir(custom_hook_folder + action)
except OSError: os.makedirs(custom_hook_folder + action)
finalpath = custom_hook_folder + action +'/'+ priority +'-'+ app
os.system('cp %s %s' % (file, finalpath))
os.system('chown -hR admin: %s' % hook_folder)
return { 'hook': finalpath }
def hook_remove(app):
"""
Remove hooks linked to a specific app
Keyword argument:
app -- Scripts related to app will be removed
"""
try:
for action in os.listdir(custom_hook_folder):
for script in os.listdir(custom_hook_folder + action):
if script.endswith(app):
os.remove(custom_hook_folder + action +'/'+ script)
except OSError: pass
def hook_list(action, list_by='name', show_info=False):
"""
List available hooks for an action
Keyword argument:
action -- Action name
list_by -- Property to list hook by
show_info -- Show hook information
"""
result = {}
# Process the property to list hook by
if list_by == 'priority':
if show_info:
def _append_hook(d, priority, name, path):
# Use the priority as key and a dict of hooks names
# with their info as value
value = { 'path': path }
try:
d[priority][name] = value
except KeyError:
d[priority] = { name: value }
else:
def _append_hook(d, priority, name, path):
# Use the priority as key and the name as value
try:
d[priority].add(name)
except KeyError:
d[priority] = set([name])
elif list_by == 'name' or list_by == 'folder':
if show_info:
def _append_hook(d, priority, name, path):
# Use the name as key and a list of hooks info - the
# executed ones with this name - as value
l = d.get(name, list())
for h in l:
# Only one priority for the hook is accepted
if h['priority'] == priority:
# Custom hooks overwrite system ones and they
# are appended at the end - so overwite it
if h['path'] != path:
h['path'] = path
return
l.append({ 'priority': priority, 'path': path })
d[name] = l
else:
if list_by == 'name':
result = set()
def _append_hook(d, priority, name, path):
# Add only the name
d.add(name)
else:
raise MoulinetteError(errno.EINVAL, m18n.n('hook_list_by_invalid'))
def _append_folder(d, folder):
# Iterate over and add hook from a folder
for f in os.listdir(folder + action):
path = '%s%s/%s' % (folder, action, f)
priority, name = _extract_filename_parts(f)
_append_hook(d, priority, name, path)
try:
# Append system hooks first
if list_by == 'folder':
result['system'] = dict() if show_info else set()
_append_folder(result['system'], hook_folder)
else:
_append_folder(result, hook_folder)
except OSError:
logger.debug("system hook folder not found for action '%s' in %s",
action, hook_folder)
try:
# Append custom hooks
if list_by == 'folder':
result['custom'] = dict() if show_info else set()
_append_folder(result['custom'], custom_hook_folder)
else:
_append_folder(result, custom_hook_folder)
except OSError:
logger.debug("custom hook folder not found for action '%s' in %s",
action, custom_hook_folder)
return { 'hooks': result }
def hook_callback(action, hooks=[], args=None):
"""
Execute all scripts binded to an action
Keyword argument:
action -- Action name
hooks -- List of hooks names to execute
args -- Ordered list of arguments to pass to the script
"""
result = { 'succeed': list(), 'failed': list() }
hooks_dict = {}
# Retrieve hooks
if not hooks:
hooks_dict = hook_list(action, list_by='priority',
show_info=True)['hooks']
else:
hooks_names = hook_list(action, list_by='name',
show_info=True)['hooks']
# Iterate over given hooks names list
for n in hooks:
try:
hl = hooks_names[n]
except KeyError:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_name_unknown', n))
# Iterate over hooks with this name
for h in hl:
# Update hooks dict
d = hooks_dict.get(h['priority'], dict())
d.update({ n: { 'path': h['path'] }})
hooks_dict[h['priority']] = d
if not hooks_dict:
return result
# Format arguments
if args is None:
args = []
elif not isinstance(args, list):
args = [args]
# Iterate over hooks and execute them
for priority in sorted(hooks_dict):
for name, info in iter(hooks_dict[priority].items()):
filename = '%s-%s' % (priority, name)
try:
hook_exec(info['path'], args=args)
except:
logger.exception("error while executing hook '%s'",
info['path'])
result['failed'].append(filename)
else:
result['succeed'].append(filename)
return result
def hook_check(file):
"""
Parse the script file and get arguments
Keyword argument:
file -- File to check
"""
try:
with open(file[:file.index('scripts/')] + 'manifest.json') as f:
manifest = json.loads(str(f.read()))
except:
raise MoulinetteError(errno.EIO, m18n.n('app_manifest_invalid'))
action = file[file.index('scripts/') + 8:]
if 'arguments' in manifest and action in manifest['arguments']:
return manifest['arguments'][action]
else:
return {}
def hook_exec(file, args=None):
"""
Execute hook from a file with arguments
Keyword argument:
file -- Script to execute
args -- Arguments to pass to the script
"""
from moulinette.utils.stream import NonBlockingStreamReader
from yunohost.app import _value_for_locale
if isinstance(args, list):
arg_list = args
else:
required_args = hook_check(file)
if args is None:
args = {}
arg_list = []
for arg in required_args:
if arg['name'] in args:
if 'choices' in arg and args[arg['name']] not in arg['choices']:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_choice_invalid', args[arg['name']]))
arg_list.append(args[arg['name']])
else:
if os.isatty(1) and 'ask' in arg:
# Retrieve proper ask string
ask_string = _value_for_locale(arg['ask'])
# Append extra strings
if 'choices' in arg:
ask_string += ' ({:s})'.format('|'.join(arg['choices']))
if 'default' in arg:
ask_string += ' (default: {:s})'.format(arg['default'])
input_string = msignals.prompt(ask_string)
if input_string == '' and 'default' in arg:
input_string = arg['default']
arg_list.append(input_string)
elif 'default' in arg:
arg_list.append(arg['default'])
else:
raise MoulinetteError(errno.EINVAL,
m18n.n('hook_argument_missing', arg['name']))
file_path = "./"
if "/" in file and file[0:2] != file_path:
file_path = os.path.dirname(file)
file = file.replace(file_path +"/", "")
#TODO: Allow python script
arg_str = ''
if arg_list:
# Concatenate arguments and escape them with double quotes to prevent
# bash related issue if an argument is empty and is not the last
arg_str = '\\"{:s}\\"'.format('\\" \\"'.join(arg_list))
msignals.display(m18n.n('executing_script'))
p = subprocess.Popen(
arg_split('su - admin -c "cd \\"{:s}\\" && ' \
'/bin/bash -x \\"{:s}\\" {:s}"'.format(
file_path, file, arg_str)),
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=False)
# Wrap and get process ouput
stream = NonBlockingStreamReader(p.stdout)
while True:
line = stream.readline(True, 0.1)
if not line:
# Check if process has terminated
returncode = p.poll()
if returncode is not None:
break
else:
msignals.display(line.rstrip(), 'log')
stream.close()
return returncode
def _extract_filename_parts(filename):
"""Extract hook parts from filename"""
if '-' in filename:
priority, action = filename.split('-', 1)
else:
priority = '50'
action = filename
return priority, action
|
JocelynDelalande/moulinette-yunohost
|
lib/yunohost/hook.py
|
Python
|
agpl-3.0
| 10,947 | 0.004111 |
import sys # this allows you to read the user input from keyboard also called "stdin"
import classOne # This imports all the classOne functions
import classTwo # This imports all the classTwo functions
import classThree # This imports all the classThree functions
import classFour # This imports all the classFour functions
TIMEOUT=10 # this is the amount of time you will wait for an answer in Seconds. 10 means 10 seconds
MAX_CLASS=5
QUIZ_INSTRUCTIONS = """
Get ready for the quiz. You will have 10 questions out of which you
will need 8 right to win the prize. You will have """ + str(TIMEOUT) + """ seconds
to answer each question.Press Enter to start."""
def getUsersClass(): #main
''' This function will get the user's class. It will compare the class with MAX_CLASS and
will return False if it is more than the MAX_CLASS. Class also has to be a natural number '''
print("Please tell me which Class you are in? ")
try:
usersClass = int(sys.stdin.readline().strip())
if (usersClass < 1 or usersClass > MAX_CLASS) :
print("No Quiz available for Class " + str(usersClass))
return False
else :
return usersClass
except :
print("Exception")
return False
if __name__ == '__main__':
while(True) :
usersClass = getUsersClass()
if (usersClass != False) :
break
print(QUIZ_INSTRUCTIONS)
sys.stdin.readline()
if (usersClass == 1) :
classOne.classOneQuiz()
elif (usersClass == 2) :
classTwo.classTwoQuiz()
elif(usersClass == 3):
classThree.classThreeQuiz()
elif(usersClass == 4):
classFour.classFourQuiz()
|
nischal2002/m-quiz-2016
|
quiz.py
|
Python
|
mit
| 1,704 | 0.016432 |
# -*- coding: utf-8 -*-
import os
import getpass
import json
from qgis.PyQt.QtCore import QSettings
from qgis.PyQt.QtSql import QSqlDatabase, QSqlQuery
from qgis.PyQt.QtWidgets import QMessageBox
from qgis.core import *
import qgis.core
class PostNAS_AccessControl:
def __init__(self, username = None):
if(username == None):
self.username = getpass.getuser().lower()
else:
self.username = username.lower()
self.access = None
self.name = None
self.db = self.__loadDB()
def setUsername(self,username):
self.username = username
if(self.checkUserExists()):
sql = "SELECT name,access FROM public.postnas_search_access_control WHERE lower(username) = :username"
self.__openDB()
queryLoadUserData = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryLoadUserData.prepare(sql)
queryLoadUserData.bindValue(":username",self.getUsername())
queryLoadUserData.exec_()
if(queryLoadUserData.size() == 1):
while(queryLoadUserData.next()):
self.setName(queryLoadUserData.value(queryLoadUserData.record().indexOf("name")))
self.setAccess(queryLoadUserData.value(queryLoadUserData.record().indexOf("access")))
def setAccess(self,access):
self.access = access
def setName(self,name):
self.name = name
def getUsername(self):
return self.username.lower()
def getAccess(self):
return self.access
def getName(self):
return self.name
def __checkUsername(self):
pass
def checkAccessControlIsActive(self):
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json') as config_file:
config = json.load(config_file)
accessControl = config['accessControl']
pass
else:
settings = QSettings("PostNAS", "PostNAS-Suche")
accessControl = settings.value("accessControl")
if(accessControl == 1):
if (self.checkAccessTable() == False):
accessControl = 0
else:
if (self.checkAccessTable() == True):
accessControl = 1
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
config['accessControl'] = accessControl
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json', 'w') as config_file:
json.dump(config, config_file)
else:
settings.setValue("accessControl", accessControl)
if(accessControl == 1):
return True
else:
return False
def checkAccessTable(self):
sql = "SELECT table_name FROM information_schema.tables WHERE table_name = 'postnas_search_access_control'";
self.__openDB()
query = QSqlQuery(self.db)
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def createAccessTable(self):
file_path = os.path.dirname(os.path.realpath(__file__)) + "/create_accesstable/create_table.sql"
sql = open(file_path).read()
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.lastError().number() == -1):
return True
else:
return False
def checkAccessTableHasAdmin(self):
sql = "SELECT lower(username) FROM public.postnas_search_access_control WHERE access = 0";
self.__openDB()
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.exec_(sql)
if(query.size() > 0):
return True
else:
return False
def insertUser(self):
if(self.getUsername() != None):
self.__openDB()
sql = "INSERT INTO public.postnas_search_access_control (username,name,access) VALUES (:username,:name,:access)"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":name",self.name)
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
return False
else:
return False
def insertAdminUser(self):
self.access = 0
return self.insertUser()
def updateUser(self,username_old):
if(self.getUsername() != None):
self.__openDB()
sql = "UPDATE public.postnas_search_access_control SET username = :username, name = :name, access = :access WHERE username = :username_old"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername().lower())
query.bindValue(":username_old",username_old)
query.bindValue(":name",self.name)
query.bindValue(":access",self.access)
query.exec_()
if(query.lastError().number() == -1):
return True
else:
QgsMessageLog.logMessage("Datenbankfehler beim Update: " + query.lastError().text(),'PostNAS-Suche', Qgis.Critical)
return False
else:
return False
def checkUserIsAdmin(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access = 0 AND lower(username) = :username"
query = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
query.prepare(sql)
query.bindValue(":username",self.getUsername())
query.exec_()
if(query.lastError().number() == -1):
if(query.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def checkUserHasEigentuemerAccess(self):
if(self.getUsername() != None):
self.__openDB()
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE access IN (0,1) AND lower(username) = :username"
queryEigentuemerAccess = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryEigentuemerAccess.prepare(sql)
queryEigentuemerAccess.bindValue(":username",self.getUsername())
queryEigentuemerAccess.exec_()
if(queryEigentuemerAccess.lastError().number() == -1):
if(queryEigentuemerAccess.size() > 0):
return True
else:
return False
else:
return False
else:
return False
def loadUserAccessTable(self):
sql = "SELECT lower(username) as username,name,bezeichnung FROM public.postnas_search_access_control LEFT JOIN public.postnas_search_accessmode ON postnas_search_access_control.access = postnas_search_accessmode.id";
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
self.__openDB()
queryLoadAccessTable = QSqlQuery(self.db)
queryLoadAccessTable.prepare(sql)
queryLoadAccessTable.exec_()
results = []
if(queryLoadAccessTable.size() > 0):
while(queryLoadAccessTable.next()):
list = {'username': queryLoadAccessTable.value(queryLoadAccessTable.record().indexOf("username")),
'name': queryLoadAccessTable.value(queryLoadAccessTable.record().indexOf("name")),
'access': queryLoadAccessTable.value(queryLoadAccessTable.record().indexOf("bezeichnung"))}
results.append(list)
return results
def deleteUser(self):
sql = "DELETE FROM public.postnas_search_access_control WHERE lower(username) = :username"
self.__openDB()
queryDeleteUser = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryDeleteUser.prepare(sql)
queryDeleteUser.bindValue(":username",self.getUsername())
queryDeleteUser.exec_()
if(queryDeleteUser.lastError().number() == -1):
return True
else:
QgsMessageLog.logMessage("Datenbankfehler beim Löschen: " + queryDeleteUser.lastError().text(), 'PostNAS-Suche',Qgis.Critical)
return False
def getAccessModes(self):
sql = "SELECT id,bezeichnung FROM public.postnas_search_accessmode"
self.__openDB()
queryLoadAccessModes = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryLoadAccessModes.prepare(sql)
queryLoadAccessModes.exec_()
results = []
if(queryLoadAccessModes.size() > 0):
while(queryLoadAccessModes.next()):
list = {'id': queryLoadAccessModes.value(queryLoadAccessModes.record().indexOf("id")),
'bezeichnung': queryLoadAccessModes.value(queryLoadAccessModes.record().indexOf("bezeichnung"))}
results.append(list)
return results
def checkUserExists(self):
sql = "SELECT lower(username) as username FROM public.postnas_search_access_control WHERE lower(username) = :username"
self.__openDB()
queryCheckUserExists = QSqlQuery(self.db)
if (self.dbSchema.lower() != "public"):
sql = sql.replace("public.", self.dbSchema + ".")
queryCheckUserExists.prepare(sql)
queryCheckUserExists.bindValue(":username",self.getUsername())
queryCheckUserExists.exec_()
if(queryCheckUserExists.lastError().number() == -1):
if(queryCheckUserExists.size() > 0):
return True
else:
return False
else:
return False
def __loadDB(self):
if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '\config.json'):
with open(os.path.dirname(os.path.realpath(__file__)) + '\config.json') as config_file:
config = json.load(config_file)
dbHost = config['db']['host']
dbDatabasename = config['db']['database']
self.dbSchema = config['db']['schema']
dbPort = config['db']['port']
dbUsername = config['db']['user']
dbPassword = config['db']['password']
authcfg = config['authcfg']
else:
settings = QSettings("PostNAS", "PostNAS-Suche")
dbHost = settings.value("host", "")
dbDatabasename = settings.value("dbname", "")
self.dbSchema = settings.value("schema", "public")
dbPort = settings.value("port", "5432")
dbUsername = settings.value("user", "")
dbPassword = settings.value("password", "")
authcfg = settings.value( "authcfg", "" )
if authcfg != "" and hasattr(qgis.core,'QgsAuthManager'):
amc = qgis.core.QgsAuthMethodConfig()
if hasattr(qgis.core, "QGis"):
qgis.core.QgsAuthManager.instance().loadAuthenticationConfig( authcfg, amc, True)
else:
QgsApplication.instance().authManager().loadAuthenticationConfig( authcfg, amc, True)
dbUsername = amc.config( "username", dbUsername )
dbPassword = amc.config( "password", dbPassword )
db = QSqlDatabase.addDatabase("QPSQL")
db.setHostName(dbHost)
if(dbPort == ""):
dbPort = "5432"
db.setPort(int(dbPort))
db.setDatabaseName(dbDatabasename)
db.setUserName(dbUsername)
db.setPassword(dbPassword)
return db
def __openDB(self):
if(self.db.isOpen() == False):
self.db.open()
|
Kreis-Unna/PostNAS_Search
|
PostNAS_AccessControl.py
|
Python
|
gpl-2.0
| 12,848 | 0.006305 |
from behave import given, when, then
from slackrest.app import SlackrestApp
from slackrest.command import Visibility, Method
import json
class GiveMeAReply:
pattern = '!givemeareply'
url_format = '/reply'
visibility = Visibility.Any
body = None
method = Method.GET
class GiveMeANotification:
pattern = '!givemeanotification'
url_format = '/notify'
visibility = Visibility.Any
body = None
method = Method.GET
class MakeAPost:
pattern = '!makeapost'
url_format = '/makeapost'
visibility = Visibility.Any
method = Method.POST
@classmethod
def body(cls, **kwargs):
return json.dumps({'param': 'value'})
commands = [GiveMeAReply, GiveMeANotification, MakeAPost]
@given(u'Slackrest is connected to Slack')
def step_impl(context):
context.app = SlackrestApp(context.chat_url, commands, context.notification_channel_id)
context.app.run_async()
context.slack_events.await_event(event_type='login')
@when(u'I send "{message}" from channel "{channel_id}"')
def step_impl(context, message, channel_id):
user_id = 'U123456'
msg = {'type': 'message', 'text': message, 'channel': channel_id, 'user': user_id}
context.slack_events.send_message(msg)
@then(u'I should get a message in channel "{channel_id}"')
def step_impl(context, channel_id):
event = context.slack_events.await_event(event_type='message')
assert event['message']['channel'] == channel_id
@then(u'I should get a message containing "{msg}"')
def step_impl(context, msg):
event = context.slack_events.await_event(event_type='message')
print("Got message containing '{}'".format(event['message']['text']))
print("Got message containing '{}'".format(event['message']['text']))
assert msg in event['message']['text']
@given(u'I set the notification channel to "{notification_channel_id}"')
def step_impl(context, notification_channel_id):
context.notification_channel_id = notification_channel_id
@given(u'I map "!givemeareply" to /reply')
def step_impl(context):
pass
@given(u'I map "!givemeanotification" to /notify')
def step_impl(context):
pass
@given(u'the chat bot is at {url}')
def step_impl(context, url):
context.chat_url = url
|
dandeliondeathray/niancat-micro
|
slackrest/features/steps/chat.py
|
Python
|
apache-2.0
| 2,243 | 0.001337 |
# -*- coding: utf-8 -*-
#
# django-getpaid documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 16 21:16:46 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
import django
import getpaid
sys.path.append(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"example",
)
)
sys.path.append(
os.path.join(
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), os.pardir),
"django-getpaid",
)
)
sys.path.insert(0, os.path.abspath("../example"))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
django.setup()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx_rtd_theme",
"sphinx.ext.autosectionlabel",
]
autodoc_member_order = "bysource"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "django-getpaid"
copyright = "2012-2013 Krzysztof Dorosz, 2013-2020 Dominik Kozaczko"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "2.2"
# The full version, including alpha/beta/rc tags.
release = getpaid.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "django-getpaiddoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"django-getpaid.tex",
"django-getpaid Documentation",
"Sunscrapers",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
("index", "django-getpaid", "django-getpaid Documentation", ["Sunscrapers"], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"django-getpaid",
"django-getpaid Documentation",
"Sunscrapers",
"django-getpaid",
"One line description of project.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
|
cypreess/django-getpaid
|
docs/conf.py
|
Python
|
mit
| 8,687 | 0.001381 |
# Script loads 3d data from text file (after Gwyddion text importing of AFM file)
import re
import numpy as np
def ReadData(file_name):
'''
Load 3d data array from a text file. The text file is imported from Gwyddion (free SPM data analysis software).
Parameters
----------
file_name : str
Relative path to a text file
Returns
-------
data : ndarray
MxM matrix of SPM data
width : float
Width of image (in meters)
height : float
Height of image (in meters)
pixel_height : float
Height of one pixel (in meters)
height_unit : float
Measurement unit coefficient (in unit/meter)
'''
comments = [] # List of comments in text file
f = open(file_name)
for line in f:
if line.startswith('#'):
comments.append(line)
else:
break
f.close()
rex = r"(\d+[.]\d+)\s(\S+)" # regular expression for image size searching
width_match = re.search(rex, comments[1])
height_match = re.search(rex, comments[2])
if (width_match.group(2) == 'µm') and (height_match.group(2) == 'µm'):
width_unit = 1e-6
height_unit = 1e-6
else:
raise ValueError("Attention! The measurement units aren't micrometers!") # My data was only in micrometers :)
width = float(width_match.group(1)) * width_unit
height = float(height_match.group(1)) * height_unit
data = np.genfromtxt(file_name) # NumPy function for data importing
M = np.shape(data)[0] # ---!!--- Needs to add rectangular area ---!!---
pixel_height = height/M
return data, width, height, pixel_height, height_unit
|
antonsergeev/tesselect
|
gwyddion_import.py
|
Python
|
mit
| 1,734 | 0.008661 |
# -------------------------------- Database models----------------------------------------------------------------------
import sys, os
import sqlalchemy
from sqlalchemy import create_engine
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import secrets
import settings
MYSQL_USERNAME = secrets.MYSQL_USERNAME
MYSQL_PASSWORD = secrets.MYSQL_PASSWORD
MYSQL_HOSTNAME = secrets.MYSQL_HOSTNAME
MYSQL_DATABASE_NAME = secrets.MYSQL_DATABASE_NAME
MYSQL_HOST_PORT = secrets.MYSQL_HOST_PORT
MAX_MESSAGE_SIZE = settings.MAX_MESSAGE_SIZE
database_url = 'mysql://{}:{}@{}:{}/{}'.format(MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_HOSTNAME, MYSQL_HOST_PORT,
MYSQL_DATABASE_NAME)
engine = create_engine(database_url)
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import ForeignKey
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class ModelManager(object):
"""
Model manager
"""
@classmethod
def create_session(cls, engine):
"""
create a session based
:param engine: engine object
:return: returns the created session object
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
@classmethod
def add_to_session(cls, session, obj):
"""
add the object to the session
:param obj:
:param session: session object
:return:
"""
session.add(obj)
@classmethod
def commit_session(cls, session):
"""
commit to session
:param session:
:return:
"""
session.commit()
@classmethod
def delete_from_session(cls, session, obj):
"""
delete the object from the session
:param session:
:return:
"""
session.delete(obj)
@classmethod
def rollback_session(cls, session):
"""
rollback the current session
:param session:
:return:
"""
session.rollback()
@classmethod
def close_session(cls, session):
"""
close the current session
:param session:
:return:
"""
session.close()
class Queue(Base):
"""
Queues model class
"""
__tablename__ = "Queue"
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
created_timestamp = Column(DateTime)
message = relationship("Message", back_populates="queue")
def __repr__(self):
"""
representation of the Queue class
:return:
"""
return "<Queue (name: {}, created_timestamp: {})>".format(self.name, self.created_timestamp)
class Message(Base):
"""
Message model class
"""
__tablename__ = "Message"
id = Column(Integer, primary_key=True)
queue_id = Column(Integer, ForeignKey('Queue.id'))
is_fetched = Column(Boolean, default=False)
content = Column(Text)
publish_timestamp = Column(DateTime)
consumed_timestamp = Column(DateTime)
queue = relationship("Queue", back_populates="message")
# The consumed_timestamp should ideally have a null value for default but that is not feasible here so
# for checking we will first check whether the is_fetched value is true, if so we consider the consumed_timestamp
# as the date and time when the message was dequeued.
def __repr__(self):
"""
representation of the Message class
:return:
"""
return "<Message (queue_id: {}, is_fetched: {}, content: {}...{}, publish_timestamp: {}, " \
"consumed_timestamp: {})>".format(self.queue_id, self.is_fetched, self.content[:10],self.content[10:],
self.publish_timestamp, self.consumed_timestamp)
|
MQFN/MQFN
|
bbmq/server/models.py
|
Python
|
apache-2.0
| 3,993 | 0.004758 |
"""Module for parsing and testing package version predicate strings.
"""
import re
import distutils.version
import operator
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)",
re.ASCII)
# (package) (rest)
re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses
re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
# (comp) (version)
def splitUp(pred):
"""Parse a single version comparison.
Return (comparison string, StrictVersion)
"""
res = re_splitComparison.match(pred)
if not res:
raise ValueError("bad package restriction syntax: %r" % pred)
comp, verStr = res.groups()
return (comp, distutils.version.StrictVersion(verStr))
compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq,
">": operator.gt, ">=": operator.ge, "!=": operator.ne}
class VersionPredicate:
"""Parse and test package version predicates.
>>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)')
The `name` attribute provides the full dotted name that is given::
>>> v.name
'pyepat.abc'
The str() of a `VersionPredicate` provides a normalized
human-readable version of the expression::
>>> print(v)
pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3)
The `satisfied_by()` method can be used to determine with a given
version number is included in the set described by the version
restrictions::
>>> v.satisfied_by('1.1')
True
>>> v.satisfied_by('1.4')
True
>>> v.satisfied_by('1.0')
False
>>> v.satisfied_by('4444.4')
False
>>> v.satisfied_by('1555.1b3')
False
`VersionPredicate` is flexible in accepting extra whitespace::
>>> v = VersionPredicate(' pat( == 0.1 ) ')
>>> v.name
'pat'
>>> v.satisfied_by('0.1')
True
>>> v.satisfied_by('0.2')
False
If any version numbers passed in do not conform to the
restrictions of `StrictVersion`, a `ValueError` is raised::
>>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)')
Traceback (most recent call last):
...
ValueError: invalid version number '1.2zb3'
It the module or package name given does not conform to what's
allowed as a legal module or package name, `ValueError` is
raised::
>>> v = VersionPredicate('foo-bar')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: '-bar'
>>> v = VersionPredicate('foo bar (12.21)')
Traceback (most recent call last):
...
ValueError: expected parenthesized list: 'bar (12.21)'
"""
def __init__(self, versionPredicateStr):
"""Parse a version predicate string.
"""
# Fields:
# name: package name
# pred: list of (comparison string, StrictVersion)
versionPredicateStr = versionPredicateStr.strip()
if not versionPredicateStr:
raise ValueError("empty package restriction")
match = re_validPackage.match(versionPredicateStr)
if not match:
raise ValueError("bad package name in %r" % versionPredicateStr)
self.name, paren = match.groups()
paren = paren.strip()
if paren:
match = re_paren.match(paren)
if not match:
raise ValueError("expected parenthesized list: %r" % paren)
str = match.groups()[0]
self.pred = [splitUp(aPred) for aPred in str.split(",")]
if not self.pred:
raise ValueError("empty parenthesized list in %r"
% versionPredicateStr)
else:
self.pred = []
def __str__(self):
if self.pred:
seq = [cond + " " + str(ver) for cond, ver in self.pred]
return self.name + " (" + ", ".join(seq) + ")"
else:
return self.name
def satisfied_by(self, version):
"""True if version is compatible with all the predicates in self.
The parameter version must be acceptable to the StrictVersion
constructor. It may be either a string or StrictVersion.
"""
for cond, ver in self.pred:
if not compmap[cond](version, ver):
return False
return True
_provision_rx = None
def split_provision(value):
"""Return the name and optional version number of a provision.
The version number, if given, will be returned as a `StrictVersion`
instance, otherwise it will be `None`.
>>> split_provision('mypkg')
('mypkg', None)
>>> split_provision(' mypkg( 1.2 ) ')
('mypkg', StrictVersion ('1.2'))
"""
global _provision_rx
if _provision_rx is None:
_provision_rx = re.compile(
"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$",
re.ASCII)
value = value.strip()
m = _provision_rx.match(value)
if not m:
raise ValueError("illegal provides specification: %r" % value)
ver = m.group(2) or None
if ver:
ver = distutils.version.StrictVersion(ver)
return m.group(1), ver
|
Orav/kbengine
|
kbe/src/lib/python/Lib/distutils/versionpredicate.py
|
Python
|
lgpl-3.0
| 5,298 | 0.002643 |
import logging
import os
import sys
import time
import re
import shutil
import itertools
import ckan.plugins as p
from pylons import config
from ckan.lib.cli import CkanCommand
from ckan.lib.helpers import OrderedDict
REQUESTS_HEADER = {'content-type': 'application/json'}
class Archiver(CkanCommand):
'''
Download and save copies of all package resources.
The result of each download attempt is saved to the CKAN task_status table,
so the information can be used later for QA analysis.
Usage:
paster archiver init
- Creates the database table archiver needs to run
paster archiver update [{package-name/id}|{group-name/id}]
- Archive all resources or just those belonging to a specific
package or group, if specified
paster archiver clean-status
- Cleans the TaskStatus records that contain the status of each
archived resource, whether it was successful or not, with errors.
It does not change the cache_url etc. in the Resource
paster archiver clean-cached-resources
- Removes all cache_urls and other references to resource files on
disk.
paster archiver view [{dataset name/id}]
- Views info archival info, in general and if you specify one, about
a particular dataset\'s resources.
paster archiver report [outputfile]
- Generates a report on orphans, either resources where the path
does not exist, or files on disk that don't have a corresponding
orphan. The outputfile parameter is the name of the CSV output
from running the report
paster archiver delete-orphans [outputfile]
- Deletes orphans that are files on disk with no corresponding
resource. This uses the report command and will write out a
report to [outputfile]
paster archiver migrate-archive-dirs
- Migrate the layout of the archived resource directories.
Previous versions of ckanext-archiver stored resources on disk
at: {resource-id}/filename.csv and this version puts them at:
{2-chars-of-resource-id}/{resource-id}/filename.csv
Running this moves them to the new locations and updates the
cache_url on each resource to reflect the new location.
paster archiver migrate
- Updates the database schema to include new fields.
'''
# TODO
# paster archiver clean-files
# - Remove all archived resources
summary = __doc__.split('\n')[0]
usage = __doc__
min_args = 0
max_args = 2
def __init__(self, name):
super(Archiver, self).__init__(name)
self.parser.add_option('-q', '--queue',
action='store',
dest='queue',
help='Send to a particular queue')
def command(self):
"""
Parse command line arguments and call appropriate method.
"""
if not self.args or self.args[0] in ['--help', '-h', 'help']:
print self.usage
sys.exit(1)
cmd = self.args[0]
self._load_config()
# Initialise logger after the config is loaded, so it is not disabled.
self.log = logging.getLogger(__name__)
if cmd == 'update':
self.update()
elif cmd == 'clean-status':
self.clean_status()
elif cmd == 'clean-cached-resources':
self.clean_cached_resources()
elif cmd == 'view':
if len(self.args) == 2:
self.view(self.args[1])
else:
self.view()
elif cmd == 'report':
if len(self.args) != 2:
self.log.error('Command requires a parameter, the name of the output')
return
self.report(self.args[1], delete=False)
elif cmd == 'delete-orphans':
if len(self.args) != 2:
self.log.error('Command requires a parameter, the name of the output')
return
self.report(self.args[1], delete=True)
elif cmd == 'init':
import ckan.model as model
from ckanext.archiver.model import init_tables
init_tables(model.meta.engine)
self.log.info('Archiver tables are initialized')
elif cmd == 'migrate-archive-dirs':
self.migrate_archive_dirs()
elif cmd == 'migrate':
self.migrate()
else:
self.log.error('Command %s not recognized' % (cmd,))
def update(self):
from ckan import model
from ckanext.archiver import lib
packages = []
resources = []
if len(self.args) > 1:
for arg in self.args[1:]:
# try arg as a group id/name
group = model.Group.get(arg)
if group:
if group.is_organization:
packages.extend(
model.Session.query(model.Package)
.filter_by(owner_org=group.id))
else:
packages.extend(group.packages(with_private=True))
if not self.options.queue:
self.options.queue = 'bulk'
continue
# try arg as a package id/name
pkg = model.Package.get(arg)
if pkg:
packages.append(pkg)
if not self.options.queue:
self.options.queue = 'priority'
continue
# try arg as a resource id
res = model.Resource.get(arg)
if res:
resources.append(res)
if not self.options.queue:
self.options.queue = 'priority'
continue
else:
self.log.error('Could not recognize as a group, package '
'or resource: %r', arg)
sys.exit(1)
else:
# all packages
pkgs = model.Session.query(model.Package)\
.filter_by(state='active')\
.order_by('name').all()
packages.extend(pkgs)
if not self.options.queue:
self.options.queue = 'bulk'
if packages:
self.log.info('Datasets to archive: %d', len(packages))
if resources:
self.log.info('Resources to archive: %d', len(resources))
if not (packages or resources):
self.log.error('No datasets or resources to process')
sys.exit(1)
self.log.info('Queue: %s', self.options.queue)
for package in packages:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
# earlier CKANs had ResourceGroup
pkg_resources = \
[res for res in
itertools.chain.from_iterable(
(rg.resources_all
for rg in package.resource_groups_all)
)
if res.state == 'active']
else:
pkg_resources = \
[res for res in package.resources_all
if res.state == 'active']
self.log.info('Queuing dataset %s (%s resources)',
package.name, len(pkg_resources))
lib.create_archiver_package_task(package, self.options.queue)
time.sleep(0.1) # to try to avoid Redis getting overloaded
for resource in resources:
if p.toolkit.check_ckan_version(max_version='2.2.99'):
package = resource.resource_group.package
else:
package = resource.package
self.log.info('Queuing resource %s/%s', package.name, resource.id)
lib.create_archiver_resource_task(resource, self.options.queue)
time.sleep(0.05) # to try to avoid Redis getting overloaded
self.log.info('Completed queueing')
def view(self, package_ref=None):
from ckan import model
from ckanext.archiver.model import Archival
r_q = model.Session.query(model.Resource).filter_by(state='active')
print 'Resources: %i total' % r_q.count()
a_q = model.Session.query(Archival)
print 'Archived resources: %i total' % a_q.count()
num_with_cache_url = a_q.filter(Archival.cache_url!='').count()
print ' %i with cache_url' % num_with_cache_url
last_updated_res = a_q.order_by(Archival.updated.desc()).first()
print 'Latest archival: %s' % (last_updated_res.updated.strftime('%Y-%m-%d %H:%M') if last_updated_res else '(no)')
if package_ref:
pkg = model.Package.get(package_ref)
print 'Package %s %s' % (pkg.name, pkg.id)
for res in pkg.resources:
print 'Resource %s' % res.id
for archival in a_q.filter_by(resource_id=res.id):
print '* %r' % archival
def clean_status(self):
from ckan import model
from ckanext.archiver.model import Archival
print 'Before:'
self.view()
q = model.Session.query(Archival)
q.delete()
model.Session.commit()
print 'After:'
self.view()
def clean_cached_resources(self):
from ckan import model
from ckanext.archiver.model import Archival
print 'Before:'
self.view()
q = model.Session.query(Archival).filter(Archival.cache_url != '')
archivals = q.all()
num_archivals = len(archivals)
progress = 0
for archival in archivals:
archival.cache_url = None
archival.cache_filepath = None
archival.size = None
archival.mimetype = None
archival.hash = None
progress += 1
if progress % 1000 == 0:
print 'Done %i/%i' % (progress, num_archivals)
model.Session.commit()
model.Session.commit()
model.Session.remove()
print 'After:'
self.view()
def report(self, output_file, delete=False):
"""
Generates a report containing orphans (either files or resources)
"""
import csv
from ckan import model
archive_root = config.get('ckanext-archiver.archive_dir')
if not archive_root:
self.log.error("Could not find archiver root")
return
# We'll use this to match the UUID part of the path
uuid_re = re.compile(".*([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}).*")
not_cached_active = 0
not_cached_deleted = 0
file_not_found_active = 0
file_not_found_deleted = 0
perm_error = 0
file_no_resource = 0
with open(output_file, "w") as f:
writer = csv.writer(f)
writer.writerow(["Resource ID", "Filepath", "Problem"])
resources = {}
for resource in model.Session.query(model.Resource).all():
resources[resource.id] = True
# Check the resource's cached_filepath
fp = resource.extras.get('cache_filepath')
if fp is None:
if resource.state == 'active':
not_cached_active += 1
else:
not_cached_deleted += 1
writer.writerow([resource.id, str(resource.extras), "Resource not cached: {0}".format(resource.state)])
continue
# Check that the cached file is there and readable
if not os.path.exists(fp):
if resource.state == 'active':
file_not_found_active += 1
else:
file_not_found_deleted += 1
writer.writerow([resource.id, fp.encode('utf-8'), "File not found: {0}".format(resource.state)])
continue
try:
os.stat(fp)
except OSError:
perm_error += 1
writer.writerow([resource.id, fp.encode('utf-8'), "File not readable"])
continue
# Iterate over the archive root and check each file by matching the
# resource_id part of the path to the resources dict
for root, _, files in os.walk(archive_root):
for filename in files:
archived_path = os.path.join(root, filename)
m = uuid_re.match(archived_path)
if not m:
writer.writerow([resource.id, archived_path, "Malformed path (no UUID)"])
continue
if not resources.get(m.groups(0)[0].strip(), False):
file_no_resource += 1
if delete:
try:
os.unlink(archived_path)
self.log.info("Unlinked {0}".format(archived_path))
os.rmdir(root)
self.log.info("Unlinked {0}".format(root))
writer.writerow([m.groups(0)[0], archived_path, "Resource not found, file deleted"])
except Exception, e:
self.log.error("Failed to unlink {0}: {1}".format(archived_path,e))
else:
writer.writerow([m.groups(0)[0], archived_path, "Resource not found"])
continue
print "General info:"
print " Permission error reading file: {0}".format(perm_error)
print " file on disk but no resource: {0}".format(file_no_resource)
print " Total resources: {0}".format(model.Session.query(model.Resource).count())
print "Active resource info:"
print " No cache_filepath: {0}".format(not_cached_active)
print " cache_filepath not on disk: {0}".format(file_not_found_active)
print "Deleted resource info:"
print " No cache_filepath: {0}".format(not_cached_deleted)
print " cache_filepath not on disk: {0}".format(file_not_found_deleted)
def migrate(self):
""" Adds any missing columns to the database table for Archival by
checking the schema and adding those that are missing.
If you wish to add a column, add the column name and sql
statement to MIGRATIONS_ADD which will check that the column is
not present before running the query.
If you wish to modify or delete a column, add the column name and
query to the MIGRATIONS_MODIFY which only runs if the column
does exist.
"""
from ckan import model
MIGRATIONS_ADD = OrderedDict({
"etag": "ALTER TABLE archival ADD COLUMN etag character varying",
"last_modified": "ALTER TABLE archival ADD COLUMN last_modified character varying"
})
MIGRATIONS_MODIFY = OrderedDict({
})
q = "select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = 'archival';"
current_cols = list([m[0] for m in model.Session.execute(q)])
for k, v in MIGRATIONS_ADD.iteritems():
if not k in current_cols:
self.log.info(u"Adding column '{0}'".format(k))
self.log.info(u"Executing '{0}'".format(v))
model.Session.execute(v)
model.Session.commit()
for k, v in MIGRATIONS_MODIFY.iteritems():
if k in current_cols:
self.log.info(u"Removing column '{0}'".format(k))
self.log.info(u"Executing '{0}'".format(v))
model.Session.execute(v)
model.Session.commit()
self.log.info("Migrations complete")
def migrate_archive_dirs(self):
from ckan import model
from ckan.logic import get_action
site_user = get_action('get_site_user')(
{'model': model, 'ignore_auth': True, 'defer_commit': True}, {}
)
site_url_base = config['ckanext-archiver.cache_url_root'].rstrip('/')
old_dir_regex = re.compile(r'(.*)/([a-f0-9\-]+)/([^/]*)$')
new_dir_regex = re.compile(r'(.*)/[a-f0-9]{2}/[a-f0-9\-]{36}/[^/]*$')
for resource in model.Session.query(model.Resource).\
filter(model.Resource.state != model.State.DELETED):
if not resource.cache_url or resource.cache_url == 'None':
continue
if new_dir_regex.match(resource.cache_url):
print 'Resource with new url already: %s' % resource.cache_url
continue
match = old_dir_regex.match(resource.cache_url)
if not match:
print 'ERROR Could not match url: %s' % resource.cache_url
continue
url_base, res_id, filename = match.groups()
# check the package isn't deleted
# Need to refresh the resource's session
resource = model.Session.query(model.Resource).get(resource.id)
if p.toolkit.check_ckan_version(max_version='2.2.99'):
package = None
if resource.resource_group:
package = resource.resource_group.package
else:
package = resource.package
if package and package.state == model.State.DELETED:
print 'Package is deleted'
continue
if url_base != site_url_base:
print 'ERROR Base URL is incorrect: %r != %r' % (url_base, site_url_base)
continue
# move the file
filepath_base = config['ckanext-archiver.archive_dir']
old_path = os.path.join(filepath_base, resource.id)
new_dir = os.path.join(filepath_base, resource.id[:2])
new_path = os.path.join(filepath_base, resource.id[:2], resource.id)
new_filepath = os.path.join(new_path, filename)
if not os.path.exists(new_dir):
os.mkdir(new_dir)
if os.path.exists(new_path) and not os.path.exists(old_path):
print 'File already moved: %s' % new_path
else:
print 'File: "%s" -> "%s"' % (old_path, new_path)
try:
shutil.move(old_path, new_path)
except IOError, e:
print 'ERROR moving resource: %s' % e
continue
# change the cache_url and cache_filepath
new_cache_url = '/'.join((url_base, res_id[:2], res_id, filename))
print 'cache_filepath: "%s" -> "%s"' % (resource.extras.get('cache_filepath'), new_filepath)
print 'cache_url: "%s" -> "%s"' % (resource.cache_url, new_cache_url)
context = {'model': model, 'user': site_user['name'], 'ignore_auth': True, 'session': model.Session}
data_dict = {'id': resource.id}
res_dict = get_action('resource_show')(context, data_dict)
res_dict['cache_filepath'] = new_filepath
res_dict['cache_url'] = new_cache_url
data_dict = res_dict
result = get_action('resource_update')(context, data_dict)
if result.get('id') == res_id:
print 'Successfully updated resource'
else:
print 'ERROR updating resource: %r' % result
|
datagovuk/ckanext-archiver
|
ckanext/archiver/commands.py
|
Python
|
mit
| 19,891 | 0.001408 |
from common import bounty, peers, settings
from common.safeprint import safeprint
from multiprocessing import Queue, Value
from time import sleep, time
import pickle
def sync():
from multiprocessing import Manager
man = Manager()
items = {'config': man.dict(),
'peerList': man.list(),
'bountyList': man.list(),
'bountyLock': bounty.bountyLock,
'keyList': man.list()}
items['config'].update(settings.config)
items['peerList'].extend(peers.peerlist)
items['bountyList'].extend(bounty.bountyList)
safeprint(items)
peers.sync(items)
return items
def initParallels():
queue = Queue()
live = Value('b', True)
ear = peers.listener(settings.config['port'], settings.config['outbound'], queue, live, settings.config['server'])
ear.daemon = True
ear.items = sync()
ear.start()
mouth = peers.propagator(settings.config['port'] + 1, live)
mouth.daemon = True
mouth.items = ear.items
mouth.start()
feedback = []
stamp = time()
while queue.empty():
if time() - 15 > stamp:
break
global ext_ip, ext_port
ext_ip = ""
ext_port = -1
try:
feedback = queue.get(False)
settings.outbound = feedback[0]
if settings.outbound is not True:
ext_ip, ext_port = feedback[1:3]
except:
safeprint("No feedback received from listener")
return live
def main():
# Begin Init
settings.setup()
try:
import miniupnpc
except:
safeprint("Miniupnpc is not installed. Running in outbound only mode")
settings.config['outbound'] = True
safeprint("settings are:")
safeprint(settings.config)
live = initParallels()
global ext_ip, ext_port
peers.initializePeerConnections(settings.config['port'], ext_ip, ext_port)
# End Init
# Begin main loop
if settings.config.get('seed'):
safeprint("Seed mode activated")
try:
while True and not settings.config.get('test'):
sleep(0.1)
except KeyboardInterrupt:
safeprint("Keyboard Interrupt")
elif settings.config.get('server'):
safeprint("Server mode activated")
else:
safeprint("Client mode activated")
# End main loop
# Begin shutdown
safeprint("Beginning exit process")
live.value = False
settings.saveSettings()
peers.saveToFile()
bounty.saveToFile()
# End shutdown
if __name__ == "__main__":
main()
|
gappleto97/Senior-Project
|
main.py
|
Python
|
mit
| 2,530 | 0.001581 |
from __future__ import unicode_literals
from future.utils import native_str
from django.contrib import admin
from django.contrib.auth import logout
from django.contrib.messages import error
from django.contrib.redirects.models import Redirect
from django.core.exceptions import MiddlewareNotUsed
from django.core.urlresolvers import reverse, resolve
from django.http import (HttpResponse, HttpResponseRedirect,
HttpResponsePermanentRedirect, HttpResponseGone)
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.template import Template, RequestContext
from django.utils.cache import get_max_age
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mezzanine.conf import settings
from mezzanine.core.models import SitePermission
from mezzanine.core.management.commands.createdb import (DEFAULT_USERNAME,
DEFAULT_PASSWORD)
from mezzanine.utils.cache import (cache_key_prefix, nevercache_token,
cache_get, cache_set, cache_installed)
from mezzanine.utils.device import templates_for_device
from mezzanine.utils.sites import current_site_id, templates_for_host
from mezzanine.utils.urls import next_url
_deprecated = {
"AdminLoginInterfaceSelector": "AdminLoginInterfaceSelectorMiddleware",
"DeviceAwareUpdateCacheMiddleware": "UpdateCacheMiddleware",
"DeviceAwareFetchFromCacheMiddleware": "FetchFromCacheMiddleware",
}
class _Deprecated(object):
def __init__(self, *args, **kwargs):
from warnings import warn
msg = "mezzanine.core.middleware.%s is deprecated." % self.old
if self.new:
msg += (" Please change the MIDDLEWARE_CLASSES setting to use "
"mezzanine.core.middleware.%s" % self.new)
warn(msg)
for old, new in _deprecated.items():
globals()[old] = type(native_str(old),
(_Deprecated,),
{"old": old, "new": new})
class AdminLoginInterfaceSelectorMiddleware(object):
"""
Checks for a POST from the admin login view and if authentication is
successful and the "site" interface is selected, redirect to the site.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
login_type = request.POST.get("mezzanine_login_interface")
if login_type and not request.user.is_authenticated():
response = view_func(request, *view_args, **view_kwargs)
if request.user.is_authenticated():
if login_type == "admin":
next = request.get_full_path()
username = request.user.get_username()
if (username == DEFAULT_USERNAME and
request.user.check_password(DEFAULT_PASSWORD)):
error(request, mark_safe(_(
"Your account is using the default password, "
"please <a href='%s'>change it</a> immediately.")
% reverse("user_change_password",
args=(request.user.id,))))
else:
next = next_url(request) or "/"
return HttpResponseRedirect(next)
else:
return response
return None
class SitePermissionMiddleware(object):
"""
Marks the current user with a ``has_site_permission`` which is
used in place of ``user.is_staff`` to achieve per-site staff
access.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
has_site_permission = False
if request.user.is_superuser:
has_site_permission = True
elif request.user.is_staff:
lookup = {"user": request.user, "sites": current_site_id()}
try:
SitePermission.objects.get(**lookup)
except SitePermission.DoesNotExist:
admin_index = reverse("admin:index")
if request.path.startswith(admin_index):
logout(request)
view_func = admin.site.login
extra_context = {"no_site_permission": True}
return view_func(request, extra_context=extra_context)
else:
has_site_permission = True
request.user.has_site_permission = has_site_permission
class TemplateForDeviceMiddleware(object):
"""
Inserts device-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_device(request,
response.template_name)
response.template_name = templates
return response
class TemplateForHostMiddleware(object):
"""
Inserts host-specific templates to the template list.
"""
def process_template_response(self, request, response):
if hasattr(response, "template_name"):
if not isinstance(response.template_name, Template):
templates = templates_for_host(request,
response.template_name)
response.template_name = templates
return response
class UpdateCacheMiddleware(object):
"""
Response phase for Mezzanine's cache middleware. Handles caching
the response, and then performing the second phase of rendering,
for content enclosed by the ``nevercache`` tag.
"""
def process_response(self, request, response):
# Caching is only applicable for text-based, non-streaming
# responses. We also skip it for non-200 statuses during
# development, so that stack traces are correctly rendered.
is_text = response.get("content-type", "").startswith("text")
valid_status = response.status_code == 200
streaming = getattr(response, "streaming", False)
if not is_text or streaming or (settings.DEBUG and not valid_status):
return response
# Cache the response if all the required conditions are met.
# Response must be marked for updating by the
# ``FetchFromCacheMiddleware`` having a cache get miss, the
# user must not be authenticated, the HTTP status must be OK
# and the response mustn't include an expiry age, indicating it
# shouldn't be cached.
marked_for_update = getattr(request, "_update_cache", False)
anon = hasattr(request, "user") and not request.user.is_authenticated()
timeout = get_max_age(response)
if timeout is None:
timeout = settings.CACHE_MIDDLEWARE_SECONDS
if anon and valid_status and marked_for_update and timeout:
cache_key = cache_key_prefix(request) + request.get_full_path()
_cache_set = lambda r: cache_set(cache_key, r.content, timeout)
if callable(getattr(response, "render", None)):
response.add_post_render_callback(_cache_set)
else:
_cache_set(response)
# Second phase rendering for non-cached template code and
# content. Split on the delimiter the ``nevercache`` tag
# wrapped its contents in, and render only the content
# enclosed by it, to avoid possible template code injection.
token = nevercache_token()
try:
token = token.encode('utf-8')
except AttributeError:
pass
parts = response.content.split(token)
# Restore csrf token from cookie - check the response
# first as it may be being set for the first time.
csrf_token = None
try:
csrf_token = response.cookies[settings.CSRF_COOKIE_NAME].value
except KeyError:
try:
csrf_token = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
pass
if csrf_token:
request.META["CSRF_COOKIE"] = csrf_token
context = RequestContext(request)
for i, part in enumerate(parts):
if i % 2:
part = Template(part).render(context).encode("utf-8")
parts[i] = part
response.content = b"".join(parts)
response["Content-Length"] = len(response.content)
if hasattr(request, '_messages'):
# Required to clear out user messages.
request._messages.update(response)
return response
class FetchFromCacheMiddleware(object):
"""
Request phase for Mezzanine cache middleware. Return a response
from cache if found, othwerwise mark the request for updating
the cache in ``UpdateCacheMiddleware``.
"""
def process_request(self, request):
if (cache_installed() and request.method == "GET" and
not request.user.is_authenticated()):
cache_key = cache_key_prefix(request) + request.get_full_path()
response = cache_get(cache_key)
# We need to force a csrf token here, as new sessions
# won't receieve one on their first request, with cache
# middleware running.
csrf_mw_name = "django.middleware.csrf.CsrfViewMiddleware"
if csrf_mw_name in settings.MIDDLEWARE_CLASSES:
csrf_mw = CsrfViewMiddleware()
csrf_mw.process_view(request, lambda x: None, None, None)
get_token(request)
if response is None:
request._update_cache = True
else:
return HttpResponse(response)
class SSLRedirectMiddleware(object):
"""
Handles redirections required for SSL when ``SSL_ENABLED`` is ``True``.
If ``SSL_FORCE_HOST`` is ``True``, and is not the current host,
redirect to it.
Also ensure URLs defined by ``SSL_FORCE_URL_PREFIXES`` are redirect
to HTTPS, and redirect all other URLs to HTTP if on HTTPS.
"""
def process_request(self, request):
settings.use_editable()
force_host = settings.SSL_FORCE_HOST
response = None
if force_host and request.get_host().split(":")[0] != force_host:
url = "http://%s%s" % (force_host, request.get_full_path())
response = HttpResponsePermanentRedirect(url)
elif settings.SSL_ENABLED and not settings.DEV_SERVER:
url = "%s%s" % (request.get_host(), request.get_full_path())
if request.path.startswith(settings.SSL_FORCE_URL_PREFIXES):
if not request.is_secure():
response = HttpResponseRedirect("https://%s" % url)
elif request.is_secure() and settings.SSL_FORCED_PREFIXES_ONLY:
response = HttpResponseRedirect("http://%s" % url)
if response and request.method == "POST":
if resolve(request.get_full_path()).url_name == "fb_do_upload":
# The handler for the flash file uploader in filebrowser
# doesn't have access to the http headers Django will use
# to determine whether the request is secure or not, so
# in this case we don't attempt a redirect - note that
# when /admin is restricted to SSL using Mezzanine's SSL
# setup, the flash uploader will post over SSL, so
# someone would need to explictly go out of their way to
# trigger this.
return
# Tell the client they need to re-POST.
response.status_code = 307
return response
class RedirectFallbackMiddleware(object):
"""
Port of Django's ``RedirectFallbackMiddleware`` that uses
Mezzanine's approach for determining the current site.
"""
def __init__(self):
if "django.contrib.redirects" not in settings.INSTALLED_APPS:
raise MiddlewareNotUsed
def process_response(self, request, response):
if response.status_code == 404:
lookup = {
"site_id": current_site_id(),
"old_path": request.get_full_path(),
}
try:
redirect = Redirect.objects.get(**lookup)
except Redirect.DoesNotExist:
pass
else:
if not redirect.new_path:
response = HttpResponseGone()
else:
response = HttpResponseRedirect(redirect.new_path)
return response
|
Kniyl/mezzanine
|
mezzanine/core/middleware.py
|
Python
|
bsd-2-clause
| 12,608 | 0.000317 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.16 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "waterflow-"
cfg.versionfile_source = "waterflow/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes
both the project name and a version string.
"""
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
|
ademilly/waterflow
|
waterflow/_version.py
|
Python
|
mit
| 16,749 | 0 |
# Utility functions for OpenMORA scripts
#
# Part of OpenMora - https://github.com/OpenMORA
import os, sys, string
import platform
import yaml
def get_mora_paths():
""" Returns a list of paths with MORA modules, from the env var MORA_PATH
"""
if not 'MORA_PATH' in os.environ:
print('**ERROR** Environment variable MORA_PATH not set')
sys.exit(1)
sMoraPaths=os.environ['MORA_PATH'];
if platform.system()=="Windows":
sPathDelim = ";"
else:
sPathDelim = ":"
morabase_dir="";
return sMoraPaths.split(sPathDelim)
def get_morabase_dir():
""" Returns the path of "mora-base" pkg
"""
mora_paths = get_mora_paths() # Get env vars
for p in mora_paths:
tstPath = os.path.normpath(p + "/mora-base")
if os.path.exists(tstPath):
morabase_dir = tstPath
if (len(morabase_dir)==0) or (not os.path.exists(morabase_dir)):
print("Couldn't detect mora-base in MORA_PATH!!")
sys.exit(1)
return morabase_dir
import sys, math
def progress(percent):
''' source: http://gunslingerc0de.wordpress.com/2010/08/13/python-command-line-progress-bar/ '''
width = 74
marks = math.floor(width * (percent / 100.0))
spaces = math.floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
if percent >= 100:
percent = 100
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
pass
sys.stdout.write("\n")
sys.stdout.flush()
def get_pkgs_root():
'''Returns the path to the parent directory of mora-base'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
return pkgs_root
def read_distro_file():
'''Returns the yaml contents of the distro file'''
morabase_dir = get_morabase_dir()
pkgs_root = os.path.dirname(morabase_dir)
sDistroFile = os.path.normpath( morabase_dir + "/distro/openmora-pkgs.yaml")
assert os.path.exists(sDistroFile)
assert os.path.exists(pkgs_root + "/mora-base")
# Parse distro file:
fil = open(sDistroFile, 'r')
distro = yaml.load(fil)
fil.close()
#print distro
return distro
|
OpenMORA/mora-base
|
scripts/morautils.py
|
Python
|
gpl-3.0
| 2,018 | 0.041625 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from foo_receiver import FooReceiver
from foo_listener_bf import FooListenerBfHelper
from PyCFFIlib_cffi import ffi, lib
import gc
class FooListenerBfImpl:
def delete_fl_in_fl(self):
print ("Not to be used")
def on_string_change(self, prs):
print ("FooListenerImpl.py: on_string_change prs", prs)
self._prs = prs
return self._prs
def get_string(self):
return self._prs
def set_listener_bf(self,fl):
self._fl = fl
def get_listener_bf(self):
return self._fl
def set_binary(self,b):
print ("setting Binary in FooListenerBfImpl ", b)
self._b = b
def get_binary(self):
return self._b
def send_return(self,fl):
return fl
def create():
# TODO: decide if we want to have this here or make checks in the helper.frompy for all
# methods to exist as attributes on the class more lenient
print ("I don't use it but the +p +c plus the check in fromPy for having all methods needs me to have this")
def fr_set_get(fr, fl, s):
fr.add_listener_bf(fl)
assert fr.set_private_bf_string(s) == s, "test_interface_back_forth failed"
# assert fl._prs == s, "test_interface_back_forth failed"
assert fr.get_listener_bf_string() == s, "test_interface_back_forth failed"
# back and forth via regular calls from python to cpp
def test_interface_back_forth():
print ("start test len ", len(FooListenerBfHelper.c_data_set))
fr = FooReceiver.create()
fl = FooListenerBfImpl() # python implementation of listener
fl_cpp = fr.get_foo_listener_bf() # cpp implementation of listener
# both direct and indirect test for python impl of FooListenerBf
fr_set_get(fr, fl, "Hello world!")
# both direct and indirect test for cpp impl of FooListenerBf
fr_set_get(fr, fl_cpp, "Goodbye world!")
fr_set_get(fr, fl_cpp, "Goodbye world!")
# send python implementation back and forth and see that it can still be used, and that no wrapper was added
fl_1 = fr.send_return(fl)
fl_2 = fr.send_return(fl_1)
fr_set_get(fr, fl_2, "Hello")
assert fl == fl_1 and fl_1 == fl_2, "test_interface_back_forth failed"
# send cpp implementation back and forth and see that is can still be used, and handles hold same implementation
fl_cpp_1 = fr.send_return(fl_cpp)
fl_cpp_2 = fr.send_return(fl_cpp_1)
fr_set_get(fr, fl_cpp_2, "Goodbye")
assert lib.equal_handles_cw__foo_listener_bf(fl_cpp._cpp_impl, fl_cpp_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_cpp_1._cpp_impl, fl_cpp_2._cpp_impl)
fl = fl_1 = fl_2 = fl_cpp = fl_cpp_1 = fl_cpp_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
def fr_fl_set_get(fr, fl_in_fl, b):
fr.set_listener_bf_in_listener_bf(fl_in_fl)
fr.set_binary_in_listener_bf_in_listener_bf(b)
assert b == fr.get_binary_in_listener_bf_in_listener_bf(), "test_interface_back_forth failed"
# back and forth via callbacks cpp to python
def test_interface_callback_back_forth():
fr = FooReceiver.create()
fl = FooListenerBfImpl()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 11'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 12'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in python, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in python, listener 2 in cpp after back&forth
fl = fr.get_foo_listener_bf()
fr.add_listener_bf(fl)
fl_in_fl = FooListenerBfImpl()
b = b'Some Binary 21'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in python
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert fl_in_fl == fl_in_fl_1 and fl_in_fl_1 == fl_in_fl_2, "test_interface_back_forth failed"
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in python after back&forth
fl_in_fl = fr.get_foo_listener_bf()
b = b'Some Other Binary 22'
fr_fl_set_get(fr, fl_in_fl, b) # listener 1 in cpp, listener 2 in cpp
fl_in_fl_1 = fr.in_listener_bf_send_return(fl_in_fl)
fl_in_fl_2 = fr.in_listener_bf_send_return(fl_in_fl_1)
assert lib.equal_handles_cw__foo_listener_bf(fl_in_fl._cpp_impl, fl_in_fl_1._cpp_impl) and \
lib.equal_handles_cw__foo_listener_bf(fl_in_fl_1._cpp_impl, fl_in_fl_2._cpp_impl)
fr_fl_set_get(fr, fl_in_fl_2, b) # listener 1 in cpp, listener 2 in cpp after back&forth
fl = fl_in_fl = fl_in_fl_1 = fl_in_fl_2 = None
gc.collect()
fr = None
gc.collect()
assert 0 == len(FooListenerBfHelper.c_data_set)
|
trafi/djinni
|
test-suite/handwritten-src/python/test_proxying.py
|
Python
|
apache-2.0
| 5,511 | 0.009254 |
# Tests for source4/libnet/py_net_dckeytab.c
#
# Copyright (C) David Mulder <dmulder@suse.com> 2018
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import string
from samba.net import Net
from samba import enable_net_export_keytab
from samba import tests
from samba.param import LoadParm
enable_net_export_keytab()
def open_bytes(filename):
if sys.version_info[0] == 3:
return open(filename, errors='ignore')
else:
return open(filename, 'rb')
class DCKeytabTests(tests.TestCase):
def setUp(self):
super(DCKeytabTests, self).setUp()
self.lp = LoadParm()
self.lp.load_default()
self.creds = self.insta_creds(template=self.get_credentials())
self.ktfile = os.path.join(self.lp.get('private dir'), 'test.keytab')
self.principal = self.creds.get_principal()
def tearDown(self):
super(DCKeytabTests, self).tearDown()
os.remove(self.ktfile)
def test_export_keytab(self):
net = Net(None, self.lp)
net.export_keytab(keytab=self.ktfile, principal=self.principal)
assert os.path.exists(self.ktfile), 'keytab was not created'
with open_bytes(self.ktfile) as bytes_kt:
result = ''
for c in bytes_kt.read():
if c in string.printable:
result += c
principal_parts = self.principal.split('@')
assert principal_parts[0] in result and \
principal_parts[1] in result, \
'Principal not found in generated keytab'
|
kernevil/samba
|
python/samba/tests/dckeytab.py
|
Python
|
gpl-3.0
| 2,161 | 0 |
from __future__ import unicode_literals
import json
import socket
from uuid import uuid4
from time import sleep
from urllib import urlencode
from random import randrange
from unittest import TestCase
from Queue import Queue, Empty
from contextlib import closing
from urlparse import urlparse, parse_qsl
import pytest
import cherrypy
import requests
from rpctools.jsonrpc import ServerProxy
from ws4py.server.cherrypyserver import WebSocketPlugin
import sideboard.websockets
from sideboard.lib import log, config, subscribes, notifies, services, cached_property, WebSocket
from sideboard.tests import service_patcher, config_patcher
from sideboard.tests.test_sa import Session
@pytest.mark.functional
class SideboardServerTest(TestCase):
port = config['cherrypy']['server.socket_port']
jsonrpc_url = 'http://127.0.0.1:{}/jsonrpc'.format(port)
jsonrpc = ServerProxy(jsonrpc_url)
rsess_username = 'unit_tests'
@staticmethod
def assert_port_open(port):
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('0.0.0.0', port))
@staticmethod
def assert_can_connect_to_localhost(port):
for i in range(50):
try:
socket.create_connection(('127.0.0.1', port)).close()
except Exception as e:
sleep(0.1)
else:
break
else:
raise e
@classmethod
def start_cherrypy(cls):
class Root(object):
@cherrypy.expose
def index(self):
cherrypy.session['username'] = cls.rsess_username
return cls.rsess_username
cherrypy.tree.apps.pop('/mock_login', None)
cherrypy.tree.mount(Root(), '/mock_login')
cls.assert_port_open(cls.port)
cherrypy.config.update({'engine.autoreload_on': False})
cherrypy.engine.start()
cherrypy.engine.wait(cherrypy.engine.states.STARTED)
cls.assert_can_connect_to_localhost(cls.port)
@classmethod
def stop_cherrypy(cls):
cherrypy.engine.stop()
cherrypy.engine.wait(cherrypy.engine.states.STOPPED)
cherrypy.engine.state = cherrypy.engine.states.EXITING
# ws4py does not support stopping and restarting CherryPy
sideboard.websockets.websocket_plugin.unsubscribe()
sideboard.websockets.websocket_plugin = WebSocketPlugin(cherrypy.engine)
sideboard.websockets.websocket_plugin.subscribe()
@classmethod
def setUpClass(cls):
super(SideboardServerTest, cls).setUpClass()
cls.start_cherrypy()
cls.ws = cls.patch_websocket(services.get_websocket())
cls.ws.connect(max_wait=5)
assert cls.ws.connected
@classmethod
def tearDownClass(cls):
cls.stop_cherrypy()
super(SideboardServerTest, cls).tearDownClass()
@staticmethod
def patch_websocket(ws):
ws.q = Queue()
ws.fallback = ws.q.put
return ws
def wait_for(self, func, *args, **kwargs):
for i in range(50):
cherrypy.engine.publish('main') # since our unit tests don't call cherrypy.engine.block, we must publish this event manually
try:
result = func(*args, **kwargs)
assert result or result is None
except:
sleep(0.1)
else:
break
else:
raise AssertionError('wait timed out')
def wait_for_eq(self, target, func, *args, **kwargs):
try:
self.wait_for(lambda: target == func(*args, **kwargs))
except:
raise AssertionError('{!r} != {!r}'.format(target, func(*args, **kwargs)))
def wait_for_ne(self, target, func, *args, **kwargs):
try:
self.wait_for(lambda: target != func(*args, **kwargs))
except:
raise AssertionError('{!r} == {!r}'.format(target, func(*args, **kwargs)))
@cached_property
def rsess(self):
rsess = requests.Session()
rsess.trust_env = False
self._get(rsess, '/mock_login')
return rsess
def url(self, path, **query_params):
params = dict(parse_qsl(urlparse(path).query))
params.update(query_params)
url = 'http://127.0.0.1:{}{}'.format(self.port, urlparse(path).path)
if params:
url += '?' + urlencode(params)
return url
def _get(self, rsess, path, **params):
return rsess.get(self.url(path, **params))
def get(self, path, **params):
return self._get(self.rsess, path, **params).content
def get_json(self, path, **params):
return self._get(self.rsess, path, **params).json()
def open_ws(self):
return self.patch_websocket(WebSocket(connect_immediately=True, max_wait=5))
def next(self, ws=None, timeout=2):
return (ws or self.ws).q.get(timeout=timeout)
def assert_incoming(self, ws=None, client=None, timeout=1, **params):
data = self.next(ws, timeout)
assert (client or self.client) == data.get('client')
for key, val in params.items():
assert val == data[key]
def assert_no_response(self):
pytest.raises(Empty, self.next)
def assert_error_with(self, *args, **kwargs):
if args:
self.ws.ws.send(str(args[0]))
else:
self.ws._send(**kwargs)
assert 'error' in self.next()
def call(self, **params):
callback = 'callback{}'.format(randrange(1000000))
self.ws._send(callback=callback, **params)
result = self.next()
assert callback == result['callback']
return result
def subscribe(self, **params):
params.setdefault('client', self.client)
return self.call(**params)
def unsubscribe(self, client=None):
self.call(action='unsubscribe', client=client or self.client)
class JsonrpcTest(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher):
service_patcher('testservice', self)
def get_message(self, name):
return 'Hello {}!'.format(name)
def send_json(self, body, content_type='application/json'):
if isinstance(body, dict):
body['id'] = self._testMethodName
resp = requests.post(self.jsonrpc_url, data=json.dumps(body),
headers={'Content-Type': 'application/json'})
assert resp.json
return resp.json()
def test_rpctools(self):
assert 'Hello World!' == self.jsonrpc.testservice.get_message('World')
def test_content_types(self):
for ct in ['text/html', 'text/plain', 'application/javascript', 'text/javascript', 'image/gif']:
response = self.send_json({
'method': 'testservice.get_message',
'params': ['World']
}, content_type=ct)
assert 'Hello World!' == response['result'], 'Expected success with valid reqeust using Content-Type {}'.format(ct)
class TestWebsocketSubscriptions(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher, config_patcher):
config_patcher(1, 'ws.call_timeout')
service_patcher('self', self)
def echo(self, s):
self.echoes.append(s)
return s
def slow_echo(self, s):
sleep(2)
return s
@subscribes('names')
def get_names(self):
return self.names
@notifies('names')
def change_name(self, name=None):
self.names[-1] = name or uuid4().hex
@notifies('names')
def change_name_then_error(self):
self.names[:] = reversed(self.names)
self.fail()
def indirectly_change_name(self):
self.change_name(uuid4().hex)
@subscribes('places')
def get_places(self):
return self.places
@notifies('places')
def change_place(self):
self.places[0] = uuid4().hex
@subscribes('names', 'places')
def get_names_and_places(self):
return self.names + self.places
def setUp(self):
SideboardServerTest.setUp(self)
self.echoes = []
self.places = ['Here']
self.names = ['Hello', 'World']
self.client = self._testMethodName
def test_echo(self):
self.ws._send(method='self.echo', params='hello')
self.ws._send(method='self.echo', params=['hello'])
self.ws._send(method='self.echo', params={'s': 'hello'})
self.assert_no_response()
self.ws._send(method='self.echo', params='hello', callback='cb123')
self.next()
assert ['hello'] * 4 == self.echoes
def test_errors(self):
self.assert_error_with(0)
self.assert_error_with([])
self.assert_error_with('')
self.assert_error_with('x')
self.assert_error_with(None)
self.assert_error_with(method='missing')
self.assert_error_with(method='close_all')
self.assert_error_with(method='crud.missing')
self.assert_error_with(method='too.many.dots')
self.assert_error_with(method='self.echo.extra')
self.assert_error_with(method='self.echo')
self.assert_error_with(method='self.echo', params=['too', 'many'])
self.assert_error_with(method='self.echo', params={'invalid': 'name'})
self.assertEqual([], self.echoes)
self.assert_error_with(method='self.fail')
def test_callback(self):
result = self.call(method='self.echo', params='hello')
assert 'hello' == result['data']
assert 'client' not in result
result = self.call(method='crud.echo', params='hello', client='ds123')
assert 'ds123' == result['client']
def test_client_and_callback(self):
self.call(method='self.get_name', client=self.client)
self.assert_no_response()
def test_triggered(self):
self.subscribe(method='self.get_names')
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name', params=['Kitty'])
self.assert_incoming()
def test_indirect_trigger(self):
self.subscribe(method='self.get_names')
with self.open_ws() as other_ws:
other_ws._send(method='self.indirectly_change_name')
self.assert_incoming()
def test_unsubscribe(self):
self.test_triggered()
self.unsubscribe()
self.call(method='self.change_name', params=[uuid4().hex])
self.assert_no_response()
def test_errors_still_triggers(self):
with self.open_ws() as other_ws:
self.subscribe(method='self.get_names')
other_ws._send(method='self.change_name_then_error')
self.assert_incoming()
def test_triggered_error(self):
with self.open_ws() as other_ws:
self.subscribe(method='self.get_names')
self.names.append(object())
other_ws._send(method='self.change_name_then_error')
self.names[:] = ['Hello'] * 2
other_ws._send(method='self.change_name')
self.assert_incoming()
def test_multiple_subscriptions(self):
self.subscribe(method='self.get_names')
self.subscribe(method='self.get_places')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
self.assert_incoming()
other_ws._send(method='self.change_place')
self.assert_incoming()
other_ws._send(method='self.echo', params='Hello')
self.assert_no_response()
def test_multiple_triggers(self):
self.subscribe(method='self.get_names_and_places')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
self.assert_incoming()
other_ws._send(method='self.change_place')
self.assert_incoming()
other_ws._send(method='self.echo', params='Hello')
self.assert_no_response()
def test_multiple_clients(self):
self.subscribe(method='self.get_names', client='client1')
self.subscribe(method='self.get_names', client='client2')
self.assert_no_response()
with self.open_ws() as other_ws:
other_ws._send(method='self.change_name')
assert {'client1', 'client2'} == {self.next()['client'], self.next()['client']}
def test_nonlocking_echo(self):
self.ws._send(method='self.slow_echo', params=['foo'],
client='client1', callback='cb11')
sleep(1)
self.ws._send(method='self.echo', params=['bar'], client='client2',
callback='cb22')
self.assert_incoming(data='bar', client='client2')
self.assert_incoming(data='foo', client='client1', timeout=2)
def test_client_locking(self):
self.ws._send(method='self.slow_echo', params=['foo'],
client=self.client, callback='cb1')
sleep(1)
self.ws._send(method='self.echo', params=['bar'],
client=self.client, callback='cb2')
self.assert_incoming(data='foo', timeout=2)
self.assert_incoming(data='bar')
def test_jsonrpc_notification(self):
self.subscribe(method='self.get_names')
self.jsonrpc.self.change_name()
self.assert_incoming()
def test_jsonrpc_websocket_client(self):
self.addCleanup(setattr, self.jsonrpc, "_prepare_request",
self.jsonrpc._prepare_request)
self.jsonrpc._prepare_request = lambda data, headers: data.update(
{'websocket_client': self.client})
self.jsonrpc.self.change_name()
self.assert_no_response()
class TestWebsocketCall(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher, config_patcher):
config_patcher(1, 'ws.call_timeout')
service_patcher('test', self)
def fast(self):
return 'fast'
def slow(self):
sleep(2)
return 'slow'
def test_fast(self):
assert self.ws.call('test.fast') == 'fast'
def test_slow(self):
pytest.raises(Exception, self.ws.call, 'test.slow')
class TestWebsocketsCrudSubscriptions(SideboardServerTest):
@pytest.fixture(autouse=True)
def override(self, service_patcher):
class MockCrud: pass
mr = self.mr = MockCrud()
for name in ['create', 'update', 'delete']:
setattr(mr, name, Session.crud.crud_notifies(self.make_crud_method(name), delay=0.5))
for name in ['read', 'count']:
setattr(mr, name, Session.crud.crud_subscribes(self.make_crud_method(name)))
service_patcher('crud', mr)
def setUp(self):
SideboardServerTest.setUp(self)
self.ws.close()
self.ws = self.open_ws()
self.client = self._testMethodName
def make_crud_method(self, name):
def crud_method(*args, **kwargs):
log.debug('mocked crud.{}'.format(name))
assert not getattr(self.mr, name + '_error', False)
return uuid4().hex
crud_method.__name__ = name.encode('utf-8')
return crud_method
def models(self, *models):
return [{'_model': model} for model in models]
def read(self, *models):
self.ws._send(method='crud.read', client=self.client, params=self.models(*models))
self.assert_incoming(trigger='subscribe')
def update(self, *models, **kwargs):
client = kwargs.get('client', 'unique_client_' + uuid4().hex)
self.ws._send(method='crud.update', client=client, params=self.models(*models))
self.assert_incoming(client=client)
def test_read(self):
self.read('User')
self.assert_no_response()
def test_triggered_read(self):
self.read('User')
self.update('User')
self.assert_incoming(trigger='update')
def test_unsubscribe(self):
self.test_triggered_read()
self.unsubscribe()
self.update('User')
self.assert_no_response()
def test_triggered_error(self):
self.mr.update_error = True
with self.open_ws() as other_ws:
other_ws._send(method='crud.read', client='other_tte', params=self.models('User'))
self.assert_incoming(other_ws, client='other_tte')
self.update('User')
self.ws._send(method='crud.update', client=self.client, params=self.models('User'))
assert 'error' in self.next()
self.assert_incoming(other_ws, client='other_tte', trigger='update')
def test_indirect_trigger(self):
def account(*attrs):
if len(attrs) == 1:
return {'_model': 'Account', 'field': attrs[0]}
else:
return {'_model': 'Account',
'or': [{'field': attr} for attr in attrs]}
def call(*attrs):
self.call(method='crud.read', client=self.client, params=account(*attrs))
def assert_update_triggers(model):
self.update(model)
self.assert_incoming()
call('xxx')
assert_update_triggers('Account')
self.unsubscribe()
call('user.xxx')
assert_update_triggers('User')
assert_update_triggers('Account')
self.unsubscribe()
call('user.xxx', 'boss.xxx')
assert_update_triggers('Account')
assert_update_triggers('User')
assert_update_triggers('Account')
self.unsubscribe()
call('user.tags.xxx')
assert_update_triggers('Account')
assert_update_triggers('User')
assert_update_triggers('Tag')
self.update('Boss')
self.assert_no_response()
def test_trigger_and_callback(self):
result = self.call(method='crud.read', params=self.models('User'), client='ds_ttac')
self.assert_no_response()
def test_multiple_triggers(self):
self.read('User', 'Boss')
self.update('User')
self.assert_incoming()
self.update('Boss')
self.assert_incoming()
self.update('Account')
self.assert_no_response()
def test_trigger_changed(self):
self.read('User')
self.read('Boss')
self.update('User')
self.assert_no_response()
self.update('Boss')
self.assert_incoming()
self.assert_no_response()
def test_multiple_clients(self):
self.read('Boss')
self.ws._send(method='crud.read', client='other_tmc', params=self.models('Boss'))
self.assert_incoming(client='other_tmc')
self.update('User')
self.assert_no_response()
self.read('Boss')
self.ws._send(method='crud.update', client='unused_client', params=self.models('Boss'))
self.next()
assert {self.client, 'other_tmc'} == {self.next()['client'], self.next()['client']}
def test_broadcast_error(self):
with self.open_ws() as other_ws:
self.read('User')
other_ws._send(method='crud.count', client='other_tbe', params=self.models('User'))
self.assert_incoming(other_ws, client='other_tbe')
self.mr.count_error = True
self.update('User', client='other_client_so_everything_will_trigger')
self.assert_incoming(trigger='update', timeout=5)
def test_jsonrpc_notifications(self):
self.read('User')
self.jsonrpc.crud.delete({'_model': 'User', 'field': 'name', 'value': 'Does Not Exist'})
self.assert_incoming(trigger='delete')
self.jsonrpc._prepare_request = lambda data, headers: data.update({'websocket_client': self.client})
self.jsonrpc.crud.delete({'_model': 'User', 'field': 'name', 'value': 'Does Not Exist'})
self.assert_no_response()
|
robdennis/sideboard
|
sideboard/tests/test_server.py
|
Python
|
bsd-3-clause
| 19,922 | 0.001757 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
""" astropy.cosmology contains classes and functions for cosmological
distance measures and other cosmology-related calculations.
See the `Astropy documentation
<https://docs.astropy.org/en/latest/cosmology/index.html>`_ for more
detailed usage examples and references.
"""
from . import core, flrw, funcs, parameter, units, utils
from . import io # needed before 'realizations' # isort: split
from . import realizations
from .core import *
from .flrw import *
from .funcs import *
from .parameter import *
from .realizations import *
from .utils import *
__all__ = (core.__all__ + flrw.__all__ # cosmology classes
+ realizations.__all__ # instances thereof
+ funcs.__all__ + parameter.__all__ + utils.__all__) # utils
|
mhvk/astropy
|
astropy/cosmology/__init__.py
|
Python
|
bsd-3-clause
| 830 | 0 |
from django.core.urlresolvers import reverse
from django.test import Client
from amcat.models import ArticleSet, Sentence, Article, Role
from amcat.tools import amcattest, sbd
import navigator.forms
from navigator.views.article_views import ArticleSplitView, handle_split, get_articles, ArticleDetailsView
class TestSplitArticles(amcattest.AmCATTestCase):
def create_test_sentences(self):
article = amcattest.create_test_article(byline="foo", text="Dit is. Tekst.\n\n"*3 + "Einde.")
sbd.create_sentences(article)
return article, article.sentences.all()
@amcattest.use_elastic
def test_article_split_view(self):
from amcat.models import Role, ProjectRole
article, sentences = self.create_test_sentences()
aset = amcattest.create_test_set(0)
aset.add_articles([article])
user = amcattest.create_test_user(username="fred", password="secret")
ProjectRole.objects.create(user=user, project=aset.project, role=Role.objects.get(label="admin"))
# Only test the very basic; if a simple split works we trust the view
# to use handle_split(), which is tested more extensively below.
url = reverse("navigator:" + ArticleSplitView.get_view_name(), args=[aset.project.id, article.id])
client = Client()
client.login(username="fred", password="secret")
response = client.post(url, {
"add_to_new_set": "test_article_split_view_set",
"remove_from_all_sets": "on",
"add_splitted_to_new_set": "",
"sentence-%s" % sentences[1].id: "on"
})
new_set = ArticleSet.objects.filter(name="test_article_split_view_set")
self.assertEqual(response.status_code, 200)
self.assertTrue(new_set.exists())
self.assertEqual(article, new_set[0].articles.all()[0])
@amcattest.use_elastic
def test_handle_split(self):
from amcat.tools import amcattest
from functools import partial
article, sentences = self.create_test_sentences()
project = amcattest.create_test_project()
aset1 = amcattest.create_test_set(4, project=project)
aset2 = amcattest.create_test_set(5, project=project)
aset3 = amcattest.create_test_set(0)
# Creates a codingjob for each articleset, as handle_split should account
# for "codedarticlesets" as well.
cj1 = amcattest.create_test_job(articleset=aset1)
cj2 = amcattest.create_test_job(articleset=aset2)
cj3 = amcattest.create_test_job(articleset=aset3)
for _set in [aset1, aset2]:
for _article in _set.articles.all():
sbd.create_sentences(_article)
a1, a2 = aset1.articles.all()[0], aset2.articles.all()[0]
aset1.add_articles([article])
aset3.add_articles([a1])
form = partial(navigator.forms.SplitArticleForm, project, article, initial={
"remove_from_sets": False
})
# Test form defaults (should do nothing!)
f = form(dict())
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertEquals(5, aset1.articles.all().count())
self.assertEquals(5, aset2.articles.all().count())
self.assertEquals(1, aset3.articles.all().count())
self.assertTrue(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertFalse(self.article_in(cj3, aset3, article))
# Passing invalid form should raise exception
f = form(dict(add_to_sets=[-1]))
self.assertFalse(f.is_valid())
self.assertRaises(ValueError, handle_split, f, project, article, Sentence.objects.none())
# Test add_to_new_set
f = form(dict(add_to_new_set="New Set 1"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 1")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
# Test add_to_sets
f = form(dict(add_to_sets=[aset3.id]))
self.assertFalse(f.is_valid())
f = form(dict(add_to_sets=[aset2.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(self.article_in(cj2, aset2, article))
# Test add_splitted_to_new_set
f = form(dict(add_splitted_to_new_set="New Set 2"))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
aset = project.all_articlesets().filter(name="New Set 2")
self.assertTrue(aset.exists())
self.assertEquals(project, aset[0].project)
self.assertEquals(1, aset[0].articles.count())
self.assertFalse(self.article_in(None, aset[0], article))
# Test add_splitted_to_sets
f = form(dict(add_splitted_to_sets=[aset2.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(article in aset2.articles.all())
# Test remove_from_sets
f = form(dict(remove_from_sets=[aset1.id]))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(article not in aset1.articles.all())
# Test remove_from_all_sets
aset1.add_articles([article])
aset2.add_articles([article])
aset3.add_articles([article])
f = form(dict(remove_from_all_sets=True))
self.assertTrue(f.is_valid())
handle_split(f, project, article, Sentence.objects.none())
self.assertTrue(aset1 in project.all_articlesets())
self.assertTrue(aset2 in project.all_articlesets())
self.assertFalse(aset3 in project.all_articlesets())
self.assertFalse(self.article_in(cj1, aset1, article))
self.assertFalse(self.article_in(cj2, aset2, article))
self.assertTrue(self.article_in(cj3, aset3, article))
def article_in(self, codingjob, articleset, article):
from amcat.tools.amcates import ES
ES().refresh()
if codingjob is not None:
if not codingjob.coded_articles.filter(article=article):
return False
return article.id in (articleset.get_article_ids() | articleset.get_article_ids(use_elastic=True))
class TestArticleViews(amcattest.AmCATTestCase):
@amcattest.use_elastic
def create_test_sentences(self):
article = amcattest.create_test_article(text="foo\n\nDit is. Tekst.\n\n"*3 + "Einde.")
sbd.create_sentences(article)
return article, article.sentences.all()
@amcattest.use_elastic
def test_get_articles(self):
from amcat.models import Sentence
_get_articles = lambda a, s : list(get_articles(a, s))
# Should raise exception if sentences not in article
article, sentences = self.create_test_sentences()
s1 = Sentence.objects.filter(id=amcattest.create_test_sentence().id)
self.assertRaises(ValueError, _get_articles, article, s1)
# Should raise an exception if we try to split on title
self.assertRaises(ValueError, _get_articles, article, sentences.filter(parnr=1))
# Should return a "copy", with byline in "text" property
arts = _get_articles(article, Sentence.objects.none())
Article.create_articles(arts)
self.assertEquals(len(arts), 1)
sbd.create_sentences(arts[0])
self.assertEquals(
[s.sentence for s in sentences[1:]],
[s.sentence for s in arts[0].sentences.all()[1:]]
)
self.assertTrue("foo" in arts[0].text)
# Should be able to split on byline
self.assertEquals(2, len(_get_articles(article, sentences[1:2])))
a, b = _get_articles(article, sentences[4:5])
# Check if text on splitted articles contains expected
self.assertTrue("Einde" not in a.text)
self.assertTrue("Einde" in b.text)
@amcattest.use_elastic
def test_permissions(self):
# articles should be visible if any of the sets it is in has the correct permissions
role_metareader = Role.objects.get(label="metareader")
role_reader = Role.objects.get(label="reader")
user = amcattest.create_test_user(username="fred", password="secret")
p1 = amcattest.create_test_project(name="p1")
p2 = amcattest.create_test_project(name="p2", owner=user)
s1 = amcattest.create_test_set(project=p1)
a1 = amcattest.create_test_article(project=p1, articleset=s1, text="Dit is de tekst", title="hoofdlijn")
client = Client()
client.login(username="fred", password="secret")
url = reverse("navigator:" + ArticleDetailsView.get_view_name(), args=[p1.id, s1.id, a1.id])
def test(url, can_view=True, can_read_article=True):
response = client.get(url)
self.assertEqual(response.status_code, 200 if can_view else 403)
if can_view:
self.assertEqual(response.context['can_view_text'], can_read_article)
return response
# fred can read it if p1 is reader
p1.guest_role = role_reader
p1.save()
response = test(url)
self.assertIn(b"Dit is de tekst", response.content)
# but not if guest role is metareader
p1.guest_role = role_metareader
p1.save()
response = test(url, can_read_article=False)
self.assertNotIn(b"Dit is de tekst", response.content)
self.assertIn(b"hoofdlijn", response.content)
# and an error if there is no guest role at all
p1.guest_role = None
p1.save()
test(url, can_view=False)
# Unless the article set is added to project 2 (where Fred is owner)
p2.articlesets.add(s1)
test(url)
# Also if project 1 has metareader as guest role
p1.guest_role = role_metareader
p1.save()
test(url)
#TODO: Test that you can only link a set on which you have read rights (i.e. on all articles? or on the project the set is currently in?)
#TODO: Test API permissions
|
amcat/amcat
|
navigator/views/tests/test_article_views.py
|
Python
|
agpl-3.0
| 10,397 | 0.003078 |
import logging
from discord.ext import commands
from bot.cooldowns import CooldownMapping, Cooldown
from bot.globals import Auth
from utils.utilities import is_owner, check_blacklist, no_dm
terminal = logging.getLogger('terminal')
def command(*args, **attrs):
if 'cls' not in attrs:
attrs['cls'] = Command
return commands.command(*args, **attrs)
def group(name=None, **attrs):
"""Uses custom Group class"""
if 'cls' not in attrs:
attrs['cls'] = Group
return commands.command(name=name, **attrs)
def cooldown(rate, per, type=commands.BucketType.default):
"""See `commands.cooldown` docs"""
def decorator(func):
if isinstance(func, Command):
func._buckets = CooldownMapping(Cooldown(rate, per, type))
else:
func.__commands_cooldown__ = Cooldown(rate, per, type)
return func
return decorator
class Command(commands.Command):
def __init__(self, func, **kwargs):
# Init called twice because commands are copied
super(Command, self).__init__(func, **kwargs)
self._buckets = CooldownMapping(self._buckets._cooldown)
self.owner_only = kwargs.pop('owner_only', False)
self.auth = kwargs.pop('auth', Auth.NONE)
self.checks.insert(0, check_blacklist)
if self.owner_only:
terminal.info(f'registered owner_only command {self.name}')
self.checks.insert(0, is_owner)
if 'no_pm' in kwargs or 'no_dm' in kwargs:
self.checks.insert(0, no_dm)
def undo_use(self, ctx):
"""Undoes one use of command"""
if self._buckets.valid:
bucket = self._buckets.get_bucket(ctx.message)
bucket.undo_one()
class Group(Command, commands.Group):
def __init__(self, *args, **attrs): # skipcq: PYL-W0231
Command.__init__(self, *args, **attrs)
self.invoke_without_command = attrs.pop('invoke_without_command', False)
def group(self, *args, **kwargs):
def decorator(func):
kwargs.setdefault('parent', self)
result = group(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
def command(self, *args, **kwargs):
def decorator(func):
if 'owner_only' not in kwargs:
kwargs['owner_only'] = self.owner_only
kwargs.setdefault('parent', self)
result = command(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
|
s0hvaperuna/Not-a-bot
|
bot/commands.py
|
Python
|
mit
| 2,568 | 0.000389 |
#!/usr/bin/python
################################################################################
# Bus Supervisor Interface
#
# - interfaces to the MCP23017 and PCF8574 IO expander chips
#
# The logic for this was ported from Dr Scott M. Baker's project:
# http://www.smbaker.com/z80-retrocomputing-4-bus-supervisor
#
################################################################################
from libArdySer import ArdySer
from lib_GenericChip import GenericChip
from GS_Timing import delay
from lib_MCP23017_IOExpander16 import MCP23017_IOExpander16
from lib_PCF8574_IOExpander8 import PCF8574_IOExpander8
class RC2014_BusSupervisor:
##################################
# class variables
ardy = None
cpuIoData = None
# A0-A7 - Data byte
# B0-B7 - Bus control
M1 = 0x01 # B0
CLK = 0x02 # B1
INT = 0x04 # B2
MREQ = 0x08 # B3
WR = 0x10 # B4
RD = 0x20 # B5
IORQ = 0x40 # B6
BUSACK = 0x80 # B7
cpuControl = None
# 0x0F - control, clock, etc
BUSREQ = 0x01
RESET = 0x02
CLKEN = 0x04
CLKOUT = 0x08
# 0xF0 - unused
cpuAddress = None
# A0-A7, B0-B7 - Address lines (reversed)
# our mirror values here
data = 0
dataControl = 0
dataAddress = 0
##############################
def bitReverse( data ):
retval = 0
if( (data & 0x80) == 0x80 ): retval = retval | 0x01
if( (data & 0x40) == 0x40 ): retval = retval | 0x02
if( (data & 0x20) == 0x20 ): retval = retval | 0x04
if( (data & 0x10) == 0x10 ): retval = retval | 0x08
if( (data & 0x08) == 0x08 ): retval = retval | 0x10
if( (data & 0x04) == 0x04 ): retval = retval | 0x20
if( (data & 0x02) == 0x02 ): retval = retval | 0x40
if( (data & 0x01) == 0x01 ): retval = retval | 0x80
return retval
##################################
# Initialization
def __init__( self, _ardy, _i2cAddr8 = None ):
# set the arduino object
baseAddr = _i2cAddr8
if _i2cAddr8 is None:
baseAddr = 0x21
self.data = 0
self.dataControl = 0
self.dataAddress = 0
self.ardy = _ardy
self.cpuIoData = MCP23017_IOExpander16( _ardy, baseAddr + 0 )
self.cpuControl = PCF8574_IOExpander8( _ardy, baseAddr + 1 )
self.cpuAddress = MCP23017_IOExpander16( _ardy, baseAddr + 2 )
self.ClearAllExpanders()
def ClearAllExpaners( self ):
# clear data register
self.cpuIoData.DirectionA( IODIRA, IOALLINPUT )
self.cpuIoData.SetA( 0x00 )
self.cpuIoData.DirectionB( IODIRA, IOALLINPUT )
self.cpuIoData.SetB( 0x00 )
# clear control register
self.cpuControl.Set( 0x00 )
# clear address register
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.SetA( 0x00 )
self.cpuAddress.DirectionB( IOALLINPUT )
self.cpuAddress.SetB( 0x00 )
##################################
# Low-level commands
##################################
# Package commands
def SupervisorDelay( self ):
delay( 1 )
def Reset( self ):
# RESET = 0
value = 0x00
self.cpuControl.Set( value )
self.SupervisorDelay()
# RESET = 1
value = self.RESET
self.cpuControl.Set( value )
return
def TakeBus( self ):
value = self.BUSREQ
self.cpuControl.Set( value )
while True:
value = self.cpuIoData.GetB( )
if (value & BUSAQ) == 0
break
self.cpuAddress.DirectionA( IOALLINPUT )
self.cpuAddress.DirectionB( IOALLINPUT )
value = M1 | C
data.iodir |= M1, CLK, INT, BUSACK
data, setgpio MREQ WR RD IORQ
return
def ReleaseBus( self ):
address[0].iodir = 0xff # input (high-z)
address[1].iodir = 0xff # input (high-z)
data.iodir = 0xff
if( reset ) supervisorDelay
busreq = 1
while trie
get gpio[1]
if busaq != 0
break
return
def SlowClock( self ):
period = 1.0/Float( rate )/2.0
clken = 0
while true:
clkout = 0
sleep( period )
clkout = 1
sleep( period )
return
def NormalClock( self ):
CLKEN =1
return
def SetAddress( self, addr ):
gpio0 = bitswap( addr >> 8 )
gpio1 = bitswap( addr & 0xff )
return
##############################
def MemRead( self, addr ):
set address( addr)
rd = 0
mreq = 0
result = daa.getgpio(0)
rd = 1
MREQ = 1
return 0xff
def MemWrite( self, addr, data ):
set address( addr )
data.setgpio( val )
wr = 0
mreq = 0
wr = 1
mreq = 1
iodir0 = 0xff
return
def IORead( self, addr ):
set address (addr )
rd = 0
iorq = 0
val = data.getgpio
rd = 1
iorq = 1
return 0xff
def IOWrite( self, addr, data ):
set address( addr )
iodir 0 = 0x00
data.setgpio( data )
wr = 0
iorq = 0
wr = 1
iorq = 1
iodir 0 = 0xff
return
|
BleuLlama/LlamaPyArdy
|
Python/devices/lib_RC2014_BusSupervisor.py
|
Python
|
mit
| 4,512 | 0.068927 |
import sys, os
sys.path.insert(1, "../../../")
import h2o
from tests import pyunit_utils
from random import randint
import tempfile
def glm_gamma_offset_mojo():
train = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip"))
y = "DPROS"
x = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL"]
x_offset = ["AGE","RACE","CAPSULE","DCAPS","PSA","VOL", "C1"]
params = {'family':"gamma", 'offset_column':"C1"}
offset = pyunit_utils.random_dataset_real_only(train.nrow, 1, realR=3, misFrac=0, randSeed=12345)
train = train.cbind(offset)
tmpdir = tempfile.mkdtemp()
glm_gamma_model = pyunit_utils.build_save_model_generic(params, x, train, y, "glm", tmpdir) # build and save mojo model
MOJONAME = pyunit_utils.getMojoName(glm_gamma_model._id)
h2o.download_csv(train[x_offset], os.path.join(tmpdir, 'in.csv')) # save test file, h2o predict/mojo use same file
pred_h2o, pred_mojo = pyunit_utils.mojo_predict(glm_gamma_model, tmpdir, MOJONAME) # load model and perform predict
h2o.download_csv(pred_h2o, os.path.join(tmpdir, "h2oPred.csv"))
print("Comparing mojo predict and h2o predict...")
pyunit_utils.compare_frames_local(pred_h2o, pred_mojo, 0.1, tol=1e-10) # compare mojo and model predict
if __name__ == "__main__":
pyunit_utils.standalone_test(glm_gamma_offset_mojo)
else:
glm_gamma_offset_mojo()
|
h2oai/h2o-3
|
h2o-py/tests/testdir_javapredict/pyunit_PUBDEV_8330_GLM_mojo_gamma_offset.py
|
Python
|
apache-2.0
| 1,402 | 0.021398 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timberwyck.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
powellc/timberwyck
|
manage.py
|
Python
|
bsd-3-clause
| 313 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import paramiko
import threading
import sys
import re
import time
import os
def start_shell(h, u, p):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(h, 22, u, p)
s = ssh.invoke_shell()
w = threading.Thread(target=write_chanel, args=(s, ))
# r = threading.Thread(target=read_chanel, args=(s, ))
w.setDaemon(True)
w.start()
# w.start()
read_chanel(s)
# w.join()
try:
s.close()
ssh.close()
except:
pass
def read_chanel(s):
while True:
d = s.recv(4096)
if not d:
break
# for i in ['\x1b.*?m','\x0f','\x1b\[6;1H','\x1b\[K','\x1b25;1H']:
# d=re.sub(str(i),"",d)
sys.stdout.write(d)
sys.stdout.flush()
# time.sleep(0.1)
try:
s.close()
except:
pass
# os.kill(os.getpid(), 15)
# sys.exit(0)
def write_chanel(s):
try:
while True:
c = sys.stdin.read(1)
if not c:
s.close()
break
a = s.send(c)
if a == 0:
s.close()
break
except:
pass
if __name__ == '__main__':
import sys
if len(sys.argv) < 4:
print 'usage:%s host user passwd' % sys.argv[0]
sys.exit(1)
(host, user, passwd) = sys.argv[1:4]
start_shell(host, user, passwd)
|
fangdingjun/example1
|
python/ssh_c.py
|
Python
|
gpl-3.0
| 1,479 | 0.005409 |
#!/usr/bin/python3
import sys, subprocess
def main(argv=None):
if argv is None:
argv = sys.argv
experiments = {
1 : ('Continuous', 'COPD'),
2 : ('Binary', ' COPD'),
3 : ('Continuous', 'EmphysemaExtentLung'),
4 : ('Binary', 'EmphysemaExtentLung'),
}
try:
experiment = experiments[ int(argv[1]) ]
except Exception as e:
print( 'usage: prog <experiment number>' )
return 1
prog = '../../Build/Classification/PredictClusterModel'
labels = {
'COPD' : '../../Data/Training/Labels/COPD.csv',
'EmphysemaExtentLung' : '../../Data/Training/Labels/EmphysemaExtentLung.csv',
}
instances = '../../Data/Training/Instances.csv'
bagMembership = '../../Data/Training/BagMembership.csv'
modelPattern = "Out/Training/MaxIterations1000/%s_%s_k%s_1.model"
numberOfClusters = ['5', '10', '20', ]#'15', '20', ]#'25', '30']
params = {
'histograms' : '24',
}
for k in numberOfClusters:
out = 'Out/Training/MaxIterations1000/%s_%s_k%s_' % (experiment + (k,))
cmd = [
prog,
"--instances", instances,
'--bag-membership', bagMembership,
'--model', modelPattern % (experiment + (k,)),
"--histograms", params['histograms'],
"--output", out,
]
print( ' '.join( cmd ) )
if subprocess.call( cmd ) != 0:
print( 'Error running %s : %s : k = %s' % ( experiment + (k,)) )
return 1
return 0
if __name__ == '__main__':
sys.exit( main() )
|
orting/emphysema-estimation
|
Experiments/07-MICCAI/Scripts/PredictTrain.py
|
Python
|
gpl-3.0
| 1,630 | 0.019018 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.