repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
endlos99/xdt99
|
test/dg-checkimg.py
|
1
|
3903
|
#!/usr/bin/env python
import os
import random
from config import Dirs, Files
from utils import xdg, xga, error, check_files_eq, count_bytes, check_bytes
# Main test
def runtest():
"""check cross-generated output against native reference files"""
# run disassembler
for srcfile, dopts, aopts in [
('gaops.gpl', ['-a', '0', '-r', '6', '17a'], []),
('gainst.gpl', ['-a', '0', '-r', '6', 'a2', 'a3', 'aa', 'ab', 'ac', 'b2', 'b4'], []),
('gabranch.gpl', ['-a', '0', '-f', '5'], []),
('gamove.gpl', ['-a', '0', '-f', '6'], []),
('gafmt.gpl', ['-a', '0', '-f', '5', '-y', 'rag'], []),
('gacopy.gpl', ['-a', '>2000', '-r', '2000'], []),
('gaexts.gpl', ['-a', '0', '-r', '0x1e'], []),
('gapass.gpl', ['-a', '0x6030', '-r', '6030'], [])
]:
source = os.path.join(Dirs.gplsources, srcfile)
xga(*[source] + aopts + ['-o', Files.reference])
xdg(*[Files.reference] + dopts + ['-p', '-o', Files.input])
xga(*[Files.input] + aopts + ['-o', Files.output])
check_files_eq(srcfile, Files.output, Files.reference, 'PROGRAM')
check_bytes(Files.input, source)
# top-down disassembler
for srcfile, dopts, aopts in [
('gaops.gpl', ['-a', '0', '-f', '6'], []),
('gainst.gpl', ['-a', '0', '-f', '6'], []),
('gabranch.gpl', ['-a', '0', '-f', '5'], []),
('gamove.gpl', ['-a', '0', '-f', '6'], []),
('gafmt.gpl', ['-a', '0', '-f', '5', '-y', 'rag'], []),
('gadirs.gpl', ['-a', '0', '-f', '0'], []),
('gacopy.gpl', ['-a', '>2000', '-f', '>2000'], []),
('gaexts.gpl', ['-a', '0', '-f', '0x1e'], []),
('gapass.gpl', ['-a', '0x6030', '-f', '>6030'], [])
]:
source = os.path.join(Dirs.gplsources, srcfile)
xga(*[source] + aopts + ['-o', Files.reference])
xdg(*[Files.reference] + dopts + ['-p', '-o', Files.input])
xga(*[Files.input] + aopts + ['-o', Files.output])
check_files_eq(srcfile, Files.output, Files.reference, 'PROGRAM')
xdg(*[Files.reference] + dopts + ['-o', Files.output]) # -p would introduce BYTEs where not disassembled
if count_bytes(Files.output) > 0:
error('BYTE', 'Unwanted BYTE directives in result')
# disassembler run
for srcfile in ['dgruns.gpl']:
source = os.path.join(Dirs.gplsources, srcfile)
xga(*[source] + ['-o', Files.reference])
xdg(*[Files.reference] + ['-a', '0', '-r', '0x0', '-p', '-o', Files.input])
xga(*[Files.input] + ['-o', Files.output])
check_files_eq(srcfile, Files.output, Files.reference, 'PROGRAM')
check_bytes(Files.input, source)
# disassemble blob
binary = os.path.join(Dirs.refs, 'blobg.bin')
#TODO: universal character escape \x..
#xdg(binary, '-a', '0', '-f', 'start', '-p', '-o', Files.input)
#xga(Files.input, '-o', Files.output)
#check_files_eq('blobg', Files.output, binary, 'PROGRAM')
#xdg(binary, '-a', '0', '-r', 'start', '-p', '-o', Files.input)
#xga(Files.input, '-o', Files.output)
#check_files_eq('blobg-run', Files.output, binary, 'PROGRAM')
# disassemble random
randrange = [n for n in range(256) if n != 0x08 and n != 0xfb]
for r in range(16):
random.seed(r)
binary = bytes([random.choice(randrange) for i in range(2048)])
with open(Files.reference, 'wb') as fref:
fref.write(binary)
xdg(Files.reference, '-a', '1000', '-f', '1000', '-p', '-o', Files.input)
xga(Files.input, '-o', Files.output)
check_files_eq('random' + str(r), Files.reference, Files.output, 'PROGRAM')
# cleanup
os.remove(Files.input)
os.remove(Files.output)
os.remove(Files.reference)
if __name__ == '__main__':
runtest()
print('OK')
|
gpl-2.0
| 8,020,362,520,169,591,000 | 41.423913 | 113 | 0.500641 | false |
adfinis-sygroup/timed-backend
|
timed/projects/views.py
|
1
|
2618
|
"""Viewsets for the projects app."""
from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet
from rest_framework_json_api.views import PreloadIncludesMixin
from timed.permissions import IsAuthenticated, IsReadOnly, IsReviewer, IsSuperUser
from timed.projects import filters, models, serializers
class CustomerViewSet(ReadOnlyModelViewSet):
"""Customer view set."""
serializer_class = serializers.CustomerSerializer
filterset_class = filters.CustomerFilterSet
ordering = "name"
def get_queryset(self):
"""Prefetch related data.
:return: The customers
:rtype: QuerySet
"""
return models.Customer.objects.prefetch_related("projects")
class BillingTypeViewSet(ReadOnlyModelViewSet):
serializer_class = serializers.BillingTypeSerializer
ordering = "name"
def get_queryset(self):
return models.BillingType.objects.all()
class CostCenterViewSet(ReadOnlyModelViewSet):
serializer_class = serializers.CostCenterSerializer
ordering = "name"
def get_queryset(self):
return models.CostCenter.objects.all()
class ProjectViewSet(PreloadIncludesMixin, ReadOnlyModelViewSet):
"""Project view set."""
serializer_class = serializers.ProjectSerializer
filterset_class = filters.ProjectFilterSet
ordering_fields = ("customer__name", "name")
ordering = "name"
queryset = models.Project.objects.all()
prefetch_for_includes = {
"__all__": ["reviewers"],
"reviewers": ["reviewers__supervisors"],
}
def get_queryset(self):
queryset = super().get_queryset()
return queryset.select_related("customer", "billing_type", "cost_center")
class TaskViewSet(ModelViewSet):
"""Task view set."""
serializer_class = serializers.TaskSerializer
filterset_class = filters.TaskFilterSet
queryset = models.Task.objects.select_related("project", "cost_center")
permission_classes = [
# superuser may edit all tasks
IsSuperUser
# reviewer may edit all tasks
| IsReviewer
# all authenticated users may read all tasks
| IsAuthenticated & IsReadOnly
]
ordering = "name"
def filter_queryset(self, queryset):
"""Specific filter queryset options."""
# my most frequent filter uses LIMIT so default ordering
# needs to be disabled to avoid exception
# see TODO filters.MyMostFrequentTaskFilter to avoid this
if "my_most_frequent" in self.request.query_params:
self.ordering = None
return super().filter_queryset(queryset)
|
agpl-3.0
| 3,538,262,605,159,621,600 | 29.8 | 82 | 0.694041 | false |
GeoCat/QGIS
|
python/plugins/processing/algs/qgis/ui/FieldsMappingPanel.py
|
1
|
16947
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
FieldsMappingWidget.py
---------------------
Date : October 2014
Copyright : (C) 2014 by Arnaud Morvan
Email : arnaud dot morvan at camptocamp dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import range
__author__ = 'Arnaud Morvan'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Arnaud Morvan'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from collections import OrderedDict
from qgis.PyQt import uic
from qgis.PyQt.QtCore import (
QItemSelectionModel,
QAbstractTableModel,
QModelIndex,
QVariant,
Qt,
pyqtSlot,
)
from qgis.PyQt.QtWidgets import (
QComboBox,
QHeaderView,
QLineEdit,
QSpacerItem,
QMessageBox,
QSpinBox,
QStyledItemDelegate,
)
from qgis.core import (
QgsApplication,
QgsExpression,
QgsMapLayerProxyModel,
QgsProcessingFeatureSourceDefinition,
QgsProcessingUtils,
QgsProject,
QgsVectorLayer,
)
from qgis.gui import QgsFieldExpressionWidget
from processing.gui.wrappers import WidgetWrapper, DIALOG_STANDARD, DIALOG_MODELER
from processing.tools import dataobjects
pluginPath = os.path.dirname(__file__)
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'fieldsmappingpanelbase.ui'))
class FieldsMappingModel(QAbstractTableModel):
fieldTypes = OrderedDict([
(QVariant.Date, "Date"),
(QVariant.DateTime, "DateTime"),
(QVariant.Double, "Double"),
(QVariant.Int, "Integer"),
(QVariant.LongLong, "Integer64"),
(QVariant.String, "String")])
def __init__(self, parent=None):
super(FieldsMappingModel, self).__init__(parent)
self._mapping = []
self._layer = None
self.configure()
def configure(self):
self.columns = [{
'name': 'expression',
'type': QgsExpression,
'header': self.tr("Source expression"),
'persistentEditor': True
}, {
'name': 'name',
'type': QVariant.String,
'header': self.tr("Field name")
}, {
'name': 'type',
'type': QVariant.Type,
'header': self.tr("Type"),
'persistentEditor': True
}, {
'name': 'length',
'type': QVariant.Int,
'header': self.tr("Length")
}, {
'name': 'precision',
'type': QVariant.Int,
'header': self.tr("Precision")
}]
def columnIndex(self, column_name):
for index, column in enumerate(self.columns):
if column['name'] == column_name:
return index
def mapping(self):
return self._mapping
def setMapping(self, value):
self.beginResetModel()
self._mapping = value
self.endResetModel()
def contextGenerator(self):
if self._layer:
return self._layer
return QgsProject.instance()
def layer(self):
return self._layer
def setLayer(self, layer):
self._layer = layer
def columnCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
return len(self.columns)
def rowCount(self, parent=QModelIndex()):
if parent.isValid():
return 0
return self._mapping.__len__()
def headerData(self, section, orientation, role=Qt.DisplayRole):
if role == Qt.DisplayRole:
if orientation == Qt.Horizontal:
return self.columns[section]['header']
if orientation == Qt.Vertical:
return section
def flags(self, index):
return Qt.ItemFlags(Qt.ItemIsSelectable |
Qt.ItemIsEditable |
Qt.ItemIsEnabled)
def data(self, index, role=Qt.DisplayRole):
field = self._mapping[index.row()]
column_def = self.columns[index.column()]
if role == Qt.DisplayRole:
value = field[column_def['name']]
if column_def['type'] == QVariant.Type:
if value == QVariant.Invalid:
return ''
return self.fieldTypes[value]
return value
if role == Qt.EditRole:
return field[column_def['name']]
if role == Qt.TextAlignmentRole:
if column_def['type'] in [QVariant.Int]:
hAlign = Qt.AlignRight
else:
hAlign = Qt.AlignLeft
return hAlign + Qt.AlignVCenter
def setData(self, index, value, role=Qt.EditRole):
field = self._mapping[index.row()]
column_def = self.columns[index.column()]
if role == Qt.EditRole:
field[column_def['name']] = value
self.dataChanged.emit(index, index)
return True
def insertRows(self, row, count, index=QModelIndex()):
self.beginInsertRows(index, row, row + count - 1)
for i in range(count):
field = self.newField()
self._mapping.insert(row + i, field)
self.endInsertRows()
return True
def removeRows(self, row, count, index=QModelIndex()):
self.beginRemoveRows(index, row, row + count - 1)
for i in range(row + count - 1, row + 1):
self._mapping.pop(i)
self.endRemoveRows()
return True
def newField(self, field=None):
if field is None:
return {'name': '',
'type': QVariant.Invalid,
'length': 0,
'precision': 0,
'expression': ''}
return {'name': field.name(),
'type': field.type(),
'length': field.length(),
'precision': field.precision(),
'expression': QgsExpression.quotedColumnRef(field.name())}
def loadLayerFields(self, layer):
self.beginResetModel()
self._mapping = []
if layer is not None:
dp = layer.dataProvider()
for field in dp.fields():
self._mapping.append(self.newField(field))
self.endResetModel()
class FieldTypeDelegate(QStyledItemDelegate):
def createEditor(self, parent, option, index):
editor = QComboBox(parent)
for key, text in list(FieldsMappingModel.fieldTypes.items()):
editor.addItem(text, key)
return editor
def setEditorData(self, editor, index):
if not editor:
return
value = index.model().data(index, Qt.EditRole)
editor.setCurrentIndex(editor.findData(value))
def setModelData(self, editor, model, index):
if not editor:
return
value = editor.currentData()
if value is None:
value = QVariant.Invalid
model.setData(index, value)
class ExpressionDelegate(QStyledItemDelegate):
def createEditor(self, parent, option, index):
editor = QgsFieldExpressionWidget(parent)
editor.setLayer(index.model().layer())
editor.registerExpressionContextGenerator(index.model().contextGenerator())
editor.fieldChanged.connect(self.on_expression_fieldChange)
editor.setAutoFillBackground(True)
return editor
def setEditorData(self, editor, index):
if not editor:
return
value = index.model().data(index, Qt.EditRole)
editor.setField(value)
def setModelData(self, editor, model, index):
if not editor:
return
(value, isExpression, isValid) = editor.currentField()
if isExpression is True:
model.setData(index, value)
else:
model.setData(index, QgsExpression.quotedColumnRef(value))
def on_expression_fieldChange(self, fieldName):
self.commitData.emit(self.sender())
class FieldsMappingPanel(BASE, WIDGET):
def __init__(self, parent=None):
super(FieldsMappingPanel, self).__init__(parent)
self.setupUi(self)
self.addButton.setIcon(QgsApplication.getThemeIcon("/mActionNewAttribute.svg"))
self.deleteButton.setIcon(QgsApplication.getThemeIcon('/mActionDeleteAttribute.svg'))
self.upButton.setIcon(QgsApplication.getThemeIcon('/mActionArrowUp.svg'))
self.downButton.setIcon(QgsApplication.getThemeIcon('/mActionArrowDown.svg'))
self.resetButton.setIcon(QgsApplication.getThemeIcon('/mIconClearText.svg'))
self.configure()
self.model.modelReset.connect(self.on_model_modelReset)
self.model.rowsInserted.connect(self.on_model_rowsInserted)
self.layerCombo.setAllowEmptyLayer(True)
self.layerCombo.setFilters(QgsMapLayerProxyModel.VectorLayer)
def configure(self):
self.model = FieldsMappingModel()
self.fieldsView.setModel(self.model)
self.setDelegate('expression', ExpressionDelegate(self))
self.setDelegate('type', FieldTypeDelegate(self))
def setDelegate(self, column_name, delegate):
self.fieldsView.setItemDelegateForColumn(
self.model.columnIndex(column_name),
delegate)
def setLayer(self, layer):
self.model.setLayer(layer)
if layer is None:
return
if self.model.rowCount() == 0:
self.on_resetButton_clicked()
return
dlg = QMessageBox(self)
dlg.setText("Do you want to reset the field mapping?")
dlg.setStandardButtons(
QMessageBox.StandardButtons(QMessageBox.Yes |
QMessageBox.No))
dlg.setDefaultButton(QMessageBox.No)
if dlg.exec_() == QMessageBox.Yes:
self.on_resetButton_clicked()
def value(self):
return self.model.mapping()
def setValue(self, value):
self.model.setMapping(value)
@pyqtSlot(bool, name='on_addButton_clicked')
def on_addButton_clicked(self, checked=False):
rowCount = self.model.rowCount()
self.model.insertRows(rowCount, 1)
index = self.model.index(rowCount, 0)
self.fieldsView.selectionModel().select(
index,
QItemSelectionModel.SelectionFlags(
QItemSelectionModel.Clear |
QItemSelectionModel.Select |
QItemSelectionModel.Current |
QItemSelectionModel.Rows))
self.fieldsView.scrollTo(index)
self.fieldsView.scrollTo(index)
@pyqtSlot(bool, name='on_deleteButton_clicked')
def on_deleteButton_clicked(self, checked=False):
sel = self.fieldsView.selectionModel()
if not sel.hasSelection():
return
indexes = sel.selectedRows()
for index in indexes:
self.model.removeRows(index.row(), 1)
@pyqtSlot(bool, name='on_upButton_clicked')
def on_upButton_clicked(self, checked=False):
sel = self.fieldsView.selectionModel()
if not sel.hasSelection():
return
row = sel.selectedRows()[0].row()
if row == 0:
return
self.model.insertRows(row - 1, 1)
for column in range(self.model.columnCount()):
srcIndex = self.model.index(row + 1, column)
dstIndex = self.model.index(row - 1, column)
value = self.model.data(srcIndex, Qt.EditRole)
self.model.setData(dstIndex, value, Qt.EditRole)
self.model.removeRows(row + 1, 1)
sel.select(
self.model.index(row - 1, 0),
QItemSelectionModel.SelectionFlags(
QItemSelectionModel.Clear |
QItemSelectionModel.Select |
QItemSelectionModel.Current |
QItemSelectionModel.Rows))
@pyqtSlot(bool, name='on_downButton_clicked')
def on_downButton_clicked(self, checked=False):
sel = self.fieldsView.selectionModel()
if not sel.hasSelection():
return
row = sel.selectedRows()[0].row()
if row == self.model.rowCount() - 1:
return
self.model.insertRows(row + 2, 1)
for column in range(self.model.columnCount()):
srcIndex = self.model.index(row, column)
dstIndex = self.model.index(row + 2, column)
value = self.model.data(srcIndex, Qt.EditRole)
self.model.setData(dstIndex, value, Qt.EditRole)
self.model.removeRows(row, 1)
sel.select(
self.model.index(row + 1, 0),
QItemSelectionModel.SelectionFlags(
QItemSelectionModel.Clear |
QItemSelectionModel.Select |
QItemSelectionModel.Current |
QItemSelectionModel.Rows))
@pyqtSlot(bool, name='on_resetButton_clicked')
def on_resetButton_clicked(self, checked=False):
self.model.loadLayerFields(self.model.layer())
def resizeColumns(self):
header = self.fieldsView.horizontalHeader()
header.resizeSections(QHeaderView.ResizeToContents)
for section in range(header.count()):
size = header.sectionSize(section)
fieldType = self.model.columns[section]['type']
if fieldType == QgsExpression:
header.resizeSection(section, size + 100)
else:
header.resizeSection(section, size + 20)
def openPersistentEditors(self, row):
for index, column in enumerate(self.model.columns):
if 'persistentEditor' in column.keys() and column['persistentEditor']:
self.fieldsView.openPersistentEditor(self.model.index(row, index))
continue
editor = self.fieldsView.indexWidget(self.model.index(row, index))
if isinstance(editor, QLineEdit):
editor.deselect()
if isinstance(editor, QSpinBox):
lineEdit = editor.findChild(QLineEdit)
lineEdit.setAlignment(Qt.AlignRight or Qt.AlignVCenter)
lineEdit.deselect()
def on_model_modelReset(self):
for row in range(0, self.model.rowCount()):
self.openPersistentEditors(row)
self.resizeColumns()
def on_model_rowsInserted(self, parent, start, end):
for row in range(start, end + 1):
self.openPersistentEditors(row)
@pyqtSlot(bool, name='on_loadLayerFieldsButton_clicked')
def on_loadLayerFieldsButton_clicked(self, checked=False):
layer = self.layerCombo.currentLayer()
if layer is None:
return
self.model.loadLayerFields(layer)
class FieldsMappingWidgetWrapper(WidgetWrapper):
def __init__(self, *args, **kwargs):
super(FieldsMappingWidgetWrapper, self).__init__(*args, **kwargs)
self._layer = None
def createWidget(self):
return FieldsMappingPanel()
def postInitialize(self, wrappers):
for wrapper in wrappers:
if wrapper.param.name() == self.param.parentLayerParameter():
self.setLayer(wrapper.value())
wrapper.widgetValueHasChanged.connect(self.parentLayerChanged)
break
# remove exiting spacers to get FieldsMappingPanel fully expanded
if self.dialogType in (DIALOG_STANDARD, DIALOG_MODELER):
layout = self.widget.parent().layout()
spacer = layout.itemAt(layout.count() - 1)
if isinstance(spacer, QSpacerItem):
layout.removeItem(spacer)
def parentLayerChanged(self, layer=None):
self.setLayer(self.sender().value())
def setLayer(self, layer):
context = dataobjects.createContext()
if layer == self._layer:
return
if isinstance(layer, QgsProcessingFeatureSourceDefinition):
layer, ok = layer.source.valueAsString(context.expressionContext())
if isinstance(layer, str):
layer = QgsProcessingUtils.mapLayerFromString(layer, context)
if not isinstance(layer, QgsVectorLayer):
layer = None
self._layer = layer
self.widget.setLayer(self._layer)
def setValue(self, value):
self.widget.setValue(value)
def value(self):
return self.widget.value()
|
gpl-2.0
| 8,780,736,930,080,279,000 | 32.164384 | 93 | 0.586593 | false |
tribut/vdirsyncer
|
vdirsyncer/storage/memory.py
|
1
|
2008
|
# -*- coding: utf-8 -*-
import random
import vdirsyncer.exceptions as exceptions
from vdirsyncer.storage.base import Storage
def _random_string():
return '{:.9f}'.format(random.random())
class MemoryStorage(Storage):
'''
Saves data in RAM, only useful for testing.
'''
def __init__(self, fileext='', **kwargs):
if kwargs.get('collection') is not None:
raise exceptions.UserError('MemoryStorage does not support '
'collections.')
self.items = {} # href => (etag, item)
self.metadata = {}
self.fileext = fileext
super(MemoryStorage, self).__init__(**kwargs)
def _get_href(self, item):
return item.ident + self.fileext
def list(self):
for href, (etag, item) in self.items.items():
yield href, etag
def get(self, href):
etag, item = self.items[href]
return item, etag
def has(self, href):
return href in self.items
def upload(self, item):
href = self._get_href(item)
if href in self.items:
raise exceptions.AlreadyExistingError(existing_href=href)
etag = _random_string()
self.items[href] = (etag, item)
return href, etag
def update(self, href, item, etag):
if href not in self.items:
raise exceptions.NotFoundError(href)
actual_etag, _ = self.items[href]
if etag != actual_etag:
raise exceptions.WrongEtagError(etag, actual_etag)
new_etag = _random_string()
self.items[href] = (new_etag, item)
return new_etag
def delete(self, href, etag):
if not self.has(href):
raise exceptions.NotFoundError(href)
if etag != self.items[href][0]:
raise exceptions.WrongEtagError(etag)
del self.items[href]
def get_meta(self, key):
return self.metadata.get(key)
def set_meta(self, key, value):
self.metadata[key] = value
|
mit
| -8,198,893,486,883,892,000 | 26.888889 | 72 | 0.583167 | false |
alandmoore/qtscrot
|
qtscrot.py
|
1
|
5340
|
"""
QTScrot is a gui for the scrot screenshot utility.
Written by Alan D Moore, c 2014.
Written by Alan D Moore, c 2014.
Released under the GPL v. 3.0
Requires scrot and QT5.
"""
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from collections import namedtuple
from functools import partial
import subprocess
import sys
from shutil import which, copy
from datetime import datetime
import os
settingItem = namedtuple("settingItem", ["label", "args"])
preview_scale = .50
class ScrotGUI(QMainWindow):
"""The main GUI for the app"""
def __init__(self, scrot_cmd):
super(ScrotGUI, self).__init__()
self.setWindowFlags(Qt.Dialog)
self.scrot = scrot_cmd
self.form = QWidget()
self.formlayout = QVBoxLayout()
self.form.setLayout(self.formlayout)
self.setCentralWidget(self.form)
self.command_args = []
#ss display
self.image_view = QLabel()
self.formlayout.addWidget(self.image_view)
screen = QApplication.desktop().availableGeometry()
self.image_view_size = (screen.width() * preview_scale, screen.height() * preview_scale)
self.image_view.setMaximumSize(*self.image_view_size)
self.image_view.setSizePolicy(QSizePolicy(QSizePolicy.Maximum))
#The execute button
self.gobutton = QPushButton ("&Take Screenshot")
self.gobutton.clicked.connect(self.take_screenshot)
self.formlayout.addWidget(self.gobutton)
#Type of shot
shot_type_gb = QGroupBox("Type of shot")
shot_type_gb.setLayout(QGridLayout())
self.shot_type_button_group = QButtonGroup()
self.shot_types = [
settingItem("&Full", ["-m"]),
settingItem("&Current Screen", []),
settingItem("Select &Window", ["-s", "-b"]),
settingItem("Select W&indow (no border)", ["-s"])
]
self.shot_type = self.shot_types[0]
for i, shot_type in enumerate(self.shot_types):
button = QRadioButton(shot_type.label)
self.shot_type_button_group.addButton(button)
shot_type_gb.layout().addWidget(button, i/2, i % 2)
button.toggled[bool].connect(partial(self.set_shot_type, i, shot_type.args))
if i == 0: # Set the first radio button selected
button.setChecked(True)
self.formlayout.addWidget(shot_type_gb)
# Countdown
countdown_section = QWidget()
countdown_section.setSizePolicy(QSizePolicy(QSizePolicy.Minimum))
countdown_section.setLayout(QHBoxLayout())
countdown_section.layout().addWidget(QLabel("Countdown"))
self.countdown_time = QSpinBox()
countdown_section.layout().addWidget(self.countdown_time)
self.formlayout.addWidget(countdown_section)
# Save button
self.savebutton = QPushButton("Save Screenshot")
self.savebutton.setShortcut(QKeySequence(QKeySequence.Save))
self.savebutton.setEnabled(False)
self.savebutton.clicked.connect(self.save_shot)
self.formlayout.addWidget(self.savebutton)
#Expander
# expand the bottom so we don't look derpy
self.expander = QWidget()
self.expander.setSizePolicy(QSizePolicy(QSizePolicy.Expanding))
self.formlayout.addWidget(self.expander)
#show the app
self.show()
def set_shot_type(self, option_number, checked):
if checked:
self.shot_type = self.shot_types[option_number]
def save_shot(self):
save_filename = QFileDialog.getSaveFileName(None, "Screenshot Name", "", "Images (*.png)")
copy(self.filename, save_filename[0])
def take_screenshot(self):
self.command_args = [self.scrot]
self.command_args += self.shot_type.args
countdown = self.countdown_time.value()
if countdown > 0:
self.command_args.append("-c")
self.command_args.append("-d")
self.command_args.append(countdown.__str__())
else:
if "-c" in self.command_args:
self.command_args.remove("-c")
if "-d" in self.command_args:
ci = self.command_args.index("-d")
del self.command_args[ci]
if self.command_args[ci].isdigit():
del self.command_args[ci]
self.filename = os.path.join("/tmp",
datetime.now().strftime("qtscrot-%Y-%m-%d-%H%M%s%f.png"))
self.command_args.append(self.filename)
self.hide()
print("Command executed: {}".format(" ".join(self.command_args)))
subprocess.check_call(self.command_args)
preview = QImage(self.filename).scaled(self.image_view_size[0], self.image_view_size[1], Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.image_view.setPixmap(QPixmap.fromImage(preview))
self.savebutton.setEnabled(True)
self.show()
if __name__ == '__main__':
# check for existence of scrot
scrot_cmd = which("scrot")
if not scrot_cmd:
sys.stderr.write("scrot was not found on your system. Please install scrot to use this app.")
sys.exit(1)
app = QApplication(sys.argv)
sg = ScrotGUI(scrot_cmd)
app.exec_()
|
gpl-3.0
| 7,572,024,369,834,528,000 | 36.342657 | 141 | 0.62191 | false |
citrix-openstack-build/cinder
|
cinder/volume/drivers/solidfire.py
|
1
|
18015
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import httplib
import json
import math
import random
import socket
import string
import time
import uuid
from cinder import context
from cinder import exception
from cinder import flags
from cinder.openstack.common import cfg
from cinder.openstack.common import log as logging
from cinder.volume.drivers.san.san import SanISCSIDriver
from cinder.volume import volume_types
VERSION = 1.1
LOG = logging.getLogger(__name__)
sf_opts = [
cfg.BoolOpt('sf_emulate_512',
default=True,
help='Set 512 byte emulation on volume creation; '),
cfg.BoolOpt('sf_allow_tenant_qos',
default=False,
help='Allow tenants to specify QOS on create'), ]
FLAGS = flags.FLAGS
FLAGS.register_opts(sf_opts)
class SolidFire(SanISCSIDriver):
"""OpenStack driver to enable SolidFire cluster.
Version history:
1.0 - Initial driver
1.1 - Refactor, clone support, qos by type and minor bug fixes
"""
sf_qos_dict = {'slow': {'minIOPS': 100,
'maxIOPS': 200,
'burstIOPS': 200},
'medium': {'minIOPS': 200,
'maxIOPS': 400,
'burstIOPS': 400},
'fast': {'minIOPS': 500,
'maxIOPS': 1000,
'burstIOPS': 1000},
'performant': {'minIOPS': 2000,
'maxIOPS': 4000,
'burstIOPS': 4000},
'off': None}
sf_qos_keys = ['minIOPS', 'maxIOPS', 'burstIOPS']
GB = math.pow(10, 9)
def __init__(self, *args, **kwargs):
super(SolidFire, self).__init__(*args, **kwargs)
def _issue_api_request(self, method_name, params):
"""All API requests to SolidFire device go through this method.
Simple json-rpc web based API calls.
each call takes a set of paramaters (dict)
and returns results in a dict as well.
"""
host = FLAGS.san_ip
# For now 443 is the only port our server accepts requests on
port = 443
cluster_admin = FLAGS.san_login
cluster_password = FLAGS.san_password
# NOTE(jdg): We're wrapping a retry loop for a know XDB issue
# Shows up in very high request rates (ie create 1000 volumes)
# we have to wrap the whole sequence because the request_id
# can't be re-used
retry_count = 5
while retry_count > 0:
request_id = int(uuid.uuid4()) # just generate a random number
command = {'method': method_name,
'id': request_id}
if params is not None:
command['params'] = params
payload = json.dumps(command, ensure_ascii=False)
payload.encode('utf-8')
header = {'Content-Type': 'application/json-rpc; charset=utf-8'}
if cluster_password is not None:
# base64.encodestring includes a newline character
# in the result, make sure we strip it off
auth_key = base64.encodestring('%s:%s' % (cluster_admin,
cluster_password))[:-1]
header['Authorization'] = 'Basic %s' % auth_key
LOG.debug(_("Payload for SolidFire API call: %s"), payload)
connection = httplib.HTTPSConnection(host, port)
connection.request('POST', '/json-rpc/1.0', payload, header)
response = connection.getresponse()
data = {}
if response.status != 200:
connection.close()
raise exception.SolidFireAPIException(status=response.status)
else:
data = response.read()
try:
data = json.loads(data)
except (TypeError, ValueError), exc:
connection.close()
msg = _("Call to json.loads() raised "
"an exception: %s") % exc
raise exception.SfJsonEncodeFailure(msg)
connection.close()
LOG.debug(_("Results of SolidFire API call: %s"), data)
if ('error' in data and
'xDBVersionMismatch' in data['error']['name']):
LOG.debug(_('Detected xDBVersionMismatch, '
'retry %s of 5') % (5 - retry_count))
time.sleep(1)
retry_count -= 1
else:
retry_count = 0
return data
def _get_volumes_by_sfaccount(self, account_id):
"""Get all volumes on cluster for specified account."""
params = {'accountID': account_id}
data = self._issue_api_request('ListVolumesForAccount', params)
if 'result' in data:
return data['result']['volumes']
def _get_sfaccount_by_name(self, sf_account_name):
"""Get SolidFire account object by name."""
sfaccount = None
params = {'username': sf_account_name}
data = self._issue_api_request('GetAccountByName', params)
if 'result' in data and 'account' in data['result']:
LOG.debug(_('Found solidfire account: %s'), sf_account_name)
sfaccount = data['result']['account']
return sfaccount
def _get_sf_account_name(self, project_id):
"""Build the SolidFire account name to use."""
return ('%s-%s' % (socket.gethostname(), project_id))
def _get_sfaccount(self, project_id):
sf_account_name = self._get_sf_account_name(project_id)
sfaccount = self._get_sfaccount_by_name(sf_account_name)
if sfaccount is None:
raise exception.SfAccountNotFound(account_name=sf_account_name)
return sfaccount
def _create_sfaccount(self, project_id):
"""Create account on SolidFire device if it doesn't already exist.
We're first going to check if the account already exits, if it does
just return it. If not, then create it.
"""
sf_account_name = self._get_sf_account_name(project_id)
sfaccount = self._get_sfaccount_by_name(sf_account_name)
if sfaccount is None:
LOG.debug(_('solidfire account: %s does not exist, create it...'),
sf_account_name)
chap_secret = self._generate_random_string(12)
params = {'username': sf_account_name,
'initiatorSecret': chap_secret,
'targetSecret': chap_secret,
'attributes': {}}
data = self._issue_api_request('AddAccount', params)
if 'result' in data:
sfaccount = self._get_sfaccount_by_name(sf_account_name)
return sfaccount
def _get_cluster_info(self):
"""Query the SolidFire cluster for some property info."""
params = {}
data = self._issue_api_request('GetClusterInfo', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
return data['result']
def _do_export(self, volume):
"""Gets the associated account, retrieves CHAP info and updates."""
sfaccount = self._get_sfaccount(volume['project_id'])
model_update = {}
model_update['provider_auth'] = ('CHAP %s %s'
% (sfaccount['username'],
sfaccount['targetSecret']))
return model_update
def _generate_random_string(self, length):
"""Generates random_string to use for CHAP password."""
char_set = string.ascii_uppercase + string.digits
return ''.join(random.sample(char_set, length))
def _get_model_info(self, sfaccount, sf_volume_id):
"""Gets the connection info for specified account and volume."""
cluster_info = self._get_cluster_info()
iscsi_portal = cluster_info['clusterInfo']['svip'] + ':3260'
chap_secret = sfaccount['targetSecret']
volume_list = self._get_volumes_by_sfaccount(sfaccount['accountID'])
iqn = None
for v in volume_list:
if v['volumeID'] == sf_volume_id:
iqn = v['iqn']
break
model_update = {}
# NOTE(john-griffith): SF volumes are always at lun 0
model_update['provider_location'] = ('%s %s %s'
% (iscsi_portal, iqn, 0))
model_update['provider_auth'] = ('CHAP %s %s'
% (sfaccount['username'],
chap_secret))
return model_update
def _do_clone_volume(self, src_uuid, src_project_id, v_ref):
"""Create a clone of an existing volume.
Currently snapshots are the same as clones on the SF cluster.
Due to the way the SF cluster works there's no loss in efficiency
or space usage between the two. The only thing different right
now is the restore snapshot functionality which has not been
implemented in the pre-release version of the SolidFire Cluster.
"""
attributes = {}
qos = {}
sfaccount = self._get_sfaccount(src_project_id)
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(src_uuid, params)
if sf_vol is None:
raise exception.VolumeNotFound(volume_id=uuid)
if 'qos' in sf_vol:
qos = sf_vol['qos']
attributes = {'uuid': v_ref['id'],
'is_clone': 'True',
'src_uuid': 'src_uuid'}
if qos:
attributes['qos'] = qos
params = {'volumeID': int(sf_vol['volumeID']),
'name': 'UUID-%s' % v_ref['id'],
'attributes': attributes,
'qos': qos}
data = self._issue_api_request('CloneVolume', params)
if (('result' not in data) or ('volumeID' not in data['result'])):
raise exception.SolidFireAPIDataException(data=data)
sf_volume_id = data['result']['volumeID']
model_update = self._get_model_info(sfaccount, sf_volume_id)
return (data, sfaccount, model_update)
def _do_volume_create(self, project_id, params):
sfaccount = self._create_sfaccount(project_id)
params['accountID'] = sfaccount['accountID']
data = self._issue_api_request('CreateVolume', params)
if (('result' not in data) or ('volumeID' not in data['result'])):
raise exception.SolidFireAPIDataException(data=data)
sf_volume_id = data['result']['volumeID']
return self._get_model_info(sfaccount, sf_volume_id)
def _set_qos_presets(self, volume):
qos = {}
valid_presets = self.sf_qos_dict.keys()
#First look to see if they included a preset
presets = [i.value for i in volume.get('volume_metadata')
if i.key == 'sf-qos' and i.value in valid_presets]
if len(presets) > 0:
if len(presets) > 1:
LOG.warning(_('More than one valid preset was '
'detected, using %s') % presets[0])
qos = self.sf_qos_dict[presets[0]]
else:
#look for explicit settings
for i in volume.get('volume_metadata'):
if i.key in self.sf_qos_keys:
qos[i.key] = int(i.value)
return qos
def _set_qos_by_volume_type(self, type_id, ctxt):
qos = {}
volume_type = volume_types.get_volume_type(ctxt, type_id)
specs = volume_type.get('extra_specs')
for key, value in specs.iteritems():
if key in self.sf_qos_keys:
qos[key] = int(value)
return qos
def _get_sf_volume(self, uuid, params):
data = self._issue_api_request('ListVolumesForAccount', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
found_count = 0
sf_volref = None
for v in data['result']['volumes']:
if uuid in v['name']:
found_count += 1
sf_volref = v
LOG.debug(_("Mapped SolidFire volumeID %(sfid)s "
"to cinder ID %(uuid)s.") %
{'sfid': v['volumeID'],
'uuid': uuid})
if found_count == 0:
# NOTE(jdg): Previously we would raise here, but there are cases
# where this might be a cleanup for a failed delete.
# Until we get better states we'll just log an error
LOG.error(_("Volume %s, not found on SF Cluster."), uuid)
if found_count > 1:
LOG.error(_("Found %(count)s volumes mapped to id: %(uuid)s.") %
{'count': found_count,
'uuid': uuid})
raise exception.DuplicateSfVolumeNames(vol_name=uuid)
return sf_volref
def create_volume(self, volume):
"""Create volume on SolidFire device.
The account is where CHAP settings are derived from, volume is
created and exported. Note that the new volume is immediately ready
for use.
One caveat here is that an existing user account must be specified
in the API call to create a new volume. We use a set algorithm to
determine account info based on passed in cinder volume object. First
we check to see if the account already exists (and use it), or if it
does not already exist, we'll go ahead and create it.
"""
slice_count = 1
attributes = {}
qos = {}
if (FLAGS.sf_allow_tenant_qos and
volume.get('volume_metadata')is not None):
qos = self._set_qos_presets(volume)
ctxt = context.get_admin_context()
type_id = volume['volume_type_id']
if type_id is not None:
qos = self._set_qos_by_volume_type(ctxt, type_id)
attributes = {'uuid': volume['id'],
'is_clone': 'False'}
if qos:
attributes['qos'] = qos
params = {'name': 'UUID-%s' % volume['id'],
'accountID': None,
'sliceCount': slice_count,
'totalSize': int(volume['size'] * self.GB),
'enable512e': FLAGS.sf_emulate_512,
'attributes': attributes,
'qos': qos}
return self._do_volume_create(volume['project_id'], params)
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of an existing volume."""
(data, sfaccount, model) = self._do_clone_volume(
src_vref['id'],
src_vref['project_id'],
volume)
return model
def delete_volume(self, volume):
"""Delete SolidFire Volume from device.
SolidFire allows multipe volumes with same name,
volumeID is what's guaranteed unique.
"""
LOG.debug(_("Enter SolidFire delete_volume..."))
sfaccount = self._get_sfaccount(volume['project_id'])
params = {'accountID': sfaccount['accountID']}
sf_vol = self._get_sf_volume(volume['id'], params)
if sf_vol is not None:
params = {'volumeID': sf_vol['volumeID']}
data = self._issue_api_request('DeleteVolume', params)
if 'result' not in data:
raise exception.SolidFireAPIDataException(data=data)
else:
LOG.error(_("Volume ID %s was not found on "
"the SolidFire Cluster!"), volume['id'])
LOG.debug(_("Leaving SolidFire delete_volume"))
def ensure_export(self, context, volume):
"""Verify the iscsi export info."""
LOG.debug(_("Executing SolidFire ensure_export..."))
return self._do_export(volume)
def create_export(self, context, volume):
"""Setup the iscsi export info."""
LOG.debug(_("Executing SolidFire create_export..."))
return self._do_export(volume)
def delete_snapshot(self, snapshot):
"""Delete the specified snapshot from the SolidFire cluster."""
self.delete_volume(snapshot)
def create_snapshot(self, snapshot):
"""Create a snapshot of a volume on the SolidFire cluster.
Note that for SolidFire Clusters currently there is no snapshot
implementation. Due to the way SF does cloning there's no performance
hit or extra space used. The only thing that's lacking from this is
the abilit to restore snaps.
After GA a true snapshot implementation will be available with
restore at which time we'll rework this appropriately.
"""
(data, sfaccount, model) = self._do_clone_volume(
snapshot['volume_id'],
snapshot['project_id'],
snapshot)
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from the specified snapshot."""
(data, sfaccount, model) = self._do_clone_volume(
snapshot['id'],
snapshot['project_id'],
volume)
return model
|
apache-2.0
| 5,604,339,479,747,918,000 | 35.840491 | 78 | 0.56181 | false |
tonnrueter/pymca_devel
|
PyMca/Object3D/Object3DRedBookFont.py
|
1
|
9580
|
import Object3DQt as qt
import SceneGLWidget
import OpenGL.GL as GL
import numpy
rasters = numpy.array([
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x18, 0x18, 0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x36, 0x36, 0x36, 0x36],
[0x00, 0x00, 0x00, 0x66, 0x66, 0xff, 0x66, 0x66, 0xff, 0x66, 0x66, 0x00, 0x00],
[0x00, 0x00, 0x18, 0x7e, 0xff, 0x1b, 0x1f, 0x7e, 0xf8, 0xd8, 0xff, 0x7e, 0x18],
[0x00, 0x00, 0x0e, 0x1b, 0xdb, 0x6e, 0x30, 0x18, 0x0c, 0x76, 0xdb, 0xd8, 0x70],
[0x00, 0x00, 0x7f, 0xc6, 0xcf, 0xd8, 0x70, 0x70, 0xd8, 0xcc, 0xcc, 0x6c, 0x38],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x1c, 0x0c, 0x0e],
[0x00, 0x00, 0x0c, 0x18, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x18, 0x0c],
[0x00, 0x00, 0x30, 0x18, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x18, 0x30],
[0x00, 0x00, 0x00, 0x00, 0x99, 0x5a, 0x3c, 0xff, 0x3c, 0x5a, 0x99, 0x00, 0x00],
[0x00, 0x00, 0x00, 0x18, 0x18, 0x18, 0xff, 0xff, 0x18, 0x18, 0x18, 0x00, 0x00],
[0x00, 0x00, 0x30, 0x18, 0x1c, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x00, 0x38, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x60, 0x60, 0x30, 0x30, 0x18, 0x18, 0x0c, 0x0c, 0x06, 0x06, 0x03, 0x03],
[0x00, 0x00, 0x3c, 0x66, 0xc3, 0xe3, 0xf3, 0xdb, 0xcf, 0xc7, 0xc3, 0x66, 0x3c],
[0x00, 0x00, 0x7e, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x78, 0x38, 0x18],
[0x00, 0x00, 0xff, 0xc0, 0xc0, 0x60, 0x30, 0x18, 0x0c, 0x06, 0x03, 0xe7, 0x7e],
[0x00, 0x00, 0x7e, 0xe7, 0x03, 0x03, 0x07, 0x7e, 0x07, 0x03, 0x03, 0xe7, 0x7e],
[0x00, 0x00, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0xff, 0xcc, 0x6c, 0x3c, 0x1c, 0x0c],
[0x00, 0x00, 0x7e, 0xe7, 0x03, 0x03, 0x07, 0xfe, 0xc0, 0xc0, 0xc0, 0xc0, 0xff],
[0x00, 0x00, 0x7e, 0xe7, 0xc3, 0xc3, 0xc7, 0xfe, 0xc0, 0xc0, 0xc0, 0xe7, 0x7e],
[0x00, 0x00, 0x30, 0x30, 0x30, 0x30, 0x18, 0x0c, 0x06, 0x03, 0x03, 0x03, 0xff],
[0x00, 0x00, 0x7e, 0xe7, 0xc3, 0xc3, 0xe7, 0x7e, 0xe7, 0xc3, 0xc3, 0xe7, 0x7e],
[0x00, 0x00, 0x7e, 0xe7, 0x03, 0x03, 0x03, 0x7f, 0xe7, 0xc3, 0xc3, 0xe7, 0x7e],
[0x00, 0x00, 0x00, 0x38, 0x38, 0x00, 0x00, 0x38, 0x38, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x30, 0x18, 0x1c, 0x1c, 0x00, 0x00, 0x1c, 0x1c, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x06, 0x0c, 0x18, 0x30, 0x60, 0xc0, 0x60, 0x30, 0x18, 0x0c, 0x06],
[0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x60, 0x30, 0x18, 0x0c, 0x06, 0x03, 0x06, 0x0c, 0x18, 0x30, 0x60],
[0x00, 0x00, 0x18, 0x00, 0x00, 0x18, 0x18, 0x0c, 0x06, 0x03, 0xc3, 0xc3, 0x7e],
[0x00, 0x00, 0x3f, 0x60, 0xcf, 0xdb, 0xd3, 0xdd, 0xc3, 0x7e, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc3, 0xc3, 0xc3, 0xc3, 0xff, 0xc3, 0xc3, 0xc3, 0x66, 0x3c, 0x18],
[0x00, 0x00, 0xfe, 0xc7, 0xc3, 0xc3, 0xc7, 0xfe, 0xc7, 0xc3, 0xc3, 0xc7, 0xfe],
[0x00, 0x00, 0x7e, 0xe7, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xe7, 0x7e],
[0x00, 0x00, 0xfc, 0xce, 0xc7, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc7, 0xce, 0xfc],
[0x00, 0x00, 0xff, 0xc0, 0xc0, 0xc0, 0xc0, 0xfc, 0xc0, 0xc0, 0xc0, 0xc0, 0xff],
[0x00, 0x00, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xfc, 0xc0, 0xc0, 0xc0, 0xff],
[0x00, 0x00, 0x7e, 0xe7, 0xc3, 0xc3, 0xcf, 0xc0, 0xc0, 0xc0, 0xc0, 0xe7, 0x7e],
[0x00, 0x00, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xff, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3],
[0x00, 0x00, 0x7e, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x7e],
[0x00, 0x00, 0x7c, 0xee, 0xc6, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06],
[0x00, 0x00, 0xc3, 0xc6, 0xcc, 0xd8, 0xf0, 0xe0, 0xf0, 0xd8, 0xcc, 0xc6, 0xc3],
[0x00, 0x00, 0xff, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0],
[0x00, 0x00, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xdb, 0xff, 0xff, 0xe7, 0xc3],
[0x00, 0x00, 0xc7, 0xc7, 0xcf, 0xcf, 0xdf, 0xdb, 0xfb, 0xf3, 0xf3, 0xe3, 0xe3],
[0x00, 0x00, 0x7e, 0xe7, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xe7, 0x7e],
[0x00, 0x00, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xfe, 0xc7, 0xc3, 0xc3, 0xc7, 0xfe],
[0x00, 0x00, 0x3f, 0x6e, 0xdf, 0xdb, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0x66, 0x3c],
[0x00, 0x00, 0xc3, 0xc6, 0xcc, 0xd8, 0xf0, 0xfe, 0xc7, 0xc3, 0xc3, 0xc7, 0xfe],
[0x00, 0x00, 0x7e, 0xe7, 0x03, 0x03, 0x07, 0x7e, 0xe0, 0xc0, 0xc0, 0xe7, 0x7e],
[0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0xff],
[0x00, 0x00, 0x7e, 0xe7, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3],
[0x00, 0x00, 0x18, 0x3c, 0x3c, 0x66, 0x66, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3],
[0x00, 0x00, 0xc3, 0xe7, 0xff, 0xff, 0xdb, 0xdb, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3],
[0x00, 0x00, 0xc3, 0x66, 0x66, 0x3c, 0x3c, 0x18, 0x3c, 0x3c, 0x66, 0x66, 0xc3],
[0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x3c, 0x3c, 0x66, 0x66, 0xc3],
[0x00, 0x00, 0xff, 0xc0, 0xc0, 0x60, 0x30, 0x7e, 0x0c, 0x06, 0x03, 0x03, 0xff],
[0x00, 0x00, 0x3c, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x3c],
[0x00, 0x03, 0x03, 0x06, 0x06, 0x0c, 0x0c, 0x18, 0x18, 0x30, 0x30, 0x60, 0x60],
[0x00, 0x00, 0x3c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x3c],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x66, 0x3c, 0x18],
[0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x18, 0x38, 0x30, 0x70],
[0x00, 0x00, 0x7f, 0xc3, 0xc3, 0x7f, 0x03, 0xc3, 0x7e, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xfe, 0xc3, 0xc3, 0xc3, 0xc3, 0xfe, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0],
[0x00, 0x00, 0x7e, 0xc3, 0xc0, 0xc0, 0xc0, 0xc3, 0x7e, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x7f, 0xc3, 0xc3, 0xc3, 0xc3, 0x7f, 0x03, 0x03, 0x03, 0x03, 0x03],
[0x00, 0x00, 0x7f, 0xc0, 0xc0, 0xfe, 0xc3, 0xc3, 0x7e, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x30, 0x30, 0x30, 0x30, 0x30, 0xfc, 0x30, 0x30, 0x30, 0x33, 0x1e],
[0x7e, 0xc3, 0x03, 0x03, 0x7f, 0xc3, 0xc3, 0xc3, 0x7e, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xc3, 0xfe, 0xc0, 0xc0, 0xc0, 0xc0],
[0x00, 0x00, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x00, 0x00, 0x18, 0x00],
[0x38, 0x6c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x00, 0x00, 0x0c, 0x00],
[0x00, 0x00, 0xc6, 0xcc, 0xf8, 0xf0, 0xd8, 0xcc, 0xc6, 0xc0, 0xc0, 0xc0, 0xc0],
[0x00, 0x00, 0x7e, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x78],
[0x00, 0x00, 0xdb, 0xdb, 0xdb, 0xdb, 0xdb, 0xdb, 0xfe, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc6, 0xc6, 0xc6, 0xc6, 0xc6, 0xc6, 0xfc, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x7c, 0xc6, 0xc6, 0xc6, 0xc6, 0xc6, 0x7c, 0x00, 0x00, 0x00, 0x00],
[0xc0, 0xc0, 0xc0, 0xfe, 0xc3, 0xc3, 0xc3, 0xc3, 0xfe, 0x00, 0x00, 0x00, 0x00],
[0x03, 0x03, 0x03, 0x7f, 0xc3, 0xc3, 0xc3, 0xc3, 0x7f, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc0, 0xc0, 0xc0, 0xc0, 0xc0, 0xe0, 0xfe, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xfe, 0x03, 0x03, 0x7e, 0xc0, 0xc0, 0x7f, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x1c, 0x36, 0x30, 0x30, 0x30, 0x30, 0xfc, 0x30, 0x30, 0x30, 0x00],
[0x00, 0x00, 0x7e, 0xc6, 0xc6, 0xc6, 0xc6, 0xc6, 0xc6, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x18, 0x3c, 0x3c, 0x66, 0x66, 0xc3, 0xc3, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc3, 0xe7, 0xff, 0xdb, 0xc3, 0xc3, 0xc3, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xc3, 0x66, 0x3c, 0x18, 0x3c, 0x66, 0xc3, 0x00, 0x00, 0x00, 0x00],
[0xc0, 0x60, 0x60, 0x30, 0x18, 0x3c, 0x66, 0x66, 0xc3, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0xff, 0x60, 0x30, 0x18, 0x0c, 0x06, 0xff, 0x00, 0x00, 0x00, 0x00],
[0x00, 0x00, 0x0f, 0x18, 0x18, 0x18, 0x38, 0xf0, 0x38, 0x18, 0x18, 0x18, 0x0f],
[0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18, 0x18],
[0x00, 0x00, 0xf0, 0x18, 0x18, 0x18, 0x1c, 0x0f, 0x1c, 0x18, 0x18, 0x18, 0xf0],
[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x8f, 0xf1, 0x60, 0x00, 0x00, 0x00]]).astype(numpy.uint8)
class Object3DRedBookFont:
def __init__(self):
self.rasters = rasters
self.__initialized = 0
def initialize(self):
self.makeRasterFont()
def makeRasterFont(self):
GL.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1)
self.fontOffset = GL.glGenLists(128)
for i in range(32, 127):
GL.glNewList(i + self.fontOffset, GL.GL_COMPILE)
GL.glBitmap(8, 13, 0.0, 2.0, 10.0, 0.0, self.rasters[i-32])
GL.glEndList()
def printString(self, text):
GL.glPushAttrib(GL.GL_LIST_BIT)
GL.glListBase(self.fontOffset)
GL.glCallLists(text)
GL.glPopAttrib()
class TestWidget(qt.QGLWidget):
def initializeGL(self):
qt.QGLWidget.initializeGL(self)
self.redBookFont = Object3DRedBookFont()
self.redBookFont.initialize()
def resizeGL(self, width, height):
side = min(width, height)
GL.glViewport((width - side) / 2, (height - side) / 2, side, side)
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glLoadIdentity()
GL.glOrtho(0.0, width, 0.0, height, -1.0, 1.0)
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glLoadIdentity()
def paintGL(self):
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glColor4f(1.0, 1.0, 1.0, 1.0)
for i in range(32, 127, 32):
teststring = ""
GL.glRasterPos2i(20, 200 -18 * i/32)
for j in range(32):
teststring += "%c" % (i+j)
self.redBookFont.printString(teststring)
GL.glRasterPos2f(20, 100)
self.redBookFont.printString("The quick brown fox jumps")
GL.glRasterPos2i(20, 82)
self.redBookFont.printString("over a lazy dog")
def test():
app = qt.QApplication([])
w = TestWidget()
w.show()
app.exec_()
if __name__ == "__main__":
test()
|
gpl-2.0
| 4,970,306,696,342,846,000 | 57.060606 | 101 | 0.639248 | false |
HewlettPackard/oneview-ansible
|
library/module_utils/oneview.py
|
1
|
60678
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2021) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import (absolute_import, division, print_function, unicode_literals)
import abc
import collections
import json
import logging
import os
import traceback
try:
from hpeOneView.oneview_client import OneViewClient
HAS_HPE_ONEVIEW = True
except ImportError:
HAS_HPE_ONEVIEW = False
try:
from ansible.module_utils import six
from ansible.module_utils._text import to_native
except ImportError:
import six
to_native = str
from ansible.module_utils.basic import AnsibleModule
# NOTE: VALIDATE IF REQUIRED
from copy import deepcopy
from collections import OrderedDict
# NOTE: VALIDATE IF REQUIRED
logger = logging.getLogger(__name__) # Logger for development purposes only
def get_logger(mod_name):
"""
To activate logs, setup the environment var LOGFILE
e.g.: export LOGFILE=/tmp/ansible-oneview.log
Args:
mod_name: module name
Returns: Logger instance
"""
logger = logging.getLogger(os.path.basename(mod_name))
global LOGFILE
LOGFILE = os.environ.get('LOGFILE')
if not LOGFILE:
logger.addHandler(logging.NullHandler())
else:
logging.basicConfig(level=logging.DEBUG, datefmt='%Y-%m-%d %H:%M:%S',
format='%(asctime)s %(levelname)s %(name)s %(message)s',
filename=LOGFILE, filemode='a')
return logger
def transform_list_to_dict(list_):
"""
Transforms a list into a dictionary, putting values as keys.
:arg list list_: List of values
:return: dict: dictionary built
"""
ret = {}
if not list_:
return ret
for value in list_:
if isinstance(value, collections.Mapping):
ret.update(value)
else:
ret[to_native(value)] = True
return ret
# Makes a deep merge of 2 dictionaries and returns the merged dictionary
def dict_merge(original_resource_dict, data_dict):
resource_dict = deepcopy(original_resource_dict)
for key, val in data_dict.items():
if not resource_dict.get(key):
resource_dict[key] = val
elif isinstance(resource_dict[key], dict) and isinstance(data_dict[key], collections.Mapping):
resource_dict[key] = dict_merge(resource_dict[key], data_dict[key])
elif isinstance(resource_dict[key], list) and isinstance(data_dict[key], list):
resource_dict[key] = data_dict[key]
else:
resource_dict[key] = val
return resource_dict
def merge_list_by_key(original_list, updated_list, key, ignore_when_null=None, replace_key=None, replace_value=None):
"""
Merge two lists by the key. It basically:
1. Adds the items that are present on updated_list and are absent on original_list.
2. Removes items that are absent on updated_list and are present on original_list.
3. For all items that are in both lists, overwrites the values from the original item by the updated item.
:arg list original_list: original list.
:arg list updated_list: list with changes.
:arg str key: unique identifier.
:arg list ignore_when_null: list with the keys from the updated items that should be ignored in the merge,
if its values are null.
:return: list: Lists merged.
"""
ignore_when_null = [] if ignore_when_null is None else ignore_when_null
if not original_list:
return updated_list
items_map = collections.OrderedDict([(i[key], i.copy()) for i in original_list])
merged_items = collections.OrderedDict()
for item in updated_list:
item_key = item[key]
if item_key in items_map:
for ignored_key in ignore_when_null:
if ignored_key in item and item[ignored_key] is None:
item.pop(ignored_key)
if replace_key and item.get(replace_key) == replace_value:
item[replace_key] = items_map[item_key][replace_key]
merged_items[item_key] = items_map[item_key]
merged_items[item_key].update(item)
# merged_items[item_key] = dict_merge(merged_items[item_key], item)
else:
merged_items[item_key] = item
return list(merged_items.values())
def _sort_by_keys(resource1, resource2):
keys = ['name', 'enclosureIndex']
if isinstance(resource1, list) and isinstance(resource1[0], dict):
for key in keys:
if key in resource1[0]:
resource1 = sorted(resource1, key=lambda k: k[key])
resource2 = sorted(resource2, key=lambda k: k[key])
return resource1, resource2
def _str_sorted(obj):
if isinstance(obj, collections.Mapping):
return json.dumps(obj, sort_keys=True)
else:
return str(obj)
def _standardize_value(value):
"""
Convert value to string to enhance the comparison.
:arg value: Any object type.
:return: str: Converted value.
"""
if isinstance(value, float) and value.is_integer():
# Workaround to avoid erroneous comparison between int and float
# Removes zero from integer floats
value = int(value)
return str(value)
def compare_lig(first_resource, second_resource):
"""
Recursively compares dictionary contents equivalence, ignoring types and elements order.
Particularities of the comparison:
- Inexistent key = None
- These values are considered equal: None, empty, False
- Lists are compared value by value after a sort, if they have same size.
- Each element is converted to str before the comparison.
:arg dict first_resource: first dictionary
:arg dict second_resource: second dictionary
:return: bool: True when equal, False when different.
"""
resource1 = first_resource
resource2 = second_resource
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
# The first resource is True / Not Null and the second resource is False / Null
if resource1 and not resource2:
logger.debug("resource1 and not resource2. " + debug_resources)
return False
# Checks all keys in first dict against the second dict
for key in resource1:
# compare uplinkset property logicalPortConfigInfos
if key == 'logicalPortConfigInfos':
if sort_by_uplink_set_location(resource1[key], resource2[key]):
continue
else:
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
if key not in resource2:
if resource1[key] is not None:
# Inexistent key is equivalent to exist with value None
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
# If both values are null, empty or False it will be considered equal.
elif not resource1[key] and not resource2[key]:
continue
elif isinstance(resource1[key], collections.Mapping):
# recursive call
if not compare_lig(resource1[key], resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
elif isinstance(resource1[key], list):
# change comparison function to compare_list
if not compare_list_lig(resource1[key], resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
elif _standardize_value(resource1[key]) != _standardize_value(resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
# Checks all keys in the second dict, looking for missing elements
for key in resource2.keys():
if key not in resource1:
if resource2[key] is not None:
# Inexistent key is equivalent to exist with value None
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
return True
def compare(first_resource, second_resource):
"""
Recursively compares dictionary contents equivalence, ignoring types and elements order.
Particularities of the comparison:
- Inexistent key = None
- These values are considered equal: None, empty, False
- Lists are compared value by value after a sort, if they have same size.
- Each element is converted to str before the comparison.
:arg dict first_resource: first dictionary
:arg dict second_resource: second dictionary
:return: bool: True when equal, False when different.
"""
resource1 = first_resource
resource2 = second_resource
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
# The first resource is True / Not Null and the second resource is False / Null
if resource1 and not resource2:
logger.debug("resource1 and not resource2. " + debug_resources)
return False
# Checks all keys in first dict against the second dict
for key in resource1:
if key not in resource2:
if resource1[key] is not None:
# Inexistent key is equivalent to exist with value None
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
# If both values are null, empty or False it will be considered equal.
elif not resource1[key] and not resource2[key]:
continue
elif isinstance(resource1[key], collections.Mapping):
# recursive call
if not compare(resource1[key], resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
elif isinstance(resource1[key], list):
# change comparison function to compare_list
if not compare_list(resource1[key], resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
elif _standardize_value(resource1[key]) != _standardize_value(resource2[key]):
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
# Checks all keys in the second dict, looking for missing elements
for key in resource2.keys():
if key not in resource1:
if resource2[key] is not None:
# Inexistent key is equivalent to exist with value None
logger.debug(OneViewModuleBase.MSG_DIFF_AT_KEY.format(key) + debug_resources)
return False
return True
def compare_list(first_resource, second_resource):
"""
Recursively compares lists contents equivalence, ignoring types and element orders.
Lists with same size are compared value by value after a sort,
each element is converted to str before the comparison.
:arg list first_resource: first list
:arg list second_resource: second list
:return: True when equal; False when different.
"""
resource1 = first_resource
resource2 = second_resource
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
# The second list is null / empty / False
if not resource2:
logger.debug("resource 2 is null. " + debug_resources)
return False
if len(resource1) != len(resource2):
logger.debug("resources have different length. " + debug_resources)
return False
resource1 = sorted(resource1, key=_str_sorted)
resource2 = sorted(resource2, key=_str_sorted)
for i, val in enumerate(resource1):
if isinstance(val, collections.Mapping):
# change comparison function to compare dictionaries
if not compare(val, resource2[i]):
logger.debug("resources are different. " + debug_resources)
return False
elif isinstance(val, list):
# recursive call
if not compare_list(val, resource2[i]):
logger.debug("lists are different. " + debug_resources)
return False
elif _standardize_value(val) != _standardize_value(resource2[i]):
logger.debug("values are different. " + debug_resources)
return False
# no differences found
return True
def compare_list_lig(first_resource, second_resource):
"""
Recursively compares lists contents equivalence, ignoring types and element orders.
Lists with same size are compared value by value after a sort,
each element is converted to str before the comparison.
:arg list first_resource: first list
:arg list second_resource: second list
:return: True when equal; False when different.
"""
resource1 = first_resource
resource2 = second_resource
debug_resources = "resource1 = {0}, resource2 = {1}".format(resource1, resource2)
# The second list is null / empty / False
if not resource2:
logger.debug("resource 2 is null. " + debug_resources)
return False
if len(resource1) != len(resource2):
logger.debug("resources have different length. " + debug_resources)
return False
resource1 = sorted(resource1, key=_str_sorted)
resource2 = sorted(resource2, key=_str_sorted)
# sort resources by specific keys
resource1, resource2 = _sort_by_keys(resource1, resource2)
for i, val in enumerate(resource1):
if isinstance(val, collections.Mapping):
# change comparison function to compare dictionaries
if not compare_lig(val, resource2[i]):
logger.debug("resources are different. " + debug_resources)
return False
elif isinstance(val, list):
# recursive call
if not compare_list_lig(val, resource2[i]):
logger.debug("lists are different. " + debug_resources)
return False
elif _standardize_value(val) != _standardize_value(resource2[i]):
logger.debug("values are different. " + debug_resources)
return False
# no differences found
return True
def sort_by_uplink_set_location(resource1, resource2):
"""
Compares lists contents equivalence, sorting element orders.
Inner dict elements(Bay, Enclosure, Port) are concatenated to compare unique values in the obj.
:arg list resource1: first list of dicts
:arg list resource2: second list of dicts
:return: True when equal; False when different.
"""
# Check first list elements
for config_dict in resource1:
location_entries = config_dict["logicalLocation"]["locationEntries"]
# Append all types together ['Bay_3', 'Enclosure_1', 'Port_75']
each_location = []
for local_entry in location_entries:
# Combine the values for comparison, 'Bay_3' if type='Bay' and relative value=3
value = local_entry.get('type', '') + "_" + str(local_entry.get('relativeValue', ''))
each_location.append(value)
# Check second elements and add each entry in all_entries list
all_entries = []
for config_dict_res2 in resource2:
location_entries_res2 = config_dict_res2["logicalLocation"]["locationEntries"]
each_location_res2 = []
for local_entry_res2 in location_entries_res2:
value_res2 = local_entry_res2.get('type', '') + "_" + str(local_entry_res2.get('relativeValue', ''))
each_location_res2.append(value_res2)
if each_location_res2 not in all_entries:
all_entries.append(sorted(each_location_res2))
# Check first list element is present in second list
if not sorted(each_location) in all_entries:
return False
return True
class OneViewModuleException(Exception):
"""
OneView base Exception.
Attributes:
msg (str): Exception message.
oneview_response (dict): OneView rest response.
"""
def __init__(self, data):
self.msg = None
self.oneview_response = None
if isinstance(data, six.string_types):
self.msg = data
else:
self.oneview_response = data
if data and isinstance(data, dict):
self.msg = data.get('message')
if self.oneview_response:
Exception.__init__(self, self.msg, self.oneview_response)
else:
Exception.__init__(self, self.msg)
class OneViewModuleTaskError(OneViewModuleException):
"""
OneView Task Error Exception.
Attributes:
msg (str): Exception message.
error_code (str): A code which uniquely identifies the specific error.
"""
def __init__(self, msg, error_code=None):
super(OneViewModuleTaskError, self).__init__(msg)
self.error_code = error_code
class OneViewModuleValueError(OneViewModuleException):
"""
OneView Value Error.
The exception is raised when the data contains an inappropriate value.
Attributes:
msg (str): Exception message.
"""
pass
class OneViewModuleResourceNotFound(OneViewModuleException):
"""
OneView Resource Not Found Exception.
The exception is raised when an associated resource was not found.
Attributes:
msg (str): Exception message.
"""
pass
# @six.add_metaclass(abc.ABCMeta)
class OneViewModule(object):
MSG_CREATED = 'Resource created successfully.'
MSG_UPDATED = 'Resource updated successfully.'
MSG_DELETED = 'Resource deleted successfully.'
MSG_ALREADY_PRESENT = 'Resource is already present.'
MSG_ALREADY_ABSENT = 'Resource is already absent.'
MSG_DIFF_AT_KEY = 'Difference found at key \'{0}\'. '
MSG_MANDATORY_FIELD_MISSING = 'Missing mandatory field: name'
HPE_ONEVIEW_SDK_REQUIRED = 'HPE OneView Python SDK is required for this module.'
ONEVIEW_COMMON_ARGS = dict(
api_version=dict(type='int'),
config=dict(type='path'),
hostname=dict(type='str'),
image_streamer_hostname=dict(type='str'),
password=dict(type='str', no_log=True),
username=dict(type='str'),
auth_login_domain=dict(type='str')
)
ONEVIEW_VALIDATE_ETAG_ARGS = dict(validate_etag=dict(type='bool', default=True))
def __init__(self, additional_arg_spec=None, validate_etag_support=False):
"""
OneViewModuleBase constructor.
:arg dict additional_arg_spec: Additional argument spec definition.
:arg bool validate_etag_support: Enables support to eTag validation.
"""
argument_spec = self._build_argument_spec(additional_arg_spec, validate_etag_support)
self.module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
self.resource_client = None
self.current_resource = None
self.state = self.module.params.get('state')
self.data = self.module.params.get('data')
self._check_hpe_oneview_sdk()
self._create_oneview_client()
# Preload params for get_all - used by facts
self.facts_params = self.module.params.get('params') or {}
# Preload options as dict - used by facts
self.options = transform_list_to_dict(self.module.params.get('options'))
self.validate_etag_support = validate_etag_support
def _build_argument_spec(self, additional_arg_spec, validate_etag_support):
merged_arg_spec = dict()
merged_arg_spec.update(self.ONEVIEW_COMMON_ARGS)
if validate_etag_support:
merged_arg_spec.update(self.ONEVIEW_VALIDATE_ETAG_ARGS)
if additional_arg_spec:
merged_arg_spec.update(additional_arg_spec)
return merged_arg_spec
def _check_hpe_oneview_sdk(self):
if not HAS_HPE_ONEVIEW:
self.module.fail_json(msg=self.HPE_ONEVIEW_SDK_REQUIRED)
def _create_oneview_client(self):
if self.module.params.get('hostname'):
config = dict(ip=self.module.params['hostname'],
credentials=dict(userName=self.module.params['username'], password=self.module.params['password'],
authLoginDomain=self.module.params.get('auth_login_domain', '')),
api_version=self.module.params['api_version'],
image_streamer_ip=self.module.params['image_streamer_hostname'])
self.oneview_client = OneViewClient(config)
elif not self.module.params['config']:
self.oneview_client = OneViewClient.from_environment_variables()
else:
self.oneview_client = OneViewClient.from_json_file(self.module.params['config'])
def set_resource_object(self, resource_client, name=None):
self.resource_client = resource_client
uri = None
if self.data:
if self.data.get("name"):
name = self.data["name"]
elif self.data.get("uri"):
uri = self.data["uri"]
if not name and not uri:
if self.module.params.get("name"):
name = self.module.params["name"]
elif self.module.params.get("uri"):
uri = self.module.params["uri"]
if name:
self.current_resource = self.resource_client.get_by_name(name)
elif uri:
self.current_resource = self.resource_client.get_by_uri(uri)
@abc.abstractmethod
def execute_module(self):
"""
Abstract method, must be implemented by the inheritor.
This method is called from the run method. It should contain the module logic
:return: dict: It must return a dictionary with the attributes for the module result,
such as ansible_facts, msg and changed.
"""
pass
def run(self):
"""
Common implementation of the OneView run modules.
It calls the inheritor 'execute_module' function and sends the return to the Ansible.
It handles any OneViewModuleException in order to signal a failure to Ansible, with a descriptive error message.
"""
try:
if self.validate_etag_support:
if not self.module.params.get('validate_etag'):
self.oneview_client.connection.disable_etag_validation()
result = self.execute_module()
if not result:
result = {}
if "changed" not in result:
result['changed'] = False
self.module.exit_json(**result)
except OneViewModuleException as exception:
error_msg = '; '.join(to_native(e) for e in exception.args)
self.module.fail_json(msg=error_msg, exception=traceback.format_exc())
def resource_absent(self, method='delete'):
"""
Generic implementation of the absent state for the OneView resources.
It checks if the resource needs to be removed.
:arg str method: Function of the OneView client that will be called for resource deletion.
Usually delete or remove.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if self.current_resource:
getattr(self.current_resource, method)()
return {"changed": True, "msg": self.MSG_DELETED}
else:
return {"changed": False, "msg": self.MSG_ALREADY_ABSENT}
def get_by_name(self, name):
"""
Generic get by name implementation.
:arg str name: Resource name to search for.
:return: The resource found or None.
"""
result = self.resource_client.get_by('name', name)
return result[0] if result else None
def resource_present(self, fact_name, create_method='create'):
"""
Generic implementation of the present state for the OneView resources.
It checks if the resource needs to be created or updated.
:arg str fact_name: Name of the fact returned to the Ansible.
:arg str create_method: Function of the OneView client that will be called for resource creation.
Usually create or add.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
changed = False
if "newName" in self.data:
self.data["name"] = self.data.pop("newName")
if not self.current_resource:
self.current_resource = getattr(self.resource_client, create_method)(self.data)
msg = self.MSG_CREATED
changed = True
else:
changed, msg = self._update_resource()
data = self.current_resource.data
return dict(
msg=msg,
changed=changed,
ansible_facts={fact_name: data}
)
def _update_resource(self):
updated_data = self.current_resource.data.copy()
updated_data = dict_merge(updated_data, self.data)
changed = False
if compare(self.current_resource.data, updated_data):
msg = self.MSG_ALREADY_PRESENT
else:
self.current_resource.update(updated_data)
changed = True
msg = self.MSG_UPDATED
return (changed, msg)
def resource_scopes_set(self, state, fact_name, scope_uris):
"""
Generic implementation of the scopes update PATCH for the OneView resources.
It checks if the resource needs to be updated with the current scopes.
This method is meant to be run after ensuring the present state.
:arg dict state: Dict containing the data from the last state results in the resource.
It needs to have the 'msg', 'changed', and 'ansible_facts' entries.
:arg str fact_name: Name of the fact returned to the Ansible.
:arg list scope_uris: List with all the scope URIs to be added to the resource.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if scope_uris is None:
scope_uris = []
resource = state['ansible_facts'][fact_name]
if resource.get('scopeUris') is None or set(resource['scopeUris']) != set(scope_uris):
operation_data = dict(operation='replace', path='/scopeUris', value=scope_uris)
updated_resource = self.current_resource.patch(**operation_data)
state['ansible_facts'][fact_name] = updated_resource.data
state['changed'] = True
state['msg'] = self.MSG_UPDATED
return state
def check_resource_present(self, fact_name):
"""
The following implementation will work for resource_present under check mode.
Generic implementation of the present state to be run under check mode for the OneView resources.
It checks if the resource needs to be created or updated.
:arg str fact_name: Name of the fact returned to the Ansible.
Usually checks if the resource will becreate or add.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
changed = False
if "newName" in self.data:
self.data["name"] = self.data.pop("newName")
if not self.current_resource:
msg = self.MSG_CREATED
changed = True
else:
changed, msg = self.check_update_resource()
data = self.data
return dict(
msg=msg,
changed=changed,
ansible_facts={fact_name: data}
)
def check_update_resource(self):
"""
The following implementation will work for update_resource under check mode.
It checks if the resource needs to be updated or not.
"""
updated_data = self.current_resource.data.copy()
updated_data.update(self.data)
changed = False
if compare(self.current_resource.data, updated_data):
msg = self.MSG_ALREADY_PRESENT
else:
changed = True
msg = self.MSG_UPDATED
return (changed, msg)
def check_resource_absent(self, method='delete'):
"""
The following implementation will work for resource_absent under check mode.
Generic implementation of the absent state for the OneView resources that to be run under check_mode.
It checks if the resource needs to be removed.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if self.current_resource:
return {"changed": True, "msg": self.MSG_DELETED}
else:
return {"changed": False, "msg": self.MSG_ALREADY_ABSENT}
def check_resource_scopes_set(self, state, fact_name, scope_uris):
"""
The following implementation will work for resource_absent under check mode.
Generic implementation of the scopes update PATCH for the OneView resources.
It checks if the resource needs to be updated with the current scopes.
This method is meant to be run after ensuring the present state.
:arg dict state: Dict containing the data from the last state results in the resource.
It needs to have the 'msg', 'changed', and 'ansible_facts' entries.
:arg str fact_name: Name of the fact returned to the Ansible.
:arg list scope_uris: List with all the scope URIs to be added to the resource.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if scope_uris is None:
scope_uris = []
resource = state['ansible_facts'][fact_name]
if resource.get('scopeUris') is None or set(resource['scopeUris']) != set(scope_uris):
state['changed'] = True
state['msg'] = self.MSG_UPDATED
return state
# @six.add_metaclass(abc.ABCMeta)
class OneViewModuleBase(object):
MSG_CREATED = 'Resource created successfully.'
MSG_UPDATED = 'Resource updated successfully.'
MSG_DELETED = 'Resource deleted successfully.'
MSG_ALREADY_PRESENT = 'Resource is already present.'
MSG_ALREADY_ABSENT = 'Resource is already absent.'
MSG_DIFF_AT_KEY = 'Difference found at key \'{0}\'. '
HPE_ONEVIEW_SDK_REQUIRED = 'HPE OneView Python SDK is required for this module.'
ONEVIEW_COMMON_ARGS = dict(
api_version=dict(type='int'),
config=dict(type='path'),
hostname=dict(type='str'),
image_streamer_hostname=dict(type='str'),
password=dict(type='str', no_log=True),
username=dict(type='str'),
auth_login_domain=dict(type='str')
)
resource_client = None
ONEVIEW_VALIDATE_ETAG_ARGS = dict(validate_etag=dict(type='bool', default=True))
def __init__(self, additional_arg_spec=None, validate_etag_support=False):
"""
OneViewModuleBase constructor.
:arg dict additional_arg_spec: Additional argument spec definition.
:arg bool validate_etag_support: Enables support to eTag validation.
"""
argument_spec = self._build_argument_spec(additional_arg_spec, validate_etag_support)
self.module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
self._check_hpe_oneview_sdk()
self._create_oneview_client()
self.state = self.module.params.get('state')
self.data = self.module.params.get('data')
# Preload params for get_all - used by facts
self.facts_params = self.module.params.get('params') or {}
# Preload options as dict - used by facts
self.options = transform_list_to_dict(self.module.params.get('options'))
self.validate_etag_support = validate_etag_support
def _build_argument_spec(self, additional_arg_spec, validate_etag_support):
merged_arg_spec = dict()
merged_arg_spec.update(self.ONEVIEW_COMMON_ARGS)
if validate_etag_support:
merged_arg_spec.update(self.ONEVIEW_VALIDATE_ETAG_ARGS)
if additional_arg_spec:
merged_arg_spec.update(additional_arg_spec)
return merged_arg_spec
def _check_hpe_oneview_sdk(self):
if not HAS_HPE_ONEVIEW:
self.module.fail_json(msg=self.HPE_ONEVIEW_SDK_REQUIRED)
def _create_oneview_client(self):
if self.module.params.get('hostname'):
config = dict(ip=self.module.params['hostname'],
credentials=dict(userName=self.module.params['username'], password=self.module.params['password'],
authLoginDomain=self.module.params.get('auth_login_domain', '')),
api_version=self.module.params['api_version'],
image_streamer_ip=self.module.params['image_streamer_hostname'])
self.oneview_client = OneViewClient(config)
elif not self.module.params['config']:
self.oneview_client = OneViewClient.from_environment_variables()
else:
self.oneview_client = OneViewClient.from_json_file(self.module.params['config'])
@abc.abstractmethod
def execute_module(self):
"""
Abstract method, must be implemented by the inheritor.
This method is called from the run method. It should contain the module logic
:return: dict: It must return a dictionary with the attributes for the module result,
such as ansible_facts, msg and changed.
"""
pass
def run(self):
"""
Common implementation of the OneView run modules.
It calls the inheritor 'execute_module' function and sends the return to the Ansible.
It handles any OneViewModuleException in order to signal a failure to Ansible, with a descriptive error message.
"""
try:
if self.validate_etag_support:
if not self.module.params.get('validate_etag'):
self.oneview_client.connection.disable_etag_validation()
result = self.execute_module()
if not result:
result = {}
if "changed" not in result:
result['changed'] = False
self.module.exit_json(**result)
except OneViewModuleException as exception:
error_msg = '; '.join(to_native(e) for e in exception.args)
self.module.fail_json(msg=error_msg, exception=traceback.format_exc())
def resource_absent(self, resource, method='delete'):
"""
Generic implementation of the absent state for the OneView resources.
It checks if the resource needs to be removed.
:arg dict resource: Resource to delete.
:arg str method: Function of the OneView client that will be called for resource deletion.
Usually delete or remove.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if resource:
getattr(self.resource_client, method)(resource)
return {"changed": True, "msg": self.MSG_DELETED}
else:
return {"changed": False, "msg": self.MSG_ALREADY_ABSENT}
def get_by_name(self, name):
"""
Generic get by name implementation.
:arg str name: Resource name to search for.
:return: The resource found or None.
"""
result = self.resource_client.get_by('name', name)
return result[0] if result else None
def resource_present(self, resource, fact_name, create_method='create'):
"""
Generic implementation of the present state for the OneView resources.
It checks if the resource needs to be created or updated.
:arg dict resource: Resource to create or update.
:arg str fact_name: Name of the fact returned to the Ansible.
:arg str create_method: Function of the OneView client that will be called for resource creation.
Usually create or add.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
changed = False
if "newName" in self.data:
self.data["name"] = self.data.pop("newName")
if not resource:
resource = getattr(self.resource_client, create_method)(self.data)
msg = self.MSG_CREATED
changed = True
else:
merged_data = resource.copy()
merged_data.update(self.data)
if compare(resource, merged_data):
msg = self.MSG_ALREADY_PRESENT
else:
resource = self.resource_client.update(merged_data)
changed = True
msg = self.MSG_UPDATED
return dict(
msg=msg,
changed=changed,
ansible_facts={fact_name: resource}
)
def resource_scopes_set(self, state, fact_name, scope_uris):
"""
Generic implementation of the scopes update PATCH for the OneView resources.
It checks if the resource needs to be updated with the current scopes.
This method is meant to be run after ensuring the present state.
:arg dict state: Dict containing the data from the last state results in the resource.
It needs to have the 'msg', 'changed', and 'ansible_facts' entries.
:arg str fact_name: Name of the fact returned to the Ansible.
:arg list scope_uris: List with all the scope URIs to be added to the resource.
:return: A dictionary with the expected arguments for the AnsibleModule.exit_json
"""
if scope_uris is None:
scope_uris = []
resource = state['ansible_facts'][fact_name]
operation_data = dict(operation='replace', path='/scopeUris', value=scope_uris)
if resource['scopeUris'] is None or set(resource['scopeUris']) != set(scope_uris):
state['ansible_facts'][fact_name] = self.resource_client.patch(resource['uri'], **operation_data)
state['changed'] = True
state['msg'] = self.MSG_UPDATED
return state
class LIGMerger(object):
# merges uplinksets in current resource and existing resource
def merge_data(self, current_data, data):
merged_data = dict_merge(current_data, data)
if current_data.get('uplinkSets') and data.get('uplinkSets'):
# merged_data['uplinkSets'] = merge_list_by_key(current_uplinksets, existing_uplinksets, key="name")
merged_data['uplinkSets'] = self._merge_uplink_set(current_data, data)
return merged_data
# updates the attributes of uplinkset in existing resource if they already exists
# appends the uplinksets which are present in current resource but not in existing resource
def _merge_uplink_set(self, current_resource, data):
existing_uplinksets = data['uplinkSets']
current_uplinksets = current_resource['uplinkSets']
current_uplinks_left = deepcopy(current_uplinksets)
for index, existing_uplink in enumerate(existing_uplinksets):
for current_uplink in current_uplinksets:
if current_uplink['name'] == existing_uplink['name']:
current_uplinks_left.remove(current_uplink) # removes the common uplinksets from current uplinksets
if not compare_lig(current_uplink, existing_uplink):
existing_uplinksets[index] = dict_merge(current_uplink, existing_uplink)
# checks to ignore extra parameters in uplink set to achieve idempotency
if existing_uplink.get('logicalPortConfigInfos') and isinstance(existing_uplink['logicalPortConfigInfos'], list):
for port_config in existing_uplink['logicalPortConfigInfos']:
if not port_config.get('desiredFecMode'):
port_config['desiredFecMode'] = "Auto"
# appends the missing uplinks from current resource to existing resource based on name
existing_uplinksets += current_uplinks_left
return existing_uplinksets
class SPKeys(object):
ID = 'id'
NAME = 'name'
DEVICE_SLOT = 'deviceSlot'
CONNECTION_SETTINGS = 'connectionSettings'
CONNECTIONS = 'connections'
OS_DEPLOYMENT = 'osDeploymentSettings'
OS_DEPLOYMENT_URI = 'osDeploymentPlanUri'
ATTRIBUTES = 'osCustomAttributes'
SAN = 'sanStorage'
VOLUMES = 'volumeAttachments'
PATHS = 'storagePaths'
CONN_ID = 'connectionId'
BOOT = 'boot'
BIOS = 'bios'
BOOT_MODE = 'bootMode'
LOCAL_STORAGE = 'localStorage'
SAS_LOGICAL_JBODS = 'sasLogicalJBODs'
CONTROLLERS = 'controllers'
LOGICAL_DRIVES = 'logicalDrives'
SAS_LOGICAL_JBOD_URI = 'sasLogicalJBODUri'
SAS_LOGICAL_JBOD_ID = 'sasLogicalJBODId'
MODE = 'mode'
MAC_TYPE = 'macType'
MAC = 'mac'
SERIAL_NUMBER_TYPE = 'serialNumberType'
UUID = 'uuid'
SERIAL_NUMBER = 'serialNumber'
DRIVE_NUMBER = 'driveNumber'
WWPN_TYPE = 'wwpnType'
WWNN = 'wwnn'
WWPN = 'wwpn'
LUN_TYPE = 'lunType'
LUN = 'lun'
class ServerProfileMerger(object):
def merge_data(self, resource, data):
merged_data = deepcopy(resource)
merged_data = dict_merge(merged_data, data)
merged_data = self._merge_bios_and_boot(merged_data, resource, data)
merged_data = self._merge_connections(merged_data, resource, data)
merged_data = self._merge_san_storage(merged_data, data, resource)
merged_data = self._merge_os_deployment_settings(merged_data, resource, data)
merged_data = self._merge_local_storage(merged_data, resource, data)
return merged_data
def _merge_bios_and_boot(self, merged_data, resource, data):
if self._should_merge(data, resource, key=SPKeys.BIOS):
merged_data = self._merge_dict(merged_data, resource, data, key=SPKeys.BIOS)
if self._should_merge(data, resource, key=SPKeys.BOOT):
merged_data = self._merge_dict(merged_data, resource, data, key=SPKeys.BOOT)
if self._should_merge(data, resource, key=SPKeys.BOOT_MODE):
merged_data = self._merge_dict(merged_data, resource, data, key=SPKeys.BOOT_MODE)
return merged_data
def _merge_connections(self, merged_data, resource, data):
# The below condition is added to handle connectionSettings in server profile json
if data.get(SPKeys.CONNECTION_SETTINGS) and SPKeys.CONNECTIONS in data.get(SPKeys.CONNECTION_SETTINGS):
existing_connections = resource[SPKeys.CONNECTION_SETTINGS][SPKeys.CONNECTIONS]
params_connections = data[SPKeys.CONNECTION_SETTINGS][SPKeys.CONNECTIONS]
merged_data[SPKeys.CONNECTION_SETTINGS][SPKeys.CONNECTIONS] = merge_list_by_key(
existing_connections,
params_connections,
key=SPKeys.ID,
replace_key='portId',
replace_value='Auto'
)
merged_data[SPKeys.CONNECTION_SETTINGS] = self._merge_connections_boot(
merged_data[SPKeys.CONNECTION_SETTINGS],
resource[SPKeys.CONNECTION_SETTINGS]
)
if self._should_merge(data, resource, key=SPKeys.CONNECTIONS):
existing_connections = resource[SPKeys.CONNECTIONS]
params_connections = data[SPKeys.CONNECTIONS]
merged_data[SPKeys.CONNECTIONS] = merge_list_by_key(existing_connections, params_connections, key=SPKeys.ID)
merged_data = self._merge_connections_boot(merged_data, resource)
return merged_data
def _merge_connections_boot(self, merged_data, resource):
existing_connection_map = {x[SPKeys.ID]: x.copy() for x in resource[SPKeys.CONNECTIONS]}
for merged_connection in merged_data[SPKeys.CONNECTIONS]:
conn_id = merged_connection[SPKeys.ID]
existing_conn_has_boot = conn_id in existing_connection_map and SPKeys.BOOT in existing_connection_map[
conn_id]
if existing_conn_has_boot and SPKeys.BOOT in merged_connection:
current_connection = existing_connection_map[conn_id]
boot_settings_merged = deepcopy(current_connection[SPKeys.BOOT])
boot_settings_merged = dict_merge(boot_settings_merged, merged_connection[SPKeys.BOOT])
merged_connection[SPKeys.BOOT] = boot_settings_merged
return merged_data
def _merge_san_storage(self, merged_data, data, resource):
if self._removed_data(data, resource, key=SPKeys.SAN):
merged_data[SPKeys.SAN] = dict(volumeAttachments=[], manageSanStorage=False)
elif self._should_merge(data, resource, key=SPKeys.SAN):
merged_data = self._merge_dict(merged_data, resource, data, key=SPKeys.SAN)
merged_data = self._merge_san_volumes(merged_data, resource, data)
return merged_data
def _merge_san_volumes(self, merged_data, resource, data):
if self._should_merge(data[SPKeys.SAN], resource[SPKeys.SAN], key=SPKeys.VOLUMES):
existing_volumes = resource[SPKeys.SAN][SPKeys.VOLUMES]
params_volumes = data[SPKeys.SAN][SPKeys.VOLUMES]
merged_volumes = merge_list_by_key(existing_volumes, params_volumes, key=SPKeys.ID)
merged_data[SPKeys.SAN][SPKeys.VOLUMES] = merged_volumes
merged_data = self._merge_san_storage_paths(merged_data, resource)
return merged_data
def _merge_san_storage_paths(self, merged_data, resource):
existing_volumes_map = OrderedDict([(i[SPKeys.ID], i) for i in resource[SPKeys.SAN][SPKeys.VOLUMES]])
merged_volumes = merged_data[SPKeys.SAN][SPKeys.VOLUMES]
for merged_volume in merged_volumes:
volume_id = merged_volume[SPKeys.ID]
if volume_id in existing_volumes_map:
if SPKeys.PATHS in merged_volume and SPKeys.PATHS in existing_volumes_map[volume_id]:
existent_paths = existing_volumes_map[volume_id][SPKeys.PATHS]
paths_from_merged_volume = merged_volume[SPKeys.PATHS]
merged_paths = merge_list_by_key(existent_paths, paths_from_merged_volume, key=SPKeys.CONN_ID)
merged_volume[SPKeys.PATHS] = merged_paths
return merged_data
def _merge_os_deployment_settings(self, merged_data, resource, data):
if self._should_merge(data, resource, key=SPKeys.OS_DEPLOYMENT):
merged_data = self._merge_os_deployment_custom_attr(merged_data, resource, data)
return merged_data
def _merge_os_deployment_custom_attr(self, merged_data, resource, data):
if SPKeys.ATTRIBUTES in data[SPKeys.OS_DEPLOYMENT]:
existing_os_deployment = resource[SPKeys.OS_DEPLOYMENT]
params_os_deployment = data[SPKeys.OS_DEPLOYMENT]
merged_os_deployment = merged_data[SPKeys.OS_DEPLOYMENT]
if self._removed_data(params_os_deployment, existing_os_deployment, key=SPKeys.ATTRIBUTES):
merged_os_deployment[SPKeys.ATTRIBUTES] = params_os_deployment[SPKeys.ATTRIBUTES]
else:
existing_attributes = existing_os_deployment[SPKeys.ATTRIBUTES]
params_attributes = params_os_deployment[SPKeys.ATTRIBUTES]
merged_data[SPKeys.OS_DEPLOYMENT][SPKeys.ATTRIBUTES] = merge_list_by_key(
existing_attributes,
params_attributes,
key='name',
)
# if compare_list(existing_attributes, params_attributes):
# merged_os_deployment[SPKeys.ATTRIBUTES] = existing_attributes
return merged_data
def _merge_local_storage(self, merged_data, resource, data):
if self._removed_data(data, resource, key=SPKeys.LOCAL_STORAGE):
merged_data[SPKeys.LOCAL_STORAGE] = dict(sasLogicalJBODs=[], controllers=[])
elif self._should_merge(data, resource, key=SPKeys.LOCAL_STORAGE):
merged_data = self._merge_sas_logical_jbods(merged_data, resource, data)
merged_data = self._merge_controllers(merged_data, resource, data)
return merged_data
def _merge_sas_logical_jbods(self, merged_data, resource, data):
if data.get(SPKeys.LOCAL_STORAGE) and SPKeys.SAS_LOGICAL_JBODS in data.get(SPKeys.LOCAL_STORAGE):
existing_items = resource[SPKeys.LOCAL_STORAGE][SPKeys.SAS_LOGICAL_JBODS]
provided_items = data[SPKeys.LOCAL_STORAGE][SPKeys.SAS_LOGICAL_JBODS]
merged_jbods = merge_list_by_key(existing_items, provided_items, key=SPKeys.ID, ignore_when_null=[SPKeys.SAS_LOGICAL_JBOD_URI])
merged_data[SPKeys.LOCAL_STORAGE][SPKeys.SAS_LOGICAL_JBODS] = merged_jbods
return merged_data
def _merge_controllers(self, merged_data, resource, data):
if self._should_merge(data[SPKeys.LOCAL_STORAGE], resource[SPKeys.LOCAL_STORAGE], key=SPKeys.CONTROLLERS):
existing_items = resource[SPKeys.LOCAL_STORAGE][SPKeys.CONTROLLERS]
provided_items = merged_data[SPKeys.LOCAL_STORAGE][SPKeys.CONTROLLERS]
merged_controllers = merge_list_by_key(existing_items, provided_items, key=SPKeys.DEVICE_SLOT)
merged_data[SPKeys.LOCAL_STORAGE][SPKeys.CONTROLLERS] = merged_controllers
merged_data = self._merge_controller_drives(merged_data, resource)
return merged_data
def _merge_controller_drives(self, merged_data, resource):
for current_controller in merged_data[SPKeys.LOCAL_STORAGE][SPKeys.CONTROLLERS][:]:
for existing_controller in resource[SPKeys.LOCAL_STORAGE][SPKeys.CONTROLLERS][:]:
same_slot = current_controller.get(SPKeys.DEVICE_SLOT) == existing_controller.get(SPKeys.DEVICE_SLOT)
same_mode = existing_controller.get(SPKeys.MODE) == existing_controller.get(SPKeys.MODE)
if same_slot and same_mode and current_controller[SPKeys.LOGICAL_DRIVES]:
key_merge = self._define_key_to_merge_drives(current_controller)
if key_merge:
merged_drives = merge_list_by_key(existing_controller[SPKeys.LOGICAL_DRIVES],
current_controller[SPKeys.LOGICAL_DRIVES],
key=key_merge)
current_controller[SPKeys.LOGICAL_DRIVES] = merged_drives
return merged_data
def _define_key_to_merge_drives(self, controller):
has_name = True
has_logical_jbod_id = True
for drive in controller[SPKeys.LOGICAL_DRIVES]:
if not drive.get(SPKeys.NAME):
has_name = False
if not drive.get(SPKeys.SAS_LOGICAL_JBOD_ID):
has_logical_jbod_id = False
if has_name:
return SPKeys.NAME
elif has_logical_jbod_id:
return SPKeys.SAS_LOGICAL_JBOD_ID
return None
def _removed_data(self, data, resource, key):
return key in data and not data[key] and key in resource
def _should_merge(self, data, resource, key):
data_has_value = key in data and data[key]
existing_resource_has_value = key in resource and resource[key]
return data_has_value and existing_resource_has_value
def _merge_dict(self, merged_data, resource, data, key):
if resource[key]:
merged_dict = deepcopy(resource[key])
merged_dict.update(deepcopy(data[key]))
merged_data[key] = merged_dict
return merged_data
class ServerProfileReplaceNamesByUris(object):
SCOPE_NOT_FOUND = 'Scope not found: '
SERVER_PROFILE_OS_DEPLOYMENT_NOT_FOUND = 'OS Deployment Plan not found: '
SERVER_PROFILE_ENCLOSURE_GROUP_NOT_FOUND = 'Enclosure Group not found: '
SERVER_PROFILE_NETWORK_NOT_FOUND = 'Network not found: '
SERVER_HARDWARE_TYPE_NOT_FOUND = 'Server Hardware Type not found: '
VOLUME_NOT_FOUND = 'Volume not found: '
STORAGE_POOL_NOT_FOUND = 'Storage Pool not found: '
STORAGE_SYSTEM_NOT_FOUND = 'Storage System not found: '
STORAGE_VOLUME_TEMPLATE_NOT_FOUND = 'Storage volume template not found: '
INTERCONNECT_NOT_FOUND = 'Interconnect not found: '
FIRMWARE_DRIVER_NOT_FOUND = 'Firmware Driver not found: '
SAS_LOGICAL_JBOD_NOT_FOUND = 'SAS logical JBOD not found: '
ENCLOSURE_NOT_FOUND = 'Enclosure not found: '
def replace(self, oneview_client, data):
self.oneview_client = oneview_client
self._replace_os_deployment_name_by_uri(data)
self._replace_enclosure_group_name_by_uri(data)
self._replace_networks_name_by_uri(data)
self._replace_server_hardware_type_name_by_uri(data)
self._replace_volume_attachment_names_by_uri(data)
self._replace_enclosure_name_by_uri(data)
self._replace_interconnect_name_by_uri(data)
self._replace_firmware_baseline_name_by_uri(data)
self._replace_sas_logical_jbod_name_by_uri(data)
self._replace_initial_scope_name_by_uri(data)
def _get_resource_uri_from_name(self, name, message, resource_client):
resource_by_name = resource_client.get_by('name', name)
if resource_by_name:
return resource_by_name[0]['uri']
else:
raise OneViewModuleResourceNotFound(message + name)
def _replace_name_by_uri(self, data, attr_name, message, resource_client,
replace_name_with='Uri'):
attr_uri = attr_name.replace("Name", replace_name_with)
if attr_name in data:
name = data.pop(attr_name)
uri = self._get_resource_uri_from_name(name, message, resource_client)
data[attr_uri] = uri
def _replace_initial_scope_name_by_uri(self, data):
if data.get("initialScopeNames"):
scope_uris = []
resource_client = self.oneview_client.scopes
for name in data.pop("initialScopeNames", []):
scope = resource_client.get_by_name(name)
if not scope:
raise OneViewModuleResourceNotFound(self.SCOPE_NOT_FOUND + name)
scope_uris.append(scope["uri"])
data["initialScopeUris"] = scope_uris
def _replace_os_deployment_name_by_uri(self, data):
if SPKeys.OS_DEPLOYMENT in data and data[SPKeys.OS_DEPLOYMENT]:
self._replace_name_by_uri(data[SPKeys.OS_DEPLOYMENT], 'osDeploymentPlanName',
self.SERVER_PROFILE_OS_DEPLOYMENT_NOT_FOUND,
self.oneview_client.os_deployment_plans)
def _replace_enclosure_group_name_by_uri(self, data):
self._replace_name_by_uri(data, 'enclosureGroupName', self.SERVER_PROFILE_ENCLOSURE_GROUP_NOT_FOUND,
self.oneview_client.enclosure_groups)
def _replace_networks_name_by_uri(self, data):
if data.get("connections"):
connections = data["connections"]
elif data.get("connectionSettings") and data["connectionSettings"].get("connections"):
connections = data["connectionSettings"]["connections"]
else:
return
for connection in connections:
if 'networkName' in connection:
name = connection.pop('networkName')
if name is not None:
connection['networkUri'] = self._get_network_by_name(name)['uri']
def _replace_server_hardware_type_name_by_uri(self, data):
self._replace_name_by_uri(data, 'serverHardwareTypeName', self.SERVER_HARDWARE_TYPE_NOT_FOUND,
self.oneview_client.server_hardware_types)
def _replace_volume_attachment_names_by_uri(self, data):
volume_attachments = (data.get('sanStorage') or {}).get('volumeAttachments') or []
if len(volume_attachments) > 0:
for volume in volume_attachments:
if not volume.get('volumeUri') and volume.get('volumeName'):
resource_by_name = self.oneview_client.volumes.get_by('name', volume['volumeName'])
if resource_by_name:
volume['volumeUri'] = resource_by_name[0]['uri']
del volume['volumeName']
else:
logger.debug("The volumeUri is null in the volumeAttachments list, it will be understood "
"that the volume does not exist, so it will be created along with the server "
"profile. Be warned that it will always trigger a new creation, so it will not "
" be idempotent.")
self._replace_name_by_uri(volume, 'volumeStoragePoolName', self.STORAGE_POOL_NOT_FOUND,
self.oneview_client.storage_pools)
self._replace_name_by_uri(volume, 'volumeStorageSystemName', self.STORAGE_SYSTEM_NOT_FOUND,
self.oneview_client.storage_systems)
# Support for API version 600 schema changes
if volume.get('volume'):
self._replace_name_by_uri(volume['volume'], 'templateName',
self.STORAGE_VOLUME_TEMPLATE_NOT_FOUND,
self.oneview_client.storage_volume_templates)
if volume['volume'].get('properties'):
self._replace_name_by_uri(volume['volume']['properties'],
'storagePoolName',
self.STORAGE_POOL_NOT_FOUND,
self.oneview_client.storage_pools,
replace_name_with='')
def _replace_enclosure_name_by_uri(self, data):
self._replace_name_by_uri(data, 'enclosureName', self.ENCLOSURE_NOT_FOUND, self.oneview_client.enclosures)
def _replace_interconnect_name_by_uri(self, data):
connections = data.get('connections') or []
if len(connections) > 0:
for connection in connections:
self._replace_name_by_uri(connection, 'interconnectName', self.INTERCONNECT_NOT_FOUND,
self.oneview_client.interconnects)
def _replace_firmware_baseline_name_by_uri(self, data):
firmware = data.get('firmware') or {}
self._replace_name_by_uri(firmware, 'firmwareBaselineName', self.FIRMWARE_DRIVER_NOT_FOUND,
self.oneview_client.firmware_drivers)
def _replace_sas_logical_jbod_name_by_uri(self, data):
sas_logical_jbods = (data.get('localStorage') or {}).get('sasLogicalJBODs') or []
if len(sas_logical_jbods) > 0:
for jbod in sas_logical_jbods:
self._replace_name_by_uri(jbod, 'sasLogicalJBODName', self.SAS_LOGICAL_JBOD_NOT_FOUND,
self.oneview_client.sas_logical_jbods)
def _get_network_by_name(self, name):
fc_networks = self.oneview_client.fc_networks.get_by('name', name)
if fc_networks:
return fc_networks[0]
fcoe_networks = self.oneview_client.fcoe_networks.get_by('name', name)
if fcoe_networks:
return fcoe_networks[0]
network_sets = self.oneview_client.network_sets.get_by('name', name)
if network_sets:
return network_sets[0]
ethernet_networks = self.oneview_client.ethernet_networks.get_by('name', name)
if not ethernet_networks:
raise OneViewModuleResourceNotFound(self.SERVER_PROFILE_NETWORK_NOT_FOUND + name)
return ethernet_networks[0]
|
apache-2.0
| -624,029,130,914,641,500 | 41.670886 | 139 | 0.632783 | false |
amueller/advanced_training
|
plots/plot_interactive_tree.py
|
1
|
2695
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs
from sklearn.tree import DecisionTreeClassifier
from sklearn.externals.six import StringIO # doctest: +SKIP
from sklearn.tree import export_graphviz
from scipy.misc import imread
from scipy import ndimage
import os
import re
GRAPHVIS_PATH = r"C:\Program Files (x86)\Graphviz2.38\bin"
if GRAPHVIS_PATH not in os.environ['PATH']:
os.environ['PATH'] += ";" + GRAPHVIS_PATH
X, y = make_blobs(centers=[[0, 0], [1, 1]], random_state=61526, n_samples=50)
def tree_image(tree, fout=None):
try:
import graphviz
except ImportError:
# make a hacky white plot
x = np.ones((10, 10))
x[0, 0] = 0
return x
dot_data = StringIO()
export_graphviz(tree, out_file=dot_data, max_depth=3, impurity=False)
data = dot_data.getvalue()
#data = re.sub(r"gini = 0\.[0-9]+\\n", "", dot_data.getvalue())
data = re.sub(r"samples = [0-9]+\\n", "", data)
data = re.sub(r"\\nsamples = [0-9]+", "", data)
data = re.sub(r"value", "counts", data)
graph = graphviz.Source(data, format="png")
if fout is None:
fout = "tmp"
graph.render(fout)
return imread(fout + ".png")
def plot_tree(max_depth=1):
fig, ax = plt.subplots(1, 2, figsize=(15, 7))
h = 0.02
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
if max_depth != 0:
tree = DecisionTreeClassifier(max_depth=max_depth, random_state=1).fit(X, y)
Z = tree.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
Z = Z.reshape(xx.shape)
faces = tree.tree_.apply(np.c_[xx.ravel(), yy.ravel()].astype(np.float32))
faces = faces.reshape(xx.shape)
border = ndimage.laplace(faces) != 0
ax[0].contourf(xx, yy, Z, alpha=.4)
ax[0].scatter(xx[border], yy[border], marker='.', s=1)
ax[0].set_title("max_depth = %d" % max_depth)
img = tree_image(tree)
if img is not None:
ax[1].imshow(img)
ax[1].axis("off")
else:
ax[1].set_visible(False)
else:
ax[0].set_title("data set")
ax[1].set_visible(False)
ax[0].scatter(X[:, 0], X[:, 1], c=np.array(['b', 'r'])[y], s=60)
ax[0].set_xlim(x_min, x_max)
ax[0].set_ylim(y_min, y_max)
ax[0].set_xticks(())
ax[0].set_yticks(())
def plot_tree_interactive():
from IPython.html.widgets import interactive, IntSlider
slider = IntSlider(min=0, max=8, step=1, value=0)
return interactive(plot_tree, max_depth=slider)
|
bsd-2-clause
| 2,661,079,370,298,128,400 | 32.271605 | 84 | 0.586642 | false |
pavanky/arrayfire-python
|
arrayfire/tests/simple/array_test.py
|
1
|
2068
|
#!/usr/bin/python
#######################################################
# Copyright (c) 2015, ArrayFire
# All rights reserved.
#
# This file is distributed under 3-clause BSD license.
# The complete license agreement can be obtained at:
# http://arrayfire.com/licenses/BSD-3-Clause
########################################################
import arrayfire as af
import array as host
from . import _util
def simple_array(verbose=False):
display_func = _util.display_func(verbose)
print_func = _util.print_func(verbose)
a = af.Array([1, 2, 3])
display_func(a)
display_func(a.T)
display_func(a.H)
print_func(a.shape)
b = a.as_type(af.Dtype.s32)
display_func(b)
print_func(a.elements(), a.type(), a.dims(), a.numdims())
print_func(a.is_empty(), a.is_scalar(), a.is_column(), a.is_row())
print_func(a.is_complex(), a.is_real(), a.is_double(), a.is_single())
print_func(a.is_real_floating(), a.is_floating(), a.is_integer(), a.is_bool())
a = af.Array(host.array('i', [4, 5, 6]))
display_func(a)
print_func(a.elements(), a.type(), a.dims(), a.numdims())
print_func(a.is_empty(), a.is_scalar(), a.is_column(), a.is_row())
print_func(a.is_complex(), a.is_real(), a.is_double(), a.is_single())
print_func(a.is_real_floating(), a.is_floating(), a.is_integer(), a.is_bool())
a = af.Array(host.array('I', [7, 8, 9] * 3), (3,3))
display_func(a)
print_func(a.elements(), a.type(), a.dims(), a.numdims())
print_func(a.is_empty(), a.is_scalar(), a.is_column(), a.is_row())
print_func(a.is_complex(), a.is_real(), a.is_double(), a.is_single())
print_func(a.is_real_floating(), a.is_floating(), a.is_integer(), a.is_bool())
c = a.to_ctype()
for n in range(a.elements()):
print_func(c[n])
c,s = a.to_ctype(True, True)
for n in range(a.elements()):
print_func(c[n])
print_func(s)
arr = a.to_array()
lst = a.to_list(True)
print_func(arr)
print_func(lst)
print_func(a.is_sparse())
_util.tests['array'] = simple_array
|
bsd-3-clause
| -1,466,352,537,016,812,500 | 30.815385 | 82 | 0.571567 | false |
andrewiggins/Textbook-Price-Aggregator
|
src/server/search.py
|
1
|
2644
|
#!/usr/bin/env python
#-------------------------------------------------------------------------------
# Name: search.py
# Purpose: Contains all Request Handlers relating to generic search
# functions
#
# Author: Andre Wiggins
#
# Created: 04/07/2011
# Copyright: (c) Jacob Marsh, Andrew Stewart, Andre Wiggins 2011
# License:
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
import parsers
import server
from google.appengine.ext.webapp import template
from google.appengine.ext import webapp
class SearchResultsPage(webapp.RequestHandler):
'''Handles request for the /search path which returns the HTML for
a user searching for a textbook by a generic search term'''
def get(self):
query = self.request.get('q')
type = self.request.get('type')
if type and query:
if type=="isbn":
newurl = "book/%s"%query
self.redirect(newurl)
else:
path = '../static/templates/search.html'
retailer = "halfdotcom"
newurl = "/search/%s?q=%s&type=%s"%(retailer,query,type)
url2 = "/book"
template_values={"url":newurl,"url2":url2,"retailer":retailer}
self.response.out.write(template.render(path, template_values, True))
else:
self.redirect("/")
class SearchRetailer(webapp.RequestHandler):
'''Handles request for the /search/retailer which returns the JSON for
searching a retailer for a generic search term'''
def get(self):
retailer_name = self.request.path.rstrip('/').split('/')[-1]
retailer = parsers.import_parser(retailer_name)
query = self.request.get('q')
type = self.request.get('type')
textbooks = server.getjson(retailer.search(query, type))
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(textbooks)
|
apache-2.0
| -3,334,603,341,803,677,000 | 36.785714 | 98 | 0.58472 | false |
waveform80/compoundfiles
|
compoundfiles/mmap.py
|
1
|
7498
|
#!/usr/bin/env python
# vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# A library for reading Microsoft's OLE Compound Document format
# Copyright (c) 2014 Dave Jones <dave@waveform.org.uk>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
try:
range = xrange
except NameError:
pass
import sys
PY2 = sys.version_info[0] == 2
import io
import threading
class FakeMemoryMap(object):
"""
Provides an mmap-style interface for streams without a file descriptor.
The :class:`FakeMemoryMap` class can be used to emulate a memory-mapped
file in cases where a seekable file-like object is provided that doesn't
have a file descriptor (e.g. in-memory streams), or where a file descriptor
exists but "real" mmap cannot be used for other reasons (e.g.
>2Gb files on a 32-bit OS).
The emulated mapping is thread-safe, read-only, but obviously will be
considerably slower than using a "real" mmap. All methods of a real
read-only mmap are emulated (:meth:`find`, :meth:`read_byte`,
:meth:`close`, etc.) so instances can be used as drop-in replacements.
Currently the emulation only covers the entire file (it cannot be limited
to a sub-range of the file as with real mmap).
"""
def __init__(self, f):
self._lock = threading.Lock()
self._file = f
with self._lock:
f.seek(0, io.SEEK_END)
self._size = f.tell()
f.seek(0)
def _read_only(self):
raise TypeError('fake mmap is read-only')
def __len__(self):
return self._size
def __getitem__(self, key):
with self._lock:
save_pos = self._file.tell()
try:
if not isinstance(key, slice):
if key < 0:
key += self._size
if not (0 <= key < self._size):
raise IndexError('fake mmap index out of range')
self._file.seek(key)
if PY2:
return self._file.read(1)
return ord(self._file.read(1))
step = 1 if key.step is None else key.step
if step > 0:
start = min(self._size, max(0, (
0 if key.start is None else
key.start + self._size if key.start < 0 else
key.start
)))
stop = min(self._size, max(0, (
self._size if key.stop is None else
key.stop + self._size if key.stop < 0 else
key.stop
)))
self._file.seek(start)
if start >= stop:
return b''
return self._file.read(stop - start)[::step]
elif step < 0:
start = min(self._size, max(0, (
-1 if key.stop is None else
key.stop + self._size if key.stop < 0 else
key.stop
) + 1))
stop = min(self._size, max(0, (
self._size - 1 if key.start is None else
key.start + self._size if key.start < 0 else
key.start
) + 1))
self._file.seek(start)
if start >= stop:
return b''
return self._file.read(stop - start)[::-1][::-step]
else:
raise ValueError('slice step cannot be zero')
finally:
self._file.seek(save_pos)
def __contains__(self, value):
# This operates rather oddly with memory-maps; it returns a valid
# answer if value is a single byte. Otherwise, it returns False
if len(value) == 1:
return self.find(value) != 1
return False
def __setitem__(self, index, value):
self._read_only()
def close(self):
pass
def find(self, string, start=None, end=None):
# XXX Naive find; replace with Boyer-Moore?
l = len(string)
start = min(self._size, max(0,
0 if start is None else
self._size + start if start < 0 else
start
))
end = min(self._size, max(0,
self._size if end is None else
self._size + end if end < 0 else
end
))
for i in range(start, end - l + 1):
if self[i:i + l] == string:
return i
return -1
def flush(self, offset=None, size=None):
# Seems like this should raise a read-only error, but real read-only
# mmaps don't so we don't either
pass
def move(self, dest, src, count):
self._read_only()
def read(self, num):
with self._lock:
return self._file.read(num)
def read_byte(self):
# XXX Beyond EOF = ValueError
with self._lock:
if PY2:
return self._file.read(1)
return ord(self._file.read(1))
def readline(self):
with self._lock:
return self._file.readline()
def resize(self, newsize):
self._read_only()
def rfind(self, string, start=None, end=None):
# XXX Naive find; replace with Boyer-Moore?
l = len(string)
start = min(self._size, max(0,
0 if start is None else
self._size + start if start < 0 else
start
))
end = min(self._size, max(0,
self._size if end is None else
self._size + end if end < 0 else
end
))
print(start, end, l)
print(list(range(end - l, start - 1, -1)))
for i in range(end - l, start - 1, -1):
if self[i:i + l] == string:
return i
return -1
def seek(self, pos, whence=io.SEEK_SET):
with self._lock:
self._file.seek(pos, whence)
return self._file.tell()
def size(self):
return self._size
def tell(self):
with self._lock:
return self._file.tell()
def write(self, string):
self._read_only()
def write_byte(self, byte):
self._read_only()
|
mit
| 2,056,833,574,935,714,600 | 33.081818 | 79 | 0.537743 | false |
fmichea/bracoujl
|
bracoujl/processor/gb_z80.py
|
1
|
9775
|
# gb_z80.py - GameBoy z80 Disassembler + configuration.
import struct
import re
from functools import partial as P
class GBZ80Disassembler:
def __init__(self):
def _disassemble_cb(op):
return self._cb_ops[op // 8] + self._cb_regs[op % 8]
def r(reg): return '%{reg}'.format(reg=reg)
def inc_reg(reg):
a = 'inc {reg}'.format(reg=reg)
return lambda _: a
def dec_reg(reg):
a = 'dec {reg}'.format(reg=reg)
return lambda _: a
def push_reg(reg):
a = 'push {reg}'.format(reg=reg)
return lambda _: a
def pop_reg(reg):
a = 'pop {reg}'.format(reg=reg)
return lambda _: a
def ld_a_mreg(reg):
a = 'ld %a, ({})'.format(reg)
return lambda _: a
def ld_mreg_a(reg):
a = 'ld ({}), %a'.format(reg)
return lambda _: a
def call_flag_a16(flag, inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'call {}, $0x{:04X}'.format(flag, addr)
def jmp_flag_a16(flag, inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'jmp {}, $0x{:04X}'.format(flag, addr)
def jr_flag_r8(flag, inst):
addr = struct.unpack('b', inst['mem'][:1])[0]
return 'jr {}, $0x{:02X} ; (${:d})'.format(flag, addr & 0xff, addr)
def ret_flag(flag):
a = 'ret {}'.format(flag)
return lambda _: a
def ld_reg_reg(reg1, reg2):
a = 'ld {reg1}, {reg2}'.format(reg1=reg1, reg2=reg2)
return lambda _: a
def op_a_reg(op, reg):
a = '{} %a, {}'.format(op, reg)
return lambda _: a
def rst_nn(nn):
a = 'rst {:02X}h'.format(nn)
return lambda _: a
def jmp_a16(inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'jmp $0x{:04X}'.format(addr)
def ld_mc_a(_):
return 'ld ($0xFFF0 + %c), %a'
def ld_a_mc(_):
return 'ld %a, ($0xFFF0 + %c)'
def cb(inst):
op = inst['mem'][0]
return 'cb $0x{:02X} ; {}'.format(op, _disassemble_cb(op))
def call_a16(inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'call $0x{:04X}'.format(addr)
def jr_r8(inst):
addr = struct.unpack('b', inst['mem'][:1])[0]
return 'jr $0x{:02X} ; (${:d})'.format(addr & 0xff, addr)
def ld_ma16_sp(inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'ld ($0x{:04X}), %sp'.format(addr)
def ld_reg_d8(reg, inst):
val = struct.unpack('B', inst['mem'][:1])[0]
return 'ld {}, $0x{:02X}'.format(reg, val)
def ld_reg_d16(reg, inst):
val = struct.unpack('<H', inst['mem'])[0]
return 'ld {}, $0x{:04X}'.format(reg, val)
def add_hl_reg(reg):
a = 'add %hl, {}'.format(reg)
return lambda _: a
def ld_ma16_a(inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'ld ($0x{:04X}), %a'.format(addr)
def ld_a_ma16(inst):
addr = struct.unpack('<H', inst['mem'])[0]
return 'ld %a, ($0x{:04X})'.format(addr)
def ldh_a_ma8(inst):
addr = inst['mem'][0]
return 'ldh %a, ($0x{:04X})'.format(0xFF00 + addr)
def ldh_ma8_a(inst):
addr = inst['mem'][0]
return 'ldh ($0x{:04X}), %a'.format(0xFF00 + addr)
def op_a_d8(op, inst):
d8 = inst['mem'][0]
return '{} %a, $0x{:X}'.format(op, d8)
def add_sp_r8(inst):
r8 = struct.unpack('b', inst['mem'][:1])[0]
return 'add %sp, $0x{:02X} ; (${:d})'.format(r8 & 0xff, r8)
def ld_hl_sppr8(inst):
a = struct.unpack('b', inst['mem'][1:])[0]
return 'ld %hl, %sp + $0x{:02X} ; (${:d})'.format(a & 0xff, a)
def ld_sp_hl(_):
return 'ld %sp, %hl'
def jmp_mhl(_):
return 'jmp (%hl)'
self._opcodes = dict()
# PREFIX CB
self._cb_ops = []
self._cb_regs = [r(a) for a in ['b', 'c', 'd', 'e', 'h', 'l']] + ['(%hl)', r('a')]
for o in ['rlc', 'rrc', 'rl', 'rr', 'sla', 'sra', 'swap', 'srl']:
self._cb_ops.append(o + ' ')
for o in ["bit", "res", "set"]:
for i in range(8):
self._cb_ops.append(o + ' $' + str(i) + ', ')
self._opcodes[0xCB] = cb
# LD (a16), SP
self._opcodes[0x08] = ld_ma16_sp
# LDH (a8), A / LDH A, (a8)
self._opcodes[0xE0] = ldh_ma8_a
self._opcodes[0xF0] = ldh_a_ma8
# LD (a16), A / LD A, (a16)
self._opcodes[0xEA] = ld_ma16_a
self._opcodes[0xFA] = ld_a_ma16
# LD SP, HL / LD HL, SP + r8
self._opcodes[0xF9] = ld_sp_hl
self._opcodes[0xF8] = ld_hl_sppr8
# ADD SP, r8
self._opcodes[0xE8] = add_sp_r8
# JP (HL)
self._opcodes[0xE9] = jmp_mhl
for i, reg in enumerate(['bc', 'de', 'hl']):
# INC
self._opcodes[0x10 * i + 0x3] = inc_reg(r(reg))
self._opcodes[0x10 * i + 0x4] = inc_reg(r(reg[0]))
self._opcodes[0x10 * i + 0xC] = inc_reg(r(reg[1]))
# DEC
self._opcodes[0x10 * i + 0x5] = dec_reg(r(reg[0]))
self._opcodes[0x10 * i + 0xB] = dec_reg(r(reg))
self._opcodes[0x10 * i + 0xD] = dec_reg(r(reg[1]))
# INC
self._opcodes[0x33] = inc_reg('%sp')
self._opcodes[0x34] = inc_reg('(%hl)')
self._opcodes[0x3C] = inc_reg(r('a'))
# DEC
self._opcodes[0x35] = dec_reg('(%hl)')
self._opcodes[0x3B] = dec_reg('%sp')
self._opcodes[0x3D] = dec_reg(r('a'))
# PUSH/POP
for i, reg in enumerate(['bc', 'de', 'hl', 'af']):
self._opcodes[0xC0 + 0x10 * i + 0x1] = pop_reg(r(reg))
self._opcodes[0xC0 + 0x10 * i + 0x5] = push_reg(r(reg))
# ADD/ADC/SUB/SBC/AND/XOR/OR/CP
for i1, op in enumerate(['add', 'adc', 'sub', 'sbc', 'and', 'xor', 'or', 'cp']):
for i2, reg in enumerate([r(a) for a in 'bcdehl'] + ['(%hl)', r('a')]):
self._opcodes[0x80 + 0x8 * i1 + i2] = op_a_reg(op, reg)
self._opcodes[0xC6 + 0x8 * i1] = P(op_a_d8, op)
# LD REG, d16
for i, reg in enumerate(['bc', 'de', 'hl', 'sp']):
self._opcodes[0x10 * i + 0x1] = P(ld_reg_d16, r(reg))
# ADD HL, REG
for i, reg in enumerate(['bc', 'de', 'hl', 'sp']):
self._opcodes[0x09 + 0x10 * i] = add_hl_reg(r(reg))
# LD REG, REG / LD REG, d8
for i1, reg1 in enumerate([r(a) for a in 'bcdehl'] + ['(%hl)', r('a')]):
for i2, reg2 in enumerate([r(a) for a in 'bcdehl'] + ['(%hl)', r('a')]):
self._opcodes[0x40 + 0x8 * i1 + i2] = ld_reg_reg(reg1, reg2)
self._opcodes[0x06 + 0x08 * i1] = P(ld_reg_d8, reg1)
# LD A, (REG)
for i, reg in enumerate(['bc', 'de', 'hl+', 'hl-']):
self._opcodes[0x10 * i + 0x2] = ld_mreg_a(r(reg))
self._opcodes[0x10 * i + 0xA] = ld_a_mreg(r(reg))
# LD A, (C) / LD (C), A
self._opcodes[0xE2] = ld_mc_a
self._opcodes[0xF2] = ld_a_mc
# RST
for i in range(0x00, 0x40, 0x8):
self._opcodes[0xC7 + i] = rst_nn(i)
# CALL, JMP, JR
self._opcodes[0x18] = jr_r8
self._opcodes[0xC3] = jmp_a16
self._opcodes[0xCD] = call_a16
for i, flag in enumerate(['nzf', 'zf', 'ncy', 'cy']):
self._opcodes[0xC0 + 0x8 * i] = ret_flag(flag)
self._opcodes[0x20 + 0x8 * i] = P(jr_flag_r8, flag)
self._opcodes[0xC2 + 0x8 * i] = P(jmp_flag_a16, flag)
self._opcodes[0xC2 + 0x8 * i + 0x2] = P(call_flag_a16, flag)
# Simple ops
for addr, op in [(0x00, 'nop'), (0x10, 'stop'), (0xFB, 'ei'),
(0xF3, 'di'), (0x76, 'halt'), (0xC9, 'ret'),
(0xD9, 'reti')]:
self._opcodes[addr] = P(lambda x, _: x, op)
for i, op in enumerate(['rlca', 'rrca', 'rla', 'rra', 'daa', 'cpl', 'scf', 'ccf']):
self._opcodes[0x07 + 0x08 * i] = P(lambda x, _: x, op)
def disassemble(self, inst):
try:
return self._opcodes[inst['opcode'][0]](inst)
except KeyError:
return '[unknown: {!r}]'.format(inst['opcode'])
except Exception as e:
return '[error: {!r} -> {}]'.format(inst['opcode'], str(e))
_RGX = '.*'
_RGX += 'PC: (?P<pc>[0-9A-Fa-f]{4}) \\| '
_RGX += 'OPCODE: (?P<opcode>[0-9A-Fa-f]{2}) \\| '
_RGX += 'MEM: (?P<mem>[0-9A-Fa-f]{4})$'
_LOG_LINE = re.compile(_RGX)
def _parse_line(line):
m = _LOG_LINE.match(line)
if m:
opcode = bytes.fromhex(m.group('opcode'))
pc = int(m.group('pc'), 16)
mem = bytes.fromhex(m.group('mem'))
return {'pc': pc, 'opcode': opcode, 'mem': mem}
return None
def chrlst(lst): return [struct.pack('B', c) for c in lst]
CPU_CONF = {
'parse_line': _parse_line,
'addr_width': 16,
'opcode_size': 3,
'interrupts': range(0x0, 0x60 + 1, 0x8),
'int_opcodes': chrlst(range(0xc7, 0x100, 0x8)),
'int_opcodes_size': 1,
'call_opcodes': chrlst([0xc4, 0xcc, 0xcd, 0xd4, 0xdc]),
'call_opcodes_size': 3,
'jump_opcodes': chrlst([0xc2, 0xc3, 0xca, 0xd2, 0xda, 0xe9]),
'jump_opcodes_size': 3,
'jr_opcodes': chrlst([0x18, 0x20, 0x28, 0x30, 0x38]),
'jr_opcodes_size': 2,
'ret_opcodes': chrlst([0xc9, 0xd9, 0xc0, 0xc8, 0xd0, 0xd8]),
'ret_opcodes_size': 1,
'disassembler': GBZ80Disassembler,
}
|
bsd-3-clause
| 271,786,114,187,145,340 | 36.596154 | 91 | 0.467519 | false |
indautgrp/erpnext
|
erpnext/selling/report/sales_person_wise_transaction_summary/sales_person_wise_transaction_summary.py
|
1
|
3777
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import msgprint, _
from frappe.utils import flt
def execute(filters=None):
if not filters: filters = {}
columns = get_columns(filters)
entries = get_entries(filters)
item_details = get_item_details()
data = []
total_contribution_amount = 0
for d in entries:
total_contribution_amount += flt(d.contribution_amt)
data.append([
d.name, d.customer, d.territory, d.posting_date, d.item_code,
item_details.get(d.item_code, {}).get("item_group"), item_details.get(d.item_code, {}).get("brand"),
d.qty, d.base_net_amount, d.sales_person, d.allocated_percentage, d.contribution_amt
])
if data:
total_row = [""]*len(data[0])
total_row[0] = _("Total")
total_row[-1] = total_contribution_amount
data.append(total_row)
return columns, data
def get_columns(filters):
if not filters.get("doc_type"):
msgprint(_("Please select the document type first"), raise_exception=1)
return [filters["doc_type"] + ":Link/" + filters["doc_type"] + ":100",
_("Customer") + ":Link/Customer:120", _("Territory") + ":Link/Territory:120", _("Posting Date") + ":Date:100",
_("Item Code") + ":Link/Item:120", _("Item Group") + ":Link/Item Group:120",
_("Brand") + ":Link/Brand:120", _("Qty") + ":Float:100", _("Amount") + ":Currency:120",
_("Sales Person") + ":Link/Sales Person:140", _("Contribution %") + "::110",
_("Contribution Amount") + ":Currency:140"]
def get_entries(filters):
date_field = filters["doc_type"] == "Sales Order" and "transaction_date" or "posting_date"
conditions, values = get_conditions(filters, date_field)
entries = frappe.db.sql("""
select
dt.name, dt.customer, dt.territory, dt.%s as posting_date, dt_item.item_code,
dt_item.qty, dt_item.base_net_amount, st.sales_person, st.allocated_percentage,
dt_item.base_net_amount*st.allocated_percentage/100 as contribution_amt
from
`tab%s` dt, `tab%s Item` dt_item, `tabSales Team` st
where
st.parent = dt.name and dt.name = dt_item.parent and st.parenttype = %s
and dt.docstatus = 1 %s order by st.sales_person, dt.name desc
""" %(date_field, filters["doc_type"], filters["doc_type"], '%s', conditions),
tuple([filters["doc_type"]] + values), as_dict=1)
return entries
def get_conditions(filters, date_field):
conditions = [""]
values = []
for field in ["company", "customer", "territory"]:
if filters.get(field):
conditions.append("dt.{0}=%s".format(field))
values.append(filters[field])
if filters.get("sales_person"):
lft, rgt = frappe.get_value("Sales Person", filters.get("sales_person"), ["lft", "rgt"])
conditions.append("exists(select name from `tabSales Person` where lft >= {0} and rgt <= {1} and name=st.sales_person)".format(lft, rgt))
if filters.get("from_date"):
conditions.append("dt.{0}>=%s".format(date_field))
values.append(filters["from_date"])
if filters.get("to_date"):
conditions.append("dt.{0}<=%s".format(date_field))
values.append(filters["to_date"])
items = get_items(filters)
if items:
conditions.append("dt_item.item_code in (%s)" % ', '.join(['%s']*len(items)))
values += items
return " and ".join(conditions), values
def get_items(filters):
if filters.get("item_group"): key = "item_group"
elif filters.get("brand"): key = "brand"
else: key = ""
items = []
if key:
items = frappe.db.sql_list("""select name from tabItem where %s = %s""" %
(key, '%s'), (filters[key]))
return items
def get_item_details():
item_details = {}
for d in frappe.db.sql("""select name, item_group, brand from `tabItem`""", as_dict=1):
item_details.setdefault(d.name, d)
return item_details
|
gpl-3.0
| -7,740,807,535,441,435,000 | 33.972222 | 139 | 0.664813 | false |
dekked/dynamodb-mock
|
ddbmock/database/item.py
|
1
|
9060
|
# -*- coding: utf-8 -*-
from ddbmock.errors import ConditionalCheckFailedException, ValidationException
from ddbmock import config
from decimal import Decimal
from math import ceil
from . import comparison
def _decode_field(field):
return field.items()[0]
class ItemSize(int):
def __add__(self, value):
return ItemSize(int.__add__(self, value))
def as_units(self):
"""Get item size in terms of capacity units. This does *not* include the
index overhead. Units can *not* be bellow 1 ie: a "delete" on a non
existing item is *not* free
"""
return max(1, int(ceil((self) / 1024.0)))
def with_indexing_overhead(self):
"""Take the indexing overhead into account. this is especially usefull
to compute the table disk size as DynamoDB would but it's not included
in the capacity unit calculation.
"""
return self + config.INDEX_OVERHEAD
class Item(dict):
def __init__(self, dico={}):
self.update(dico)
self.size = None
def filter(self, fields):
"""
Return a dict containing only the keys specified in ``fields``. If
``fields`` evaluates to False (None, empty, ...), the original dict is
returned untouched.
:ivar fields: array of name of keys to keep
:return: filtered ``item``
"""
if fields:
filtered = Item((k, v) for k, v in self.items() if k in fields)
filtered.size = self.get_size() # Filtered or not, you pay for actual size
return filtered
return self
def _apply_action(self, fieldname, action):
# Rewrite this function, it's disgustting code
if action[u'Action'] == u"PUT":
self[fieldname] = action[u'Value']
if action[u'Action'] == u"DELETE": # Starts to be anoying
if not fieldname in self:
return #shortcut
if u'Value' not in action:
del self[fieldname] # Nice and easy part
return
typename, value = _decode_field(action[u'Value'])
ftypename, fvalue = _decode_field(self[fieldname])
if len(ftypename) != 2:
raise ValidationException(u"Can not DELETE elements from a non set type. Got {}".format(ftypename))
if ftypename != typename:
raise ValidationException(u"Expected type {t} for DELETE from type {t}. Got {}".format(typename, t=ftypename))
# do the dirty work
data = set(fvalue).difference(value)
# if data empty => remove the key
if not data:
del self[fieldname]
else:
self[fieldname] = {ftypename: list(data)}
if action[u'Action'] == u"ADD": # Realy anoying to code :s
#FIXME: not perfect, action should be different if the item was new
typename, value = _decode_field(action[u'Value'])
if fieldname in self:
ftypename, fvalue = _decode_field(self[fieldname])
if ftypename == u"N":
data = Decimal(value) + Decimal(fvalue)
self[fieldname][u"N"] = unicode(data)
elif ftypename in [u"NS", u"SS", u"BS"]:
if ftypename != typename:
raise ValidationException(u"Expected type {t} for ADD in type {t}. Got {}".format(typename, t=ftypename))
data = set(fvalue).union(value)
self[fieldname][typename] = list(data)
else:
raise ValidationException(u"Only N, NS, SS and BS types supports ADD operation. Got {}".format(ftypename))
else:
if typename not in [u"N", u"NS"]:
raise ValidationException(u"When performing ADD operation on new field, only Numbers or Numbers set are allowed. Got {} of type {}".format(value, typename))
self[fieldname] = action[u'Value']
def apply_actions(self, actions):
map(self._apply_action, actions.keys(), actions.values())
self.size = None # reset cache
def assert_match_expected(self, expected):
"""
Raise ConditionalCheckFailedException if ``self`` does not match ``expected``
values. ``expected`` schema is raw conditions as defined by DynamoDb.
:ivar expected: conditions to validate
:raises: ConditionalCheckFailedException
"""
for fieldname, condition in expected.iteritems():
if u'Exists' in condition and not condition[u'Exists']:
if fieldname in self:
raise ConditionalCheckFailedException(
"Field '{}' should not exist".format(fieldname))
# *IS* executed but coverage bug
continue # pragma: no cover
if fieldname not in self:
raise ConditionalCheckFailedException(
"Field '{}' should exist".format(fieldname))
if self[fieldname] != condition[u'Value']:
raise ConditionalCheckFailedException(
"Expected field '{}'' = '{}'. Got '{}'".format(
fieldname, condition[u'Value'], self[fieldname]))
def match(self, conditions):
for name, condition in conditions.iteritems():
if not self.field_match(name, condition):
return False
return True
def field_match(self, name, condition):
"""Check if a field matches a condition. Return False when field not
found, or do not match. If condition is None, it is considered to match.
:ivar name: name of the field to test
:ivar condition: raw dict describing the condition {"OPERATOR": FIELDDEFINITION}
:return: True on success
"""
# Arcording to specif, no condition means match
if condition is None:
return True
# read the item
if name not in self:
value = None
else:
value = self[name]
# Load the test operator from the comparison module. Thamks to input
# validation, no try/except required
condition_operator = condition[u'ComparisonOperator'].lower()
operator = getattr(comparison, condition_operator)
return operator(value, *condition[u'AttributeValueList'])
def read_key(self, key, name=None, max_size=0):
"""Provided ``key``, read field value at ``name`` or ``key.name`` if not
specified. If the field does not exist, this is a "ValueError". In case
it exists, also check the type compatibility. If it does not match, raise
TypeError.
:ivar key: ``Key`` or ``PrimaryKey`` to read
:ivar name: override name field of key
:ivar max_size: if specified, check that the item is bellow a treshold
:return: field value
"""
if key is None:
return False
if name is None:
name = key.name
try:
field = self[name]
except KeyError:
raise ValidationException(u'Field {} not found'.format(name))
if max_size:
size = self.get_field_size(name)
if size > max_size:
raise ValidationException(u'Field {} is over {} bytes limit. Got {}'.format(name, max_size, size))
return key.read(field)
def _internal_item_size(self, base_type, value):
if base_type == 'N': return 8 # assumes "double" internal type on ddb side
if base_type == 'S': return len(value.encode('utf-8'))
if base_type == 'B': return len(value.encode('utf-8'))*3/4 # base64 overead
def get_field_size(self, key):
"""Return value size in bytes or 0 if not found"""
if not key in self:
return 0
typename, value = _decode_field(self[key])
base_type = typename[0]
if len(typename) == 1:
value_size = self._internal_item_size(base_type, value)
else:
value_size = 0
for v in value:
value_size += self._internal_item_size(base_type, v)
return value_size
def get_size(self):
"""Compute Item size as DynamoDB would. This is especially useful for
enforcing the 64kb per item limit as well as the capacityUnit cost.
note: the result is cached for efficiency. If you ever happend to directly
edit values for any reason, do not forget to invalidate it: ``self.size=None``
:return: the computed size
"""
# Check cache and compute
if self.size is None:
size = 0
for key in self.keys():
size += self._internal_item_size('S', key)
size += self.get_field_size(key)
self.size = size
return ItemSize(self.size)
def __sub__(self, other):
# Thanks mnoel :)
return {k:v for k,v in self.iteritems() if k not in other or v != other[k]}
|
lgpl-3.0
| -7,427,540,030,551,715,000 | 37.88412 | 176 | 0.579249 | false |
perfectsearch/sandman
|
code/sadm/repoinfo.py
|
1
|
4090
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
from __future__ import print_function
import os
import sys
try:
from standardoptions import set_up_logging, add_standard_arguments
except:
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'buildscripts'))
from standardoptions import set_up_logging, add_standard_arguments
import argparse
import logging
import traceback
import subprocess
import string
MASTER_SERVER = 'bazaar.example.com' ## TODO make part of conf
INDENT = ' '
BRANCH = 0
COMPONENT = 1
ASPECT = 2
def main(args):
handle_arguments(args)
set_up_logging(OPTIONS)
branchinfo = _run('bzr fast-branches ' + OPTIONS.reporoot)
branchinfo = [b.split() for b in branchinfo.split('\n') if b.strip()]
if OPTIONS.branch:
branchinfo = [b for b in branchinfo if b[0].lower() == OPTIONS.branch.lower()]
if OPTIONS.component:
branchinfo = [b for b in branchinfo if b[1].lower() == OPTIONS.component.lower()]
if OPTIONS.branches:
show(branchinfo, BRANCH)
elif OPTIONS.components:
show(branchinfo, COMPONENT)
elif OPTIONS.aspects:
show(branchinfo, ASPECT)
else:
print('Branches:')
show(branchinfo, BRANCH, indent=True)
print('\nComponents:')
show(branchinfo, COMPONENT, indent=True)
print('\nAspects:')
show(branchinfo, ASPECT, indent=True)
def show(branchinfo, column, indent=False):
items = set()
[items.add(b[column]) for b in branchinfo]
[print((INDENT if indent else '') + c) for c in sorted(list(items), key=string.lower)]
def show_branches(branchinfo, indent=False):
branches = set()
[branches.add(b[0]) for b in branchinfo]
[print((INDENT if indent else '') +c) for c in sorted(list(branches), key=string.lower)]
def _run(command):
logging.debug('running: %s' % command)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = process.communicate()
if process.returncode != 0:
logging.error('failed to run %s: %s' % (command, stderr))
raise Exception()
return stdout
def get_branch_info(server, branchname):
branchinfos = []
branches = [l for l in get_revisions(server).split('\n') if l.strip() and l.startswith(branchname)]
for b in branches:
branch = decompose(b)
if branch:
branchinfos.append(branch)
return branchinfos
def handle_arguments(args):
parser = argparse.ArgumentParser(args[0])
add_standard_arguments(parser)
parser.add_argument('-r', '--reporoot', type=str,
default='bzr+ssh://bazaar.example.com/reporoot',## TODO make part of conf
help='root of repository')
parser.add_argument('-b', '--branches', help='only show branches', action='store_true')
parser.add_argument('--branch', help='only show information about this branch')
parser.add_argument('-c', '--components', help='only show components', action='store_true')
parser.add_argument('--component', help='only show information for this component')
parser.add_argument('-a', '--aspects', help = 'only show aspects', action='store_true')
global OPTIONS
OPTIONS = parser.parse_args(args[1:])
class Branch:
def __init__(self, branch, component, aspect, revid):
self.branch = branch
self.component = component
self.aspect = aspect
self.revid = revid
self.site_revid = None
def decompose(entry):
parts = entry.split()
if len(parts) != 4:
print('entry ERROR:', entry)
else:
return Branch(parts[0], parts[1], parts[2], parts[3])
def find_branch(master_branches, branch):
for b in master_branches:
if b.branch == branch.branch and b.component == branch.component and b.aspect == branch.aspect:
return b
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
mit
| -8,831,123,763,097,423,000 | 30.204724 | 103 | 0.62934 | false |
yashdsaraf/scancode-toolkit
|
src/cluecode/finder.py
|
1
|
15886
|
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import, print_function
import logging
import string
import re
import url as urlpy
import ipaddress
from textcode import analysis
from cluecode import finder_data
LOG = logging.getLogger(__name__)
DEBUG = False
"""
Find patterns in text lines such as a emails and URLs.
Optionally apply filters to pattern matches.
"""
def find(location, patterns):
"""
Yield match and matched lines for patterns found in file at location as a
tuple of (key, found text, text line). Pattern is list of tuples (key,
compiled regex).
Note: the location can be a list of lines for testing convenience.
"""
if DEBUG:
from pprint import pformat
loc = pformat(location)
print('find(location=%(loc)r,\n patterns=%(patterns)r)' % locals())
for i, line in enumerate(analysis.text_lines(location)):
lineno = i + 1
for key, pattern in patterns:
for match in pattern.findall(line):
if DEBUG:
print('find: yielding match: key=%(key)r, '
'match=%(match)r,\n line=%(line)r' % locals())
yield key, unicode(match), line, lineno
def find_and_filter(location, patterns, filters, unique=True):
"""
Yield match and matched line number for patterns found in file at location
as a tuple of (found text, line number). Pattern is list of tuples (key,
compiled regex).
Note: the location can be a list of lines for testing convenience.
"""
def unique_filter(matches):
"""
Iterate over matches and yield unique matches.
"""
uniques = set()
for key, match, line, lineno in matches:
if (key, match,) in uniques:
continue
uniques.add((key, match,))
yield key, match, line, lineno
def apply_filters(matches, *filters):
"""
Apply a sequence of `filters` to a `matches` iterable. Return a new filtered
matches iterable.
A filter must accept a single arg: an iterable of tuples of (key, match,
line, lineno) and must return an iterable of tuples of (key, match, line,
lineno).
"""
for filt in filters:
matches = filt(matches)
return matches
def build_regex_filter(pattern):
"""
Return a filter function using regex pattern, filtering out matches
matching this regex. The pattern should be text, not a compiled re.
"""
def re_filt(matches):
for key, match, line, lineno in matches:
if re.match(regex, match):
if DEBUG:
print('build_regex_filter(pattern=%(pattern)r: '
'filtering match: %(match)r' % locals())
continue
yield key, match, line, lineno
regex = re.compile(pattern, re.UNICODE | re.I)
return re_filt
# A good reference page of email address regex is:
# http://fightingforalostcause.net/misc/2006/compare-email-regex.php email
# regex from http://www.regular-expressions.info/regexbuddy/email.html
def emails_regex():
return re.compile(r'\b[A-Z0-9._%-]+@[A-Z0-9.-]+\.[A-Z]{2,4}\b', re.IGNORECASE)
def find_emails(location, unique=True):
"""
Yield emails found in file at location.
Only return unique items if unique is True.
"""
patterns = [('emails', emails_regex(),)]
matches = find(location, patterns)
filters = (junk_email_domains_filter,)
if unique:
filters += (unique_filter,)
matches = apply_filters(matches, *filters)
for _key, email, _line, lineno in matches:
yield email, lineno
def junk_email_domains_filter(matches):
"""
Given an iterable of email matches, return an iterable where email with
common uninteresting domains have been removed, such as local, non public
or example.com emails.
"""
for key, email, line, lineno in matches:
domain = email.split('@')[-1]
if not is_good_host(domain):
continue
yield key, email, line, lineno
def uninteresting_emails_filter(matches):
"""
Given an iterable of emails matches, return an iterable where common
uninteresting emails have been removed.
"""
for key, email, line, lineno in matches:
good_email = finder_data.classify_email(email)
if not good_email:
continue
yield key, email, line, lineno
# TODO: consider: http://www.regexguru.com/2008/11/detecting-urls-in-a-block-of-text/
# TODO: consider: http://blog.codinghorror.com/the-problem-with-urls/
schemes = 'https?|ftps?|sftp|rsync|ssh|svn|git|hg|https?\+git|https?\+svn|https?\+hg'
url_body = '[^\s<>\[\]"]'
def urls_regex():
# no space, no < >, no [ ] and no double quote
return re.compile(r'''
(
# URLs with schemes
(?:%(schemes)s)://%(url_body)s+
|
# common URLs prefix without schemes
(?:www|ftp)\.%(url_body)s+
|
# git style git@github.com:christophercantu/pipeline.git
git\@%(url_body)s+:%(url_body)s+\.git
)''' % globals()
, re.UNICODE | re.VERBOSE | re.IGNORECASE)
INVALID_URLS_PATTERN = '((?:' + schemes + ')://([$%*/_])+)'
def find_urls(location, unique=True):
"""
Yield urls found in file at location.
Only return unique items if unique is True.
"""
patterns = [('urls', urls_regex(),)]
matches = find(location, patterns)
# the order of filters IS important
filters = (
verbatim_crlf_url_cleaner,
end_of_url_cleaner,
empty_urls_filter,
scheme_adder,
user_pass_cleaning_filter,
build_regex_filter(INVALID_URLS_PATTERN),
canonical_url_cleaner,
junk_url_hosts_filter,
junk_urls_filter,
)
if unique:
filters += (unique_filter,)
matches = apply_filters(matches, *filters)
for _key, url, _line, lineno in matches:
yield unicode(url), lineno
EMPTY_URLS = set(['https', 'http', 'ftp', 'www', ])
def empty_urls_filter(matches):
"""
Given an iterable of URL matches, return an iterable without empty URLs.
"""
for key, match, line, lineno in matches:
junk = match.lower().strip(string.punctuation).strip()
if not junk or junk in EMPTY_URLS:
if DEBUG:
print('empty_urls_filter: filtering match: %(match)r'
% locals())
continue
yield key, match, line, lineno
def verbatim_crlf_url_cleaner(matches):
"""
Given an iterable of URL matches, return an iterable where literal end of
lines and carriage return characters that may show up as-is, un-encoded in
a URL have been removed.
"""
# FIXME: when is this possible and could happen?
for key, url, line, lineno in matches:
if not url.endswith('/'):
url = url.replace(r'\n', '')
url = url.replace(r'\r', '')
yield key, url, line, lineno
def end_of_url_cleaner(matches):
"""
Given an iterable of URL matches, return an iterable where junk characters
commonly found at the end of a URL are removed.
This is not entirely correct, but works practically.
"""
for key, url, line, lineno in matches:
if not url.endswith('/'):
url = url.replace(u'<', u'<')
url = url.replace(u'>', u'>')
url = url.replace(u'&', u'&')
url = url.rstrip(string.punctuation)
url = url.split(u'\\')[0]
url = url.split(u'<')[0]
url = url.split(u'>')[0]
url = url.split(u'(')[0]
url = url.split(u')')[0]
url = url.split(u'[')[0]
url = url.split(u']')[0]
url = url.split(u'"')[0]
url = url.split(u"'")[0]
yield key, url, line, lineno
non_standard_urls_prefix = ('git@',)
def is_filterable(url):
"""
Return True if a url is eligible for filtering. Certain URLs should not pass
through certain filters (such as a git@github.com style urls)
"""
return not url.startswith(non_standard_urls_prefix)
def scheme_adder(matches):
"""
Add a fake http:// scheme if there was none.
"""
for key, match, line, lineno in matches:
if is_filterable(match):
match = add_fake_scheme(match)
yield key, match, line, lineno
def add_fake_scheme(url):
"""
Add a fake http:// scheme to URL if has none.
"""
if not has_scheme(url):
url = u'http://' + url.lstrip(u':/').strip()
return url
def has_scheme(url):
"""
Return True if url has a scheme.
"""
return re.match('^(?:%(schemes)s)://.*' % globals(), url)
def user_pass_cleaning_filter(matches):
"""
Given an iterable of URL matches, return an iterable where user and
password are removed from the URLs host.
"""
for key, match, line, lineno in matches:
if is_filterable(match):
host, _domain = url_host_domain(match)
if not host:
if DEBUG:
print('user_pass_cleaning_filter: '
'filtering match(no host): %(match)r' % locals())
continue
if '@' in host:
# strips any user/pass
host = host.split(u'@')[-1]
yield key, match, line, lineno
def canonical_url(uri):
"""
Return the canonical representation of a given URI.
This assumes the `uri` has a scheme.
* When a default port corresponding for the scheme is explicitly declared
(such as port 80 for http), the port will be removed from the output.
* Fragments '#' are not removed.
* Params and query string arguments are not reordered.
"""
normalized = urlpy.parse(uri).sanitize().punycode()
if normalized._port == urlpy.PORTS.get(normalized._scheme, None):
normalized._port = None
return normalized.utf8()
def canonical_url_cleaner(matches):
"""
Given an iterable of URL matches, return an iterable where URLs have been
canonicalized.
"""
for key, match, line, lineno in matches:
if is_filterable(match):
match = canonical_url(match)
if DEBUG:
print('canonical_url_cleaner: '
'match=%(match)r, canonic=%(canonic)r' % locals())
yield key, match , line, lineno
IP_V4_RE = r'^(\d{1,3}\.){0,3}\d{1,3}$'
def is_ip_v4(s):
return re.compile(IP_V4_RE).match(s)
IP_V6_RE = (
r'^([0-9a-f]{0,4}:){2,7}[0-9a-f]{0,4}$'
'|'
r'^([0-9a-f]{0,4}:){2,6}(\d{1,3}\.){0,3}\d{1,3}$'
)
def is_ip_v6(s):
"""
Return True is string s is an IP V6 address
"""
return re.compile(IP_V6_RE).match(s)
def is_ip(s):
"""
Return True is string s is an IP address
"""
return is_ip_v4(s) or is_ip_v6(s)
def get_ip(s):
"""
Return True is string s is an IP address
"""
if not is_ip(s):
return False
try:
ip = ipaddress.ip_address(unicode(s))
return ip
except ValueError:
return False
def is_private_ip(ip):
"""
Return true if ip object is a private or local IP.
"""
if ip:
if isinstance(ip, ipaddress.IPv4Address):
private = (
ip.is_reserved
or ip.is_private
or ip.is_multicast
or ip.is_unspecified
or ip.is_loopback
or ip.is_link_local
)
else:
private(
ip.is_multicast
or ip.is_reserved
or ip.is_link_local
or ip.is_site_local
or ip.is_private
or ip.is_unspecified
or ip.is_loopback
)
return private
def is_good_host(host):
"""
Return True if the host is not some local or uninteresting host.
"""
if not host:
return False
ip = get_ip(host)
if ip:
if is_private_ip(ip):
return False
return finder_data.classify_ip(host)
# at this stage we have a host name, not an IP
if '.' not in host:
# private hostnames not in a domain, including localhost
return False
good_host = finder_data.classify_host(host)
return good_host
def url_host_domain(url):
"""
Return a tuple of the (host, domain) of a URL or None. Assumes that the
URL has a scheme.
"""
parsed = urlpy.parse(url)
host = parsed._host
if not host:
return None, None
host = host.lower()
domain = parsed.pld().lower()
return host, domain
def junk_url_hosts_filter(matches):
"""
Given an iterable of URL matches, return an iterable where URLs with
common uninteresting hosts or domains have been removed, such as local,
non public or example.com URLs.
"""
for key, match, line, lineno in matches:
if is_filterable(match):
host, domain = url_host_domain(match)
if not is_good_host(host):
if DEBUG:
print('junk_url_hosts_filter: '
'!is_good_host:%(host)r): %(match)r' % locals())
continue
if not is_good_host(domain) and not is_ip(host):
if DEBUG:
print('junk_url_hosts_filter: ''!is_good_host:%(domain)r '
'and !is_ip:%(host)r: %(match)r' % locals())
continue
yield key, match, line, lineno
def junk_urls_filter(matches):
"""
Given an iterable of URL matches, return an iterable where URLs with
common uninteresting URLs, or uninteresting URL hosts or domains have been
removed, such as local, non public or example.com URLs.
"""
for key, match, line, lineno in matches:
good_url = finder_data.classify_url(match)
if not good_url:
if DEBUG:
print('junk_url_filter: %(match)r' % locals())
continue
yield key, match, line, lineno
def find_pattern(location, pattern, unique=False):
"""
Find regex pattern in the text lines of file at location.
Return all match groups joined as one unicode string.
Only return unique items if unique is True.
"""
pattern = re.compile(pattern, re.UNICODE | re.I)
matches = find(location, [(None, pattern,)])
if unique:
matches = unique_filter(matches)
for _key, match , _line, lineno in matches:
yield match, lineno
|
apache-2.0
| -7,716,903,587,129,529,000 | 29.201521 | 85 | 0.600403 | false |
rsheftel/pandas_market_calendars
|
tests/test_bse_calendar.py
|
1
|
1192
|
import datetime
import pandas as pd
import pytz
from pandas_market_calendars.exchange_calendar_bse import BSEExchangeCalendar, BSEClosedDay
def test_time_zone():
assert BSEExchangeCalendar().tz == pytz.timezone('Asia/Calcutta')
assert BSEExchangeCalendar().name == 'BSE'
def test_holidays():
bse_calendar = BSEExchangeCalendar()
trading_days = bse_calendar.valid_days(pd.Timestamp('2004-01-01'), pd.Timestamp('2018-12-31'))
for session_label in BSEClosedDay:
assert session_label not in trading_days
def test_open_close_time():
bse_calendar = BSEExchangeCalendar()
india_time_zone = pytz.timezone('Asia/Calcutta')
bse_schedule = bse_calendar.schedule(
start_date=india_time_zone.localize(datetime.datetime(2015, 1, 14)),
end_date=india_time_zone.localize(datetime.datetime(2015, 1, 16))
)
assert BSEExchangeCalendar.open_at_time(
schedule=bse_schedule,
timestamp=india_time_zone.localize(datetime.datetime(2015, 1, 14, 11, 0))
)
assert not BSEExchangeCalendar.open_at_time(
schedule=bse_schedule,
timestamp=india_time_zone.localize(datetime.datetime(2015, 1, 9, 12, 0))
)
|
mit
| -823,092,407,218,533,600 | 29.564103 | 98 | 0.703859 | false |
dipanjanS/text-analytics-with-python
|
Old-First-Edition/Ch04_Text_Classification/classifier_evaluation_demo.py
|
1
|
3277
|
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 02 12:36:55 2016
@author: DIP
"""
from sklearn import metrics
import numpy as np
import pandas as pd
from collections import Counter
actual_labels = ['spam', 'ham', 'spam', 'spam', 'spam',
'ham', 'ham', 'spam', 'ham', 'spam',
'spam', 'ham', 'ham', 'ham', 'spam',
'ham', 'ham', 'spam', 'spam', 'ham']
predicted_labels = ['spam', 'spam', 'spam', 'ham', 'spam',
'spam', 'ham', 'ham', 'spam', 'spam',
'ham', 'ham', 'spam', 'ham', 'ham',
'ham', 'spam', 'ham', 'spam', 'spam']
ac = Counter(actual_labels)
pc = Counter(predicted_labels)
print 'Actual counts:', ac.most_common()
print 'Predicted counts:', pc.most_common()
cm = metrics.confusion_matrix(y_true=actual_labels,
y_pred=predicted_labels,
labels=['spam','ham'])
print pd.DataFrame(data=cm,
columns=pd.MultiIndex(levels=[['Predicted:'],
['spam','ham']],
labels=[[0,0],[0,1]]),
index=pd.MultiIndex(levels=[['Actual:'],
['spam','ham']],
labels=[[0,0],[0,1]]))
positive_class = 'spam'
true_positive = 5.
false_positive = 6.
false_negative = 5.
true_negative = 4.
accuracy = np.round(
metrics.accuracy_score(y_true=actual_labels,
y_pred=predicted_labels),2)
accuracy_manual = np.round(
(true_positive + true_negative) /
(true_positive + true_negative +
false_negative + false_positive),2)
print 'Accuracy:', accuracy
print 'Manually computed accuracy:', accuracy_manual
precision = np.round(
metrics.precision_score(y_true=actual_labels,
y_pred=predicted_labels,
pos_label=positive_class),2)
precision_manual = np.round(
(true_positive) /
(true_positive + false_positive),2)
print 'Precision:', precision
print 'Manually computed precision:', precision_manual
recall = np.round(
metrics.recall_score(y_true=actual_labels,
y_pred=predicted_labels,
pos_label=positive_class),2)
recall_manual = np.round(
(true_positive) /
(true_positive + false_negative),2)
print 'Recall:', recall
print 'Manually computed recall:', recall_manual
f1_score = np.round(
metrics.f1_score(y_true=actual_labels,
y_pred=predicted_labels,
pos_label=positive_class),2)
f1_score_manual = np.round(
(2 * precision * recall) /
(precision + recall),2)
print 'F1 score:', f1_score
print 'Manually computed F1 score:', f1_score_manual
|
apache-2.0
| -7,227,046,085,735,694,000 | 36.25 | 91 | 0.46567 | false |
awesto/django-shop
|
shop/forms/auth.py
|
1
|
7249
|
from django.conf import settings
from django.contrib.auth import get_user_model, authenticate, login, password_validation
from django.contrib.auth.forms import PasswordResetForm
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ValidationError
from django.forms import widgets, ModelForm
from django.template.loader import get_template, select_template, render_to_string
from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from djng.forms import fields, NgModelFormMixin, NgFormValidationMixin
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
from post_office import mail as post_office_mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.forms.base import UniqueEmailValidationMixin
from shop.models.customer import CustomerModel
from shop.signals import email_queued
class RegisterUserForm(NgModelFormMixin, NgFormValidationMixin, UniqueEmailValidationMixin, Bootstrap3ModelForm):
form_name = 'register_user_form'
scope_prefix = 'form_data'
field_css_classes = 'input-group has-feedback'
email = fields.EmailField(
label=_("Your e-mail address"),
widget=widgets.EmailInput(attrs={'placeholder': _("E-mail address")})
)
preset_password = fields.BooleanField(
label=_("Preset password"),
widget=widgets.CheckboxInput(attrs={'class': 'form-check-input'}),
required=False,
help_text=_("Send a randomly generated password to your e-mail address."),
)
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = fields.CharField(
label=_("New password"),
widget=widgets.PasswordInput(attrs={'placeholder': _("Password")}),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = fields.CharField(
label=_("New password confirmation"),
strip=False,
widget=widgets.PasswordInput(attrs={'placeholder': _("Password")}),
help_text=format_html('<ul><li>{}</li></ul>', _("Confirm the password.")),
)
class Meta:
model = CustomerModel
fields = ['email', 'password1', 'password2']
def __init__(self, data=None, instance=None, *args, **kwargs):
if data and data.get('preset_password', False):
pwd_length = max(self.base_fields['password1'].min_length or 8, 8)
password = get_user_model().objects.make_random_password(pwd_length)
data['password1'] = data['password2'] = password
super().__init__(data=data, instance=instance, *args, **kwargs)
def clean(self):
cleaned_data = super().clean()
password1 = cleaned_data.get('password1')
password2 = cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2)
return cleaned_data
def save(self, request=None, commit=True):
self.instance.user.is_active = True
self.instance.user.email = self.cleaned_data['email']
self.instance.user.set_password(self.cleaned_data['password1'])
self.instance.recognize_as_registered(request, commit=False)
customer = super().save(commit)
password = self.cleaned_data['password1']
if self.cleaned_data['preset_password']:
self._send_password(request, customer.user, password)
user = authenticate(username=customer.user.username, password=password)
login(request, user)
return customer
def _send_password(self, request, user, password):
current_site = get_current_site(request)
context = {
'site_name': current_site.name,
'absolute_base_uri': request.build_absolute_uri('/'),
'email': user.email,
'password': password,
'user': user,
}
subject_template = select_template([
'{}/email/register-user-subject.txt'.format(app_settings.APP_LABEL),
'shop/email/register-user-subject.txt',
])
# Email subject *must not* contain newlines
subject = ''.join(subject_template.render(context).splitlines())
body_text_template = select_template([
'{}/email/register-user-body.txt'.format(app_settings.APP_LABEL),
'shop/email/register-user-body.txt',
])
body_html_template = select_template([
'{}/email/register-user-body.html'.format(app_settings.APP_LABEL),
'shop/email/register-user-body.html',
], using='post_office')
message = body_text_template.render(context)
html_message = body_html_template.render(context)
from_email = getattr(settings, 'DEFAULT_FROM_EMAIL')
user.email_user(subject, message, from_email=from_email, html_message=html_message)
email_queued()
class ContinueAsGuestForm(ModelForm):
"""
Handles Customer's decision to order as guest.
"""
form_name = 'continue_as_guest_form'
scope_prefix = 'form_data'
class Meta:
model = CustomerModel
fields = () # this form doesn't show any fields
def save(self, request=None, commit=True):
self.instance.recognize_as_guest(request, commit=False)
self.instance.user.is_active = app_settings.GUEST_IS_ACTIVE_USER
if self.instance.user.is_active:
# set a usable password, otherwise the user later can not reset its password
password = get_user_model().objects.make_random_password(length=30)
self.instance.user.set_password(password)
return super().save(commit)
class PasswordResetRequestForm(PasswordResetForm):
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
try:
email_template = EmailTemplate.objects.get(name='password-reset-inform')
except EmailTemplate.DoesNotExist:
subject = render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name:
template = get_template(html_email_template_name, using='post_office')
html = template.render(context)
email_message.attach_alternative(html, 'text/html')
template.attach_related(email_message)
email_message.send()
else:
context['user'] = str(context['user'])
context['uid'] = context['uid'].decode('utf-8')
post_office_mail.send(to_email, template=email_template, context=context, render_on_delivery=True)
email_queued()
|
bsd-3-clause
| -4,549,912,536,911,705,000 | 42.668675 | 113 | 0.653883 | false |
webbers/dongle.net
|
bld/libs/builder/src/steps/copyfilesstep.py
|
1
|
1151
|
import os
import shutil
from steps.abstractstep import *
def copyfile(filename1, filename2):
if not os.path.exists(os.path.dirname(filename2)):
os.makedirs(os.path.dirname(filename2))
shutil.copy( filename1, os.path.dirname(filename2) )
if os.path.isfile (filename2): return True
return False
class CopyFilesStep(AbstractStep):
"""Copy Files Step"""
def __init__( self, files, srcDir, destDir ):
AbstractStep.__init__( self, "Copy Files" )
self.srcDir = srcDir
self.destDir = destDir
self.files = files
def do( self ):
self.reporter.message( "COPY FILES: %s => %s" % ( self.srcDir, self.destDir ) )
for fp in self.files:
relPath = fp.lower().replace( os.path.realpath( self.srcDir ).lower(), "" )
destPath = os.path.realpath( self.destDir ) + relPath
self.reporter.message(fp)
if not copyfile( fp, destPath ):
self.reporter.failure("copying %s to %s" % (fp, destPath))
return False
return True
def setFiles( self, files ):
self.files = files
|
mit
| 7,843,665,492,706,745,000 | 30.135135 | 87 | 0.596003 | false |
lmazuel/azure-sdk-for-python
|
azure-mgmt-web/azure/mgmt/web/models/auto_heal_triggers.py
|
1
|
1747
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AutoHealTriggers(Model):
"""Triggers for auto-heal.
:param requests: A rule based on total requests.
:type requests: ~azure.mgmt.web.models.RequestsBasedTrigger
:param private_bytes_in_kb: A rule based on private bytes.
:type private_bytes_in_kb: int
:param status_codes: A rule based on status codes.
:type status_codes: list[~azure.mgmt.web.models.StatusCodesBasedTrigger]
:param slow_requests: A rule based on request execution time.
:type slow_requests: ~azure.mgmt.web.models.SlowRequestsBasedTrigger
"""
_attribute_map = {
'requests': {'key': 'requests', 'type': 'RequestsBasedTrigger'},
'private_bytes_in_kb': {'key': 'privateBytesInKB', 'type': 'int'},
'status_codes': {'key': 'statusCodes', 'type': '[StatusCodesBasedTrigger]'},
'slow_requests': {'key': 'slowRequests', 'type': 'SlowRequestsBasedTrigger'},
}
def __init__(self, requests=None, private_bytes_in_kb=None, status_codes=None, slow_requests=None):
super(AutoHealTriggers, self).__init__()
self.requests = requests
self.private_bytes_in_kb = private_bytes_in_kb
self.status_codes = status_codes
self.slow_requests = slow_requests
|
mit
| -7,658,815,519,248,362,000 | 42.675 | 103 | 0.63194 | false |
Lapeth/timeline
|
Timeline/data/migrations/0002_auto_20150823_0913.py
|
1
|
1872
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def lang(apps, schema_editor):
Language = apps.get_model("Timeline_data", "Language")
en = Language()
en.id = 1
en.code = 'en'
en.indexing = 1
en.name = "English"
en.save()
da = Language()
da.id = 2
da.code = 'da'
da.indexing = 2
da.name = "Dansk"
da.save()
class Migration(migrations.Migration):
dependencies = [
('Timeline_data', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('code', models.CharField(max_length=5)),
('indexing', models.IntegerField(unique=True)),
('name', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.RunPython(lang),
migrations.AddField(
model_name='eventbase',
name='group',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='eventbase',
name='language',
field=models.ForeignKey(default=1, to='Timeline_data.Language'),
preserve_default=False,
),
migrations.AddField(
model_name='tagbase',
name='group',
field=models.IntegerField(default=0),
preserve_default=True,
),
migrations.AddField(
model_name='tagbase',
name='language',
field=models.ForeignKey(default=1, to='Timeline_data.Language'),
preserve_default=False,
),
]
|
apache-2.0
| -8,127,956,987,461,323,000 | 27.8 | 114 | 0.532585 | false |
vlegoff/tsunami
|
src/secondaires/navigation/cherchables/__init__.py
|
1
|
1694
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce package contient les objets cherchables du module.
Modules :
navire -- cherchable navire
"""
from . import navire
|
bsd-3-clause
| 1,609,541,058,728,453,000 | 43.578947 | 79 | 0.775679 | false |
amaurywalbert/twitter
|
net_structure/ks_test/ks_test_models.py
|
1
|
5706
|
# -*- coding: latin1 -*-
################################################################################################
#
#
import snap, datetime, sys, time, json, os, os.path, shutil, time, struct, random
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Two-Sided Kolmogorov-Smirnov Tests - Testar se dois grupos de amostras foram tirados de conjuntos com a mesma distribuição.
## Compara de dois em dois.EX: 03 folds - A,B,C - compara AeB, AeC, e BeC.
## Teste realizado entre os conjuntos de alters de cada modelo da rede ego.
##
######################################################################################################################################################################
######################################################################################################################################################################
#
#Two-Sided Kolmogorov-Smirnov Tests
#
######################################################################################################################################################################
def ks_test(data1,data2):
statistical, p_value = stats.ks_2samp(data1, data2)
print statistical, p_value
def prepare(graphs_dir,output_dir,net,isdir):
for file in os.listdir(graphs_dir):
i+=1
print (str(graphs_dir)+str(file)+" - Recuperando grafo: "+str(i))
if IsDir is True:
G = snap.LoadEdgeList(snap.PNGraph, dataset_dir+file, 0, 1) # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
else:
G = snap.LoadEdgeList(snap.PUNGraph, dataset_dir+file, 0, 1) # load from a text file - pode exigir um separador
n_nodes = G.GetNodes() # Número de vértices
n_edges = G.GetEdges() # Número de arestas
#Verificar se é com esse teste mesmo...
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Script para cálculo de significância estatística dos modelos "
print" "
print"#################################################################################"
print
print" 1 - Follow"
print" 9 - Follwowers"
print" 2 - Retweets"
print" 3 - Likes"
print" 3 - Mentions"
print " "
print" 5 - Co-Follow"
print" 10 - Co-Followers"
print" 6 - Co-Retweets"
print" 7 - Co-Likes"
print" 8 - Co-Mentions"
print
op = int(raw_input("Escolha uma opção acima: "))
if op in (5,6,7,8,10): # Testar se é um grafo direcionado ou não
isdir = False
elif op in (1,2,3,4,9):
isdir = True
else:
print("Opção inválida! Saindo...")
sys.exit()
######################################################################
net = "n"+str(op)
######################################################################
######################################################################
graphs_dir = "/home/amaury/graphs/"+str(net)+"/graphs_with_ego/" # Diretório dos grafos - antes da conversão com hashmap
if not os.path.isdir(graphs_dir):
print("Diretório dos grafos não encontrado: "+str(graphs_dir))
else:
output_dir = "/home/amaury/Dropbox/ks_test/graphs_with_ego/"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
prepare(graphs_dir,output_dir,net,isdir) # Inicia os cálculos...
######################################################################
######################################################################
graphs_dir2 = "/home/amaury/graphs/"+str(net)+"/graphs_without_ego/" # Diretório dos grafos - antes da conversão com hashmap
if not os.path.isdir(graphs_dir2):
print("Diretório dos grafos não encontrado: "+str(graphs_dir2))
else:
output_dir2 = "/home/amaury/Dropbox/ks_test/graphs_with_ego/"
if not os.path.exists(output_dir2):
os.makedirs(output_dir2)
prepare(graphs_dir2,output_dir2,net,isdir) # Inicia os cálculos...
######################################################################
######################################################################
print("\n######################################################################\n")
print("Script finalizado!")
print("\n######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
#Executa o método main
if __name__ == "__main__": main()
|
gpl-3.0
| -8,505,254,690,200,277,000 | 45.532787 | 172 | 0.350423 | false |
endlisnis/weather-records
|
winterAverages.py
|
1
|
4494
|
#!/usr/bin/python3
import time, posix, daily, gnuplot, linear, sys, datetime
from dailyFilters import *
from monthName import monthName
START_MONTH=7
START_DAY=1
END_MONTH=7
END_DAY=1
def START_DATE(year):
return datetime.date(year,START_MONTH,START_DAY)
def END_DATE(year):
return datetime.date(year+1,END_MONTH,END_DAY)
TOTAL_COUNT = 30
def yearlySum(cityData, year, field):
sum = 0
count = 0
fakeCount = 0
for date in daily.dayRange(START_DATE(year), END_DATE(year)):
#print date
try:
val = field(cityData[date])
if val != None:
sum += val
count += 1
elif cityData[date].MAX_TEMP is not None:
fakeCount += 1
except KeyError:
pass
if count > 30:
count += fakeCount
return (sum, count)
def yearlyAverage(cityData, year, field):
(sum, count) = yearlySum(cityData, year, field)
avg = None
if count:
avg = sum/count
return avg
def normalYearlyAverage(cityData, beforeYear, field):
sum = 0
count = 0
for year in range(beforeYear-31, beforeYear):
(ysum, ycount) = yearlySum(cityData, year, field)
sum += ysum
count += ycount
avg = None
if count:
avg = sum/count
#print 'debug: m=%d, f=%d, s=%d, c=%d, a=%d' % (month, field, sum, count, avg)
return avg
def normalYearlySum(cityData, beforeYear, field):
sum = 0
count = 0
for year in range(beforeYear-31, beforeYear):
(ysum, ycount) = yearlySum(cityData, year, field)
sum += ysum
count += 1
avg = None
if count:
avg = sum/count
return avg
def getAnnualValues(cityData, field, cumulative):
data = []
for year in range(cityData.minYear, cityData.maxYear+1):
thisYearSum, thisYearCount = yearlySum(cityData, year, field)
if thisYearCount >= TOTAL_COUNT*8/10:
v = thisYearSum
if not cumulative:
v /= thisYearCount
data.append((year, v))
return data
if __name__=="__main__":
city=sys.argv[1]
for arg in sys.argv[2:]:
if arg.startswith('-start='):
(START_MONTH, START_DAY) = map(int, arg.split('=')[1].split(','))
if arg.startswith('-end='):
(END_MONTH, END_DAY) = map(int, arg.split('=')[1].split(','))
print(START_MONTH, START_DAY, END_MONTH, END_DAY)
cityData = daily.load(city)
for field in [
#FractionVal(daily.MIN_WINDCHILL, "windchill"),
Avg(daily.MAX_TEMP, daily.MIN_TEMP, 'average temperature'),
FractionVal(daily.AVG_WIND, 'wind'),
#FractionVal(daily.SNOW_ON_GRND_CM, 'average snow depth'),
]:
fname = field.name
print(fname)
data = getAnnualValues(cityData, field, False)
assert len(data)
for year, val in data:
print('%u: %.1f' % (year, val))
lineFit = linear.linearTrend(data)
plot = gnuplot.Plot("%s/svg/Winter_%s" % (city, fname), yaxis=2)
plot.open(title='%s %s per winter (%s %u to %s %u)'
% (city.capitalize(), field.title,
monthName(START_MONTH), START_DAY,
monthName(END_MONTH), END_DAY) )
plot.addLine(gnuplot.Line("Linear", lineFit, lineColour='green', plot='lines'))
plot.addLine(gnuplot.Line("Temp", data, lineColour='purple'))
plot.plot()
plot.close()
print('30-year average: %.1f' % normalYearlyAverage(cityData, 2014, field))
for field in [FractionVal(daily.TOTAL_SNOW_CM, 'snow')]:
fname = field.name
print(fname)
data = getAnnualValues(cityData, field, True)
assert len(data)
for year, val in data:
print('%u: %8.1f' % (year, val))
lineFit = linear.linearTrend(data)
plot = gnuplot.Plot("%s/svg/Winter_%s" % (city, fname), yaxis=2)
plot.open(title='%s %s per winter (%s %u to %s %u)'
% (city.capitalize(), field.title,
monthName(START_MONTH), START_DAY,
monthName(END_MONTH), END_DAY),
ymin=0 )
plot.addLine(gnuplot.Line("Linear", lineFit, lineColour='green', plot='lines'))
plot.addLine(gnuplot.Line("Amount", data, lineColour='purple'))
plot.plot()
plot.close()
print('30-year average: %.1f' % normalYearlySum(cityData, 2014, field))
|
gpl-3.0
| 8,903,026,221,375,308,000 | 29.780822 | 87 | 0.568981 | false |
krislig/rumblepad2py
|
pad.py
|
1
|
1240
|
import usb.core
import usb.util
from time import sleep
dev = usb.core.find(idVendor=0x046d, idProduct=0xc218)
if dev is None:
raise ValueError('Device not found')
interface = 0
endpoint = dev[0][(0,0)][0]
if dev.is_kernel_driver_active(interface) is True:
dev.detach_kernel_driver(interface)
usb.util.claim_interface(dev, interface)
print 'claimed'
collected = 0
attemps = 100
print endpoint.bEndpointAddress
try:
# strong vibration - left side - vibration level from 0x00 to 0xff
dev.write(1, [0x51, 0, 0, 0, 0xff, 0, 0, 0], interface)
sleep(1)
# stop vibration
dev.write(1, [0xf3, 0, 0, 0, 0, 0, 0, 0], interface)
sleep(1)
# weak vibration - right side - vibration level from 0x00 to 0xff
dev.write(1, [0x51, 0, 0xff, 0, 0, 0, 0, 0], interface)
sleep(1)
dev.write(1, [0xf3, 0, 0, 0, 0, 0, 0, 0], interface)
except:
print 'exception occured'
while collected < attemps:
try:
data = dev.read(endpoint.bEndpointAddress, endpoint.wMaxPacketSize)
collected += 1
print data
except usb.core.USBError as e:
data = None
if e.args == ('Operation timed out',):
print 'time out'
print 'releasing interface'
usb.util.release_interface(dev, interface)
dev.attach_kernel_driver(interface)
print 'done'
|
mit
| 2,519,202,103,770,096,600 | 23.313725 | 71 | 0.698387 | false |
Sixshaman/networkx
|
networkx/algorithms/shortest_paths/tests/test_weighted.py
|
1
|
16774
|
from __future__ import division
from nose.tools import assert_equal
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
import networkx as nx
def _setUp(self):
cnlti = nx.convert_node_labels_to_integers
self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")
self.cycle = nx.cycle_graph(7)
self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
self.XG = nx.DiGraph()
self.XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5),
('u', 'v', 1), ('u', 'x', 2),
('v', 'y', 1), ('x', 'u', 3),
('x', 'v', 5), ('x', 'y', 2),
('y', 's', 7), ('y', 'v', 6)])
self.MXG = nx.MultiDiGraph(self.XG)
self.MXG.add_edge('s', 'u', weight=15)
self.XG2 = nx.DiGraph()
self.XG2.add_weighted_edges_from([[1, 4, 1], [4, 5, 1],
[5, 6, 1], [6, 3, 1],
[1, 3, 50], [1, 2, 100], [2, 3, 100]])
self.XG3 = nx.Graph()
self.XG3.add_weighted_edges_from([[0, 1, 2], [1, 2, 12],
[2, 3, 1], [3, 4, 5],
[4, 5, 1], [5, 0, 10]])
self.XG4 = nx.Graph()
self.XG4.add_weighted_edges_from([[0, 1, 2], [1, 2, 2],
[2, 3, 1], [3, 4, 1],
[4, 5, 1], [5, 6, 1],
[6, 7, 1], [7, 0, 1]])
self.MXG4 = nx.MultiGraph(self.XG4)
self.MXG4.add_edge(0, 1, weight=3)
self.G = nx.DiGraph() # no weights
self.G.add_edges_from([('s', 'u'), ('s', 'x'),
('u', 'v'), ('u', 'x'),
('v', 'y'), ('x', 'u'),
('x', 'v'), ('x', 'y'),
('y', 's'), ('y', 'v')])
def validate_path(G, s, t, soln_len, path):
assert_equal(path[0], s)
assert_equal(path[-1], t)
if not G.is_multigraph():
assert_equal(
soln_len, sum(G[u][v].get('weight', 1)
for u, v in zip(path[:-1], path[1:])))
else:
assert_equal(
soln_len, sum(min(e.get('weight', 1) for e in G[u][v].values())
for u, v in zip(path[:-1], path[1:])))
def validate_length_path(G, s, t, soln_len, length, path):
assert_equal(soln_len, length)
validate_path(G, s, t, length, path)
class TestWeightedPath:
setUp = _setUp
def test_dijkstra(self):
(D, P) = nx.single_source_dijkstra(self.XG, 's')
validate_path(self.XG, 's', 'v', 9, P['v'])
assert_equal(D['v'], 9)
validate_path(
self.XG, 's', 'v', 9, nx.single_source_dijkstra_path(self.XG, 's')['v'])
assert_equal(dict(
nx.single_source_dijkstra_path_length(self.XG, 's'))['v'], 9)
validate_path(
self.XG, 's', 'v', 9, nx.single_source_dijkstra(self.XG, 's')[1]['v'])
validate_path(
self.MXG, 's', 'v', 9, nx.single_source_dijkstra_path(self.MXG, 's')['v'])
GG = self.XG.to_undirected()
# make sure we get lower weight
# to_undirected might choose either edge with weight 2 or weight 3
GG['u']['x']['weight'] = 2
(D, P) = nx.single_source_dijkstra(GG, 's')
validate_path(GG, 's', 'v', 8, P['v'])
assert_equal(D['v'], 8) # uses lower weight of 2 on u<->x edge
validate_path(GG, 's', 'v', 8, nx.dijkstra_path(GG, 's', 'v'))
assert_equal(nx.dijkstra_path_length(GG, 's', 'v'), 8)
validate_path(self.XG2, 1, 3, 4, nx.dijkstra_path(self.XG2, 1, 3))
validate_path(self.XG3, 0, 3, 15, nx.dijkstra_path(self.XG3, 0, 3))
assert_equal(nx.dijkstra_path_length(self.XG3, 0, 3), 15)
validate_path(self.XG4, 0, 2, 4, nx.dijkstra_path(self.XG4, 0, 2))
assert_equal(nx.dijkstra_path_length(self.XG4, 0, 2), 4)
validate_path(self.MXG4, 0, 2, 4, nx.dijkstra_path(self.MXG4, 0, 2))
validate_path(
self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's', 'v')[1]['v'])
validate_path(
self.G, 's', 'v', 2, nx.single_source_dijkstra(self.G, 's')[1]['v'])
validate_path(self.G, 's', 'v', 2, nx.dijkstra_path(self.G, 's', 'v'))
assert_equal(nx.dijkstra_path_length(self.G, 's', 'v'), 2)
# NetworkXError: node s not reachable from moon
assert_raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, 's', 'moon')
assert_raises(
nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, 's', 'moon')
validate_path(self.cycle, 0, 3, 3, nx.dijkstra_path(self.cycle, 0, 3))
validate_path(self.cycle, 0, 4, 3, nx.dijkstra_path(self.cycle, 0, 4))
assert_equal(
nx.single_source_dijkstra(self.cycle, 0, 0), ({0: 0}, {0: [0]}))
def test_bidirectional_dijkstra(self):
validate_length_path(
self.XG, 's', 'v', 9, *nx.bidirectional_dijkstra(self.XG, 's', 'v'))
validate_length_path(
self.G, 's', 'v', 2, *nx.bidirectional_dijkstra(self.G, 's', 'v'))
validate_length_path(
self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3))
validate_length_path(
self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4))
validate_length_path(
self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3))
validate_length_path(
self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2))
# need more tests here
P = nx.single_source_dijkstra_path(self.XG, 's')['v']
validate_path(self.XG, 's', 'v', sum(self.XG[u][v]['weight'] for u, v in zip(
P[:-1], P[1:])), nx.dijkstra_path(self.XG, 's', 'v'))
@raises(nx.NetworkXNoPath)
def test_bidirectional_dijkstra_no_path(self):
G = nx.Graph()
nx.add_path(G, [1, 2, 3])
nx.add_path(G, [4, 5, 6])
path = nx.bidirectional_dijkstra(G, 1, 6)
def test_dijkstra_predecessor(self):
G = nx.path_graph(4)
assert_equal(nx.dijkstra_predecessor_and_distance(G, 0),
({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3}))
G = nx.grid_2d_graph(2, 2)
pred, dist = nx.dijkstra_predecessor_and_distance(G, (0, 0))
assert_equal(sorted(pred.items()),
[((0, 0), []), ((0, 1), [(0, 0)]),
((1, 0), [(0, 0)]), ((1, 1), [(0, 1), (1, 0)])])
assert_equal(sorted(dist.items()),
[((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
XG = nx.DiGraph()
XG.add_weighted_edges_from([('s', 'u', 10), ('s', 'x', 5),
('u', 'v', 1), ('u', 'x', 2),
('v', 'y', 1), ('x', 'u', 3),
('x', 'v', 5), ('x', 'y', 2),
('y', 's', 7), ('y', 'v', 6)])
(P, D) = nx.dijkstra_predecessor_and_distance(XG, 's')
assert_equal(P['v'], ['u'])
assert_equal(D['v'], 9)
(P, D) = nx.dijkstra_predecessor_and_distance(XG, 's', cutoff=8)
assert_false('v' in D)
def test_single_source_dijkstra_path_length(self):
pl = nx.single_source_dijkstra_path_length
assert_equal(dict(pl(self.MXG4, 0))[2], 4)
spl = pl(self.MXG4, 0, cutoff=2)
assert_false(2 in spl)
def test_bidirectional_dijkstra_multigraph(self):
G = nx.MultiGraph()
G.add_edge('a', 'b', weight=10)
G.add_edge('a', 'b', weight=100)
dp = nx.bidirectional_dijkstra(G, 'a', 'b')
assert_equal(dp, (10, ['a', 'b']))
def test_dijkstra_pred_distance_multigraph(self):
G = nx.MultiGraph()
G.add_edge('a', 'b', key='short', foo=5, weight=100)
G.add_edge('a', 'b', key='long', bar=1, weight=110)
p, d = nx.dijkstra_predecessor_and_distance(G, 'a')
assert_equal(p, {'a': [], 'b': ['a']})
assert_equal(d, {'a': 0, 'b': 100})
def test_negative_edge_cycle(self):
G = nx.cycle_graph(5, create_using=nx.DiGraph())
assert_equal(nx.negative_edge_cycle(G), False)
G.add_edge(8, 9, weight=-7)
G.add_edge(9, 8, weight=3)
graph_size = len(G)
assert_equal(nx.negative_edge_cycle(G), True)
assert_equal(graph_size, len(G))
assert_raises(ValueError, nx.single_source_dijkstra_path_length, G, 8)
assert_raises(ValueError, nx.single_source_dijkstra, G, 8)
assert_raises(ValueError, nx.dijkstra_predecessor_and_distance, G, 8)
G.add_edge(9, 10)
assert_raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10)
def test_weight_function(self):
"""Tests that a callable weight is interpreted as a weight
function instead of an edge attribute.
"""
# Create a triangle in which the edge from node 0 to node 2 has
# a large weight and the other two edges have a small weight.
G = nx.complete_graph(3)
G.edge[0][2]['weight'] = 10
G.edge[0][1]['weight'] = 1
G.edge[1][2]['weight'] = 1
# The weight function will take the multiplicative inverse of
# the weights on the edges. This way, weights that were large
# before now become small and vice versa.
weight = lambda u, v, d: 1 / d['weight']
# The shortest path from 0 to 2 using the actual weights on the
# edges should be [0, 1, 2].
distances, paths = nx.single_source_dijkstra(G, 0, 2)
assert_equal(distances[2], 2)
assert_equal(paths[2], [0, 1, 2])
# However, with the above weight function, the shortest path
# should be the [0, 2], since that has a very small weight.
distances, paths = nx.single_source_dijkstra(G, 0, 2, weight=weight)
assert_equal(distances[2], 1 / 10)
assert_equal(paths[2], [0, 2])
class TestBellmanFordAndGoldbergRadizk:
setUp = _setUp
def test_single_node_graph(self):
G = nx.DiGraph()
G.add_node(0)
assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0}))
assert_equal(nx.goldberg_radzik(G, 0), ({0: None}, {0: 0}))
assert_raises(KeyError, nx.bellman_ford, G, 1)
assert_raises(KeyError, nx.goldberg_radzik, G, 1)
def test_negative_weight_cycle(self):
G = nx.cycle_graph(5, create_using=nx.DiGraph())
G.add_edge(1, 2, weight=-7)
for i in range(5):
assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
G = nx.cycle_graph(5) # undirected Graph
G.add_edge(1, 2, weight=-3)
for i in range(5):
assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i)
assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i)
G = nx.DiGraph([(1, 1, {'weight': -1})])
assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 1)
assert_raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1)
# no negative cycle but negative weight
G = nx.cycle_graph(5, create_using=nx.DiGraph())
G.add_edge(1, 2, weight=-3)
assert_equal(nx.bellman_ford(G, 0),
({0: None, 1: 0, 2: 1, 3: 2, 4: 3},
{0: 0, 1: 1, 2: -2, 3: -1, 4: 0}))
assert_equal(nx.goldberg_radzik(G, 0),
({0: None, 1: 0, 2: 1, 3: 2, 4: 3},
{0: 0, 1: 1, 2: -2, 3: -1, 4: 0}))
def test_not_connected(self):
G = nx.complete_graph(6)
G.add_edge(10, 11)
G.add_edge(10, 12)
assert_equal(nx.bellman_ford(G, 0),
({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
{0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
assert_equal(nx.goldberg_radzik(G, 0),
({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
{0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
# not connected, with a component not containing the source that
# contains a negative cost cycle.
G = nx.complete_graph(6)
G.add_edges_from([('A', 'B', {'load': 3}),
('B', 'C', {'load': -10}),
('C', 'A', {'load': 2})])
assert_equal(nx.bellman_ford(G, 0, weight='load'),
({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
{0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
assert_equal(nx.goldberg_radzik(G, 0, weight='load'),
({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0},
{0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}))
def test_multigraph(self):
P, D = nx.bellman_ford(self.MXG, 's')
assert_equal(P['v'], 'u')
assert_equal(D['v'], 9)
P, D = nx.goldberg_radzik(self.MXG, 's')
assert_equal(P['v'], 'u')
assert_equal(D['v'], 9)
P, D = nx.bellman_ford(self.MXG4, 0)
assert_equal(P[2], 1)
assert_equal(D[2], 4)
P, D = nx.goldberg_radzik(self.MXG4, 0)
assert_equal(P[2], 1)
assert_equal(D[2], 4)
def test_others(self):
(P, D) = nx.bellman_ford(self.XG, 's')
assert_equal(P['v'], 'u')
assert_equal(D['v'], 9)
(P, D) = nx.goldberg_radzik(self.XG, 's')
assert_equal(P['v'], 'u')
assert_equal(D['v'], 9)
G = nx.path_graph(4)
assert_equal(nx.bellman_ford(G, 0),
({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
assert_equal(nx.goldberg_radzik(G, 0),
({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3}))
assert_equal(nx.bellman_ford(G, 3),
({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))
assert_equal(nx.goldberg_radzik(G, 3),
({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0}))
G = nx.grid_2d_graph(2, 2)
pred, dist = nx.bellman_ford(G, (0, 0))
assert_equal(sorted(pred.items()),
[((0, 0), None), ((0, 1), (0, 0)),
((1, 0), (0, 0)), ((1, 1), (0, 1))])
assert_equal(sorted(dist.items()),
[((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
pred, dist = nx.goldberg_radzik(G, (0, 0))
assert_equal(sorted(pred.items()),
[((0, 0), None), ((0, 1), (0, 0)),
((1, 0), (0, 0)), ((1, 1), (0, 1))])
assert_equal(sorted(dist.items()),
[((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)])
class TestJohnsonAlgorithm:
setUp = _setUp
@raises(nx.NetworkXError)
def test_single_node_graph(self):
G = nx.DiGraph()
G.add_node(0)
nx.johnson(G)
def test_negative_cycle(self):
G = nx.DiGraph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5),
('0', '2', 2), ('1', '2', 4),
('2', '3', 1)])
assert_raises(nx.NetworkXUnbounded, nx.johnson, G)
G = nx.Graph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5), ('1', '0', -5),
('0', '2', 2), ('1', '2', 4),
('2', '3', 1)])
assert_raises(nx.NetworkXUnbounded, nx.johnson, G)
def test_negative_weights(self):
G = nx.DiGraph()
G.add_weighted_edges_from([('0', '3', 3), ('0', '1', -5),
('0', '2', 2), ('1', '2', 4),
('2', '3', 1)])
paths = nx.johnson(G)
assert_equal(paths, {'1': {'1': ['1'], '3': ['1', '2', '3'],
'2': ['1', '2']}, '0': {'1': ['0', '1'],
'0': ['0'], '3': ['0', '1', '2', '3'],
'2': ['0', '1', '2']}, '3': {'3': ['3']},
'2': {'3': ['2', '3'], '2': ['2']}})
@raises(nx.NetworkXError)
def test_unweighted_graph(self):
G = nx.path_graph(5)
nx.johnson(G)
def test_graphs(self):
validate_path(self.XG, 's', 'v', 9, nx.johnson(self.XG)['s']['v'])
validate_path(self.MXG, 's', 'v', 9, nx.johnson(self.MXG)['s']['v'])
validate_path(self.XG2, 1, 3, 4, nx.johnson(self.XG2)[1][3])
validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3])
validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2])
validate_path(self.MXG4, 0, 2, 4, nx.johnson(self.MXG4)[0][2])
|
bsd-3-clause
| -4,696,423,949,973,894,000 | 42.455959 | 86 | 0.474842 | false |
RonnyPfannschmidt/setuptools_scm
|
testing/test_integration.py
|
1
|
2328
|
import sys
import pytest
from setuptools_scm.utils import do
from setuptools_scm import PRETEND_KEY, PRETEND_KEY_NAMED
@pytest.fixture
def wd(wd):
wd("git init")
wd("git config user.email test@example.com")
wd('git config user.name "a test"')
wd.add_command = "git add ."
wd.commit_command = "git commit -m test-{reason}"
return wd
def test_pyproject_support(tmpdir, monkeypatch):
pytest.importorskip("toml")
monkeypatch.delenv("SETUPTOOLS_SCM_DEBUG")
pkg = tmpdir.ensure("package", dir=42)
pkg.join("pyproject.toml").write(
"""[tool.setuptools_scm]
fallback_version = "12.34"
"""
)
pkg.join("setup.py").write("__import__('setuptools').setup()")
res = do((sys.executable, "setup.py", "--version"), pkg)
assert res == "12.34"
def test_pyproject_support_with_git(tmpdir, monkeypatch, wd):
pytest.importorskip("toml")
pkg = tmpdir.join("wd")
pkg.join("pyproject.toml").write("""[tool.setuptools_scm]""")
pkg.join("setup.py").write(
"__import__('setuptools').setup(name='setuptools_scm_example')"
)
res = do((sys.executable, "setup.py", "--version"), pkg)
assert res.endswith("0.1.dev0")
def test_pretend_version(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY, "1.0.0")
assert wd.get_version() == "1.0.0"
assert wd.get_version(dist_name="ignored") == "1.0.0"
def test_pretend_version_named_pyproject_integration(tmpdir, monkeypatch, wd):
test_pyproject_support_with_git(tmpdir, monkeypatch, wd)
monkeypatch.setenv(
PRETEND_KEY_NAMED.format(name="setuptools_scm_example".upper()), "3.2.1"
)
res = do((sys.executable, "setup.py", "--version"), tmpdir / "wd")
assert res.endswith("3.2.1")
def test_pretend_version_named(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test2".upper()), "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
assert wd.get_version(dist_name="test2") == "2.0.0"
def test_pretend_version_name_takes_precedence(tmpdir, monkeypatch, wd):
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY, "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
|
mit
| -78,044,208,471,752,530 | 32.257143 | 80 | 0.659364 | false |
pferreir/indico-backup
|
indico/core/logger.py
|
1
|
10500
|
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
import copy
import os
import logging
import logging.handlers
import logging.config
import ConfigParser
from flask import request, session
from ZODB.POSException import POSError
from indico.core.config import Config
from MaKaC.common.contextManager import ContextManager
class AddIDFilter(logging.Filter):
def filter(self, record):
if not logging.Filter.filter(self, record):
return False
# Add request ID if available
try:
record.request_id = request.id
except RuntimeError:
record.request_id = '0' * 12
return True
class ExtraIndicoFilter(AddIDFilter):
def filter(self, record):
if record.name.split('.')[0] == 'indico':
return False
return AddIDFilter.filter(self, record)
class IndicoMailFormatter(logging.Formatter):
def format(self, record):
s = logging.Formatter.format(self, record)
if isinstance(s, unicode):
s = s.encode('utf-8')
return s + self._getRequestInfo()
def _getRequestInfo(self):
rh = ContextManager.get('currentRH', None)
info = ['Additional information:']
try:
info.append('Request: %s' % request.id)
info.append('URL: %s' % request.url)
if request.url_rule:
info.append('Endpoint: {0}'.format(request.url_rule.endpoint))
info.append('Method: %s' % request.method)
if rh:
info.append('Params: %s' % rh._getTruncatedParams())
if session:
try:
info.append('User: {0}'.format(session.user))
except POSError:
# If the DB connection is closed getting the avatar may fail
info.append('User id: {0}'.format(session.get('_avatarId')))
info.append('IP: %s' % request.remote_addr)
info.append('User Agent: %s' % request.user_agent)
info.append('Referer: %s' % (request.referrer or 'n/a'))
except RuntimeError, e:
info.append('Not available: %s' % e)
return '\n\n%s' % '\n'.join(x.encode('utf-8') if isinstance(x, unicode) else x for x in info)
class LoggerUtils:
@classmethod
def _bootstrap_cp(cls, cp, defaultArgs):
"""
Creates a very basic logging config for cases in which
logging.conf does not yet exist
"""
if not cp.has_section('loggers'):
cp.add_section('loggers')
cp.add_section('logger_root')
cp.add_section('handlers')
cp.set('loggers', 'keys', 'root')
cp.set('logger_root', 'handlers', ','.join(defaultArgs))
cp.set('handlers', 'keys', ','.join(defaultArgs))
for handler_name in defaultArgs:
section_name = 'handler_' + handler_name
cp.add_section(section_name)
cp.set(section_name, 'formatter', 'defaultFormatter')
@classmethod
def configFromFile(cls, fname, defaultArgs, filters):
"""
Read the logging configuration from the logging.conf file.
Fetch default values if the logging.conf file is not set.
"""
cp = ConfigParser.ConfigParser()
parsed_files = cp.read(fname)
if cp.has_section('formatters'):
formatters = logging.config._create_formatters(cp)
else:
formatters = {}
# Really ugly.. but logging fails to import MaKaC.common.logger.IndicoMailFormatter
# when using it in the class= option...
if 'mailFormatter' in formatters:
f = formatters.get('mailFormatter')
if f:
formatters['mailFormatter'] = IndicoMailFormatter(f._fmt, f.datefmt)
# if there is a problem with the config file, set some sane defaults
if not parsed_files:
formatters['defaultFormatter'] = logging.Formatter(
'%(asctime)s %(levelname)-7s %(request_id)s %(name)-25s %(message)s')
cls._bootstrap_cp(cp, defaultArgs)
logging._acquireLock()
try:
logging._handlers.clear()
del logging._handlerList[:]
handlers = cls._install_handlers(cp, defaultArgs, formatters, filters)
logging.config._install_loggers(cp, handlers, False)
finally:
logging._releaseLock()
return handlers
@classmethod
def _install_handlers(cls, cp, defaultArgs, formatters, filters=None):
"""
Install and return handlers. If a handler configuration
is missing its args, fetches the default values from the
indico.conf file
"""
hlist = cp.get("handlers", "keys")
hlist = hlist.split(",")
handlers = {}
fixups = [] # for inter-handler references
for hand in hlist:
sectname = "handler_%s" % hand.strip()
opts = cp.options(sectname)
if "class" in opts:
klass = cp.get(sectname, "class")
else:
klass = defaultArgs[hand.strip()][0]
if "formatter" in opts:
fmt = cp.get(sectname, "formatter")
else:
fmt = ""
klass = eval(klass, vars(logging))
if "args" in opts:
# if the args are not present in the file,
# take default values
args = cp.get(sectname, "args")
else:
try:
args = defaultArgs[hand.strip()][1]
except KeyError:
continue
args = eval(args, vars(logging))
h = apply(klass, args)
if "level" in opts:
level = cp.get(sectname, "level")
h.setLevel(logging._levelNames[level])
else:
h.setLevel(logging._levelNames[defaultArgs[hand.strip()][2]])
if len(fmt):
h.setFormatter(formatters[fmt])
if filters and hand.strip() in filters:
for fltr in filters[hand.strip()]:
h.addFilter(fltr)
#temporary hack for FileHandler and MemoryHandler.
if klass == logging.handlers.MemoryHandler:
if "target" in opts:
target = cp.get(sectname,"target")
else:
target = ""
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
class Logger:
"""
Encapsulates the features provided by the standard logging module
"""
handlers = {}
@classmethod
def initialize(cls):
# Lists of filters for each handler
filters = {'indico': [AddIDFilter('indico')],
'other': [ExtraIndicoFilter()],
'smtp': [AddIDFilter('indico')]}
config = Config.getInstance()
if 'files' in config.getLoggers():
logConfFilepath = os.path.join(config.getConfigurationDir(), 'logging.conf')
smtpServer = config.getSmtpServer()
serverName = config.getWorkerName()
if not serverName:
serverName = config.getHostNameURL()
# Default arguments for the handlers, taken mostly for the configuration
defaultArgs = {
'indico': ("FileHandler", "('%s', 'a')" % cls._log_path('indico.log'), 'DEBUG'),
'other': ("FileHandler", "('%s', 'a')" % cls._log_path('other.log'), 'DEBUG'),
'smtp': (
"handlers.SMTPHandler", "(%s, 'logger@%s', ['%s'], 'Unexpected Exception occurred at %s')"
% (smtpServer, serverName, config.getSupportEmail(), serverName), "ERROR")
}
cls.handlers.update(LoggerUtils.configFromFile(logConfFilepath, defaultArgs, filters))
@classmethod
def init_app(cls, app):
"""
Initialize Flask app logging (add Sentry if needed)
"""
config = Config.getInstance()
if 'sentry' in config.getLoggers():
from raven.contrib.flask import Sentry
app.config['SENTRY_DSN'] = config.getSentryDSN()
# Plug into both Flask and `logging`
Sentry(app, logging=True, level=getattr(logging, config.getSentryLoggingLevel()))
@classmethod
def reset(cls):
"""
Reset the config, using new paths, etc (useful for testing)
"""
if cls.handlers:
for handler in copy.copy(cls.handlers):
cls.removeHandler(handler)
cls.initialize()
@classmethod
def removeHandler(cls, handlerName):
if cls.handlers:
handler = cls.handlers.get(handlerName)
if handler and handler in cls.handlers:
del cls.handlers[handlerName]
logging.root.handlers.remove(handler)
@classmethod
def get(cls, module=None):
return logging.getLogger('indico' if module is None else 'indico.' + module)
@classmethod
def _log_path(cls, fname):
config = Config.getInstance()
configDir = config.getLogDir()
fpath = os.path.join(configDir, fname)
if not os.access(os.path.dirname(fpath), os.W_OK):
# if the file in the config is not accessible, use a "local" one
fpath = os.path.join(os.getcwd(), '.indico.log')
return fpath.replace('\\', '\\\\')
Logger.initialize()
|
gpl-3.0
| -3,335,148,615,844,740,600 | 34.958904 | 110 | 0.57381 | false |
jolyonb/edx-platform
|
lms/djangoapps/badges/backends/badgr.py
|
1
|
6775
|
"""
Badge Awarding backend for Badgr-Server.
"""
from __future__ import absolute_import
import hashlib
import logging
import mimetypes
import requests
import six
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from eventtracking import tracker
from lazy import lazy
from requests.packages.urllib3.exceptions import HTTPError
from badges.backends.base import BadgeBackend
from badges.models import BadgeAssertion
MAX_SLUG_LENGTH = 255
LOGGER = logging.getLogger(__name__)
class BadgrBackend(BadgeBackend):
"""
Backend for Badgr-Server by Concentric Sky. http://info.badgr.io/
"""
badges = []
def __init__(self):
super(BadgrBackend, self).__init__()
if not settings.BADGR_API_TOKEN:
raise ImproperlyConfigured("BADGR_API_TOKEN not set.")
@lazy
def _base_url(self):
"""
Base URL for all API requests.
"""
return "{}/v1/issuer/issuers/{}".format(settings.BADGR_BASE_URL, settings.BADGR_ISSUER_SLUG)
@lazy
def _badge_create_url(self):
"""
URL for generating a new Badge specification
"""
return "{}/badges".format(self._base_url)
def _badge_url(self, slug):
"""
Get the URL for a course's badge in a given mode.
"""
return "{}/{}".format(self._badge_create_url, slug)
def _assertion_url(self, slug):
"""
URL for generating a new assertion.
"""
return "{}/assertions".format(self._badge_url(slug))
def _slugify(self, badge_class):
"""
Get a compatible badge slug from the specification.
"""
slug = badge_class.issuing_component + badge_class.slug
if badge_class.issuing_component and badge_class.course_id:
# Make this unique to the course, and down to 64 characters.
# We don't do this to badges without issuing_component set for backwards compatibility.
slug = hashlib.sha256(slug + six.text_type(badge_class.course_id)).hexdigest()
if len(slug) > MAX_SLUG_LENGTH:
# Will be 64 characters.
slug = hashlib.sha256(slug).hexdigest()
return slug
def _log_if_raised(self, response, data):
"""
Log server response if there was an error.
"""
try:
response.raise_for_status()
except HTTPError:
LOGGER.error(
u"Encountered an error when contacting the Badgr-Server. Request sent to %r with headers %r.\n"
u"and data values %r\n"
u"Response status was %s.\n%s",
response.request.url, response.request.headers,
data,
response.status_code, response.content
)
raise
def _create_badge(self, badge_class):
"""
Create the badge class on Badgr.
"""
image = badge_class.image
# We don't want to bother validating the file any further than making sure we can detect its MIME type,
# for HTTP. The Badgr-Server should tell us if there's anything in particular wrong with it.
content_type, __ = mimetypes.guess_type(image.name)
if not content_type:
raise ValueError(
u"Could not determine content-type of image! Make sure it is a properly named .png file. "
u"Filename was: {}".format(image.name)
)
files = {'image': (image.name, image, content_type)}
data = {
'name': badge_class.display_name,
'criteria': badge_class.criteria,
'slug': self._slugify(badge_class),
'description': badge_class.description,
}
result = requests.post(
self._badge_create_url, headers=self._get_headers(), data=data, files=files,
timeout=settings.BADGR_TIMEOUT
)
self._log_if_raised(result, data)
def _send_assertion_created_event(self, user, assertion):
"""
Send an analytics event to record the creation of a badge assertion.
"""
tracker.emit(
'edx.badge.assertion.created', {
'user_id': user.id,
'badge_slug': assertion.badge_class.slug,
'badge_name': assertion.badge_class.display_name,
'issuing_component': assertion.badge_class.issuing_component,
'course_id': six.text_type(assertion.badge_class.course_id),
'enrollment_mode': assertion.badge_class.mode,
'assertion_id': assertion.id,
'assertion_image_url': assertion.image_url,
'assertion_json_url': assertion.assertion_url,
'issuer': assertion.data.get('issuer'),
}
)
def _create_assertion(self, badge_class, user, evidence_url):
"""
Register an assertion with the Badgr server for a particular user for a specific class.
"""
data = {
'email': user.email,
'evidence': evidence_url,
}
response = requests.post(
self._assertion_url(self._slugify(badge_class)), headers=self._get_headers(), data=data,
timeout=settings.BADGR_TIMEOUT
)
self._log_if_raised(response, data)
assertion, __ = BadgeAssertion.objects.get_or_create(user=user, badge_class=badge_class)
assertion.data = response.json()
assertion.backend = 'BadgrBackend'
assertion.image_url = assertion.data['image']
assertion.assertion_url = assertion.data['json']['id']
assertion.save()
self._send_assertion_created_event(user, assertion)
return assertion
@staticmethod
def _get_headers():
"""
Headers to send along with the request-- used for authentication.
"""
return {'Authorization': u'Token {}'.format(settings.BADGR_API_TOKEN)}
def _ensure_badge_created(self, badge_class):
"""
Verify a badge has been created for this badge class, and create it if not.
"""
slug = self._slugify(badge_class)
if slug in BadgrBackend.badges:
return
response = requests.get(self._badge_url(slug), headers=self._get_headers(), timeout=settings.BADGR_TIMEOUT)
if response.status_code != 200:
self._create_badge(badge_class)
BadgrBackend.badges.append(slug)
def award(self, badge_class, user, evidence_url=None):
"""
Make sure the badge class has been created on the backend, and then award the badge class to the user.
"""
self._ensure_badge_created(badge_class)
return self._create_assertion(badge_class, user, evidence_url)
|
agpl-3.0
| 3,786,731,688,811,023,400 | 35.820652 | 115 | 0.600886 | false |
wacax/AvazuCTR
|
csv_to_vw.py
|
1
|
3065
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
########################################################
# __Author__: Xueer Chen <snowwing922@gmail.com> #
# Kaggle competition "Display Advertising Challenge": #
# https://www.kaggle.com/c/avazu-ctr-prediction #
# Credit: Triskelion <info@mlwave.com> #
########################################################
from datetime import datetime
from csv import DictReader
import sys
def csv_to_vw(loc_csv, loc_output, train=True):
"""
Munges a CSV file (loc_csv) to a VW file (loc_output). Set "train"
to False when munging a test set.
TODO: Too slow for a daily cron job. Try optimize, Pandas or Go.
"""
start = datetime.now()
print("\nTurning %s into %s. Is_train_set? %s"%(loc_csv,loc_output,train))
with open(loc_output,"wb") as outfile:
for e, row in enumerate( DictReader(open(loc_csv)) ):
#Creating the features
numerical_features = ""
categorical_features = ""
for k,v in row.items():
if k not in ["id","click"]:
if len(str(v)) > 0:
categorical_features += " %s" % v
#Creating the labels
if train: #we care about labels
if row['click'] == "1":
label = 1
else:
label = -1 #we set negative label to -1
outfile.write( "%s '%s |i%s |c%s\n" % (label,row['id'],numerical_features,categorical_features) )
else: #we dont care about labels
outfile.write( "1 '%s |i%s |c%s\n" % (row['id'],numerical_features,categorical_features) )
#Reporting progress
if e % 100000 == 0:
print("%s\t%s"%(e, str(datetime.now() - start)))
print("\n %s Task execution time:\n\t%s"%(e, str(datetime.now() - start)))
def main():
return 0
if __name__ == '__main__':
# main should return 0 for success, something else (usually 1) for error.
sys.exit(main())
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/trainOriginal.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/trainOriginal.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testOriginal.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testOriginal.vw", train=False)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/trainProbs.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/trainProbs.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testProbs.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testProbs.vw", train=False)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/ProbsTfidf.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/ProbsTfidf.vw", train=True)
#csv_to_vw("/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/testProbsTfidf.csv",
# "/home/wacax/Wacax/Kaggle/Avazu-CTR-Prediction/AvazuCTR/Data/vw/testProbsTfidf.vw", train=False)
|
gpl-2.0
| -1,451,433,367,091,311,400 | 42.785714 | 107 | 0.630995 | false |
JMSkelton/Transformer
|
Transformer/IO/_VASP.py
|
1
|
5760
|
# Transformer/IO/_VASP.py
# -------
# Imports
# -------
import warnings;
from Transformer import Constants;
from Transformer import Structure;
from Transformer.Utilities import StructureTools;
# ---------
# Functions
# ---------
def ReadPOSCARFile(inputReader, atomicSymbolLookupTable = None):
# Variables to collect.
systemName = None;
scaleFactor = None;
latticeVectors = None;
atomTypes, atomCounts = None, None;
coordinateType, atomPositions = None, None;
# Read the system name.
systemName = next(inputReader).strip();
# Read the scale factor.
scaleFactor = float(next(inputReader).strip());
# Read the lattice vectors.
latticeVectors = [];
for i in range(0, 3):
latticeVectors.append(
[float(element) for element in next(inputReader).strip().split()][:3]
);
# Although we sliced the list returned from split(), this does not guarentee that there were at least three elements.
for latticeVector in latticeVectors:
if len(latticeVector) != 3:
raise Exception("Error: The lattice vector specification in the supplied VASP POSCAR file is invalid.");
# Read the atom types and/or atom counts.
atomTypes = [element for element in next(inputReader).strip().split()];
atomCounts = None;
if atomTypes[0].isdigit():
atomCounts = [int(item) for item in atomTypes];
atomTypes = None;
else:
atomCounts = [int(element) for element in next(inputReader).strip().split()];
# If atom types were given in the file, check the number of atom types listed is consistent with the number of atom counts.
if atomTypes != None and len(atomTypes) != len(atomCounts):
raise Exception("Error: The atom-type and atom-count lines in the supplied VASP POSCAR file contain different numbers of entries.");
# Read the coordinate type.
coordinateType = None;
keyword = next(inputReader).strip().lower();
# Check for and skip the "selective dynamics" keyword.
if keyword[0] == "s":
keyword = next(inputReader).strip().lower();
if keyword[0] == 'd':
coordinateType = 'd';
elif keyword[0] == 'c' or keyword[0] == 'k':
coordinateType = 'c';
else:
raise Exception("Error: The coordinate-type line in the supplied VASP POSCAR file contains an unexpected keyword.");
# Read the atom positions.
totalAtomCount = 0;
for atomCount in atomCounts:
totalAtomCount = totalAtomCount + atomCount;
atomPositions = [];
for i in range(0, totalAtomCount):
elements = next(inputReader).strip().split();
atomPositions.append(
[float(element) for element in elements[:3]]
);
for atomPosition in atomPositions:
if len(atomPosition) != 3:
raise Exception("Error: One or more atomic position specifications in the supplied VASP POSCAR file is invalid.");
# If a scale factor other than 1 has been set, adjust the lattice vectors.
if scaleFactor != 1.0:
for i, vector in enumerate(latticeVectors):
latticeVectors[i] = [scaleFactor * x for x in vector];
# Build a list of atom-type numbers.
atomTypeNumbers = None;
if atomTypes != None:
# If atom types were read from the POSCAR file, convert these to atomic numbers.
atomicSymbols = [];
for atomType, atomCount in zip(atomTypes, atomCounts):
atomicSymbols = atomicSymbols + [atomType] * atomCount;
# Convert the atomic symbols to atom-type numbers.
atomTypeNumbers = [
Structure.AtomTypeToAtomTypeNumber(symbol, atomicSymbolLookupTable = atomicSymbolLookupTable)
for symbol in atomicSymbols
];
else:
# If not, issue a warning and assign negative type numbers from -1.
warnings.warn("Structure objects returned by reading VASP 4-format POSCAR files numbers will be initialised with negative atomic numbers from -1.", UserWarning);
atomTypeNumbers = [];
for i, atomCount in enumerate(atomCounts):
atomTypeNumbers = atomTypeNumbers + [-1 * (i + 1)] * atomCount;
# If the atom positions are given in Cartesian coordinates, convert them to fractional coordinates.
if coordinateType == 'c':
atomPositions = StructureTools.CartesianToFractionalCoordinates(latticeVectors, atomPositions);
# Return a Structure object.
return Structure.Structure(latticeVectors, atomPositions, atomTypeNumbers, name = systemName);
def WritePOSCARFile(structure, outputWriter, atomicSymbolLookupTable = None):
# Write the system name; Structure.GetName() returns a sensible default value if a name is not set.
outputWriter.write("{0}\n".format(structure.GetName()));
# Write the scale factor.
outputWriter.write(" {0: >19.16f}\n".format(1.0));
# Write the lattice vectors.
for ax, ay, az in structure.GetLatticeVectors():
outputWriter.write(" {0: >21.16f} {1: >21.16f} {2: >21.16f}\n".format(ax, ay, az));
# Write the atom types and counts.
atomicSymbols, atomCounts = structure.GetAtomicSymbolsCounts(atomicSymbolLookupTable = atomicSymbolLookupTable);
for atomicSymbol in atomicSymbols:
outputWriter.write(" {0: >3}".format(atomicSymbol));
outputWriter.write("\n");
for atomCount in atomCounts:
outputWriter.write(" {0: >3}".format(atomCount));
outputWriter.write("\n");
# Write the coordinate type.
outputWriter.write("Direct\n");
# Write the atom positions.
for x, y, z in structure.GetAtomPositions():
outputWriter.write(" {0: >21.16f} {1: >21.16f} {2: >21.16f}\n".format(x, y, z));
|
gpl-3.0
| -6,654,405,562,310,765,000 | 30.304348 | 169 | 0.660764 | false |
Jeff1995/oj
|
judge/ojroot.py
|
1
|
1215
|
#!/usr/bin/env python
import ConfigParser
import socket
import os
import json
def main():
cf = ConfigParser.ConfigParser()
cf.read("conf/oj.conf")
judge_host = cf.get('sandbox', 'judgeHost')
userid = int(cf.get('sandbox', 'userid'))
server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if os.path.exists("/tmp/judge_root.sock"):
os.unlink("/tmp/judge_root.sock")
server.bind("/tmp/judge_root.sock")
os.system('chmod 777 /tmp/judge_root.sock')
server.listen(0)
while True:
connection, address = server.accept()
infor = json.loads(connection.recv(1024).decode())
work_dir, bin, usrout, errout, input_dir, stdin, time_limit, mem_limit = infor
cmd = "%s %s %s %s %s %s %s %s %s %s"%(judge_host, work_dir, bin, usrout, errout, input_dir, stdin, time_limit, mem_limit, userid)
print cmd
tmp = os.popen(cmd).read()
result, time_used, mem_used = [int(s) for s in tmp.split()]
success = result == 0
time_exceeded = result == 2
mem_exceeded = result == 3
connection.send(json.dumps([success, time_exceeded, mem_exceeded, time_used, mem_used]).encode())
if __name__ == '__main__':
main()
|
apache-2.0
| 2,691,872,429,397,832,000 | 32.75 | 138 | 0.622222 | false |
FiratGundogdu/dagitik
|
odeve05/message_server.py
|
1
|
8494
|
__author__ = 'firatlepirate'
import socket
import threading
import Queue
import time
import errno
class WriteThread (threading.Thread):
def __init__(self, name, cSocket, address,fihrist,threadQueue, logQueue ):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.address = address
self.lQueue = logQueue
self.tQueue = threadQueue
self.fihrist = fihrist
self.nickname=""
self.flag=False
def run(self):
self.lQueue.put("Starting " + self.name)
while True:
# burasi kuyrukta sirasi gelen mesajlari
# gondermek icin kullanilacak
if self.tQueue.qsize()>0 or self.flag:
if self.nickname=="":
self.nickname=self.tQueue.get()
self.flag=True
if self.fihrist[self.nickname].qsize()>0:
queue_message = self.fihrist[self.nickname].get()
# gonderilen ozel mesajsa
if not queue_message[0]=="SAY" and len(queue_message)==3 and not queue_message=="QUI" :
message_to_send = "MSG "+str(queue_message[0])+":"+str(queue_message[1])+";"+str(queue_message[2])
self.cSocket.send(str(message_to_send))
# genel mesajsa
elif queue_message[0]=="SAY":
message_to_send = "SAY "+str(queue_message[1])+":"+str(queue_message[2])
self.cSocket.send(str(message_to_send))
print(message_to_send)
elif queue_message=="QUI":
# fihristten sil
del self.fihrist[self.nickname]
break
# hicbiri degilse sistem mesajidir
else:
message_to_send = "SYS "+str(queue_message[1])
self.cSocket.send(str(message_to_send))
self.lQueue.put("Exiting " + self.name)
class ReadThread (threading.Thread):
def __init__(self, name, cSocket, address,fihrist,threadQueue,logQueue):
threading.Thread.__init__(self)
self.name = name
self.cSocket = cSocket
self.address = address
self.lQueue = logQueue
self.fihrist = fihrist
self.tQueue = threadQueue
self.nickname=""
def parser(self, data):
#data = data.strip()
# henuz login olmadiysa
if not self.nickname and not data[0:3] == "USR":
response="ERL"
self.cSocket.send(response)
else:
# data sekli bozuksa
if len(data)<3:
response = "ERR"
self.cSocket.send(response)
return 0
if data[0:3] == "USR":
if len(data)>4 and data[3]==" " and not data[3:len(data)]==" ":
self.nickname = data[4:]
if not self.nickname in self.fihrist:
# kullanici yoksa
response = "HEL " + self.nickname
self.cSocket.send(response)
self.fihrist[self.nickname]=Queue.Queue(10)
# fihristi guncelle
#self.fihrist.update(...)
self.lQueue.put(self.nickname + " has joined.")
self.tQueue.put(self.nickname)
queue_message = ("SYS", self.nickname)
for items in self.fihrist.keys():
self.fihrist[items].put(queue_message)
return 0
else:
# kullanici reddedilecek
response = "REJ " + self.nickname
self.cSocket.send(response)
# baglantiyi kapat
# self.cSocket.close()
return 1
else:
response = "ERR"
self.cSocket.send(response)
elif data[0:3] == "QUI":
response = "BYE " + self.nickname
self.cSocket.send(response)
queue_message="QUI"
self.fihrist[self.nickname].put(queue_message)
# log gonder
self.lQueue.put(self.nickname + " has left.")
# baglantiyi sil
self.cSocket.close()
return queue_message
elif data[0:3] == "LSQ":
a=" "
for i in self.fihrist.keys():
a=a+i+":"
response="LSA"+a[:-1]
self.cSocket.send(response)
elif data[0:3] == "TIC":
response="TOC"
self.cSocket.send(response)
elif data[0:3] == "SAY":
if len(data)>4 and data[3]==" " and not data[4:]==" ":
message=data[4:]
queue_message = ("SAY", self.nickname, message)
for items in self.fihrist.keys():
self.fihrist[items].put(queue_message)
response="SOK"
self.cSocket.send(response)
elif data[0:3] == "MSG":
c=":"
if not data[4:]==" " and c in data[4:]:
to_nickname=data[4:data.index(":")]
message=data[data.index(":")+1:]
if not to_nickname in self.fihrist.keys():
response = "MNO"
else:
queue_message = (to_nickname, self.nickname, message)
# gonderilecek threadQueueyu fihristten alip icine yaz
self.fihrist[to_nickname].put(queue_message)
response = "MOK"
self.cSocket.send(response)
else:
# bir seye uymadiysa protokol hatasi verilecek
response = "ERR"
self.cSocket.send(response)
def run(self):
self.lQueue.put("Starting " + self.name)
while True:
try:
incoming_data=self.cSocket.recv(1024)
except socket.error ,e:
err=e.args[0]
if err == errno.EAGAIN or err == errno.EWOULDBLOCK:
time.sleep(1)
print 'No data available'
continue
else:
print("ERROR"+str(e))
queue_message = self.parser(incoming_data)
if(queue_message)=="QUI":
break
self.lQueue.put("Exiting " + self.name)
print(threading.activeCount())
class LoggerThread (threading.Thread):
def __init__(self, name, logQueue, logFileName):
threading.Thread.__init__(self)
self.name = name
self.lQueue = logQueue
self.fileName=logFileName
# dosyayi appendable olarak ac
self.fid = open(self.fileName, "a")
def log(self,message):
# gelen mesaji zamanla beraber bastir
t = time.ctime()
self.fid.write(t+":"+" "+ message+"\n")
self.fid.flush()
def run(self):
self.log("Starting " + self.name)
while True:
if self.lQueue.qsize() > 0:
# lQueue'da yeni mesaj varsa
# self.log() metodunu cagir
to_be_logged = self.lQueue.get()
self.log(to_be_logged)
self.log("Exiting" + self.name)
self.fid.close()
userList={}
loggerQueue= Queue.Queue()
thread3=LoggerThread("LoggerThread",loggerQueue,"log.txt")
thread3.start()
s = socket.socket()
#host = socket.gethostname()
host="127.0.0.1"
print("host"+host)
port = 12345
s.bind((host, port))
s.listen(5)
threadCounter=0
threadCounter2=0
while True:
loggerQueue.put("Waiting for connection")
print "Waiting for connection"
c, addr = s.accept()
workQueue = Queue.Queue()
loggerQueue.put("Got a connection from " + str(addr))
print "Got a connection from ", addr
threadCounter += 1
thread = ReadThread("ReadThread"+str(threadCounter), c, addr,userList,workQueue,loggerQueue)
threadCounter2 += 1
thread2 = WriteThread("WriteThread"+str(threadCounter2), c, addr,userList,workQueue,loggerQueue)
thread.start()
thread2.start()
|
gpl-2.0
| -7,592,662,664,281,051,000 | 32.050584 | 122 | 0.494467 | false |
nlhepler/freetype-py3
|
examples/glyph-vector-2.py
|
1
|
3414
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
#
# FreeType high-level python API - Copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
#
# -----------------------------------------------------------------------------
'''
Show how to access glyph outline description.
'''
from freetype import *
if __name__ == '__main__':
import numpy
import matplotlib.pyplot as plt
from matplotlib.path import Path
import matplotlib.patches as patches
face = Face(b'./Vera.ttf')
face.set_char_size( 32*64 )
face.load_char('g')
slot = face.glyph
bitmap = face.glyph.bitmap
width = face.glyph.bitmap.width
rows = face.glyph.bitmap.rows
pitch = face.glyph.bitmap.pitch
data = []
for i in range(rows):
data.extend(bitmap.buffer[i*pitch:i*pitch+width])
Z = numpy.array(data,dtype=numpy.ubyte).reshape(rows, width)
outline = slot.outline
points = numpy.array(outline.points, dtype=[('x',float), ('y',float)])
x, y = points['x'], points['y']
figure = plt.figure(figsize=(8,10))
axis = figure.add_subplot(111)
#axis.scatter(points['x'], points['y'], alpha=.25)
start, end = 0, 0
VERTS, CODES = [], []
# Iterate over each contour
for i in range(len(outline.contours)):
end = outline.contours[i]
points = outline.points[start:end+1]
points.append(points[0])
tags = outline.tags[start:end+1]
tags.append(tags[0])
segments = [ [points[0],], ]
for j in range(1, len(points) ):
segments[-1].append(points[j])
if tags[j] & (1 << 0) and j < (len(points)-1):
segments.append( [points[j],] )
verts = [points[0], ]
codes = [Path.MOVETO,]
for segment in segments:
if len(segment) == 2:
verts.extend(segment[1:])
codes.extend([Path.LINETO])
elif len(segment) == 3:
verts.extend(segment[1:])
codes.extend([Path.CURVE3, Path.CURVE3])
else:
verts.append(segment[1])
codes.append(Path.CURVE3)
for i in range(1,len(segment)-2):
A,B = segment[i], segment[i+1]
C = ((A[0]+B[0])/2.0, (A[1]+B[1])/2.0)
verts.extend([ C, B ])
codes.extend([ Path.CURVE3, Path.CURVE3])
verts.append(segment[-1])
codes.append(Path.CURVE3)
VERTS.extend(verts)
CODES.extend(codes)
start = end+1
# Draw glyph
path = Path(VERTS, CODES)
glyph = patches.PathPatch(path, fill = True, facecolor=(0.8,0.5,0.8), alpha=.25, lw=0)
glyph_outline = patches.PathPatch(path, fill = False, edgecolor='black', lw=3)
plt.imshow(Z, extent=[x.min(), x.max(),y.min(), y.max()],
interpolation='nearest', cmap = plt.cm.gray_r, vmin=0, vmax=400)
plt.xticks(numpy.linspace(x.min(), x.max(), Z.shape[1]+1), ())
plt.yticks(numpy.linspace(y.min(), y.max(), Z.shape[0]+1), ())
plt.grid(color='k', linewidth=1, linestyle='-')
axis.add_patch(glyph)
axis.add_patch(glyph_outline)
axis.set_xlim(x.min(), x.max())
axis.set_ylim(y.min(), y.max())
plt.savefig('test.pdf')
plt.show()
|
bsd-3-clause
| 3,459,714,015,790,568,400 | 32.80198 | 90 | 0.52812 | false |
victorvde/dota2_nohats
|
binary.py
|
1
|
9922
|
# Copyright (c) Victor van den Elzen
# Released under the Expat license, see LICENSE file for details
from struct import pack, unpack, calcsize
from collections import OrderedDict
def getbytes(s, n):
b = s.read(n)
assert len(b) == n, "Unexpected EOF"
return b
def getbyte(s):
return getbytes(s, 1)
class Seek(object):
def __init__(self, s, *args, **kwargs):
self.old_pos = None
self.s = s
self.args = args
self.kwargs = kwargs
def __enter__(self):
self.old_pos = self.s.tell()
self.s.seek(*self.args, **self.kwargs)
def __exit__(self, exc_type, exc_value, traceback):
self.s.seek(self.old_pos)
class FakeWriteStream(object):
def __init__(self, offset=0):
self.offset = offset
def seek(self, offset):
self.offset = offset
def tell(self):
return self.offset
def write(self, data):
self.offset += len(data)
return len(data)
class BaseField(object):
def unpack(self, s):
self.data = self.unpack_data(s)
def unpack_data(self, s):
raise notImplementedError
def pack(self, s):
self.pack_data(s, self.data)
def pack_data(self, s, data):
raise NotImplementedError(self)
def full_pack(self, s):
new_data = self.data
while True:
old_data = new_data
self.pack(FakeWriteStream(s.tell()))
new_data = self.data
if old_data == new_data:
break
self.pack(s)
def serialize(self):
return self.data
class ContainerField(BaseField):
def __getitem__(self, key):
return self.field[key]
def __setitem__(self, key, value):
self.field[key] = value
def __delitem__(self, key):
del self.field[key]
def __len__(self):
return len(self.field)
def __iter__(self):
return iter(self.field)
def __contains__(self, key):
return key in self.field
def serialize(self):
return self.field.serialize()
class Struct(ContainerField):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def add_field(self, name, f):
assert name not in self, name
self[name] = f
input_type, v = self.input
if input_type == "data":
f.data = v.get(name, None)
elif input_type == "stream":
f.unpack(v)
else:
assert False, input_type
return f
def F(self, name, f):
return self.add_field(name, f)
def unpack(self, s):
self.field = OrderedDict()
self.input = ("stream", s)
self.fields(*self.args, **self.kwargs)
del self.input
def pack(self, s):
for name, f in self.field.items():
f.pack(s)
@property
def data(self):
data = OrderedDict()
for k, v in self.field.items():
data[k] = v.data
return data
@data.setter
def data(self, v):
self.field = OrderedDict()
self.input = ("data", v)
self.fields(*self.args, **self.kwargs)
del self.input
def serialize(self):
data = OrderedDict()
for k, v in self.field.items():
if self.should_serialize(k, v):
data[k] = v.serialize()
return data
def should_serialize(self, k, v):
return True
def fields(self):
raise NotImplementedError(self)
class Magic(BaseField):
def __init__(self, magic):
if isinstance(magic, str):
magic = magic.encode()
self.magic = magic
def unpack(self, s):
data = getbytes(s, len(self.magic))
assert data == self.magic
def pack(self, s):
s.write(self.magic)
@property
def data(self):
return self.magic.decode()
@data.setter
def data(self, v):
assert v == self.magic or v is None, v
class Format(BaseField):
def __init__(self, fmt):
if fmt[0] in "@=<>!":
bosa = fmt[0]
fmt = fmt[1:]
else:
bosa = "<"
self.bosa = bosa
self.fmt = fmt
self.single = len(fmt) == 1
def unpack_data(self, s):
fmt = self.bosa + self.fmt
size = calcsize(fmt)
b = getbytes(s, size)
data = unpack(fmt, b)
if self.single:
assert len(data) == 1
data = data[0]
return data
def pack_data(self, s, data):
if self.single:
data = (data,)
s.write(pack(self.fmt, *data))
class BaseArray(ContainerField):
def __init__(self, field_maker=None, field_function=None):
if field_function is None:
field_function = lambda i, f: field_maker()
self.field_fun = field_function
self._dict = None
def unpack(self, s):
self.field = [self.field_fun(i, self) for i in range(self.size)]
for f in self:
f.unpack(s)
def pack(self, s):
for f in self:
f.pack(s)
@property
def data(self):
return [f.data for f in self]
def index(self, field):
if self._dict is None:
self._dict = {}
for i in range(len(self.field)):
self._dict[self.field[i]] = i
return self._dict[field]
@data.setter
def data(self, v):
self.field = [self.field_fun(i, self) for i in range(len(v))]
for f, fv in zip(self.field, v):
f.data = fv
self._dict = None
def serialize(self):
return [f.serialize() for f in self]
def append_data(self, v):
idx = len(self.field)
f = self.field_fun(idx, self)
self.field.append(f)
f.data = v
if self._dict is not None:
self._dict[f] = idx
class Array(BaseArray):
def __init__(self, size, *args, **kwargs):
self.size = size
BaseArray.__init__(self, *args, **kwargs)
class PrefixedArray(BaseArray):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseArray.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseArray.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseArray.pack(self, s)
class BaseBlob(BaseField):
def unpack_data(self, s):
return getbytes(s, self.size)
def pack_data(self, s, data):
s.write(data)
class Blob(BaseBlob):
def __init__(self, size):
self.size = size
def serialize(self):
return None
class PrefixedBlob(BaseBlob):
def __init__(self, prefix_field, *args, **kwargs):
self.prefix_field = prefix_field
BaseBlob.__init__(self, *args, **kwargs)
@property
def size(self):
return self.prefix_field.data
def unpack(self, s):
self.prefix_field.unpack(s)
BaseBlob.unpack(self, s)
def pack(self, s):
self.prefix_field.data = len(self)
self.prefix_field.pack(s)
BaseBlob.pack(self, s)
class String(BaseField):
def unpack_data(self, s):
lc = []
c = getbyte(s)
while c != b"\0":
lc.append(c)
c = getbyte(s)
return b"".join(lc).decode()
def pack_data(self, s, data):
s.write(data.encode())
s.write(b"\0")
class FixedString(BaseField):
def __init__(self, size):
self.size = size
def unpack_data(self, s):
data = getbytes(s, self.size)
data = data.rstrip(b"\0").decode()
return data
def pack_data(self, s, data):
data = data.encode().ljust(self.size, b"\0")
s.write(data)
class Index(BaseField):
def __init__(self, array, index_field):
self.array = array
self.index_field = index_field
def unpack_data(self, s):
self.index_field.unpack(s)
return self.array[self.index_field.data].data
def pack_data(self, s, data):
try:
index = self.array.data.index(data)
except ValueError:
index = len(self.array)
self.array.append_data(data)
self.index_field.data = index
self.index_field.pack(s)
class Offset(BaseField):
def unpack_data(self, s):
return s.tell()
def pack_data(self, s, data):
self.data = s.tell()
class Pointer(ContainerField):
def __init__(self, offset, field):
self.offset = offset
self.field = field
def unpack(self, s):
with Seek(s, self.offset):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
def pack_data(self, s, data):
pass
class DataPointer(ContainerField):
def __init__(self, offset_field, field):
self.offset_field = offset_field
self.field = field
def unpack(self, s):
self.offset_field.unpack(s)
with Seek(s, self.offset_field.data):
self.field.unpack(s)
@property
def data(self):
return self.field.data
@data.setter
def data(self, v):
self.field.data = v
class Mapping(BaseField):
def __init__(self, field, mapping):
self.field = field
self.mapping = mapping
def unpack_data(self, s):
data = self.field.unpack_data(s)
return self.mapping[data]
class Flags(BaseField):
def __init__(self, field, flags):
self.field = field
self.flags = flags
def unpack_data(self, s):
data = self.field.unpack_data(s)
flag_data = []
for mask, name in self.flags:
if mask & data:
flag_data.append(name)
return flag_data
|
mit
| 739,687,719,123,784,300 | 23.559406 | 72 | 0.551199 | false |
siliconsmiley/QGIS
|
python/plugins/GdalTools/tools/doPctRgb.py
|
1
|
6670
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
doPctRgb.py
---------------------
Date : June 2010
Copyright : (C) 2010 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giuseppe Sucameli'
__date__ = 'June 2010'
__copyright__ = '(C) 2010, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QObject, SIGNAL, QCoreApplication
from PyQt4.QtGui import QWidget
from ui_widgetConvert import Ui_GdalToolsWidget as Ui_Widget
from widgetBatchBase import GdalToolsBaseBatchWidget as BaseBatchWidget
import GdalTools_utils as Utils
class GdalToolsDialog(QWidget, Ui_Widget, BaseBatchWidget):
def __init__(self, iface):
QWidget.__init__(self)
self.iface = iface
self.setupUi(self)
BaseBatchWidget.__init__(self, self.iface, "pct2rgb.py")
# we use one widget for two tools
self.base.setWindowTitle( self.tr( "Convert paletted image to RGB" ) )
self.outSelector.setType( self.outSelector.FILE )
# set the default QSpinBoxes and QProgressBar value
self.bandSpin.setValue(1)
self.progressBar.setValue(0)
self.progressBar.hide()
self.outputFormat = Utils.fillRasterOutputFormat()
self.setParamsStatus([
(self.inSelector, SIGNAL("filenameChanged()")),
(self.outSelector, SIGNAL("filenameChanged()")),
(self.colorsSpin, SIGNAL("valueChanged(int)"), self.colorsCheck, "-1"), # hide this option
(self.bandSpin, SIGNAL("valueChanged(int)"), self.bandCheck)
])
self.connect(self.inSelector, SIGNAL("selectClicked()"), self.fillInputFile)
self.connect(self.outSelector, SIGNAL("selectClicked()"), self.fillOutputFileEdit)
self.connect( self.batchCheck, SIGNAL( "stateChanged( int )" ), self.switchToolMode )
# switch to batch or normal mode
def switchToolMode( self ):
self.setCommandViewerEnabled( not self.batchCheck.isChecked() )
self.progressBar.setVisible( self.batchCheck.isChecked() )
self.inSelector.setType( self.inSelector.FILE if self.batchCheck.isChecked() else self.inSelector.FILE_LAYER )
self.outSelector.clear()
if self.batchCheck.isChecked():
self.inFileLabel = self.label.text()
self.outFileLabel = self.label_2.text()
self.label.setText( QCoreApplication.translate( "GdalTools", "&Input directory" ) )
self.label_2.setText( QCoreApplication.translate( "GdalTools", "&Output directory" ) )
QObject.disconnect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputFile )
QObject.disconnect( self.outSelector, SIGNAL( "selectClicked()" ), self.fillOutputFileEdit )
QObject.connect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputDir )
QObject.connect( self.outSelector, SIGNAL( "selectClicked()" ), self.fillOutputDir )
else:
self.label.setText( self.inFileLabel )
self.label_2.setText( self.outFileLabel )
QObject.disconnect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputDir )
QObject.disconnect( self.outSelector, SIGNAL( "selectClicked()" ), self.fillOutputDir )
QObject.connect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputFile )
QObject.connect( self.outSelector, SIGNAL( "selectClicked()" ), self.fillOutputFileEdit )
def onLayersChanged(self):
self.inSelector.setLayers( Utils.LayerRegistry.instance().getRasterLayers() )
def fillInputFile(self):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
inputFile = Utils.FileDialog.getOpenFileName(self, self.tr( "Select the input file for convert" ), Utils.FileFilter.allRastersFilter(), lastUsedFilter )
if not inputFile:
return
Utils.FileFilter.setLastUsedRasterFilter(lastUsedFilter)
self.inSelector.setFilename(inputFile)
def fillOutputFileEdit(self):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
outputFile = Utils.FileDialog.getSaveFileName(self, self.tr( "Select the raster file to save the results to" ), Utils.FileFilter.saveRastersFilter(), lastUsedFilter )
if not outputFile:
return
Utils.FileFilter.setLastUsedRasterFilter(lastUsedFilter)
self.outputFormat = Utils.fillRasterOutputFormat( lastUsedFilter, outputFile )
self.outSelector.setFilename(outputFile)
def fillInputDir( self ):
inputDir = Utils.FileDialog.getExistingDirectory( self, self.tr( "Select the input directory with files for convert" ))
if not inputDir:
return
self.inSelector.setFilename( inputDir )
def fillOutputDir( self ):
outputDir = Utils.FileDialog.getExistingDirectory( self, self.tr( "Select the output directory to save the results to" ))
if not outputDir:
return
self.outSelector.setFilename( outputDir )
def getArguments(self):
arguments = []
if self.bandCheck.isChecked():
arguments.append( "-b")
arguments.append( unicode( self.bandSpin.value() ))
if self.isBatchEnabled():
return arguments
outputFn = self.getOutputFileName()
if outputFn:
arguments.append( "-of")
arguments.append( self.outputFormat)
arguments.append( self.getInputFileName())
arguments.append( outputFn)
return arguments
def getInputFileName(self):
return self.inSelector.filename()
def getOutputFileName(self):
return self.outSelector.filename()
def addLayerIntoCanvas(self, fileInfo):
self.iface.addRasterLayer(fileInfo.filePath())
def isBatchEnabled(self):
return self.batchCheck.isChecked()
def setProgressRange(self, maximum):
self.progressBar.setRange(0, maximum)
def updateProgress(self, index, total):
if index < total:
self.progressBar.setValue( index + 1 )
else:
self.progressBar.setValue( 0 )
|
gpl-2.0
| 2,120,766,154,415,502,600 | 39.920245 | 172 | 0.647526 | false |
widowild/messcripts
|
exercice/python3/solutions_exercices/exercice_10_33.py
|
1
|
1110
|
#! /usr/bin/env python
# -*- coding:Utf8 -*-
## Cette variante utilise une liste de listes ##
## (que l'on pourrait aisément remplacer par deux listes distinctes)
# La liste ci-dessous contient deux éléments qui sont eux-mêmes des listes.
# l'élément 0 contient les nombres de jours de chaque mois, tandis que
# l'élément 1 contient les noms des douze mois :
mois = [[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31],
['Janvier', 'Février', 'Mars', 'Avril', 'Mai', 'Juin', 'Juillet',
'Août', 'Septembre', 'Octobre', 'Novembre', 'Décembre']]
jour = ['Dimanche','Lundi','Mardi','Mercredi','Jeudi','Vendredi','Samedi']
ja, jm, js, m = 0, 0, 0, 0
while ja <365:
ja, jm = ja +1, jm +1 # ja = jour dans l'année, jm = jour dans le mois
js = (ja +3) % 7 # js = jour de la semaine. Le décalage ajouté
# permet de choisir le jour de départ
if jm > mois[0][m]: # élément m de l'élément 0 de la liste
jm, m = 1, m+1
print(jour[js], jm, mois[1][m]) # élément m de l'élément 1 de la liste
|
gpl-3.0
| 3,744,835,426,236,196,400 | 40.807692 | 77 | 0.586937 | false |
santisiri/popego
|
envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/internet/tcp.py
|
1
|
31318
|
# -*- test-case-name: twisted.test.test_tcp -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Various asynchronous TCP/IP classes.
End users shouldn't use this module directly - use the reactor APIs instead.
Maintainer: U{Itamar Shtull-Trauring<mailto:twisted@itamarst.org>}
"""
# System Imports
import os
import stat
import types
import exceptions
import socket
import sys
import select
import operator
import warnings
try:
import fcntl
except ImportError:
fcntl = None
from zope.interface import implements, classImplements
try:
from OpenSSL import SSL
except ImportError:
SSL = None
from twisted.python.runtime import platform, platformType
if platformType == 'win32':
# no such thing as WSAEPERM or error code 10001 according to winsock.h or MSDN
EPERM = object()
from errno import WSAEINVAL as EINVAL
from errno import WSAEWOULDBLOCK as EWOULDBLOCK
from errno import WSAEINPROGRESS as EINPROGRESS
from errno import WSAEALREADY as EALREADY
from errno import WSAECONNRESET as ECONNRESET
from errno import WSAEISCONN as EISCONN
from errno import WSAENOTCONN as ENOTCONN
from errno import WSAEINTR as EINTR
from errno import WSAENOBUFS as ENOBUFS
from errno import WSAEMFILE as EMFILE
# No such thing as WSAENFILE, either.
ENFILE = object()
# Nor ENOMEM
ENOMEM = object()
EAGAIN = EWOULDBLOCK
from errno import WSAECONNRESET as ECONNABORTED
else:
from errno import EPERM
from errno import EINVAL
from errno import EWOULDBLOCK
from errno import EINPROGRESS
from errno import EALREADY
from errno import ECONNRESET
from errno import EISCONN
from errno import ENOTCONN
from errno import EINTR
from errno import ENOBUFS
from errno import EMFILE
from errno import ENFILE
from errno import ENOMEM
from errno import EAGAIN
from errno import ECONNABORTED
from errno import errorcode
# Twisted Imports
from twisted.internet import protocol, defer, base, address
from twisted.persisted import styles
from twisted.python import log, failure, reflect
from twisted.python.util import unsignedID
from twisted.internet.error import CannotListenError
# Sibling Imports
import abstract
import main
import interfaces
import error
class _SocketCloser:
_socketShutdownMethod = 'shutdown'
def _closeSocket(self):
# socket.close() doesn't *really* close if there's another reference
# to it in the TCP/IP stack, e.g. if it was was inherited by a
# subprocess. And we really do want to close the connection. So we
# use shutdown() instead, and then close() in order to release the
# filedescriptor.
skt = self.socket
try:
getattr(skt, self._socketShutdownMethod)(2)
except socket.error:
pass
try:
skt.close()
except socket.error:
pass
class _TLSMixin:
_socketShutdownMethod = 'sock_shutdown'
writeBlockedOnRead = 0
readBlockedOnWrite = 0
_userWantRead = _userWantWrite = True
def getPeerCertificate(self):
return self.socket.get_peer_certificate()
def doRead(self):
if self.writeBlockedOnRead:
self.writeBlockedOnRead = 0
self._resetReadWrite()
try:
return Connection.doRead(self)
except SSL.ZeroReturnError:
return main.CONNECTION_DONE
except SSL.WantReadError:
return
except SSL.WantWriteError:
self.readBlockedOnWrite = 1
Connection.startWriting(self)
Connection.stopReading(self)
return
except SSL.SysCallError, (retval, desc):
if ((retval == -1 and desc == 'Unexpected EOF')
or retval > 0):
return main.CONNECTION_LOST
log.err()
return main.CONNECTION_LOST
except SSL.Error, e:
return e
def doWrite(self):
# Retry disconnecting
if self.disconnected:
return self._postLoseConnection()
if self._writeDisconnected:
return self._closeWriteConnection()
if self.readBlockedOnWrite:
self.readBlockedOnWrite = 0
self._resetReadWrite()
return Connection.doWrite(self)
def writeSomeData(self, data):
try:
return Connection.writeSomeData(self, data)
except SSL.WantWriteError:
return 0
except SSL.WantReadError:
self.writeBlockedOnRead = 1
Connection.stopWriting(self)
Connection.startReading(self)
return 0
except SSL.ZeroReturnError:
return main.CONNECTION_LOST
except SSL.SysCallError, e:
if e[0] == -1 and data == "":
# errors when writing empty strings are expected
# and can be ignored
return 0
else:
return main.CONNECTION_LOST
except SSL.Error, e:
return e
def _postLoseConnection(self):
"""Gets called after loseConnection(), after buffered data is sent.
We try to send an SSL shutdown alert, but if it doesn't work, retry
when the socket is writable.
"""
self.disconnected=1
if hasattr(self.socket, 'set_shutdown'):
self.socket.set_shutdown(SSL.RECEIVED_SHUTDOWN)
return self._sendCloseAlert()
_first=False
def _sendCloseAlert(self):
# Okay, *THIS* is a bit complicated.
# Basically, the issue is, OpenSSL seems to not actually return
# errors from SSL_shutdown. Therefore, the only way to
# determine if the close notification has been sent is by
# SSL_shutdown returning "done". However, it will not claim it's
# done until it's both sent *and* received a shutdown notification.
# I don't actually want to wait for a received shutdown
# notification, though, so, I have to set RECEIVED_SHUTDOWN
# before calling shutdown. Then, it'll return True once it's
# *SENT* the shutdown.
# However, RECEIVED_SHUTDOWN can't be left set, because then
# reads will fail, breaking half close.
# Also, since shutdown doesn't report errors, an empty write call is
# done first, to try to detect if the connection has gone away.
# (*NOT* an SSL_write call, because that fails once you've called
# shutdown)
try:
os.write(self.socket.fileno(), '')
except OSError, se:
if se.args[0] in (EINTR, EWOULDBLOCK, ENOBUFS):
return 0
# Write error, socket gone
return main.CONNECTION_LOST
try:
if hasattr(self.socket, 'set_shutdown'):
laststate = self.socket.get_shutdown()
self.socket.set_shutdown(laststate | SSL.RECEIVED_SHUTDOWN)
done = self.socket.shutdown()
if not (laststate & SSL.RECEIVED_SHUTDOWN):
self.socket.set_shutdown(SSL.SENT_SHUTDOWN)
else:
#warnings.warn("SSL connection shutdown possibly unreliable, "
# "please upgrade to ver 0.XX", category=UserWarning)
self.socket.shutdown()
done = True
except SSL.Error, e:
return e
if done:
self.stopWriting()
# Note that this is tested for by identity below.
return main.CONNECTION_DONE
else:
self.startWriting()
return None
def _closeWriteConnection(self):
result = self._sendCloseAlert()
if result is main.CONNECTION_DONE:
return Connection._closeWriteConnection(self)
return result
def startReading(self):
self._userWantRead = True
if not self.readBlockedOnWrite:
return Connection.startReading(self)
def stopReading(self):
self._userWantRead = False
if not self.writeBlockedOnRead:
return Connection.stopReading(self)
def startWriting(self):
self._userWantWrite = True
if not self.writeBlockedOnRead:
return Connection.startWriting(self)
def stopWriting(self):
self._userWantWrite = False
if not self.readBlockedOnWrite:
return Connection.stopWriting(self)
def _resetReadWrite(self):
# After changing readBlockedOnWrite or writeBlockedOnRead,
# call this to reset the state to what the user requested.
if self._userWantWrite:
self.startWriting()
else:
self.stopWriting()
if self._userWantRead:
self.startReading()
else:
self.stopReading()
def _getTLSClass(klass, _existing={}):
if klass not in _existing:
class TLSConnection(_TLSMixin, klass):
implements(interfaces.ISSLTransport)
_existing[klass] = TLSConnection
return _existing[klass]
class Connection(abstract.FileDescriptor, _SocketCloser):
"""I am the superclass of all socket-based FileDescriptors.
This is an abstract superclass of all objects which represent a TCP/IP
connection based socket.
"""
implements(interfaces.ITCPTransport, interfaces.ISystemHandle)
TLS = 0
def __init__(self, skt, protocol, reactor=None):
abstract.FileDescriptor.__init__(self, reactor=reactor)
self.socket = skt
self.socket.setblocking(0)
self.fileno = skt.fileno
self.protocol = protocol
if SSL:
def startTLS(self, ctx):
assert not self.TLS
error=False
if self.dataBuffer or self._tempDataBuffer:
self.dataBuffer += "".join(self._tempDataBuffer)
self._tempDataBuffer = []
self._tempDataLen = 0
written = self.writeSomeData(buffer(self.dataBuffer, self.offset))
offset = self.offset
dataLen = len(self.dataBuffer)
self.offset = 0
self.dataBuffer = ""
if isinstance(written, Exception) or (offset + written != dataLen):
error=True
self.stopReading()
self.stopWriting()
self._startTLS()
self.socket = SSL.Connection(ctx.getContext(), self.socket)
self.fileno = self.socket.fileno
self.startReading()
if error:
warnings.warn("startTLS with unwritten buffered data currently doesn't work right. See issue #686. Closing connection.", category=RuntimeWarning, stacklevel=2)
self.loseConnection()
return
def _startTLS(self):
self.TLS = 1
self.__class__ = _getTLSClass(self.__class__)
def getHandle(self):
"""Return the socket for this connection."""
return self.socket
def doRead(self):
"""Calls self.protocol.dataReceived with all available data.
This reads up to self.bufferSize bytes of data from its socket, then
calls self.dataReceived(data) to process it. If the connection is not
lost through an error in the physical recv(), this function will return
the result of the dataReceived call.
"""
try:
data = self.socket.recv(self.bufferSize)
except socket.error, se:
if se.args[0] == EWOULDBLOCK:
return
else:
return main.CONNECTION_LOST
if not data:
return main.CONNECTION_DONE
return self.protocol.dataReceived(data)
def writeSomeData(self, data):
"""Connection.writeSomeData(data) -> #of bytes written | CONNECTION_LOST
This writes as much data as possible to the socket and returns either
the number of bytes read (which is positive) or a connection error code
(which is negative)
"""
try:
# Limit length of buffer to try to send, because some OSes are too
# stupid to do so themselves (ahem windows)
return self.socket.send(buffer(data, 0, self.SEND_LIMIT))
except socket.error, se:
if se.args[0] == EINTR:
return self.writeSomeData(data)
elif se.args[0] in (EWOULDBLOCK, ENOBUFS):
return 0
else:
return main.CONNECTION_LOST
def _closeWriteConnection(self):
try:
getattr(self.socket, self._socketShutdownMethod)(1)
except socket.error:
pass
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.writeConnectionLost()
except:
f = failure.Failure()
log.err()
self.connectionLost(f)
def readConnectionLost(self, reason):
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.readConnectionLost()
except:
log.err()
self.connectionLost(failure.Failure())
else:
self.connectionLost(reason)
def connectionLost(self, reason):
"""See abstract.FileDescriptor.connectionLost().
"""
abstract.FileDescriptor.connectionLost(self, reason)
self._closeSocket()
protocol = self.protocol
del self.protocol
del self.socket
del self.fileno
protocol.connectionLost(reason)
logstr = "Uninitialized"
def logPrefix(self):
"""Return the prefix to log with when I own the logging thread.
"""
return self.logstr
def getTcpNoDelay(self):
return operator.truth(self.socket.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
def setTcpNoDelay(self, enabled):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, enabled)
def getTcpKeepAlive(self):
return operator.truth(self.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE))
def setTcpKeepAlive(self, enabled):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, enabled)
if SSL:
classImplements(Connection, interfaces.ITLSTransport)
class BaseClient(Connection):
"""A base class for client TCP (and similiar) sockets.
"""
addressFamily = socket.AF_INET
socketType = socket.SOCK_STREAM
def _finishInit(self, whenDone, skt, error, reactor):
"""Called by base classes to continue to next stage of initialization."""
if whenDone:
Connection.__init__(self, skt, None, reactor)
self.doWrite = self.doConnect
self.doRead = self.doConnect
reactor.callLater(0, whenDone)
else:
reactor.callLater(0, self.failIfNotConnected, error)
def startTLS(self, ctx, client=1):
holder = Connection.startTLS(self, ctx)
if client:
self.socket.set_connect_state()
else:
self.socket.set_accept_state()
return holder
def stopConnecting(self):
"""Stop attempt to connect."""
self.failIfNotConnected(error.UserError())
def failIfNotConnected(self, err):
"""
Generic method called when the attemps to connect failed. It basically
cleans everything it can: call connectionFailed, stop read and write,
delete socket related members.
"""
if (self.connected or self.disconnected or
not hasattr(self, "connector")):
return
self.connector.connectionFailed(failure.Failure(err))
if hasattr(self, "reactor"):
# this doesn't happen if we failed in __init__
self.stopReading()
self.stopWriting()
del self.connector
try:
self._closeSocket()
except AttributeError:
pass
else:
del self.socket, self.fileno
def createInternetSocket(self):
"""(internal) Create a non-blocking socket using
self.addressFamily, self.socketType.
"""
s = socket.socket(self.addressFamily, self.socketType)
s.setblocking(0)
if fcntl and hasattr(fcntl, 'FD_CLOEXEC'):
old = fcntl.fcntl(s.fileno(), fcntl.F_GETFD)
fcntl.fcntl(s.fileno(), fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
return s
def resolveAddress(self):
if abstract.isIPAddress(self.addr[0]):
self._setRealAddress(self.addr[0])
else:
d = self.reactor.resolve(self.addr[0])
d.addCallbacks(self._setRealAddress, self.failIfNotConnected)
def _setRealAddress(self, address):
self.realAddress = (address, self.addr[1])
self.doConnect()
def doConnect(self):
"""I connect the socket.
Then, call the protocol's makeConnection, and start waiting for data.
"""
if not hasattr(self, "connector"):
# this happens when connection failed but doConnect
# was scheduled via a callLater in self._finishInit
return
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err:
self.failIfNotConnected(error.getConnectError((err, os.strerror(err))))
return
# doConnect gets called twice. The first time we actually need to
# start the connection attempt. The second time we don't really
# want to (SO_ERROR above will have taken care of any errors, and if
# it reported none, the mere fact that doConnect was called again is
# sufficient to indicate that the connection has succeeded), but it
# is not /particularly/ detrimental to do so. This should get
# cleaned up some day, though.
try:
connectResult = self.socket.connect_ex(self.realAddress)
except socket.error, se:
connectResult = se.args[0]
if connectResult:
if connectResult == EISCONN:
pass
# on Windows EINVAL means sometimes that we should keep trying:
# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winsock/winsock/connect_2.asp
elif ((connectResult in (EWOULDBLOCK, EINPROGRESS, EALREADY)) or
(connectResult == EINVAL and platformType == "win32")):
self.startReading()
self.startWriting()
return
else:
self.failIfNotConnected(error.getConnectError((connectResult, os.strerror(connectResult))))
return
# If I have reached this point without raising or returning, that means
# that the socket is connected.
del self.doWrite
del self.doRead
# we first stop and then start, to reset any references to the old doRead
self.stopReading()
self.stopWriting()
self._connectDone()
def _connectDone(self):
self.protocol = self.connector.buildProtocol(self.getPeer())
self.connected = 1
self.protocol.makeConnection(self)
self.logstr = self.protocol.__class__.__name__+",client"
self.startReading()
def connectionLost(self, reason):
if not self.connected:
self.failIfNotConnected(error.ConnectError(string=reason))
else:
Connection.connectionLost(self, reason)
self.connector.connectionLost(reason)
class Client(BaseClient):
"""A TCP client."""
def __init__(self, host, port, bindAddress, connector, reactor=None):
# BaseClient.__init__ is invoked later
self.connector = connector
self.addr = (host, port)
whenDone = self.resolveAddress
err = None
skt = None
try:
skt = self.createInternetSocket()
except socket.error, se:
err = error.ConnectBindError(se[0], se[1])
whenDone = None
if whenDone and bindAddress is not None:
try:
skt.bind(bindAddress)
except socket.error, se:
err = error.ConnectBindError(se[0], se[1])
whenDone = None
self._finishInit(whenDone, skt, err, reactor)
def getHost(self):
"""Returns an IPv4Address.
This indicates the address from which I am connecting.
"""
return address.IPv4Address('TCP', *(self.socket.getsockname() + ('INET',)))
def getPeer(self):
"""Returns an IPv4Address.
This indicates the address that I am connected to.
"""
return address.IPv4Address('TCP', *(self.addr + ('INET',)))
def __repr__(self):
s = '<%s to %s at %x>' % (self.__class__, self.addr, unsignedID(self))
return s
class Server(Connection):
"""Serverside socket-stream connection class.
I am a serverside network connection transport; a socket which came from an
accept() on a server.
"""
def __init__(self, sock, protocol, client, server, sessionno):
"""Server(sock, protocol, client, server, sessionno)
Initialize me with a socket, a protocol, a descriptor for my peer (a
tuple of host, port describing the other end of the connection), an
instance of Port, and a session number.
"""
Connection.__init__(self, sock, protocol)
self.server = server
self.client = client
self.sessionno = sessionno
self.hostname = client[0]
self.logstr = "%s,%s,%s" % (self.protocol.__class__.__name__, sessionno, self.hostname)
self.repstr = "<%s #%s on %s>" % (self.protocol.__class__.__name__, self.sessionno, self.server.port)
self.startReading()
self.connected = 1
def __repr__(self):
"""A string representation of this connection.
"""
return self.repstr
def startTLS(self, ctx, server=1):
holder = Connection.startTLS(self, ctx)
if server:
self.socket.set_accept_state()
else:
self.socket.set_connect_state()
return holder
def getHost(self):
"""Returns an IPv4Address.
This indicates the server's address.
"""
return address.IPv4Address('TCP', *(self.socket.getsockname() + ('INET',)))
def getPeer(self):
"""Returns an IPv4Address.
This indicates the client's address.
"""
return address.IPv4Address('TCP', *(self.client + ('INET',)))
class Port(base.BasePort, _SocketCloser):
"""I am a TCP server port, listening for connections.
When a connection is accepted, I will call my factory's buildProtocol with
the incoming connection as an argument, according to the specification
described in twisted.internet.interfaces.IProtocolFactory.
If you wish to change the sort of transport that will be used, my
`transport' attribute will be called with the signature expected for
Server.__init__, so it can be replaced.
"""
implements(interfaces.IListeningPort)
addressFamily = socket.AF_INET
socketType = socket.SOCK_STREAM
transport = Server
sessionno = 0
interface = ''
backlog = 50
# Actual port number being listened on, only set to a non-None
# value when we are actually listening.
_realPortNumber = None
def __init__(self, port, factory, backlog=50, interface='', reactor=None):
"""Initialize with a numeric port to listen on.
"""
base.BasePort.__init__(self, reactor=reactor)
self.port = port
self.factory = factory
self.backlog = backlog
self.interface = interface
def __repr__(self):
if self._realPortNumber is not None:
return "<%s of %s on %s>" % (self.__class__, self.factory.__class__,
self._realPortNumber)
else:
return "<%s of %s (not listening)>" % (self.__class__, self.factory.__class__)
def createInternetSocket(self):
s = base.BasePort.createInternetSocket(self)
if platformType == "posix" and sys.platform != "cygwin":
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s
def startListening(self):
"""Create and bind my socket, and begin listening on it.
This is called on unserialization, and must be called after creating a
server to begin listening on the specified port.
"""
try:
skt = self.createInternetSocket()
skt.bind((self.interface, self.port))
except socket.error, le:
raise CannotListenError, (self.interface, self.port, le)
# Make sure that if we listened on port 0, we update that to
# reflect what the OS actually assigned us.
self._realPortNumber = skt.getsockname()[1]
log.msg("%s starting on %s" % (self.factory.__class__, self._realPortNumber))
# The order of the next 6 lines is kind of bizarre. If no one
# can explain it, perhaps we should re-arrange them.
self.factory.doStart()
skt.listen(self.backlog)
self.connected = 1
self.socket = skt
self.fileno = self.socket.fileno
self.numberAccepts = 100
self.startReading()
def _buildAddr(self, (host, port)):
return address._ServerFactoryIPv4Address('TCP', host, port)
def doRead(self):
"""Called when my socket is ready for reading.
This accepts a connection and calls self.protocol() to handle the
wire-level protocol.
"""
try:
if platformType == "posix":
numAccepts = self.numberAccepts
else:
# win32 event loop breaks if we do more than one accept()
# in an iteration of the event loop.
numAccepts = 1
for i in range(numAccepts):
# we need this so we can deal with a factory's buildProtocol
# calling our loseConnection
if self.disconnecting:
return
try:
skt, addr = self.socket.accept()
except socket.error, e:
if e.args[0] in (EWOULDBLOCK, EAGAIN):
self.numberAccepts = i
break
elif e.args[0] == EPERM:
# Netfilter on Linux may have rejected the
# connection, but we get told to try to accept()
# anyway.
continue
elif e.args[0] in (EMFILE, ENOBUFS, ENFILE, ENOMEM, ECONNABORTED):
# Linux gives EMFILE when a process is not allowed
# to allocate any more file descriptors. *BSD and
# Win32 give (WSA)ENOBUFS. Linux can also give
# ENFILE if the system is out of inodes, or ENOMEM
# if there is insufficient memory to allocate a new
# dentry. ECONNABORTED is documented as possible on
# both Linux and Windows, but it is not clear
# whether there are actually any circumstances under
# which it can happen (one might expect it to be
# possible if a client sends a FIN or RST after the
# server sends a SYN|ACK but before application code
# calls accept(2), however at least on Linux this
# _seems_ to be short-circuited by syncookies.
log.msg("Could not accept new connection (%s)" % (
errorcode[e.args[0]],))
break
raise
protocol = self.factory.buildProtocol(self._buildAddr(addr))
if protocol is None:
skt.close()
continue
s = self.sessionno
self.sessionno = s+1
transport = self.transport(skt, protocol, addr, self, s)
transport = self._preMakeConnection(transport)
protocol.makeConnection(transport)
else:
self.numberAccepts = self.numberAccepts+20
except:
# Note that in TLS mode, this will possibly catch SSL.Errors
# raised by self.socket.accept()
#
# There is no "except SSL.Error:" above because SSL may be
# None if there is no SSL support. In any case, all the
# "except SSL.Error:" suite would probably do is log.deferr()
# and return, so handling it here works just as well.
log.deferr()
def _preMakeConnection(self, transport):
return transport
def loseConnection(self, connDone=failure.Failure(main.CONNECTION_DONE)):
"""Stop accepting connections on this port.
This will shut down my socket and call self.connectionLost().
It returns a deferred which will fire successfully when the
port is actually closed.
"""
self.disconnecting = 1
self.stopReading()
if self.connected:
self.deferred = defer.Deferred()
self.reactor.callLater(0, self.connectionLost, connDone)
return self.deferred
stopListening = loseConnection
def connectionLost(self, reason):
"""Cleans up my socket.
"""
log.msg('(Port %s Closed)' % self._realPortNumber)
self._realPortNumber = None
base.BasePort.connectionLost(self, reason)
self.connected = 0
self._closeSocket()
del self.socket
del self.fileno
self.factory.doStop()
if hasattr(self, "deferred"):
self.deferred.callback(None)
del self.deferred
def logPrefix(self):
"""Returns the name of my class, to prefix log entries with.
"""
return reflect.qual(self.factory.__class__)
def getHost(self):
"""Returns an IPv4Address.
This indicates the server's address.
"""
return address.IPv4Address('TCP', *(self.socket.getsockname() + ('INET',)))
class Connector(base.BaseConnector):
def __init__(self, host, port, factory, timeout, bindAddress, reactor=None):
self.host = host
if isinstance(port, types.StringTypes):
try:
port = socket.getservbyname(port, 'tcp')
except socket.error, e:
raise error.ServiceNameUnknownError(string="%s (%r)" % (e, port))
self.port = port
self.bindAddress = bindAddress
base.BaseConnector.__init__(self, factory, timeout, reactor)
def _makeTransport(self):
return Client(self.host, self.port, self.bindAddress, self, self.reactor)
def getDestination(self):
return address.IPv4Address('TCP', self.host, self.port, 'INET')
|
bsd-3-clause
| 375,367,185,941,382,200 | 34.268018 | 175 | 0.598729 | false |
jonmaur/draggin-framework
|
tools/scons_zip.py
|
1
|
1497
|
import os
import os.path
import glob
import shutil
import datetime
from fnmatch import fnmatch
PROJECT_NAME = "draggin"
if 'PROJECT_NAME' in ARGUMENTS:
PROJECT_NAME = ARGUMENTS["PROJECT_NAME"]
env = Environment(tools = [])
#the full path to this SConscript file
this_sconscript_file = (lambda x:x).func_code.co_filename
# matches anything in the list
def fnmatchList(_name, _filters):
for f in _filters:
if fnmatch(_name, f):
# found one match so get out of here
return True
# no matches
return False
###################################################################################################
# copy files
zipfiles = []
def zipAction( target, source, env ):
print "zipAction"
fpair = os.path.splitext(str(target[0]))
print "Zipping "+fpair[0]
try:
shutil.make_archive(fpair[0], "zip", "package/windows")
except (IOError, os.error), why:
#easygui.exceptionbox(str(source[0])+", "+str(target[0])+" FAILED")
raw_input(str(source[0])+", "+str(target[0])+" FAILED: "+str(why))
copyBuilder = Builder(action = zipAction)
env.Append(BUILDERS = {'zipComplier' : copyBuilder})
inputfiles = []
for root, dirs, files in os.walk("package/windows"):
for f in files:
filename = os.path.join(root, f)
inputfiles.append(str(filename))
# the exe
outputfiles = []
outputfiles.append(str("package/" + PROJECT_NAME + "-windows-" + str(datetime.date.today()) + ".zip"))
zipfiles.append(env.zipComplier(outputfiles, inputfiles))
if len(zipfiles) > 0:
Default(zipfiles)
|
mit
| 7,152,029,935,408,573,000 | 24.372881 | 102 | 0.654643 | false |
MorellatoAriel/PyGobstones
|
interpreter/vgbs/__init__.py
|
1
|
2114
|
from ..interpreterWorker import *
from common.tools import tools
from lang.gbs_board import Board
import common.utils
import common.i18n as i18n
import lang
import logging
class GUIGobstonesApi(lang.GobstonesApi):
def __init__(self, communicator):
self.comm = communicator
def read(self):
self.comm.send('READ_REQUEST')
message = self.comm.receive()
if message.header != 'READ_DONE': assert False
return message.body
def show(self, board):
self.comm.send('PARTIAL', tools.board_format.to_string(board))
def log(self, msg):
self.comm.send('LOG', msg)
class Interpreter(InterpreterWorker):
def prepare(self):
api = GUIGobstonesApi(self.communicator)
options = lang.GobstonesOptions()
self.gobstones = lang.Gobstones(options, api)
def start(self, filename, program_text, initial_board_string, run_mode):
board = tools.board_format.from_string(initial_board_string)
try:
if run_mode == Interpreter.RunMode.FULL:
self.success(self.gobstones.run(filename, program_text, board))
else:
# Parse gobstones script
self.gobstones.api.log(i18n.i18n('Parsing.'))
tree = self.gobstones.parse(program_text, filename)
assert tree
# Explode macros
self.gobstones.api.log(i18n.i18n('Exploding program macros.'))
self.gobstones.explode_macros(tree)
# Check semantics, liveness and types
self.gobstones.check(tree)
self.success()
except Exception as exception:
self.failure(exception)
def success(self, gbs_run=None):
if gbs_run is None:
self.communicator.send('OK', (None, None))
else:
self.communicator.send('OK', (tools.board_format.to_string(gbs_run.final_board), gbs_run.result))
def failure(self, exception):
self.communicator.send('FAIL', (exception.__class__, (exception.msg, exception.area)))
|
gpl-3.0
| 6,555,255,423,196,050,000 | 40.470588 | 109 | 0.614475 | false |
ST-Data-Mining/crater
|
wei/tdivwhere2.py
|
1
|
11047
|
from __future__ import division
import sys
from table import *
from where2 import *
from dtree import *
def csv2py(f):
sym2num = {} # hold all the characters with assinged numbers that never seen
def str2num(t, p=0):
for r,row in enumerate(t._rows):
for c, cell in enumerate(row.cells):
if isinstance(cell, str) and c <t.depen[0].col and isinstance(t.headers[c], Sym):
if sym2num.get(cell, 0) == 0:
sym2num[cell] = p
p +=1
t._rows[r].cells[c]= sym2num[cell] # update cell with num
return t
tbl = table(f)
tbl_num = str2num(tbl)
x = data(indep = [ x.name for x in tbl_num.indep],
less = [x.name for x in tbl_num.depen],
_rows = [row.cells for row in tbl_num._rows])
return x, sym2num
def savetbl(t, fname):
def writetofile(f,lst):
f.write(",".join(map(str,lst))+'\n')
f = open(fname, 'wb')
writetofile(f, [i.name for i in t.headers]) # write header
for num, i in enumerate(t._rows):
writetofile(f, (i.cells))
def apex(test,tree,opt=The.tree): # from Dr. Menzies
"""apex= leaf at end of biggest (most supported)
branch that is selected by test in a tree"""
def equals(val,span):
if val == opt.missing or val==span:
return True
else:
if isinstance(span,tuple):
lo,hi = span
return lo <= val < hi
else:
return span == val
def apex1(cells,tree):
found = False
for kid in tree.kids:
val = cells[kid.f.col]
if equals(val,kid.val):
for leaf in apex1(cells,kid):
found = True
yield leaf
if not found:
yield tree
leaves= [(len(leaf.rows),leaf)
for leaf in apex1(opt.cells(test),tree)]
a = second(last(sorted(leaves)))
try:
oldtestdata = a.testdata
except Exception, e:
oldtestdata = []
newtestdata = oldtestdata +[test]
a.__dict__.update(testdata = newtestdata) # append testdata to nodes in the tree
return a
def buildtestdata(t, num = 1, data =[]):
data +=[t._rows.pop(random.randint(0,len(t._rows)-1)) for _ in range(num)] # take random numbers of testdata
return data
def buildtdiv(tbl):
t= discreteNums(tbl, map(lambda x: x.cells, tbl._rows))
tree = tdiv(t)
# showTdiv(tree)
return tree
def buildcontrast(tree):
allleaves = [i for i in dtleaves(tree)] # all the leaves in dtree
contrastset = []
gl = globalleaf(tree) # the global best contrast set
testscorelst = []
for leaf in allleaves:
testscore = leafscore(leaf)
testscorelst +=[testscore]
tcon = contrast(tree,leaf, testscore)
if tcon==[]:
contrastset+=[gl]
else:
contrastset += [tcon]
print contrastset
print testscorelst
printcontrastset(contrastset, testscorelst)
def gotoleaf(testdata, tree, opt = The.tree):
goleaf = []
for row in testdata:
goleaf += [apex(row, tree, opt)]
return goleaf
def findleaves(tree, leaves = []):
for i,leaf in enumerate(dtleaves(tree)):
leaf.__dict__.update(score=leafscore(leaf), leafid=i)
leaves+=[leaf]
return leaves
def buildcontrast1(tree, allleaves=[]):
def addtoleaf(leaf,contrastset):
leaf.__dict__.update(contrastset=contrastset[-1]) # add contrast set to this leaf
leaf.__dict__.update(target=contrastset[-1]['targetscore'])
return leaf
def findset(leavesdic, l, i=0, contrastset=[], branch = None):
gl,bestscore = globalleaf(allleaves) # the global best contrast set
while True:
if(l.lvl+abs(i)>max(leavesdic) or l.lvl-abs(i) <0):
branch = findbetter(leavesdic, -l.lvl, l) # find the better leaves on the level 0
if branch:
contrastset+=[branch]
elif bestscore == l.score:
contrastset+=[{"This is the best one!":"No Contrast", "targetscore":l.score}]
else:
contrastset+=[gl] # not found, give the global best contrast set
l = addtoleaf(l, contrastset)
break
branch = findbetter(leavesdic, -i, l) # go up level
if branch:
contrastset+=[branch]
l=addtoleaf(l, contrastset)
break
i = -i #up
branch = findbetter(leavesdic, -i, l) # go down i level
if branch:
contrastset+=[branch]
l=addtoleaf(l, contrastset)
break
i = abs(i)+1
return contrastset
def br(node, score):
if not node:
return
contrastdic = {}
for i, b in enumerate(node.branch):
contrastdic[b[0].name]= contrastdic.get(b[0].name,"")+str(b[1])
contrastdic.update({"targetscore":score})
return contrastdic
def findbetter1(kids, testscore, betternode = None):
target =testscore
for bro in kids:
if bro.kids:
continue
if bro.score < target:
target=bro.score # find the better brother
betternode=bro
return br(betternode, target)
def findbetter(leavesdic, i,l):
if not int(i+l.lvl) in leavesdic:
return
if len(l.up.kids)>1: # priority1: find in brothers/Sisters
branch = findbetter1(l.up.kids, l.score)
if branch:
return branch
if l.up.up and len(l.up.up.kids)>1:# priority2: find in aunts and uncles
branch = findbetter1(l.up.up.kids,l.score)
if branch:
return branch
for node in leavesdic[i+l.lvl]: # priority3: find in cousins
# tempscore = leafscore(node)
if node.score < l.score:
branch = br(node,node.score)
return branch
contrastset = []
for sub in tree.kids:
subleaves= [i for i in dtleaves(sub)]
leavesdic = {}
for l in subleaves: # make teh subleaves dic
leavesdic[l.lvl] = leavesdic.get(l.lvl, []) +[l] # add all leaves under one subroot in to dic, according to lvl
# {1:[leaf1, leaf2,leaf4] 2:[]}
for l in subleaves: # build contrast set
contrastset = findset(leavesdic, l)
showTdiv(tree)
printcontrastset(contrastset, allleaves)
return tree
def globalleaf(allleaves, node= None):
mins = 10**10
contrastset= {}
for leaf in allleaves:
if leaf.score < mins:
node = leaf
mins = leaf.score
for i in node.branch:
contrastset[i[0].name]= i[1]
contrastset["targetscore"]=mins
return contrastset, mins
def leafscore(leaf):
score =[]
# rows = map(lambda x:x.cells, leaf.rows)
for row in leaf.rows:
score += [row.cells[-1]]
n = len(score)
p= q = max(0, int(n*0.5) - 1)
if len(score)%2==0:p = q+1
median = (score[p]+score[q])*0.5
return median
def printcontrastset(contrastset,allleaves):
print "\n"+ "+"*20+"\nCONSTRAST SET:"+ "\n"+ "+"*20
for k, adit in enumerate(contrastset):
out = "leaf #"+str(k)+" score:" + str(allleaves[k].score)
# sortdic = dict(sorted(adit.iteritems(), key= lambda x:x[1]))
# sortdic = dict(sorted(adit.iteritems(), key = adit.get))
for key, val in adit.iteritems(): # sort dict by key
out += " ==>"+str(key) +"="+str(val)
print out
out = ""
def printtogo(nodelst):
if not nodelst:
return
print "\n"+ "+"*20+"\nTEST DATA:"+ "\n"+ "+"*20
for i, node in enumerate(nodelst):
out ="testdata "+str(i)+ " will go to"
try:
out +=" leaf #"+str(node.leafid) +": "
except Exception, e:
out+= " node # "+str(node.mode)+": "
for i, b in enumerate(node.branch):
out +=b[0].name+"="+str(b[1])+" "
print out
def contrast(tree, testleaf, testscore):
def myup(node, testleaf, testscore, conset=[]):
if node.lvl==0:
return []
if len(node.up.kids)>1: # have local neighbors, here ,can't go down
for neigh in node.up.kids:
if leafscore(neigh)< testscore:
return [neigh]
if node.up.up and node.up.lvl!=0:
conset +=myup(node.up, testleaf, testscore, conset)
return [node]
else:
return ["noset"]
contrastdic = {}
# testscore = leafscore(testleaf)
temp = myup(testleaf,testleaf, testscore)
if "noset" in temp:
return []
if temp ==[]:
return []
for s in reversed(temp):
# contrastdic+=[s.f.name +":"+s.val]
contrastdic[s.f.name]= contrastdic.get(s.f.name,"")+str(s.val)
contrastdic["clusterID"]= contrastdic.get("clusterID", 0)+ int(temp[0].mode)
return contrastdic
def showTdiv(n,lvl=-1, ):
if n.f:
say( ('|..' * lvl) + str(n.f.name)+ "="+str(n.val) + \
"\t:" + str(n.mode) + " #" + str(nmodes(n)))
if n.kids:
nl();
for k in n.kids:
showTdiv(k, lvl+1)
else:
s=classStats(n)
print ' '+str(int(100*s.counts[s.mode()]/len(n.rows)))+'% * '+str(len(n.rows))+' leaf #'+str(n.leafid) +' score:'+str(n.score)
def clustertbl(f,tree, num2sym, row=[]):
tbl1 = tbl = table(f)
newheader = Num()
newheader.col = len(tbl.headers)
newheader.name = "=klass"
tbl1.headers +=[newheader] # tbl1 : the new table with cluster ID
for k,_ in leaves(tree):
for j in k.val:
for i, cell in enumerate(j.cells):
if isinstance(tbl.headers[i], Sym):
j.cells[i] = num2sym.get(cell, cell)
tmp=j.cells
tmp.append(id(k) % 1000)
tmp.append(j.cells[tbl1.depen[0].col]) # add the FIRST objective into the last cell of the row
# j.__dict__.update({'cells': tmp})
j.update(cells=tmp)
row.append(j.cells)
tbl1 = clone(tbl1, row)
return tbl1, row
def summerize(leaves, Dtree, befscore = 0, aftscore=0):
for leaf in leaves:
try:
leaf.testdata
befscore +=leaf.score * len(leaf.testdata)
try:
leaf.contrastset["This is the best one!"]
aftscore += leaf.score * len(leaf.testdata)
except Exception, e:
aftscore += len(leaf.testdata)*(leaf.contrastset["targetscore"])
except Exception, e:
continue
# try:
# befscore +=leaf.score * len(leaf.testdata)
# except Exception, e:
# # befscore +=0
# try:
# leaf.contrastset["This is the best one!"]
# aftscore += leaf.score * len(leaf.testdata)
# except Exception, e:
# try:
# aftscore +=len(leaf.testdata)*int(leaf.contrastset["targetscore"])
# except Exception, e:
# aftscore+=0
print "\n"+ "+"*20+"\nSummerize:"+ "\n"+ "+"*20
print "before appying contrastset: %s"%str(befscore)
print "after appying contrastset: %s"%str(aftscore)
def main():
random.seed(1)
data = o(src = "data/nasa93train.csv")
# data = o(src = "data/ant-1.3.csv")
m, sym2num= csv2py(data.src)
num2sym = dict(zip(sym2num.values(), sym2num.keys()))
Init(m) # init The class
tree= where2(m, m._rows) # tree generated by clustering
tbl1, row = clustertbl(data.src, tree, num2sym) # new table with cluster ID
fname = data.src[:-4]+'_copy'+data.src[-4:]
savetbl(tbl1,fname) # write new table to a file
# clusterscore = calScore(tree)
testdata = buildtestdata(tbl1, 30) # select the testdata
Dtree = buildtdiv(tbl1)
leaves=findleaves(Dtree)
testleaf = gotoleaf(testdata, Dtree) # all the leaves the testdata should go
buildcontrast1(Dtree, leaves)
printtogo(testleaf)
summerize(leaves, Dtree)
if __name__ =="__main__": eval(cmd())
|
mit
| -916,340,450,791,656,300 | 31.30117 | 133 | 0.611659 | false |
jberci/resolwe
|
resolwe/flow/managers/workload_connectors/slurm.py
|
1
|
2517
|
""".. Ignore pydocstyle D400.
===============
Slurm Connector
===============
"""
import logging
import os
import shlex
import subprocess
from django.conf import settings
from resolwe.utils import BraceMessage as __
from .base import BaseConnector
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
# We add this much to the memory limit to account for executor overhead,
# since the executor is running in the same environment as the process.
EXECUTOR_MEMORY_OVERHEAD = 200
class Connector(BaseConnector):
"""Slurm-based connector for job execution."""
def submit(self, data, runtime_dir, argv):
"""Run process with SLURM.
For details, see
:meth:`~resolwe.flow.managers.workload_connectors.base.BaseConnector.submit`.
"""
limits = data.process.get_resource_limits()
logger.debug(__(
"Connector '{}' running for Data with id {} ({}).",
self.__class__.__module__,
data.id,
repr(argv)
))
# Compute target partition.
partition = getattr(settings, 'FLOW_SLURM_PARTITION_DEFAULT', None)
if data.process.slug in getattr(settings, 'FLOW_SLURM_PARTITION_OVERRIDES', {}):
partition = settings.FLOW_SLURM_PARTITION_OVERRIDES[data.process.slug]
try:
# Make sure the resulting file is executable on creation.
script_path = os.path.join(runtime_dir, 'slurm.sh')
file_descriptor = os.open(script_path, os.O_WRONLY | os.O_CREAT, mode=0o555)
with os.fdopen(file_descriptor, 'wt') as script:
script.write('#!/bin/bash\n')
script.write('#SBATCH --mem={}M\n'.format(limits['memory'] + EXECUTOR_MEMORY_OVERHEAD))
script.write('#SBATCH --cpus-per-task={}\n'.format(limits['cores']))
if partition:
script.write('#SBATCH --partition={}\n'.format(partition))
# Render the argument vector into a command line.
line = ' '.join(map(shlex.quote, argv))
script.write(line + '\n')
command = ['/usr/bin/env', 'sbatch', script_path]
subprocess.Popen(
command,
cwd=runtime_dir,
stdin=subprocess.DEVNULL
).wait()
except OSError as err:
logger.error(__(
"OSError occurred while preparing SLURM script for Data {}: {}",
data.id, err
))
|
apache-2.0
| 1,765,178,422,453,275,600 | 33.479452 | 103 | 0.581645 | false |
nephila/djangocms-page-meta
|
tests/test_toolbar.py
|
1
|
6609
|
from cms.toolbar.items import Menu, ModalItem, SubMenu
from cms.utils.i18n import get_language_object
from django.contrib.auth.models import Permission, User
from django.test.utils import override_settings
from django.urls import reverse
from django.utils.encoding import force_text
from djangocms_page_meta.cms_toolbars import PAGE_META_ITEM_TITLE, PAGE_META_MENU_TITLE
from djangocms_page_meta.models import PageMeta, TitleMeta
from . import BaseTest
class ToolbarTest(BaseTest):
def test_no_page(self):
"""
Test that no page menu is present if request not in a page
"""
from cms.toolbar.toolbar import CMSToolbar
request = self.get_page_request(None, self.user, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.find_items(Menu, name="Page")
self.assertEqual(page_menu, [])
def test_no_perm(self):
"""
Test that no page menu is present if user has no perm
"""
from cms.toolbar.toolbar import CMSToolbar
page1, __ = self.get_pages()
request = self.get_page_request(page1, self.user_staff, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.find_items(Menu, name="Page")
try:
self.assertEqual(page_menu, [])
except AssertionError:
meta_menu = page_menu[0].item.find_items(SubMenu, name=force_text(PAGE_META_MENU_TITLE))
self.assertEqual(meta_menu, [])
def test_perm(self):
"""
Test that page meta menu is present if user has Page.change_perm
"""
from cms.toolbar.toolbar import CMSToolbar
page1, __ = self.get_pages()
self.user_staff.user_permissions.add(Permission.objects.get(codename="change_page"))
self.user_staff = User.objects.get(pk=self.user_staff.pk)
request = self.get_page_request(page1, self.user_staff, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.menus["page"]
meta_menu = page_menu.find_items(SubMenu, name=force_text(PAGE_META_MENU_TITLE))[0].item
self.assertEqual(
len(meta_menu.find_items(ModalItem, name="{}...".format(force_text(PAGE_META_ITEM_TITLE)))), 1
)
@override_settings(CMS_PERMISSION=True)
def test_perm_permissions(self):
"""
Test that no page menu is present if user has general page Page.change_perm but not permission on current page
"""
from cms.toolbar.toolbar import CMSToolbar
page1, __ = self.get_pages()
self.user_staff.user_permissions.add(Permission.objects.get(codename="change_page"))
self.user_staff = User.objects.get(pk=self.user_staff.pk)
request = self.get_page_request(page1, self.user_staff, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.find_items(Menu, name="Page")
try:
self.assertEqual(page_menu, [])
except AssertionError:
meta_menu = page_menu[0].item.find_items(SubMenu, name=force_text(PAGE_META_MENU_TITLE))
self.assertEqual(meta_menu, [])
def test_toolbar(self):
"""
Test that PageMeta/TitleMeta items are present for superuser
"""
from cms.toolbar.toolbar import CMSToolbar
NEW_CMS_LANGS = { # noqa: N806
1: [
{
"code": "en",
"name": "English",
"public": True,
},
{
"code": "it",
"name": "Italiano",
"public": True,
},
],
"default": {
"hide_untranslated": False,
},
}
page1, __ = self.get_pages()
with self.settings(CMS_LANGUAGES=NEW_CMS_LANGS):
request = self.get_page_request(page1, self.user, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.menus["page"]
meta_menu = page_menu.find_items(SubMenu, name=force_text(PAGE_META_MENU_TITLE))[0].item
self.assertEqual(
len(meta_menu.find_items(ModalItem, name="{}...".format(force_text(PAGE_META_ITEM_TITLE)))), 1
)
self.assertEqual(len(meta_menu.find_items(ModalItem)), len(NEW_CMS_LANGS[1]) + 1)
def test_toolbar_with_items(self):
"""
Test that PageMeta/TitleMeta items are present for superuser if PageMeta/TitleMeta exists for current page
"""
from cms.toolbar.toolbar import CMSToolbar
page1, __ = self.get_pages()
page_ext = PageMeta.objects.create(extended_object=page1)
title_meta = TitleMeta.objects.create(extended_object=page1.get_title_obj("en"))
request = self.get_page_request(page1, self.user, "/", edit=True)
toolbar = CMSToolbar(request)
toolbar.get_left_items()
page_menu = toolbar.menus["page"]
meta_menu = page_menu.find_items(SubMenu, name=force_text(PAGE_META_MENU_TITLE))[0].item
pagemeta_menu = meta_menu.find_items(ModalItem, name="{}...".format(force_text(PAGE_META_ITEM_TITLE)))
self.assertEqual(len(pagemeta_menu), 1)
self.assertTrue(
pagemeta_menu[0].item.url.startswith(
reverse("admin:djangocms_page_meta_pagemeta_change", args=(page_ext.pk,))
)
)
url_change = False
url_add = False
for title in page1.title_set.all():
language = get_language_object(title.language)
titlemeta_menu = meta_menu.find_items(ModalItem, name="{}...".format(language["name"]))
self.assertEqual(len(titlemeta_menu), 1)
try:
title_ext = TitleMeta.objects.get(extended_object_id=title.pk)
self.assertEqual(title_ext, title_meta)
self.assertTrue(
titlemeta_menu[0].item.url.startswith(
reverse("admin:djangocms_page_meta_titlemeta_change", args=(title_ext.pk,))
)
)
url_change = True
except TitleMeta.DoesNotExist:
self.assertTrue(
titlemeta_menu[0].item.url.startswith(reverse("admin:djangocms_page_meta_titlemeta_add"))
)
url_add = True
self.assertTrue(url_change and url_add)
|
bsd-3-clause
| -7,438,808,034,730,485,000 | 40.566038 | 119 | 0.590256 | false |
dpgaspar/Flask-AppBuilder
|
flask_appbuilder/filemanager.py
|
1
|
8533
|
import logging
import os
import os.path as op
import re
import uuid
from flask.globals import _request_ctx_stack
from werkzeug.datastructures import FileStorage
from werkzeug.utils import secure_filename
from wtforms import ValidationError
try:
from flask import _app_ctx_stack
except ImportError:
_app_ctx_stack = None
app_stack = _app_ctx_stack or _request_ctx_stack
log = logging.getLogger(__name__)
try:
from PIL import Image, ImageOps
except ImportError:
Image = None
ImageOps = None
class FileManager(object):
def __init__(
self,
base_path=None,
relative_path="",
namegen=None,
allowed_extensions=None,
permission=0o755,
**kwargs
):
ctx = app_stack.top
if "UPLOAD_FOLDER" in ctx.app.config and not base_path:
base_path = ctx.app.config["UPLOAD_FOLDER"]
if not base_path:
raise Exception("Config key UPLOAD_FOLDER is mandatory")
self.base_path = base_path
self.relative_path = relative_path
self.namegen = namegen or uuid_namegen
if not allowed_extensions and "FILE_ALLOWED_EXTENSIONS" in ctx.app.config:
self.allowed_extensions = ctx.app.config["FILE_ALLOWED_EXTENSIONS"]
else:
self.allowed_extensions = allowed_extensions
self.permission = permission
self._should_delete = False
def is_file_allowed(self, filename):
if not self.allowed_extensions:
return True
return (
"." in filename
and filename.rsplit(".", 1)[1].lower() in self.allowed_extensions
)
def generate_name(self, obj, file_data):
return self.namegen(file_data)
def get_path(self, filename):
if not self.base_path:
raise ValueError("FileUploadField field requires base_path to be set.")
return op.join(self.base_path, filename)
def delete_file(self, filename):
path = self.get_path(filename)
if op.exists(path):
os.remove(path)
def save_file(self, data, filename):
filename_ = secure_filename(filename)
path = self.get_path(filename_)
if not op.exists(op.dirname(path)):
os.makedirs(os.path.dirname(path), self.permission)
data.save(path)
return filename_
class ImageManager(FileManager):
"""
Image Manager will manage your image files referenced on SQLAlchemy Model
will save files on IMG_UPLOAD_FOLDER as <uuid>_sep_<filename>
"""
keep_image_formats = ("PNG",)
def __init__(
self,
base_path=None,
relative_path=None,
max_size=None,
namegen=None,
allowed_extensions=None,
thumbgen=None,
thumbnail_size=None,
permission=0o755,
**kwargs
):
# Check if PIL is installed
if Image is None:
raise Exception("PIL library was not found")
ctx = app_stack.top
if "IMG_SIZE" in ctx.app.config and not max_size:
self.max_size = ctx.app.config["IMG_SIZE"]
if "IMG_UPLOAD_URL" in ctx.app.config and not relative_path:
relative_path = ctx.app.config["IMG_UPLOAD_URL"]
if not relative_path:
raise Exception("Config key IMG_UPLOAD_URL is mandatory")
if "IMG_UPLOAD_FOLDER" in ctx.app.config and not base_path:
base_path = ctx.app.config["IMG_UPLOAD_FOLDER"]
if not base_path:
raise Exception("Config key IMG_UPLOAD_FOLDER is mandatory")
self.thumbnail_fn = thumbgen or thumbgen_filename
self.thumbnail_size = thumbnail_size
self.image = None
if not allowed_extensions:
allowed_extensions = ("gif", "jpg", "jpeg", "png", "tiff")
super(ImageManager, self).__init__(
base_path=base_path,
relative_path=relative_path,
namegen=namegen,
allowed_extensions=allowed_extensions,
permission=permission,
**kwargs
)
def get_url(self, filename):
if isinstance(filename, FileStorage):
return filename.filename
return self.relative_path + filename
def get_url_thumbnail(self, filename):
if isinstance(filename, FileStorage):
return filename.filename
return self.relative_path + thumbgen_filename(filename)
# Deletion
def delete_file(self, filename):
super(ImageManager, self).delete_file(filename)
self.delete_thumbnail(filename)
def delete_thumbnail(self, filename):
path = self.get_path(self.thumbnail_fn(filename))
if op.exists(path):
os.remove(path)
# Saving
def save_file(self, data, filename, size=None, thumbnail_size=None):
"""
Saves an image File
:param data: FileStorage from Flask form upload field
:param filename: Filename with full path
"""
max_size = size or self.max_size
thumbnail_size = thumbnail_size or self.thumbnail_size
if data and isinstance(data, FileStorage):
try:
self.image = Image.open(data)
except Exception as e:
raise ValidationError("Invalid image: %s" % e)
path = self.get_path(filename)
# If Path does not exist, create it
if not op.exists(op.dirname(path)):
os.makedirs(os.path.dirname(path), self.permission)
# Figure out format
filename, format = self.get_save_format(filename, self.image)
if self.image and (self.image.format != format or max_size):
if max_size:
image = self.resize(self.image, max_size)
else:
image = self.image
self.save_image(image, self.get_path(filename), format)
else:
data.seek(0)
data.save(path)
self.save_thumbnail(data, filename, format, thumbnail_size)
return filename
def save_thumbnail(self, data, filename, format, thumbnail_size=None):
thumbnail_size = thumbnail_size or self.thumbnail_size
if self.image and thumbnail_size:
path = self.get_path(self.thumbnail_fn(filename))
self.save_image(self.resize(self.image, thumbnail_size), path, format)
def resize(self, image, size):
"""
Resizes the image
:param image: The image object
:param size: size is PIL tuple (width, heigth, force) ex: (200,100,True)
"""
(width, height, force) = size
if image.size[0] > width or image.size[1] > height:
if force:
return ImageOps.fit(self.image, (width, height), Image.ANTIALIAS)
else:
thumb = self.image.copy()
thumb.thumbnail((width, height), Image.ANTIALIAS)
return thumb
return image
def save_image(self, image, path, format="JPEG"):
if image.mode not in ("RGB", "RGBA"):
image = image.convert("RGBA")
with open(path, "wb") as fp:
image.save(fp, format)
def get_save_format(self, filename, image):
if image.format not in self.keep_image_formats:
name, ext = op.splitext(filename)
filename = "%s.jpg" % name
return filename, "JPEG"
return filename, image.format
def uuid_namegen(file_data):
return str(uuid.uuid1()) + "_sep_" + file_data.filename
def get_file_original_name(name):
"""
Use this function to get the user's original filename.
Filename is concatenated with <UUID>_sep_<FILE NAME>, to avoid collisions.
Use this function on your models on an aditional function
::
class ProjectFiles(Base):
id = Column(Integer, primary_key=True)
file = Column(FileColumn, nullable=False)
def file_name(self):
return get_file_original_name(str(self.file))
:param name:
The file name from model
:return:
Returns the user's original filename removes <UUID>_sep_
"""
re_match = re.findall(".*_sep_(.*)", name)
if re_match:
return re_match[0]
else:
return "Not valid"
def uuid_originalname(uuid_filename):
return uuid_filename.split("_sep_")[1]
def thumbgen_filename(filename):
name, ext = op.splitext(filename)
return "%s_thumb%s" % (name, ext)
|
bsd-3-clause
| -2,775,965,913,715,415,000 | 30.029091 | 84 | 0.597445 | false |
stmobo/Machine-Learning
|
OpenAI-Gym/agents/dqn.py
|
1
|
2613
|
import tensorflow as tf
import numpy as np
from agents import mixed_network, spaces
tensorType = tf.float32
class DeepQNetwork:
def __init__(self, graph, session, state_in, netGen, \
target_mix_factor=0.001, gradient_clipping=None,\
discount_factor=0.99, learning_rate=0.001, prefix=""):
self.graph = graph
self.session = session
self.state_in = state_in
self.discount_factor = discount_factor
self.net = mixed_network.MixedNetwork(self.graph, self.session,\
self.state_in, netGen, target_mix_factor=target_mix_factor,\
prefix=prefix)
self.targets = tf.placeholder(tensorType, [None]) # TD-targets Y[i]
self.target_actions = tf.placeholder(tf.int32, [None]) # Actions to train on A[j]
self.N = tf.range(0, tf.shape(self.target_actions)[0])
self.net_q = tf.gather_nd(self.net.main_out, tf.pack([self.N, self.target_actions], 1)) # Q(s, A[j]) for all A[j] in minibatch
self.loss = tf.reduce_mean( tf.square( self.targets - self.net_q ) )
optimizer = tf.train.AdamOptimizer(learning_rate)
if isinstance(gradient_clipping, tuple):
gradients = optimizer.compute_gradients(self.loss, self.net.main_parameters)
clipped_grads = [ \
( tf.clip_by_value(gv[0], gradient_clipping[0], gradient_clipping[1]), gv[1]) \
for gv in gradients ]
self.optimize = optimizer.apply_gradients(clipped_grads)
else:
self.optimize = optimizer.minimize(self.loss, var_list=self.net.main_parameters)
def predict_main(self, state):
return self.net.get_main({self.state_in:state})
def predict_target(self, state):
return self.net.get_target({self.state_in:state})
def train(self, target_states, states, actions, term_state, rewards):
target_q = self.net.get_target({ self.state_in:target_states })
td_targets = []
for i, t in enumerate(target_q):
if term_state[i]:
td_targets.append(rewards[i])
else:
td_targets.append(rewards[i] + (self.discount_factor * np.amax(t)))
_, crit_loss = self.session.run([self.optimize, self.loss], {
self.state_in: states,
self.target_actions: np.squeeze(actions),
self.targets: np.squeeze(td_targets)
})
return crit_loss
def target_copy(self):
return self.net.update_target()
def target_mix(self):
return self.net.mix_target()
|
mit
| -1,506,260,545,065,428,500 | 37.426471 | 134 | 0.603521 | false |
rsalmaso/django-cms
|
cms/admin/forms.py
|
1
|
49721
|
from django import forms
from django.apps import apps
from django.contrib.auth import get_user_model, get_permission_codename
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.forms.utils import ErrorList
from django.forms.widgets import HiddenInput
from django.template.defaultfilters import slugify
from django.utils.encoding import force_str
from django.utils.translation import gettext, gettext_lazy as _
from cms import api
from cms.apphook_pool import apphook_pool
from cms.cache.permissions import clear_permission_cache
from cms.exceptions import PluginLimitReached
from cms.extensions import extension_pool
from cms.constants import PAGE_TYPES_ID, PUBLISHER_STATE_DIRTY, ROOT_USER_LEVEL
from cms.forms.validators import (validate_relative_url, validate_url_uniqueness,
validate_overwrite_url)
from cms.forms.widgets import UserSelectAdminWidget, AppHookSelect, ApplicationConfigSelect
from cms.models import (CMSPlugin, Page, PageType, PagePermission, PageUser, PageUserGroup, Title,
Placeholder, GlobalPagePermission, TreeNode)
from cms.models.permissionmodels import User
from cms.plugin_pool import plugin_pool
from cms.signals.apphook import set_restart_trigger
from cms.utils.conf import get_cms_setting
from cms.utils.compat.forms import UserChangeForm
from cms.utils.i18n import get_language_list, get_language_object
from cms.utils.permissions import (
get_current_user,
get_subordinate_users,
get_subordinate_groups,
get_user_permission_level,
)
from menus.menu_pool import menu_pool
def get_permission_accessor(obj):
User = get_user_model()
if isinstance(obj, (PageUser, User,)):
rel_name = 'user_permissions'
else:
rel_name = 'permissions'
return getattr(obj, rel_name)
def get_page_changed_by_filter_choices():
# This is not site-aware
# Been like this forever
# Would be nice for it to filter out by site
values = (
Page
.objects
.filter(publisher_is_draft=True)
.distinct()
.order_by('changed_by')
.values_list('changed_by', flat=True)
)
yield ('', _('All'))
for value in values:
yield (value, value)
def get_page_template_filter_choices():
yield ('', _('All'))
for value, name in get_cms_setting('TEMPLATES'):
yield (value, name)
def save_permissions(data, obj):
models = (
(Page, 'page'),
(PageUser, 'pageuser'),
(PageUserGroup, 'pageuser'),
(PagePermission, 'pagepermission'),
)
if not obj.pk:
# save obj, otherwise we can't assign permissions to him
obj.save()
permission_accessor = get_permission_accessor(obj)
for model, name in models:
content_type = ContentType.objects.get_for_model(model)
for key in ('add', 'change', 'delete'):
# add permission `key` for model `model`
codename = get_permission_codename(key, model._meta)
permission = Permission.objects.get(content_type=content_type, codename=codename)
field = 'can_%s_%s' % (key, name)
if data.get(field):
permission_accessor.add(permission)
elif field in data:
permission_accessor.remove(permission)
class CopyPermissionForm(forms.Form):
"""
Holds the specific field for permissions
"""
copy_permissions = forms.BooleanField(
label=_('Copy permissions'),
required=False,
initial=True,
)
class BasePageForm(forms.ModelForm):
_user = None
_site = None
_language = None
title = forms.CharField(label=_("Title"), max_length=255, widget=forms.TextInput(),
help_text=_('The default title'))
slug = forms.CharField(label=_("Slug"), max_length=255, widget=forms.TextInput(),
help_text=_('The part of the title that is used in the URL'))
menu_title = forms.CharField(label=_("Menu Title"), widget=forms.TextInput(),
help_text=_('Overwrite what is displayed in the menu'), required=False)
page_title = forms.CharField(label=_("Page Title"), widget=forms.TextInput(),
help_text=_('Overwrites what is displayed at the top of your browser or in bookmarks'),
required=False)
meta_description = forms.CharField(label=_('Description meta tag'), required=False,
widget=forms.Textarea(attrs={'maxlength': '320', 'rows': '4'}),
help_text=_('A description of the page used by search engines.'),
max_length=320)
class Meta:
model = Page
fields = []
def clean_slug(self):
slug = slugify(self.cleaned_data['slug'])
if not slug:
raise ValidationError(_("Slug must not be empty."))
return slug
class AddPageForm(BasePageForm):
source = forms.ModelChoiceField(
label=_(u'Page type'),
queryset=Page.objects.filter(
is_page_type=True,
publisher_is_draft=True,
),
required=False,
)
parent_node = forms.ModelChoiceField(
queryset=TreeNode.objects.all(),
required=False,
widget=forms.HiddenInput(),
)
class Meta:
model = Page
fields = ['source']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
source_field = self.fields.get('source')
if not source_field or source_field.widget.is_hidden:
return
root_page = PageType.get_root_page(site=self._site)
if root_page:
# Set the choicefield's choices to the various page_types
descendants = root_page.get_descendant_pages().filter(is_page_type=True)
titles = Title.objects.filter(page__in=descendants, language=self._language)
choices = [('', '---------')]
choices.extend((title.page_id, title.title) for title in titles)
source_field.choices = choices
else:
choices = []
if len(choices) < 2:
source_field.widget = forms.HiddenInput()
def clean(self):
data = self.cleaned_data
if self._errors:
# Form already has errors, best to let those be
# addressed first.
return data
parent_node = data.get('parent_node')
if parent_node:
slug = data['slug']
parent_path = parent_node.item.get_path(self._language)
path = u'%s/%s' % (parent_path, slug) if parent_path else slug
else:
path = data['slug']
try:
# Validate the url
validate_url_uniqueness(
self._site,
path=path,
language=self._language,
)
except ValidationError as error:
self.add_error('slug', error)
else:
data['path'] = path
return data
def clean_parent_node(self):
parent_node = self.cleaned_data.get('parent_node')
if parent_node and parent_node.site_id != self._site.pk:
raise ValidationError("Site doesn't match the parent's page site")
return parent_node
def create_translation(self, page):
data = self.cleaned_data
title_kwargs = {
'page': page,
'language': self._language,
'slug': data['slug'],
'path': data['path'],
'title': data['title'],
}
if 'menu_title' in data:
title_kwargs['menu_title'] = data['menu_title']
if 'page_title' in data:
title_kwargs['page_title'] = data['page_title']
if 'meta_description' in data:
title_kwargs['meta_description'] = data['meta_description']
return api.create_title(**title_kwargs)
def from_source(self, source, parent=None):
new_page = source.copy(
site=self._site,
parent_node=parent,
language=self._language,
translations=False,
permissions=False,
extensions=False,
)
new_page.update(is_page_type=False, in_navigation=True)
return new_page
def get_template(self):
return Page.TEMPLATE_DEFAULT
def save(self, *args, **kwargs):
source = self.cleaned_data.get('source')
parent = self.cleaned_data.get('parent_node')
if source:
new_page = self.from_source(source, parent=parent)
for lang in source.get_languages():
source._copy_contents(new_page, lang)
else:
new_page = super().save(commit=False)
new_page.template = self.get_template()
new_page.set_tree_node(self._site, target=parent, position='last-child')
new_page.save()
translation = self.create_translation(new_page)
if source:
extension_pool.copy_extensions(
source_page=source,
target_page=new_page,
languages=[translation.language],
)
is_first = not (
TreeNode
.objects
.get_for_site(self._site)
.exclude(pk=new_page.node_id)
.exists()
)
new_page.rescan_placeholders()
if is_first and not new_page.is_page_type:
# its the first page. publish it right away
new_page.publish(translation.language)
new_page.set_as_homepage(self._user)
new_page.clear_cache(menu=True)
return new_page
class AddPageTypeForm(AddPageForm):
menu_title = None
meta_description = None
page_title = None
source = forms.ModelChoiceField(
queryset=Page.objects.drafts(),
required=False,
widget=forms.HiddenInput(),
)
def get_or_create_root(self):
"""
Creates the root node used to store all page types
for the current site if it doesn't exist.
"""
root_page = PageType.get_root_page(site=self._site)
if not root_page:
root_page = Page(
publisher_is_draft=True,
in_navigation=False,
is_page_type=True,
)
root_page.set_tree_node(self._site)
root_page.save()
if not root_page.has_translation(self._language):
api.create_title(
language=self._language,
title=gettext('Page Types'),
page=root_page,
slug=PAGE_TYPES_ID,
path=PAGE_TYPES_ID,
)
return root_page.node
def clean_parent_node(self):
parent_node = super().clean_parent_node()
if parent_node and not parent_node.item.is_page_type:
raise ValidationError("Parent has to be a page type.")
if not parent_node:
# parent was not explicitly selected.
# fallback to the page types root
parent_node = self.get_or_create_root()
return parent_node
def from_source(self, source, parent=None):
new_page = source.copy(
site=self._site,
parent_node=parent,
language=self._language,
translations=False,
permissions=False,
extensions=False,
)
new_page.update(is_page_type=True, in_navigation=False)
return new_page
def save(self, *args, **kwargs):
new_page = super().save(*args, **kwargs)
if not self.cleaned_data.get('source'):
# User has created a page-type via "Add page"
# instead of from another page.
new_page.update(
draft_only=True,
is_page_type=True,
in_navigation=False,
)
return new_page
class DuplicatePageForm(AddPageForm):
source = forms.ModelChoiceField(
queryset=Page.objects.drafts(),
required=True,
widget=forms.HiddenInput(),
)
class ChangePageForm(BasePageForm):
translation_fields = (
'slug',
'title',
'meta_description',
'menu_title',
'page_title',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title_obj = self.instance.get_title_obj(
language=self._language,
fallback=False,
force_reload=True,
)
for field in self.translation_fields:
if field in self.fields:
self.fields[field].initial = getattr(self.title_obj, field)
def clean(self):
data = super().clean()
if self._errors:
# Form already has errors, best to let those be
# addressed first.
return data
page = self.instance
if page.is_home:
data['path'] = ''
return data
if self.title_obj.has_url_overwrite:
data['path'] = self.title_obj.path
return data
if 'slug' not in self.fields:
# the {% edit_title_fields %} template tag
# allows users to edit specific fields for a translation.
# as a result, slug might not always be there.
return data
if page.parent_page:
slug = data['slug']
parent_path = page.parent_page.get_path(self._language)
path = u'%s/%s' % (parent_path, slug) if parent_path else slug
else:
path = data['slug']
try:
# Validate the url
validate_url_uniqueness(
self._site,
path=path,
language=self._language,
exclude_page=page,
)
except ValidationError as error:
self.add_error('slug', error)
else:
data['path'] = path
return data
def save(self, commit=True):
data = self.cleaned_data
cms_page = super().save(commit=False)
translation_data = {field: data[field]
for field in self.translation_fields if field in data}
if 'path' in data:
# The path key is set if
# the slug field is present in the form,
# or if the page being edited is the home page,
# or if the translation has a url override.
translation_data['path'] = data['path']
update_count = cms_page.update_translations(
self._language,
publisher_state=PUBLISHER_STATE_DIRTY,
**translation_data
)
if self._language in cms_page.title_cache:
del cms_page.title_cache[self._language]
if update_count == 0:
api.create_title(language=self._language, page=cms_page, **translation_data)
# _update_title_path_recursive should be called if the new page is the parent
# of already created children in multilingual sites.
cms_page._update_title_path_recursive(self._language, slug=self.data['slug'])
cms_page.clear_cache(menu=True)
return cms_page
class PublicationDatesForm(forms.ModelForm):
class Meta:
model = Page
fields = ['publication_date', 'publication_end_date']
def save(self, *args, **kwargs):
page = super().save(*args, **kwargs)
page.clear_cache(menu=True)
return page
class AdvancedSettingsForm(forms.ModelForm):
from cms.forms.fields import PageSmartLinkField
_user = None
_site = None
_language = None
application_urls = forms.ChoiceField(label=_('Application'),
choices=(), required=False,
help_text=_('Hook application to this page.'))
overwrite_url = forms.CharField(label=_('Overwrite URL'), max_length=255, required=False,
help_text=_('Keep this field empty if standard path should be used.'))
xframe_options = forms.ChoiceField(
choices=Page._meta.get_field('xframe_options').choices,
label=_('X Frame Options'),
help_text=_('Whether this page can be embedded in other pages or websites'),
initial=Page._meta.get_field('xframe_options').default,
required=False
)
redirect = PageSmartLinkField(label=_('Redirect'), required=False,
help_text=_('Redirects to this URL.'),
placeholder_text=_('Start typing...'),
ajax_view='admin:cms_page_get_published_pagelist',
)
# This is really a 'fake' field which does not correspond to any Page attribute
# But creates a stub field to be populate by js
application_configs = forms.CharField(
label=_('Application configurations'),
required=False,
widget=ApplicationConfigSelect,
)
fieldsets = (
(None, {
'fields': ('overwrite_url', 'redirect'),
}),
(_('Language independent options'), {
'fields': ('template', 'reverse_id', 'soft_root', 'navigation_extenders',
'application_urls', 'application_namespace', 'application_configs',
'xframe_options',)
})
)
class Meta:
model = Page
fields = [
'template', 'reverse_id', 'overwrite_url', 'redirect', 'soft_root', 'navigation_extenders',
'application_urls', 'application_namespace', "xframe_options",
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title_obj = self.instance.get_title_obj(
language=self._language,
fallback=False,
force_reload=True,
)
if 'navigation_extenders' in self.fields:
navigation_extenders = self.get_navigation_extenders()
self.fields['navigation_extenders'].widget = forms.Select(
{}, [('', "---------")] + navigation_extenders)
if 'application_urls' in self.fields:
# Prepare a dict mapping the apps by class name ('PollApp') to
# their app_name attribute ('polls'), if any.
app_namespaces = {}
app_configs = {}
for hook in apphook_pool.get_apphooks():
app = apphook_pool.get_apphook(hook[0])
if app.app_name:
app_namespaces[hook[0]] = app.app_name
if app.app_config:
app_configs[hook[0]] = app
self.fields['application_urls'].widget = AppHookSelect(
attrs={'id': 'application_urls'},
app_namespaces=app_namespaces
)
self.fields['application_urls'].choices = [('', "---------")] + apphook_pool.get_apphooks()
page_data = self.data if self.data else self.initial
if app_configs:
self.fields['application_configs'].widget = ApplicationConfigSelect(
attrs={'id': 'application_configs'},
app_configs=app_configs,
)
if page_data.get('application_urls', False) and page_data['application_urls'] in app_configs:
configs = app_configs[page_data['application_urls']].get_configs()
self.fields['application_configs'].widget.choices = [(config.pk, force_str(config)) for config in configs]
try:
config = configs.get(namespace=self.initial['application_namespace'])
self.fields['application_configs'].initial = config.pk
except ObjectDoesNotExist:
# Provided apphook configuration doesn't exist (anymore),
# just skip it
# The user will choose another value anyway
pass
if 'redirect' in self.fields:
self.fields['redirect'].widget.language = self._language
self.fields['redirect'].initial = self.title_obj.redirect
if 'overwrite_url' in self.fields and self.title_obj.has_url_overwrite:
self.fields['overwrite_url'].initial = self.title_obj.path
def get_apphooks(self):
for hook in apphook_pool.get_apphooks():
yield (hook[0], apphook_pool.get_apphook(hook[0]))
def get_apphooks_with_config(self):
return {key: app for key, app in self.get_apphooks() if app.app_config}
def get_navigation_extenders(self):
return menu_pool.get_menus_by_attribute("cms_enabled", True)
def _check_unique_namespace_instance(self, namespace):
return Page.objects.drafts().on_site(self._site).filter(
application_namespace=namespace
).exclude(pk=self.instance.pk).exists()
def clean(self):
cleaned_data = super().clean()
if cleaned_data.get("overwrite_url"):
# Assuming that the user enters a full URL in the overwrite_url input.
# Here we validate it before publishing the page and if it contains
# reserved characters (e.g. $?:#), we add error in the form.
# issue 6934
url = cleaned_data.get("overwrite_url")
if url and not validate_overwrite_url(value=url):
self._errors['overwrite_url'] = self.error_class([_('You entered an invalid URL.')])
if self._errors:
# Fail fast if there's errors in the form
return cleaned_data
# Language has been validated already
# so we know it exists.
language_name = get_language_object(
self._language,
site_id=self._site.pk,
)['name']
if not self.title_obj.slug:
# This covers all cases where users try to edit
# page advanced settings without setting a title slug
# for page titles that already exist.
message = _("Please set the %(language)s slug "
"before editing its advanced settings.")
raise ValidationError(message % {'language': language_name})
if 'reverse_id' in self.fields:
reverse_id = cleaned_data['reverse_id']
if reverse_id:
lookup = Page.objects.drafts().on_site(self._site).filter(reverse_id=reverse_id)
if lookup.exclude(pk=self.instance.pk).exists():
self._errors['reverse_id'] = self.error_class(
[_('A page with this reverse URL id exists already.')])
apphook = cleaned_data.get('application_urls', None)
# The field 'application_namespace' is a misnomer. It should be
# 'instance_namespace'.
instance_namespace = cleaned_data.get('application_namespace', None)
application_config = cleaned_data.get('application_configs', None)
if apphook:
apphooks_with_config = self.get_apphooks_with_config()
# application_config wins over application_namespace
if apphook in apphooks_with_config and application_config:
# the value of the application config namespace is saved in
# the 'usual' namespace field to be backward compatible
# with existing apphooks
try:
appconfig_pk = forms.IntegerField(required=True).to_python(application_config)
except ValidationError:
self._errors['application_configs'] = ErrorList([
_('Invalid application config value')
])
return self.cleaned_data
try:
config = apphooks_with_config[apphook].get_configs().get(pk=appconfig_pk)
except ObjectDoesNotExist:
self._errors['application_configs'] = ErrorList([
_('Invalid application config value')
])
return self.cleaned_data
if self._check_unique_namespace_instance(config.namespace):
# Looks like there's already one with the default instance
# namespace defined.
self._errors['application_configs'] = ErrorList([
_('An application instance using this configuration already exists.')
])
else:
self.cleaned_data['application_namespace'] = config.namespace
else:
if instance_namespace:
if self._check_unique_namespace_instance(instance_namespace):
self._errors['application_namespace'] = ErrorList([
_('An application instance with this name already exists.')
])
else:
# The attribute on the apps 'app_name' is a misnomer, it should be
# 'application_namespace'.
application_namespace = apphook_pool.get_apphook(apphook).app_name
if application_namespace and not instance_namespace:
if self._check_unique_namespace_instance(application_namespace):
# Looks like there's already one with the default instance
# namespace defined.
self._errors['application_namespace'] = ErrorList([
_('An application instance with this name already exists.')
])
else:
# OK, there are zero instances of THIS app that use the
# default instance namespace, so, since the user didn't
# provide one, we'll use the default. NOTE: The following
# line is really setting the "instance namespace" of the
# new app to the app’s "application namespace", which is
# the default instance namespace.
self.cleaned_data['application_namespace'] = application_namespace
if instance_namespace and not apphook:
self.cleaned_data['application_namespace'] = None
if application_config and not apphook:
self.cleaned_data['application_configs'] = None
return self.cleaned_data
def clean_xframe_options(self):
if 'xframe_options' not in self.fields:
return # nothing to do, field isn't present
xframe_options = self.cleaned_data['xframe_options']
if xframe_options == '':
return Page._meta.get_field('xframe_options').default
return xframe_options
def clean_overwrite_url(self):
path_override = self.cleaned_data.get('overwrite_url')
if path_override:
path = path_override.strip('/')
else:
path = self.instance.get_path_for_slug(self.title_obj.slug, self._language)
validate_url_uniqueness(
self._site,
path=path,
language=self._language,
exclude_page=self.instance,
)
self.cleaned_data['path'] = path
return path_override
def has_changed_apphooks(self):
changed_data = self.changed_data
if 'application_urls' in changed_data:
return True
return 'application_namespace' in changed_data
def update_apphooks(self):
# User has changed the apphooks on the page.
# Update the public version of the page to reflect this change immediately.
public_id = self.instance.publisher_public_id
self._meta.model.objects.filter(pk=public_id).update(
application_urls=self.instance.application_urls,
application_namespace=(self.instance.application_namespace or None),
)
# Connects the apphook restart handler to the request finished signal
set_restart_trigger()
def save(self, *args, **kwargs):
data = self.cleaned_data
page = super().save(*args, **kwargs)
page.update_translations(
self._language,
path=data['path'],
redirect=(data.get('redirect') or None),
publisher_state=PUBLISHER_STATE_DIRTY,
has_url_overwrite=bool(data.get('overwrite_url')),
)
is_draft_and_has_public = page.publisher_is_draft and page.publisher_public_id
if is_draft_and_has_public and self.has_changed_apphooks():
self.update_apphooks()
page.clear_cache(menu=True)
return page
class PagePermissionForm(forms.ModelForm):
class Meta:
model = Page
fields = ['login_required', 'limit_visibility_in_menu']
def save(self, *args, **kwargs):
page = super().save(*args, **kwargs)
page.clear_cache(menu=True)
clear_permission_cache()
return page
class PageTreeForm(forms.Form):
position = forms.IntegerField(initial=0, required=True)
target = forms.ModelChoiceField(queryset=Page.objects.none(), required=False)
def __init__(self, *args, **kwargs):
self.page = kwargs.pop('page')
self._site = kwargs.pop('site', Site.objects.get_current())
super().__init__(*args, **kwargs)
self.fields['target'].queryset = Page.objects.drafts().filter(
node__site=self._site,
is_page_type=self.page.is_page_type,
)
def get_root_nodes(self):
# TODO: this needs to avoid using the pages accessor directly
nodes = TreeNode.get_root_nodes()
return nodes.exclude(cms_pages__is_page_type=not(self.page.is_page_type))
def get_tree_options(self):
position = self.cleaned_data['position']
target_page = self.cleaned_data.get('target')
parent_node = target_page.node if target_page else None
if parent_node:
return self._get_tree_options_for_parent(parent_node, position)
return self._get_tree_options_for_root(position)
def _get_tree_options_for_root(self, position):
siblings = self.get_root_nodes().filter(site=self._site)
try:
target_node = siblings[position]
except IndexError:
# The position requested is not occupied.
# Add the node as the last root node,
# relative to the current site.
return (siblings.reverse()[0], 'right')
return (target_node, 'left')
def _get_tree_options_for_parent(self, parent_node, position):
if position == 0:
return (parent_node, 'first-child')
siblings = parent_node.get_children().filter(site=self._site)
try:
target_node = siblings[position]
except IndexError:
# The position requested is not occupied.
# Add the node to be the parent's first child
return (parent_node, 'last-child')
return (target_node, 'left')
class MovePageForm(PageTreeForm):
def clean(self):
cleaned_data = super().clean()
if self.page.is_home and cleaned_data.get('target'):
self.add_error('target', force_str(_('You can\'t move the home page inside another page')))
return cleaned_data
def get_tree_options(self):
options = super().get_tree_options()
target_node, target_node_position = options
if target_node_position != 'left':
return (target_node, target_node_position)
node = self.page.node
node_is_first = node.path < target_node.path
if node_is_first and node.is_sibling_of(target_node):
# The node being moved appears before the target node
# and is a sibling of the target node.
# The user is moving from left to right.
target_node_position = 'right'
elif node_is_first:
# The node being moved appears before the target node
# but is not a sibling of the target node.
# The user is moving from right to left.
target_node_position = 'left'
else:
# The node being moved appears after the target node.
# The user is moving from right to left.
target_node_position = 'left'
return (target_node, target_node_position)
def move_page(self):
self.page.move_page(*self.get_tree_options())
class CopyPageForm(PageTreeForm):
source_site = forms.ModelChoiceField(queryset=Site.objects.all(), required=True)
copy_permissions = forms.BooleanField(initial=False, required=False)
def copy_page(self):
target, position = self.get_tree_options()
copy_permissions = self.cleaned_data.get('copy_permissions', False)
new_page = self.page.copy_with_descendants(
target_node=target,
position=position,
copy_permissions=copy_permissions,
target_site=self._site,
)
new_page.clear_cache(menu=True)
return new_page
def _get_tree_options_for_root(self, position):
try:
return super()._get_tree_options_for_root(position)
except IndexError:
# The user is copying a page to a site with no pages
# Add the node as the last root node.
siblings = self.get_root_nodes().reverse()
return (siblings[0], 'right')
class ChangeListForm(forms.Form):
BOOLEAN_CHOICES = (
('', _('All')),
('1', _('Yes')),
('0', _('No')),
)
q = forms.CharField(required=False, widget=forms.HiddenInput())
in_navigation = forms.ChoiceField(required=False, choices=BOOLEAN_CHOICES)
template = forms.ChoiceField(required=False)
changed_by = forms.ChoiceField(required=False)
soft_root = forms.ChoiceField(required=False, choices=BOOLEAN_CHOICES)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['changed_by'].choices = get_page_changed_by_filter_choices()
self.fields['template'].choices = get_page_template_filter_choices()
def is_filtered(self):
data = self.cleaned_data
if self.cleaned_data.get('q'):
return True
return any(bool(data.get(field.name)) for field in self.visible_fields())
def get_filter_items(self):
for field in self.visible_fields():
value = self.cleaned_data.get(field.name)
if value:
yield (field.name, value)
def run_filters(self, queryset):
for field, value in self.get_filter_items():
query = {'{}__exact'.format(field): value}
queryset = queryset.filter(**query)
return queryset
class BasePermissionAdminForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
permission_fields = self._meta.model.get_all_permissions()
for field in permission_fields:
if field not in self.base_fields:
setattr(self.instance, field, False)
class PagePermissionInlineAdminForm(BasePermissionAdminForm):
"""
Page permission inline admin form used in inline admin. Required, because
user and group queryset must be changed. User can see only users on the same
level or under him in chosen page tree, and users which were created by him,
but aren't assigned to higher page level than current user.
"""
page = forms.ModelChoiceField(
queryset=Page.objects.all(),
label=_('user'),
widget=HiddenInput(),
required=True,
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
user = get_current_user() # current user from threadlocals
site = Site.objects.get_current()
sub_users = get_subordinate_users(user, site)
limit_choices = True
use_raw_id = False
# Unfortunately, if there are > 500 users in the system, non-superusers
# won't see any benefit here because if we ask Django to put all the
# user PKs in limit_choices_to in the query string of the popup we're
# in danger of causing 414 errors so we fall back to the normal input
# widget.
if get_cms_setting('RAW_ID_USERS'):
if sub_users.count() < 500:
# If there aren't too many users, proceed as normal and use a
# raw id field with limit_choices_to
limit_choices = True
use_raw_id = True
elif get_user_permission_level(user, site) == ROOT_USER_LEVEL:
# If there are enough choices to possibly cause a 414 request
# URI too large error, we only proceed with the raw id field if
# the user is a superuser & thus can legitimately circumvent
# the limit_choices_to condition.
limit_choices = False
use_raw_id = True
# We don't use the fancy custom widget if the admin form wants to use a
# raw id field for the user
if use_raw_id:
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
# This check will be False if the number of users in the system
# is less than the threshold set by the RAW_ID_USERS setting.
if isinstance(self.fields['user'].widget, ForeignKeyRawIdWidget):
# We can't set a queryset on a raw id lookup, but we can use
# the fact that it respects the limit_choices_to parameter.
if limit_choices:
self.fields['user'].widget.rel.limit_choices_to = dict(
id__in=list(sub_users.values_list('pk', flat=True))
)
else:
self.fields['user'].widget = UserSelectAdminWidget()
self.fields['user'].queryset = sub_users
self.fields['user'].widget.user = user # assign current user
self.fields['group'].queryset = get_subordinate_groups(user, site)
class Meta:
fields = [
'user',
'group',
'can_add',
'can_change',
'can_delete',
'can_publish',
'can_change_advanced_settings',
'can_change_permissions',
'can_move_page',
'grant_on',
]
model = PagePermission
class ViewRestrictionInlineAdminForm(BasePermissionAdminForm):
page = forms.ModelChoiceField(
queryset=Page.objects.all(),
label=_('user'),
widget=HiddenInput(),
required=True,
)
can_view = forms.BooleanField(
label=_('can_view'),
widget=HiddenInput(),
initial=True,
)
class Meta:
fields = [
'user',
'group',
'grant_on',
'can_view',
]
model = PagePermission
def clean_can_view(self):
return True
class GlobalPagePermissionAdminForm(BasePermissionAdminForm):
class Meta:
fields = [
'user',
'group',
'can_add',
'can_change',
'can_delete',
'can_publish',
'can_change_advanced_settings',
'can_change_permissions',
'can_move_page',
'can_view',
'sites',
]
model = GlobalPagePermission
class GenericCmsPermissionForm(forms.ModelForm):
"""Generic form for User & Grup permissions in cms
"""
_current_user = None
can_add_page = forms.BooleanField(label=_('Add'), required=False, initial=True)
can_change_page = forms.BooleanField(label=_('Change'), required=False, initial=True)
can_delete_page = forms.BooleanField(label=_('Delete'), required=False)
# pageuser is for pageuser & group - they are combined together,
# and read out from PageUser model
can_add_pageuser = forms.BooleanField(label=_('Add'), required=False)
can_change_pageuser = forms.BooleanField(label=_('Change'), required=False)
can_delete_pageuser = forms.BooleanField(label=_('Delete'), required=False)
can_add_pagepermission = forms.BooleanField(label=_('Add'), required=False)
can_change_pagepermission = forms.BooleanField(label=_('Change'), required=False)
can_delete_pagepermission = forms.BooleanField(label=_('Delete'), required=False)
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance')
initial = kwargs.get('initial') or {}
if instance:
initial = initial or {}
initial.update(self.populate_initials(instance))
kwargs['initial'] = initial
super().__init__(*args, **kwargs)
def clean(self):
data = super().clean()
# Validate Page options
if not data.get('can_change_page'):
if data.get('can_add_page'):
message = _("Users can't create a page without permissions "
"to change the created page. Edit permissions required.")
raise ValidationError(message)
if data.get('can_delete_page'):
message = _("Users can't delete a page without permissions "
"to change the page. Edit permissions required.")
raise ValidationError(message)
if data.get('can_add_pagepermission'):
message = _("Users can't set page permissions without permissions "
"to change a page. Edit permissions required.")
raise ValidationError(message)
if data.get('can_delete_pagepermission'):
message = _("Users can't delete page permissions without permissions "
"to change a page. Edit permissions required.")
raise ValidationError(message)
# Validate PagePermission options
if not data.get('can_change_pagepermission'):
if data.get('can_add_pagepermission'):
message = _("Users can't create page permissions without permissions "
"to change the created permission. Edit permissions required.")
raise ValidationError(message)
if data.get('can_delete_pagepermission'):
message = _("Users can't delete page permissions without permissions "
"to change permissions. Edit permissions required.")
raise ValidationError(message)
def populate_initials(self, obj):
"""Read out permissions from permission system.
"""
initials = {}
permission_accessor = get_permission_accessor(obj)
for model in (Page, PageUser, PagePermission):
name = model.__name__.lower()
content_type = ContentType.objects.get_for_model(model)
permissions = permission_accessor.filter(content_type=content_type).values_list('codename', flat=True)
for key in ('add', 'change', 'delete'):
codename = get_permission_codename(key, model._meta)
initials['can_%s_%s' % (key, name)] = codename in permissions
return initials
def save(self, commit=True):
instance = super().save(commit=False)
instance.save()
save_permissions(self.cleaned_data, instance)
return instance
class PageUserAddForm(forms.ModelForm):
_current_user = None
user = forms.ModelChoiceField(queryset=User.objects.none())
class Meta:
fields = ['user']
model = PageUser
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['user'].queryset = self.get_subordinates()
def get_subordinates(self):
subordinates = get_subordinate_users(self._current_user, self._current_site)
return subordinates.filter(pageuser__isnull=True)
def save(self, commit=True):
user = self.cleaned_data['user']
instance = super().save(commit=False)
instance.created_by = self._current_user
for field in user._meta.fields:
# assign all the fields - we can do this, because object is
# subclassing User (one to one relation)
value = getattr(user, field.name)
setattr(instance, field.name, value)
if commit:
instance.save()
return instance
class PageUserChangeForm(UserChangeForm):
_current_user = None
class Meta:
fields = '__all__'
model = PageUser
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self._current_user.is_superuser:
# Limit permissions to include only
# the permissions available to the manager.
permissions = self.get_available_permissions()
self.fields['user_permissions'].queryset = permissions
# Limit groups to include only those where
# the manager is a member.
self.fields['groups'].queryset = self.get_available_groups()
def get_available_permissions(self):
permissions = self._current_user.get_all_permissions()
permission_codes = (perm.rpartition('.')[-1] for perm in permissions)
return Permission.objects.filter(codename__in=permission_codes)
def get_available_groups(self):
return self._current_user.groups.all()
class PageUserGroupForm(GenericCmsPermissionForm):
class Meta:
model = PageUserGroup
fields = ('name', )
def save(self, commit=True):
if not self.instance.pk:
self.instance.created_by = self._current_user
return super().save(commit=commit)
class PluginAddValidationForm(forms.Form):
placeholder_id = forms.ModelChoiceField(
queryset=Placeholder.objects.all(),
required=True,
)
plugin_language = forms.CharField(required=True)
plugin_parent = forms.ModelChoiceField(
CMSPlugin.objects.all(),
required=False,
)
plugin_type = forms.CharField(required=True)
def clean_plugin_type(self):
plugin_type = self.cleaned_data['plugin_type']
try:
plugin_pool.get_plugin(plugin_type)
except KeyError:
message = gettext("Invalid plugin type '%s'") % plugin_type
raise ValidationError(message)
return plugin_type
def clean(self):
from cms.utils.plugins import has_reached_plugin_limit
data = self.cleaned_data
if self.errors:
return data
language = data['plugin_language']
placeholder = data['placeholder_id']
parent_plugin = data.get('plugin_parent')
if language not in get_language_list():
message = gettext("Language must be set to a supported language!")
self.add_error('plugin_language', message)
return self.cleaned_data
if parent_plugin:
if parent_plugin.language != language:
message = gettext("Parent plugin language must be same as language!")
self.add_error('plugin_language', message)
return self.cleaned_data
if parent_plugin.placeholder_id != placeholder.pk:
message = gettext("Parent plugin placeholder must be same as placeholder!")
self.add_error('placeholder_id', message)
return self.cleaned_data
page = placeholder.page
template = page.get_template() if page else None
try:
has_reached_plugin_limit(
placeholder,
data['plugin_type'],
language,
template=template,
parent_plugin=parent_plugin
)
except PluginLimitReached as error:
self.add_error(None, force_str(error))
return self.cleaned_data
class RequestToolbarForm(forms.Form):
obj_id = forms.CharField(required=False)
obj_type = forms.CharField(required=False)
cms_path = forms.CharField(required=False)
def clean(self):
data = self.cleaned_data
obj_id = data.get('obj_id')
obj_type = data.get('obj_type')
if not bool(obj_id or obj_type):
return data
if (obj_id and not obj_type) or (obj_type and not obj_id):
message = 'Invalid object lookup. Both obj_id and obj_type are required'
raise forms.ValidationError(message)
app, sep, model = obj_type.rpartition('.')
try:
model_class = apps.get_model(app_label=app, model_name=model)
except LookupError:
message = 'Invalid object lookup. Both obj_id and obj_type are required'
raise forms.ValidationError(message)
try:
generic_obj = model_class.objects.get(pk=obj_id)
except model_class.DoesNotExist:
message = 'Invalid object lookup. Both obj_id and obj_type are required'
raise forms.ValidationError(message)
else:
data['attached_obj'] = generic_obj
return data
def clean_cms_path(self):
path = self.cleaned_data.get('cms_path')
if path:
validate_relative_url(path)
return path
|
bsd-3-clause
| -7,345,646,420,611,951,000 | 35.397511 | 126 | 0.582373 | false |
mzdaniel/django-selenium-test-runner
|
dstest/test_runner.py
|
1
|
13197
|
"""Django Selenium test runner.
Incorporate functional testing into Django's manage.py test subcommand
using Selenium web testing tools."""
__author__ = 'Daniel Mizyrycki'
__copyright__ = 'Copyright 2009, Daniel Mizyrycki'
__license__ = 'BSD'
__version__ = '0.1.0'
__maintainer__ = __author__
__email__ = 'mzdaniel@gmail.com'
__status__ = 'Development'
__url__ = 'http://pypi.python.org/pypi/django-selenium-test-runner'
__summary__ = __doc__
from django.conf import settings
from django.core.management import setup_environ, import_module, call_command
if not hasattr(settings, 'SETTINGS_MODULE'):
settings.configure()
else:
PROJECT_PATH = setup_environ(import_module(settings.SETTINGS_MODULE),
settings.SETTINGS_MODULE)
import os, sys, re, threading, unittest, shutil
from urlparse import urlparse
from subprocess import Popen, PIPE
from signal import SIGHUP
from time import sleep
from django.db import connection
from django.db.models import get_app, get_apps
from django.test.simple import run_tests as base_run_tests
from django.core.handlers.wsgi import WSGIHandler
from django.contrib import admin
from wsgiserver import CherryPyWSGIServer, WSGIPathInfoDispatcher
from mediahandler import MediaHandler
SELENIUM_TESTS_PATH = 'tests/selenium'
FIXTURES = ['tests/data.json']
DSTEST_PATH = os.path.dirname(__file__)
TEST_DB_NAME = 'test_fixture_db'
SELENIUM_RC_PATH = os.path.join(DSTEST_PATH, 'selenium-server.jar')
CPSERVER_OPTIONS = {'host': 'localhost', 'port': 8000, 'threads': 10,
'request_queue_size': 15}
# Overwrite default settings from settings.py if they are defined.
if hasattr(settings, 'SELENIUM_TESTS_PATH'):
SELENIUM_TESTS_PATH = settings.SELENIUM_TESTS_PATH
if hasattr(settings, 'FIXTURES'):
FIXTURES = settings.FIXTURES
if hasattr(settings, 'SELENIUM_PATH'):
SELENIUM_RC_PATH = os.path.join(settings.SELENIUM_PATH,
'selenium-server.jar')
sys.path += [settings.SELENIUM_PATH]
sys.path += [DSTEST_PATH]
class SeleniumRCThread(threading.Thread):
"""Selenium RC control thread."""
def __init__(self, server_filepath):
super(SeleniumRCThread, self).__init__()
self.server_filepath = server_filepath
self.process = None
def run(self):
"""Launch Selenium server."""
self.process = Popen(('java -jar %s' % self.server_filepath).split(),
shell=False, stdout=PIPE, stderr=PIPE)
def stop(self):
"""Stop Selenium server."""
os.kill(self.process.pid, SIGHUP)
class TestDB(object):
"""Encapsulate fixtured database handling for tests to be used by
Django web server. As the Django connection is global, this class will
setup TEST_DB_NAME as the database in use."""
def __init__(self, db_name, fixtures, verbosity=0):
"""Initialize TestDB."""
self.db_name = db_name
self.fixtures = fixtures
self.verbosity = verbosity
# Save the real database names for later connection restore.
self.database_name = settings.DATABASE_NAME
self.test_database_name = settings.TEST_DATABASE_NAME
self.db_path = None
self.db_backup_path = None
def initialize_test_db(self):
"""Establish a connection to a fresh TEST_DB_NAME database with the
test fixtures on it."""
# Create a test database and sync it with models.py
# Handle a second test database for selenium use. Postgres uses
# transactions which interfere with the Django server thread.
settings.TEST_DATABASE_NAME = self.db_name
connection.creation.create_test_db(verbosity=self.verbosity,
autoclobber=True)
# Hook for doing any extra initialization
self.extra_init()
# Load fixture data.
call_command('loaddata', *self.fixtures, verbosity=self.verbosity)
# Sync data and close connection
connection.close()
# If sqlite3 or Postgres is used, create a backup database to speed up
# fixture reloading.
if settings.DATABASE_ENGINE == 'postgresql_psycopg2':
# connection.creation is used to overcome transaction management,
# allowing to execute DROP and CREATE db commands.
cursor = connection.cursor()
connection.creation.set_autocommit()
cursor.execute("DROP DATABASE IF EXISTS %s_backup" % self.db_name)
cursor.execute("CREATE DATABASE %s_backup WITH TEMPLATE %s" % (
self.db_name, self.db_name))
if settings.DATABASE_ENGINE == 'sqlite3':
self.db_path = os.path.join(PROJECT_PATH, settings.DATABASE_NAME)
self.db_backup_path = '%s_backup' % self.db_path
if self.db_path[-3:] == '.db':
self.db_backup_path = '%s_backup.db' % self.db_path[:-3]
shutil.copyfile(self.db_path, self.db_backup_path)
# Restore the database names as create_test_db changed it.
settings.TEST_DATABASE_NAME = self.test_database_name
settings.DATABASE_NAME = self.database_name
def extra_init(self):
"""Hook for doing any extra initialization. After subclassing TestDB,
and overriding this method, initialize_test_db will call it."""
pass
def reload_db(self):
"""Reload fixtures into test database. This is a database dependant
method. For now, only works on Postgres."""
if not settings.DATABASE_ENGINE in ['sqlite3', 'postgresql_psycopg2']:
return None
# Close connection to cleanly swap databases.
connection.close()
if settings.DATABASE_ENGINE == 'sqlite3':
shutil.copyfile(self.db_backup_path, self.db_path)
if settings.DATABASE_ENGINE == 'postgresql_psycopg2':
# Establish a temporal connection to template1 database and
# recreate TEST_DB_NAME.
connection.settings_dict["DATABASE_NAME"] = 'template1'
cursor = connection.cursor()
connection.creation.set_autocommit()
cursor.execute("DROP DATABASE IF EXISTS %s" % self.db_name)
cursor.execute("CREATE DATABASE %s WITH TEMPLATE %s_backup" % (
self.db_name, self.db_name))
connection.close()
# Change the connection to the new test database.
settings.DATABASE_NAME = self.db_name
connection.settings_dict["DATABASE_NAME"] = self.db_name
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
connection.cursor()
return True
def drop(self):
"""Drop test database. This is a database dependant method. For now,
only works on Postgres."""
def drop_db(name):
"""TestDB.drop helper function"""
try:
connection.creation._destroy_test_db(name, verbosity=0)
except:
return None
return True
if not settings.DATABASE_ENGINE in ['sqlite3', 'postgresql_psycopg2']:
return None
connection.close()
if settings.DATABASE_ENGINE == 'postgresql_psycopg2':
connection.settings_dict["DATABASE_NAME"] = 'template1'
drop_db('%s_backup' % self.db_name)
drop_db(self.db_name)
drop_db(self.test_database_name)
# restore the connection to the original database.
settings.TEST_DATABASE_NAME = self.test_database_name
settings.DATABASE_NAME = self.database_name
connection.settings_dict["DATABASE_NAME"] = self.database_name
connection.cursor()
class DjangoThread(threading.Thread):
"""Django server control thread."""
def __init__(self, testdb):
"""Initialize CherryPy Django web server."""
super(DjangoThread, self).__init__()
testdb.initialize_test_db()
self.setDaemon(True)
def run(self):
"""Launch CherryPy Django web server."""
options = CPSERVER_OPTIONS
server = CherryPyWSGIServer(
(options['host'], int(options['port'])),
WSGIPathInfoDispatcher({
'/': WSGIHandler(),
urlparse(settings.MEDIA_URL).path: MediaHandler(
settings.MEDIA_ROOT),
settings.ADMIN_MEDIA_PREFIX: MediaHandler(
os.path.join(admin.__path__[0], 'media'))
}),
int(options['threads']), options['host'],
request_queue_size=int(options['request_queue_size']))
try:
server.start()
except KeyboardInterrupt:
server.stop()
def get_selenium_tests(testdb, test_labels=None):
"""Import selenium tests stored on path/SELENIUM_TESTS_PATH."""
def load_tests(module_path):
"""Import selenium tests."""
def add_fixtures(ctest):
"""Monkeypatch selenium tests to add django fixtures."""
def test_setup(funct):
"""Test setUp decorator to add fixture reloading."""
def decorated_setup():
"""Decorated test setup."""
testdb.reload_db()
funct()
return decorated_setup
for test in ctest._tests:
test.setUp = test_setup(test.setUp)
# Check dependencies before loading test.
tests = []
test_path = os.path.join(module_path, SELENIUM_TESTS_PATH)
if not os.path.isdir(test_path):
return tests
sys.path += [test_path]
# Monkeypatch selenium tests to reload fixtures into Django server db.
for filename in os.listdir(test_path):
if not re.search('^test_.+\.py$', filename):
continue
test_module = __import__(filename[:-len('.py')])
# Add all unittests from module
for test_name in test_module.__dict__:
test_case = test_module.__dict__[test_name]
if not (type(test_case) is type(unittest.TestCase) and \
issubclass(test_case, unittest.TestCase)):
continue
test = unittest.TestLoader().loadTestsFromTestCase(test_case)
# Setup fixtures for the test.
add_fixtures(test)
tests.append(test)
return tests
tests = []
if test_labels:
for label in test_labels:
tests += load_tests(os.path.dirname(get_app(label).__file__))
else:
for app in get_apps():
tests += load_tests(os.path.dirname(app.__file__))
return tests
def dependencies_met():
"""Check Selenium testing dependencies are met"""
# Check Java VM command line runner.
try:
Popen(['java'], shell=False, stderr=PIPE).communicate()[1]
except:
print 'Dependecy unmet. Java virtual machine command line runner not ' \
'found.'
return False
# Check selenium-server.jar is ready to run.
output = Popen(('java -jar %s -unrecognized_argument' % SELENIUM_RC_PATH
).split(), shell=False, stderr=PIPE).communicate()[1]
if not re.search('Usage: java -jar selenium-server.jar', output):
print 'Dependecy unmet. Selenium RC server (selenium-server.jar) not ' \
'found.'
return False
# Check selenium RC python driver is available.
try:
import selenium
except:
print 'Dependecy unmet. Selenium RC python driver (selenium.py) not ' \
'found.'
return False
# Check CherryPy wsgi server is available.
try:
import wsgiserver
except:
print 'Dependecy unmet. CherryPy wsgi server (wsgiserver.py) not found.'
return False
# Check fixture support is implemented for the database engine.
if not settings.DATABASE_ENGINE in ['sqlite3', 'postgresql_psycopg2']:
print 'Dependecy unmet. Fixture support for database engine %s not ' \
'implemented.' % settings.DATABASE_ENGINE
return False
return True
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=None):
"""Selenium Test runner."""
if not extra_tests:
extra_tests = []
dependencies = dependencies_met()
if dependencies and not extra_tests:
# Obtain a database test handler.
testdb = TestDB(TEST_DB_NAME, FIXTURES, verbosity=0)
extra_tests = get_selenium_tests(testdb, test_labels)
if dependencies and extra_tests:
print 'Preparing to run unittests and selenium tests.'
# Start selenium rc and Django servers.
selenium_rc = SeleniumRCThread(SELENIUM_RC_PATH)
selenium_rc.start()
django_server = DjangoThread(testdb)
django_server.start()
# Wait a couple of seconds for the servers to initialize.
sleep(5)
else:
extra_tests = []
print 'Running unittests but not selenium tests.'
results = base_run_tests(test_labels, verbosity, interactive, extra_tests)
if extra_tests:
# Stop selenium server, and drop test database
selenium_rc.stop()
testdb.drop()
return results
|
bsd-3-clause
| 8,599,218,500,867,330,000 | 38.75 | 80 | 0.626582 | false |
idegtiarov/ceilometer
|
ceilometer/network/floatingip.py
|
1
|
2715
|
# Copyright 2016 Sungard Availability Services
# Copyright 2016 Red Hat
# Copyright 2012 eNovance <licensing@enovance.com>
# Copyright 2013 IBM Corp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
from ceilometer.agent import plugin_base
from ceilometer.i18n import _LW
from ceilometer import neutron_client
from ceilometer import sample
LOG = log.getLogger(__name__)
cfg.CONF.import_group('service_types', 'ceilometer.neutron_client')
class FloatingIPPollster(plugin_base.PollsterBase):
STATUS = {
'inactive': 0,
'active': 1,
'pending_create': 2,
}
def __init__(self):
self.neutron_cli = neutron_client.Client()
@property
def default_discovery(self):
return 'endpoint:%s' % cfg.CONF.service_types.neutron
@staticmethod
def _form_metadata_for_fip(fip):
"""Return a metadata dictionary for the fip usage data."""
metadata = {
'router_id': fip.get("router_id"),
'status': fip.get("status"),
'floating_network_id': fip.get("floating_network_id"),
'fixed_ip_address': fip.get("fixed_ip_address"),
'port_id': fip.get("port_id"),
'floating_ip_address': fip.get("floating_ip_address")
}
return metadata
def get_samples(self, manager, cache, resources):
for fip in self.neutron_cli.fip_get_all():
status = self.STATUS.get(fip['status'].lower())
if status is None:
LOG.warning(_LW("Invalid status, skipping IP address %s") %
fip['floating_ip_address'])
continue
res_metadata = self._form_metadata_for_fip(fip)
yield sample.Sample(
name='ip.floating',
type=sample.TYPE_GAUGE,
unit='ip',
volume=status,
user_id=fip.get('user_id'),
project_id=fip['tenant_id'],
resource_id=fip['id'],
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=res_metadata
)
|
apache-2.0
| 4,804,211,110,120,645,000 | 32.9375 | 75 | 0.622468 | false |
miyucy/oppia
|
core/domain/stats_domain.py
|
1
|
4522
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain object for statistics models."""
__author__ = 'Sean Lip'
import copy
import operator
import re
from core.platform import models
(stats_models,) = models.Registry.import_models([models.NAMES.statistics])
class StateCounter(object):
"""Domain object that keeps counts associated with states.
All methods and properties in this file should be independent of the
specific storage model used.
"""
def __init__(self, first_entry_count, subsequent_entries_count,
resolved_answer_count, active_answer_count):
self.first_entry_count = first_entry_count
self.subsequent_entries_count = subsequent_entries_count
self.resolved_answer_count = resolved_answer_count
self.active_answer_count = active_answer_count
@property
def total_entry_count(self):
"""Total number of entries to the state."""
return self.first_entry_count + self.subsequent_entries_count
@property
def no_answer_count(self):
"""Number of times a reader left without entering an answer."""
return (self.first_entry_count + self.subsequent_entries_count
- self.resolved_answer_count - self.active_answer_count)
@classmethod
def get(cls, exploration_id, state_name):
state_counter_model = stats_models.StateCounterModel.get_or_create(
exploration_id, state_name)
return cls(
state_counter_model.first_entry_count,
state_counter_model.subsequent_entries_count,
state_counter_model.resolved_answer_count,
state_counter_model.active_answer_count
)
class StateRuleAnswerLog(object):
"""Domain object that stores answers which match different state rules.
All methods and properties in this file should be independent of the
specific storage model used.
"""
def __init__(self, answers):
# This dict represents a log of answers that hit this rule and that
# have not been resolved. The keys of this dict are the answers encoded
# as HTML strings, and the values are integer counts representing how
# many times the answer has been entered.
self.answers = copy.deepcopy(answers)
@property
def total_answer_count(self):
"""Total count of answers for this rule that have not been resolved."""
# TODO(sll): Cache this computed property.
total_count = 0
for answer, count in self.answers.iteritems():
total_count += count
return total_count
@classmethod
def get_multi(cls, exploration_id, rule_data):
"""Gets domain objects corresponding to the given rule data.
Args:
exploration_id: the exploration id
rule_data: a list of dicts, each with the following keys:
(state_name, handler_name, rule_str).
"""
# TODO(sll): Should each rule_str be unicode instead?
answer_log_models = (
stats_models.StateRuleAnswerLogModel.get_or_create_multi(
exploration_id, rule_data))
return [cls(answer_log_model.answers)
for answer_log_model in answer_log_models]
@classmethod
def get(cls, exploration_id, state_name, handler_name, rule_str):
# TODO(sll): Deprecate this method.
return cls.get_multi(exploration_id, [{
'state_name': state_name,
'handler_name': handler_name,
'rule_str': rule_str
}])[0]
def get_top_answers(self, N):
"""Returns the top N answers.
Args:
N: the maximum number of answers to return.
Returns:
A list of (answer, count) tuples for the N answers with the highest
counts.
"""
return sorted(
self.answers.iteritems(), key=operator.itemgetter(1),
reverse=True)[:N]
|
apache-2.0
| -6,014,838,141,142,006,000 | 35.467742 | 79 | 0.654135 | false |
avanzosc/avanzosc6.1
|
avanzosc_tire_management/wizard/wizard_scratch.py
|
1
|
11421
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011-2012 Daniel (Avanzosc) <http://www.avanzosc.com>
# 28/03/2012
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
import wizard
import pooler
import Image
class wizard_tire_scratch (wizard.interface):
form1 = '''<?xml version="1.0"?>
<form string="Tire Change">
<field name="tire" width="250" height="50"/>
<separator string="Move Tire" colspan="6"/>
<field name="origin" width="250" height="50"/>
<field name="destination" width="250" height="50"/>
<field name="odometer" />
</form>'''
form1_fields = {
'tire': {
'string': 'Tire',
'type': 'many2one',
'relation': 'stock.production.lot',
'required': True,
'readonly': True
},
'origin': {
'string': 'Origin',
'type': 'many2one',
'relation': 'stock.location',
'required': True,
'readonly': True
},
'destination': {
'string': 'Destination',
'type': 'many2one',
'relation': 'stock.location',
'required': True,
'readonly': True
},
'odometer': {
'string': 'Odometer',
'type': 'integer',
},
}
form2 = '''<?xml version="1.0"?>
<form string="Tire move">
<separator string="Tire correctly moved! " colspan="4"/>
</form>'''
form2_fields = {}
def tire_init (self,cr,uid, data,context):
move_data = {}
pool = pooler.get_pool(cr.dbname)
tire_obj = pool.get('stock.production.lot')
move_obj = pool.get('stock.move')
loc_obj = pool.get('stock.location')
company_obj = pool.get('res.company')
data_obj = pool.get('tire.stock.lot')
tire_data_obj = data_obj
tire = tire_obj.browse(cr,uid,data['id'])
company=tire.company_id
move_list = move_obj.search(cr,uid,[('prodlot_id','=',tire.id)])
locat_default = company.tire_stock
destini = company.scratch.id
if move_list == []:
origin = locat_default.id
else:
loc_id = max(move_list)
move= move_obj.browse(cr,uid, loc_id)
origin = move.location_dest_id.id
move_data={'tire':tire.id, 'origin': origin, 'destination': destini}
return move_data
def tire_scratch (self,cr,uid, data,context):
pool = pooler.get_pool(cr.dbname)
tire_obj = pool.get('stock.production.lot')
move_obj = pool.get('stock.move')
vehic_obj = pool.get('fleet.vehicles')
loc_obj = pool.get('stock.location')
company_obj = pool.get('res.company')
tire_data_obj = pool.get('tire.stock.lot')
tire = tire_obj.browse(cr,uid,data['form']['tire'])
company=tire.company_id
move_list = move_obj.search(cr,uid,[('prodlot_id','=',tire.id)])
destination = loc_obj.browse (cr,uid,data['form']['destination'])
destination_name = destination.name
origin = loc_obj.browse (cr,uid,data['form']['origin'])
origin_name = origin.name
#Comprobar si el origen es un vehiculo
if origin.location_id:
loc_parent_ori = origin.location_id.id
if loc_parent_ori:
vehic_list = vehic_obj.search(cr,uid,[('buslocat','=',loc_parent_ori)])
else : vehic_list = []
if vehic_list ==[]:
ori_vehicle = False
res = 'error'
else:
vehicle = vehic_obj.browse(cr,uid,vehic_list[0])
ori_vehicle = True
res = 'moved'
else:
ori_vehicle = False
res = 'moved'
# Termina comprobación origen
if ori_vehicle : # Origin = Vehicle
if origin_name.endswith("-1"):
update ={ 'f_l_tire' : False}
elif origin_name.endswith("-2"):
update ={ 'f_r_tire' : False}
if vehicle.tires == 6:
if origin_name.endswith("-3"):
update ={ 'r_l_tire1' : False}
elif origin_name.endswith("-4"):
update ={ 'r_l_tire2' : False}
elif origin_name.endswith("-5"):
update ={ 'r_r_tire2' : False}
elif origin_name.endswith("-6"):
update ={ 'r_r_tire1' : False}
elif vehicle.tires > 6:
if origin_name.endswith("-3"):
update ={ 'm_l_tire1' : False}
elif origin_name.endswith("-4"):
update ={ 'm_l_tire2' : False}
elif origin_name.endswith("-5"):
update ={ 'm_r_tire2' : False}
elif origin_name.endswith("-6"):
update ={ 'm_r_tire1' : False}
elif origin_name.endswith("-7"):
update ={ 'r_l_tire1' : False}
elif origin_name.endswith("-8"):
update ={ 'r_r_tire1' : False}
vehic_obj.write(cr,uid,vehicle.id,update)
#Datos movimiento
product_id = tire.product_id
# actualizar odometro rueda
odometer = data['form']['odometer']
if move_list == []:
odometer_text = str(data['form']['odometer'])
tire_odometer = 1
if odometer_text == '0':
odometer = 1
tire_val= {'tire_km' : tire_odometer,'odometers' : odometer_text}
else:
if ori_vehicle :
loc_id = max(move_list)
move= move_obj.browse(cr,uid, loc_id)
result = int(odometer) - move.odometer
tire_odometer = tire.tire_km + result
if tire.odometers:
odometer_text = tire.odometers + "\n" + str(data['form']['odometer'])
else: odometer_text = str(data['form']['odometer'])
tire_val= {'tire_km' : tire_odometer, 'odometers' : odometer_text}
else:
if tire.odometers:
odometer_text = tire.odometers + "\n" + str(data['form']['odometer'])
else: odometer_text = str(data['form']['odometer'])
tire_val= {'odometers' : odometer_text}
tire_obj.write(cr,uid, tire.id,tire_val)
# Termina actualización odometro rueda
#Datos rueda
tire_data_list = tire_data_obj.search(cr,uid,[('lot_id','=',tire.id)])
if tire_data_list== []:
tire_data_val={
'name': origin.name + ' | ' + tire.name + ' => ' + destination.name,
'lot_id': tire.id,
'origin' : origin.id,
'destination': destination.id,
# 'data':time.strftime('%Y-%m-%d %H:%M:%S'),
'odomold' : 0,
'odomnew' : 0,
'tire_km' : 0,
'tire_km_total': tire.tire_km
}
else :
tire_data_id = max(tire_data_list)
tire_data = tire_data_obj.browse(cr,uid,tire_data_id)
tire_data_val={
'name': origin.name + ' | ' + tire.name + ' => ' + destination.name,
'lot_id': tire.id,
'origin' : origin.id,
'destination': destination.id,
# 'data':time.strftime('%Y-%m-%d %H:%M:%S'),
}
if ori_vehicle: # Update odometer from vehicle
tire_data_val['odomold'] = tire_data.odomnew
tire_data_val['odomnew'] = odometer
tire_data_val['tire_km'] = odometer - tire_data.odomnew
tire_data_val['tire_km_total'] = tire_data.tire_km_total + odometer - tire_data.odomnew
else:
tire_data_val['odomold'] = tire_data.odomnew
tire_data_val['odomnew'] = odometer
tire_data_val['tire_km'] = 0
tire_data_val['tire_km_total'] = tire.tire_km
#Fin datos rueda
#Datos movimiento
move_data = {'product_id' : tire.product_id.id,
'name' : origin.name + ' | ' + tire.name + ' => ' + destination.name,
'location_id' : origin.id,
'product_uom': tire.product_id.product_tmpl_id.uom_id.id,
'prodlot_id' : tire.id,
'location_dest_id': destination.id,
'odometer': odometer
}
#actualiza movimiento
move_id = move_obj.create(cr,uid,move_data)
#Fin datos movimiento
#Actualiza rueda
tire_obj.write(cr,uid, tire.id,{'tire_km' : tire_data_val['tire_km_total'], 'odometers' : odometer_text})
#crear datos neumático
move_data_reg = move_obj.browse(cr,uid,move_id)
tire_data_val['data']= move_data_reg.date
data_id= tire_data_obj.create(cr,uid,tire_data_val)
#Fin datos rueda
res = 'moved'
return res
states = {
'init': {
'actions': [tire_init],
'result': {'type': 'form', 'arch':form1, 'fields':form1_fields, 'state': [('end', 'Cancel','gtk-cancel'),('waste', 'Accept','gtk-ok')]}
},
'waste': {
'actions' : [],
'result': {'type': 'choice', 'next_state': tire_scratch}
},
'moved': {
'actions' : [],
'result': {'type': 'form', 'arch':form2, 'fields':form2_fields,'state': [('end', 'Accept','gtk-ok')]}
}
}
wizard_tire_scratch('tire.scratch')
|
agpl-3.0
| -8,667,324,019,660,210,000 | 41.136531 | 156 | 0.463917 | false |
qsnake/py2js
|
py2js/decorator.py
|
1
|
2070
|
from py2js import convert_py2js
import inspect
class JavaScript(object):
"""
Decorator that you can use to convert methods to JavaScript.
For example this code::
@JavaScript
class TestClass(object):
def __init__(self):
alert('TestClass created')
self.reset()
def reset(self):
self.value = 0
def inc(self):
alert(self.value)
self.value += 1
print str(TestClass)
prints::
function TestClass() {
return new _TestClass();
}
function _TestClass() {
this.__init__();
}
_TestClass.__name__ = 'TestClass'
_TestClass.prototype.__class__ = _TestClass
_TestClass.prototype.__init__ = function() {
alert("TestClass created");
this.reset();
}
_TestClass.prototype.reset = function() {
this.value = 0;
}
_TestClass.prototype.inc = function() {
alert(this.value);
this.value += 1;
}
Alternatively, an equivalent way is to use JavaScript() as a function:
class TestClass(object):
def __init__(self):
alert('TestClass created')
self.reset()
def reset(self):
self.value = 0
def inc(self):
alert(self.value)
self.value += 1
print str(JavaScript(TestClass))
If you want to call the original function/class as Python, use the
following syntax::
js = JavaScript(TestClass)
test_class = js() # Python instance of TestClass() will be created
js_class = str(js) # A string with the JS code
"""
def __init__(self, obj):
self._obj = obj
obj_source = inspect.getsource(obj)
self._js = convert_py2js(obj_source)
def __str__(self):
return self._js
def __call__(self, *args, **kwargs):
return self._obj(*args, **kwargs)
|
mit
| -6,334,131,527,294,003,000 | 24.243902 | 74 | 0.513043 | false |
pizzapanther/Church-Source
|
churchsource/configuration/migrations/0003_face_search.py
|
1
|
1152
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from churchsource.configuration.models import Setting
class Migration(DataMigration):
def forwards(self, orm):
s = Setting(skey='FACE_SEARCH_ENABLE', name='When True, enables check-in by facial recognition', stype='tf', value='True')
s.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'configuration.setting': {
'Meta': {'ordering': "('skey',)", 'object_name': 'Setting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'skey': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'stype': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['configuration']
|
gpl-3.0
| 844,289,280,137,038,200 | 37.4 | 130 | 0.591146 | false |
ECP-CANDLE/Benchmarks
|
Pilot2/P2B1/pilot2_datasets.py
|
1
|
1104
|
data_sets = {
'3k_Disordered' : ('3k_run10_10us.35fs-DPPC.10-DOPC.70-CHOL.20.dir', 'ece75b704ec63ac9c39afd74b63497dc'),
'3k_Ordered' : ('3k_run32_10us.35fs-DPPC.50-DOPC.10-CHOL.40.dir', '211e1bcf46a3f19a978e4af63f067ce0'),
'3k_Ordered_and_gel' : ('3k_run43_10us.35fs-DPPC.70-DOPC.10-CHOL.20.dir', '87032ff78e4d01739aef5c6c0f5e4f04'),
'6k_Disordered' : ('6k_run10_25us.35fs-DPPC.10-DOPC.70-CHOL.20.dir', '13404cb8225819577e4821a976e9203b'),
'6k_Ordered' : ('6k_run32_25us.35fs-DPPC.50-DOPC.10-CHOL.40.dir', '95ef068b8deb69302c97f104b631d108'),
'6k_Ordered_and_gel' : ('6k_run43_25us.35fs-DPPC.70-DOPC.10-CHOL.20.dir', '3353e86d1cc2670820678c4c0c356206')
}
from collections import OrderedDict
def gen_data_set_dict():
# Generating names for the data set
names= {'x' : 0, 'y' : 1, 'z' : 2,
'CHOL' : 3, 'DPPC' : 4, 'DIPC' : 5,
'Head' : 6, 'Tail' : 7}
for i in range(12):
temp = 'BL'+str(i+1)
names.update({temp : i+8})
# dictionary sorted by value
fields=OrderedDict(sorted(names.items(), key=lambda t: t[1]))
return fields
|
mit
| 3,668,649,857,986,627,600 | 45 | 112 | 0.653986 | false |
polyaxon/polyaxon
|
core/polyaxon/deploy/schemas/auth.py
|
1
|
1238
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from marshmallow import fields
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
class AuthSchema(BaseCamelSchema):
enabled = fields.Bool(allow_none=True)
external = fields.Str(allow_none=True)
use_resolver = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return AuthConfig
class AuthConfig(BaseConfig):
SCHEMA = AuthSchema
REDUCED_ATTRIBUTES = ["enabled", "external", "useResolver"]
def __init__(self, enabled=None, external=None, use_resolver=None):
self.enabled = enabled
self.external = external
self.use_resolver = use_resolver
|
apache-2.0
| 7,205,230,924,192,353,000 | 30.74359 | 74 | 0.727787 | false |
iarna/joe-editor
|
tests/joefx/rcfile.py
|
1
|
13393
|
import os
from .exceptions import *
GLOBAL_OPTS = set([
'help_is_utf8', 'mid', 'marking', 'asis', 'force', 'nolocks', 'nomodcheck', 'nocurdir',
'nobackups', 'nodeadjoe', 'break_hardlinks', 'break_links', 'lightoff', 'exask', 'beep',
'nosta', 'keepup', 'pg', 'undo_keep', 'csmode', 'backpath', 'floatmouse', 'rtbutton',
'nonotice', 'noexmsg', 'noxon', 'orphan', 'dopadding', 'lines', 'baud', 'columns',
'help', 'skiptop', 'notite', 'nolinefeeds', 'usetabs', 'assume_color', 'assume_256color',
'guess_non_utf8', 'guess_utf8', 'guess_utf16', 'guess_crlf', 'guess_indent', 'menu_above',
'transpose', 'menu_explorer', 'menu_jump', 'notagsmenu', 'icase', 'wrap', 'autoswap',
'joe_state', 'mouse', 'joexterm', 'brpaste', 'pastehack', 'square', 'text_color',
'status_color', 'help_color', 'menu_color', 'prompt_color', 'msg_color', 'restore',
'search_prompting', 'regex', 'lmsg', 'rmsg', 'smsg', 'zmsg', 'xmsg', 'highlight', 'istep',
'wordwrap', 'autoindent'
])
FILE_OPTS = set([
'cpara', 'cnotpara', 'encoding', 'syntax', 'hex', 'highlight', 'smarthome', 'indentfirst',
'smartbacks', 'tab', 'indentc', 'spaces', 'istep', 'purify', 'crlf', 'wordwrap', 'nobackup',
'autoindent', 'overwrite', 'picture', 'lmargin', 'rmargin', 'flowed', 'french', 'linums',
'rdonly', 'keymap', 'lmsg', 'rmsg', 'mfirst', 'mnew', 'mold', 'msnew', 'msold',
'highlighter_context', 'single_quoted', 'no_double_quoted', 'c_comment', 'cpp_comment',
'pound_comment', 'vhdl_comment', 'semi_comment', 'tex_comment', 'text_delimiters',
])
OPTS_WITH_ARGS = set([
# Global
'undo_keep', 'backpath', 'lines', 'baud', 'columns', 'skiptop', 'text_color', 'status_color',
'help_color', 'menu_color', 'prompt_color', 'msg_color', 'lmsg', 'rmsg', 'smsg', 'zmsg',
# File
'cpara', 'cnotpara', 'encoding', 'syntax', 'tab', 'indentc', 'istep', 'lmargin', 'rmargin',
'keymap', 'mfirst', 'mnew', 'mold', 'msnew', 'msold', 'text_delimiters'
])
class RCFile(object):
def __init__(self):
self.globalopts = Options(GLOBAL_OPTS)
self.fileopts = []
self.help = []
self.menus = []
self.macros = []
self.bindings = []
def serialize(self):
result = []
result.extend(self.globalopts.serialize())
for section in (self.fileopts, self.help, self.menus, self.macros, self.bindings):
for item in section:
result.extend(item.serialize())
return b'\n'.join((item.encode('utf-8') if isinstance(item, str) else item) for item in result)
def clone(self):
other = RCFile()
other.globalopts = self.globalopts.clone()
other.fileopts = [fopt.clone() for fopt in self.fileopts]
other.help = [help.clone() for help in self.help]
other.menus = [menu.clone() for menu in self.menus]
other.macros = [macro.clone() for macro in self.macros]
other.bindings = [binding.clone() for binding in self.bindings]
return other
class Options(object):
def __init__(self, properties):
self._properties = properties
self._values = {}
def __getattr__(self, name):
return self.getValue(name)
def __setattr__(self, name, value):
if name.startswith('_'):
object.__setattr__(self, name, value)
else:
self.setValue(name, value)
def getValue(self, name):
if name not in self._properties:
raise InvalidProperty(name)
if name not in self._values:
return None
else:
return self._values[name]
def setValue(self, name, value):
if name not in self._properties:
raise InvalidProperty(name)
else:
if (name in OPTS_WITH_ARGS) == isinstance(value, bool):
raise InvalidPropertyValue(name)
self._values[name] = value
def serialize(self):
result = []
for k, v in self._values.items():
if v is True:
result.append('-' + k)
elif v is False:
result.append('--' + k)
elif v is not None:
result.append('-%s %s' % (k, v))
return result
def clone(self):
other = Options(self._properties)
other._values.update(self._values)
return other
class FileOptions(object):
def __init__(self):
self.name = ''
self.extensions = []
self.patterns = []
self.options = Options(FILE_OPTS)
def serialize(self):
result = []
result.append('[%s]' % self.name)
result.extend(self.extensions)
result.extend('+' + pat for pat in self.patterns)
result.extend(self.options.serialize())
return result
def clone(self):
other = FileOptions()
other.name = self.name
other.extensions = self.extensions[:]
other.patterns = self.patterns[:]
other.options = self.options.clone()
return other
class Menu(object):
def __init__(self):
self.name = ''
self.back = ''
self.items = []
def serialize(self):
result = [':defmenu %s %s' % (self.name, self.back)]
result.extend(item.serialize() for item in self.items)
return result
def clone(self):
other = Menu()
other.name = self.name
other.back = self.back
other.items = [item.clone() for item in self.items]
return other
class MenuItem(object):
def __init__(self):
self.macro = ''
self.label = ''
def serialize(self):
return '%s\t%s' % (self.macro, self.label)
def clone(self):
other = MenuItem()
other.macro = self.macro
other.label = self.label
return other
class HelpScreen(object):
def __init__(self):
self.name = ''
self.content = []
def serialize(self):
return ['{' + self.name] + self.content + ['}']
def clone(self):
other = HelpScreen()
other.name = self.name
other.content = self.content[:]
return other
class KeyBindingCollection(object):
def __init__(self):
self.name = ''
self.inherits = None
self.bindings = []
def serialize(self):
if not self.name:
# Uninitialized
return []
result = [':' + self.name]
if self.inherits is not None:
result.append(':inherit ' + self.inherits)
result.extend([f.serialize() for f in self.bindings])
return result
def clone(self):
other = KeyBindingCollection()
other.name = self.name
other.inherits = self.inherits
other.bindings = [b.clone() for b in self.bindings]
return other
class Binding(object):
def __init__(self):
self.macro = None
self.keys = []
def serialize(self):
return self.macro + ' ' + ' '.join(self.keys)
def clone(self):
other = Binding()
other.macro = self.macro
other.keys = self.keys[:]
return other
class MacroDefinition(object):
def __init__(self):
self.name = None
self.macro = None
def serialize(self):
return [':def %s %s' % (self.name, self.macro)]
def clone(self):
other = MacroDefinition()
other.name = self.name
other.macro = self.macro
return other
class ParserState(object):
def __init__(self, rcfile, filegen):
self.rcfile = rcfile
self.file = filegen
self.curline = None
def begin(self):
try:
self.parseglobal()
self.parsefileopts()
self.parsemenus()
self.parsehelp()
self.parsebindings()
except StopIteration:
pass
def parseglobal(self):
while True:
line = self.nextnows()
if line.startswith('-'):
self.parseoption(self.rcfile.globalopts)
else:
break
def parseoption(self, opts):
mode = not self.curline.startswith('--')
parts = self.curline.split(None, 1)
optionName = parts[0][1:]
if len(parts) == 1 or optionName not in OPTS_WITH_ARGS:
opts.setValue(optionName, mode)
else:
opts.setValue(optionName, self.curline[len(parts[0]) + 1:].rstrip('\r\n'))
def parsemacro(self, line):
i = 0
q = False
bs = False
while i < len(line):
c = line[i]
if q:
if bs:
bs = False
elif c == '\\':
bs = True
elif c == '"':
q = False
elif c == '"':
q = True
elif c.isspace():
return line[:i], line[i:].lstrip()
i += 1
return line, ''
def parsefileopts(self):
while self.curline.startswith('['):
filetype = FileOptions()
filetype.name = self.curline.strip().strip('[]')
while True:
line = self.nextnows()
if line.startswith('*'):
filetype.extensions.append(line.strip())
elif line.startswith('+'):
filetype.patterns.append(line[1:].strip())
elif line.startswith('-'):
self.parseoption(filetype.options)
else:
break
self.rcfile.fileopts.append(filetype)
def parsemenus(self):
while self.curline.startswith(':defmenu'):
menu = Menu()
parts = self.curline.strip().split(None, 2)
menu.name = parts[1]
if len(parts) == 3:
menu.back = parts[2]
while True:
line = self.nextnows()
if line.startswith(':') or line.startswith('{'):
break
macro, rest = self.parsemacro(line)
item = MenuItem()
item.macro = macro
item.label = rest.strip()
menu.items.append(item)
self.rcfile.menus.append(menu)
def parsehelp(self):
while self.curline.startswith('{'):
screen = HelpScreen()
screen.name = self.curline[1:].strip()
while not self.nextbytes().startswith(b'}'):
screen.content.append(self.curline.rstrip(b'\r\n'))
self.rcfile.help.append(screen)
self.nextnows()
def parsebindings(self):
currentSection = None
while True:
if self.curline.startswith(':def '):
# Macro
macro = MacroDefinition()
_def, macro.name, macro.macro = self.curline.split(None, 2)
self.rcfile.macros.append(macro)
elif self.curline.startswith(':inherit '):
# Inheritance specification
currentSection.inherits = self.curline[len(':inherit '):].strip()
elif self.curline.startswith(':'):
# New section
currentSection = KeyBindingCollection()
self.rcfile.bindings.append(currentSection)
parts = self.curline.split()
currentSection.name = parts[0][1:]
else:
# Binding
binding = Binding()
binding.macro, keystr = self.parsemacro(self.curline)
# Split out keys
keys = keystr.split()
for k in keys:
if self.iskey(k):
binding.keys.append(k)
else:
break
currentSection.bindings.append(binding)
self.nextnows()
def iskey(self, k):
if len(k) == 1: return True
if k.startswith('U+'): return True
if k.startswith('^') and len(k) == 2: return True
if k.startswith('.k') and len(k) == 3: return True
if k in ('MDOWN', 'MDRAG', 'MUP', 'M2DOWN', 'M2DRAG', 'M2UP', 'M3DOWN', 'M3DRAG',
'M3UP, MWDOWN', 'MWUP', 'SP', 'TO'):
return True
return False
def nextbytes(self):
self.curline = next(self.file)
return self.curline
def next(self):
self.curline = next(self.file).decode('utf-8').strip('\r\n')
return self.curline
def nextnows(self):
while True:
line = self.next()
if len(line.strip()) > 0 and not line[0].isspace():
return line
def readFile(filename):
with open(filename, 'rb') as f:
for line in f:
if line.startswith(b':include'):
args = line.decode('utf-8').split()
for included in readFile(os.path.join(os.path.dirname(filename), args[1])):
yield included
else:
yield line
def parse(filename):
result = RCFile()
ParserState(result, readFile(filename)).begin()
return result
|
gpl-2.0
| 3,460,974,747,108,257,300 | 31.987685 | 103 | 0.519749 | false |
mihaip/streamspigot
|
app/tweetdigest/data.py
|
1
|
8085
|
import calendar
import datetime
import itertools
import os
import re
import time
import zlib
from base.constants import CONSTANTS
from datasources import thumbnails, twitter, twitterappengine, twitterdisplay
from datasources.oauth_keys import SERVICE_PROVIDERS
TWITTER_SERVICE_PROVIDER = SERVICE_PROVIDERS['tweetdigest:twitter']
DIGEST_LENGTH_SEC = 60 * 60 * 24
TWITTER_USERNAME_RE = re.compile('^[a-zA-Z0-9_]{1,15}$')
def _get_digest_twitter_api(max_cache_age, key):
# We don't actually need to use authentication for any of the data that
# we fetch, but then we end up with IP address-based rate limiting, which
# is depleted very quickly on App Engine (where there aren't a lot of
# externally visible IP addresses). We therefore authenticate anyway, and we
# spread that load over a few accounts. To ensure consistency (since
# python-twitter incorporates the access token in the cache key), we always
# want to consistently use the same access token for the same request, hence
# the hashing based on the key that's passed in.
access_token = TWITTER_SERVICE_PROVIDER.access_tokens[
zlib.adler32(key.encode('utf-8')) %
len(TWITTER_SERVICE_PROVIDER.access_tokens)]
api = twitter.Api(
consumer_key=TWITTER_SERVICE_PROVIDER.consumer.key,
consumer_secret=TWITTER_SERVICE_PROVIDER.consumer.secret,
access_token_key=access_token.key,
access_token_secret=access_token.secret,
cache=twitterappengine.MemcacheCache())
api.SetCacheTimeout(max_cache_age)
api.SetUserAgent('StreamSpigot/%s (+%s)' % (
os.environ.get('CURRENT_VERSION_ID', '1'),
CONSTANTS.APP_URL,
))
return api
def _get_digest_timestamps():
# From the current time
now = time.gmtime()
# Go back to midnight
digest_end_time = calendar.timegm([
now.tm_year,
now.tm_mon,
now.tm_mday,
0,
0,
0,
now.tm_wday,
now.tm_yday,
now.tm_isdst
])
digest_start_time = digest_end_time - DIGEST_LENGTH_SEC
# Twitter data can be as stale as the digest end time, since we don't care
# about anything more recent (there may be some concurrency issues with
# parallell invocations, but they're unlikely to actually matter at the load
# we're expecting.
max_cache_age = calendar.timegm(now) - digest_end_time
return digest_start_time, digest_end_time, max_cache_age
def get_digest_dates():
digest_start_time, digest_end_time, max_cache_age = _get_digest_timestamps()
return (datetime.datetime.fromtimestamp(digest_start_time),
datetime.datetime.fromtimestamp(digest_end_time))
def _process_digest_statuses(
statuses,
digest_start_time,
digest_end_time,
error_info,
dev_mode,
timezone=None):
if not dev_mode:
# Filter them for the ones that fall in the window
digest_statuses = [
s for s in statuses
if s.created_at_in_seconds <= digest_end_time and
s.created_at_in_seconds > digest_start_time
]
else:
digest_statuses = statuses
# Order them in chronological order
digest_statuses.sort(
lambda x, y: int(x.created_at_in_seconds - y.created_at_in_seconds))
if dev_mode:
digest_statuses.reverse()
# Group them by username
status_groups = []
for username, statuses in itertools.groupby(
digest_statuses, lambda status: status.user.id):
statuses = list(statuses)
status_groups.append(twitterdisplay.DisplayStatusGroup(
user=statuses[0].user,
statuses=statuses,
thumbnail_size=thumbnails.SMALL_THUMBNAIL,
timezone=timezone))
return status_groups, error_info
class TwitterFetcher(object):
def fetch(self):
data, had_error = twitterappengine.exec_twitter_api(
self._fetch, error_detail=self._id())
return data or [], had_error
class ListTwitterFetcher(TwitterFetcher):
def __init__(self, api, list_owner, list_id, digest_start_time):
self._api = api
self._list_owner = list_owner
self._list_id = list_id
self._digest_start_time = digest_start_time
def _fetch(self):
statuses = []
while True:
max_id = len(statuses) and statuses[-1].id - 1 or None
chunk = self._api.GetListTimeline(
slug=self._list_id,
owner_screen_name=self._list_owner,
max_id=max_id,
count=40,
include_rts=True,
include_entities=True)
statuses.extend(chunk)
if not chunk or \
chunk[-1].created_at_in_seconds < self._digest_start_time:
break
return statuses
def _id(self):
return 'list "%s/%s"' % (self._list_owner, self._list_id)
class UserTwitterFetcher(TwitterFetcher):
def __init__(
self,
api,
username,
digest_start_time,
digest_end_time,
dev_mode):
self._api = api
self._username = username
self._digest_start_time = digest_start_time
self._digest_end_time = digest_end_time
self._dev_mode = dev_mode
def _fetch(self):
timeline = self._api.GetUserTimeline(
screen_name=self._username,
count=40)
if not self._dev_mode:
# We do the filtering now, so that we don't look up user objects that
# we don't need.
timeline = [
s for s in timeline
if s.created_at_in_seconds <= self._digest_end_time and
s.created_at_in_seconds > self._digest_start_time
]
return timeline
def _id(self):
return 'user "%s"' % self._username
def get_digest_for_list(list_owner, list_id, dev_mode):
digest_start_time, digest_end_time, max_cache_age = _get_digest_timestamps()
api = _get_digest_twitter_api(
max_cache_age, key='%s/%s' % (list_owner, list_id))
user, had_error = twitterappengine.exec_twitter_api(
lambda: api.GetUser(screen_name=list_owner, include_entities=False),
error_detail='user %s' % list_owner)
if not had_error:
timezone = twitterdisplay.get_timezone_for_user(user)
else:
timezone = None
fetcher = ListTwitterFetcher(api, list_owner, list_id, digest_start_time)
statuses, had_error = fetcher.fetch()
return _process_digest_statuses(
statuses,
digest_start_time,
digest_end_time,
had_error,
dev_mode,
timezone=timezone)
def get_digest_for_usernames(usernames, dev_mode):
digest_start_time, digest_end_time, max_cache_age = _get_digest_timestamps()
statuses = []
error_usernames = []
for username in usernames:
api = _get_digest_twitter_api(max_cache_age, key=username)
fetcher = UserTwitterFetcher(
api,
username,
digest_start_time,
digest_end_time,
dev_mode)
user_statuses, had_error = fetcher.fetch()
if had_error:
error_usernames.append(username)
else:
statuses.extend(user_statuses)
return _process_digest_statuses(
statuses,
digest_start_time,
digest_end_time,
error_usernames,
dev_mode,
timezone=None)
class UserListsTwitterFetcher(TwitterFetcher):
def __init__(self, api, username):
self._api = api
self._username = username
def _fetch(self):
return self._api.GetLists(screen_name=self._username)
def _id(self):
return 'lists "%s"' % self._username
def get_lists(username):
api = _get_digest_twitter_api(3600, key=username)
fetcher = UserListsTwitterFetcher(api, username)
lists, had_error = fetcher.fetch()
return had_error and None or lists
def is_valid_twitter_username(username):
return TWITTER_USERNAME_RE.match(username) is not None
|
apache-2.0
| -2,630,944,226,292,411,400 | 31.34 | 80 | 0.624861 | false |
bdang2012/taiga-back-casting
|
taiga/hooks/bitbucket/services.py
|
1
|
1732
|
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from django.core.urlresolvers import reverse
from django.conf import settings
from taiga.users.models import User
from taiga.base.utils.urls import get_absolute_url
# Set this in settings.PROJECT_MODULES_CONFIGURATORS["bitbucket"]
def get_or_generate_config(project):
config = project.modules_config.config
if config and "bitbucket" in config:
g_config = project.modules_config.config["bitbucket"]
else:
g_config = {
"secret": uuid.uuid4().hex,
"valid_origin_ips": settings.BITBUCKET_VALID_ORIGIN_IPS,
}
url = reverse("bitbucket-hook-list")
url = get_absolute_url(url)
url = "%s?project=%s&key=%s" % (url, project.id, g_config["secret"])
g_config["webhooks_url"] = url
return g_config
def get_bitbucket_user(user_id):
return User.objects.get(is_system=True, username__startswith="bitbucket")
|
agpl-3.0
| 4,315,237,863,237,007,000 | 37.444444 | 77 | 0.723121 | false |
alexryndin/ambari
|
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
|
1
|
5572
|
#!/usr/bin/python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.constants import StackFeature
from resource_management.core.exceptions import Fail
from resource_management.core.resources.system import Execute
from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
from resource_management.libraries.providers.hdfs_resource import HdfsResourceProvider
from resource_management import is_empty
from resource_management import shell
from resource_management.libraries.functions.decorator import retry
from resource_management.core.logger import Logger
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import conf_select, stack_select
from livy_service import livy_service
from setup_livy import setup_livy
class LivyServer(Script):
def install(self, env):
import params
env.set_params(params)
self.install_packages(env)
def configure(self, env, upgrade_type=None):
import params
env.set_params(params)
setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config')
def start(self, env, upgrade_type=None):
import params
env.set_params(params)
if params.has_ats and params.has_livyserver:
Logger.info("Verifying DFS directories where ATS stores time line data for active and completed applications.")
self.wait_for_dfs_directories_created([params.entity_groupfs_store_dir, params.entity_groupfs_active_dir])
self.configure(env)
livy_service('server', upgrade_type=upgrade_type, action='start')
def stop(self, env, upgrade_type=None):
import params
env.set_params(params)
livy_service('server', upgrade_type=upgrade_type, action='stop')
def status(self, env):
import status_params
env.set_params(status_params)
check_process_status(status_params.livy_server_pid_file)
# TODO move out and compose with similar method in resourcemanager.py
def wait_for_dfs_directories_created(self, dirs):
import params
ignored_dfs_dirs = HdfsResourceProvider.get_ignored_resources_list(params.hdfs_resource_ignore_file)
if params.security_enabled:
Execute(format("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_principal}"),
user=params.livy_user
)
Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
user=params.hdfs_user
)
for dir_path in dirs:
self.wait_for_dfs_directory_created(dir_path, ignored_dfs_dirs)
@retry(times=8, sleep_time=20, backoff_factor=1, err_class=Fail)
def wait_for_dfs_directory_created(self, dir_path, ignored_dfs_dirs):
import params
if not is_empty(dir_path):
dir_path = HdfsResourceProvider.parse_path(dir_path)
if dir_path in ignored_dfs_dirs:
Logger.info("Skipping DFS directory '" + dir_path + "' as it's marked to be ignored.")
return
Logger.info("Verifying if DFS directory '" + dir_path + "' exists.")
dir_exists = None
if WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
# check with webhdfs is much faster than executing hdfs dfs -test
util = WebHDFSUtil(params.hdfs_site, params.hdfs_user, params.security_enabled)
list_status = util.run_command(dir_path, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
dir_exists = ('FileStatus' in list_status)
else:
# have to do time expensive hdfs dfs -d check.
dfs_ret_code = shell.call(format("hdfs --config {hadoop_conf_dir} dfs -test -d " + dir_path), user=params.livy_user)[0]
dir_exists = not dfs_ret_code #dfs -test -d returns 0 in case the dir exists
if not dir_exists:
raise Fail("DFS directory '" + dir_path + "' does not exist !")
else:
Logger.info("DFS directory '" + dir_path + "' exists.")
def get_component_name(self):
return "livy-server"
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
Logger.info("Executing Livy Server Stack Upgrade pre-restart")
conf_select.select(params.stack_name, "spark", params.version)
stack_select.select("livy-server", params.version)
def get_log_folder(self):
import params
return params.livy_log_dir
def get_user(self):
import params
return params.livy_user
if __name__ == "__main__":
LivyServer().execute()
|
apache-2.0
| -8,514,305,504,603,228,000 | 37.427586 | 133 | 0.727925 | false |
MasterX1582/bitcoin-becoin
|
qa/rpc-tests/nodehandling.py
|
1
|
3623
|
#!/usr/bin/env python2
# Copyright (c) 2014 The BeCoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test node handling
#
from test_framework.test_framework import BeCoinTestFramework
from test_framework.util import *
import base64
try:
import http.client as httplib
except ImportError:
import httplib
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
class NodeHandlingTest (BeCoinTestFramework):
def run_test(self):
###########################
# setban/listbanned tests #
###########################
assert_equal(len(self.nodes[2].getpeerinfo()), 4) #we should have 4 nodes at this point
self.nodes[2].setban("127.0.0.1", "add")
time.sleep(3) #wait till the nodes are disconected
assert_equal(len(self.nodes[2].getpeerinfo()), 0) #all nodes must be disconnected at this point
assert_equal(len(self.nodes[2].listbanned()), 1)
self.nodes[2].clearbanned()
assert_equal(len(self.nodes[2].listbanned()), 0)
self.nodes[2].setban("127.0.0.0/24", "add")
assert_equal(len(self.nodes[2].listbanned()), 1)
try:
self.nodes[2].setban("127.0.0.1", "add") #throws exception because 127.0.0.1 is within range 127.0.0.0/24
except:
pass
assert_equal(len(self.nodes[2].listbanned()), 1) #still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
try:
self.nodes[2].setban("127.0.0.1", "remove")
except:
pass
assert_equal(len(self.nodes[2].listbanned()), 1)
self.nodes[2].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[2].listbanned()), 0)
self.nodes[2].clearbanned()
assert_equal(len(self.nodes[2].listbanned()), 0)
##test persisted banlist
self.nodes[2].setban("127.0.0.0/32", "add")
self.nodes[2].setban("127.0.0.0/24", "add")
self.nodes[2].setban("192.168.0.1", "add", 1) #ban for 1 seconds
self.nodes[2].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) #ban for 1000 seconds
listBeforeShutdown = self.nodes[2].listbanned();
assert_equal("192.168.0.1/255.255.255.255", listBeforeShutdown[2]['address']) #must be here
time.sleep(2) #make 100% sure we expired 192.168.0.1 node time
#stop node
stop_node(self.nodes[2], 2)
self.nodes[2] = start_node(2, self.options.tmpdir)
listAfterShutdown = self.nodes[2].listbanned();
assert_equal("127.0.0.0/255.255.255.0", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/255.255.255.255", listAfterShutdown[1]['address'])
assert_equal("2001:4000::/ffff:e000:0:0:0:0:0:0", listAfterShutdown[2]['address'])
###########################
# RPC disconnectnode test #
###########################
url = urlparse.urlparse(self.nodes[1].url)
self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
time.sleep(2) #disconnecting a node needs a little bit of time
for node in self.nodes[0].getpeerinfo():
assert(node['addr'] != url.hostname+":"+str(p2p_port(1)))
connect_nodes_bi(self.nodes,0,1) #reconnect the node
found = False
for node in self.nodes[0].getpeerinfo():
if node['addr'] == url.hostname+":"+str(p2p_port(1)):
found = True
assert(found)
if __name__ == '__main__':
NodeHandlingTest ().main ()
|
mit
| 3,107,207,026,408,182,300 | 40.643678 | 136 | 0.602263 | false |
Sasanita/nmt-keras
|
utils/config_pkl2py.py
|
1
|
2305
|
import argparse
import ast
import sys
from keras_wrapper.extra.read_write import pkl2dict
def parse_args():
parser = argparse.ArgumentParser("Rebuilds a python file (like config.py) from a given config instance.")
parser.add_argument("-c", "--config", required=False, help="Config pkl for loading the model configuration. "
"If not specified, hyperparameters "
"are read from config.py")
parser.add_argument("-d", "--dest", required=False, type=str,
default=None, help="Destination file. If unspecidied, standard output")
parser.add_argument("-ch", "--changes", nargs="*", help="Changes to the config. Following the syntax Key=Value",
default="")
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
if args.config is None:
from config import load_parameters
params = load_parameters()
else:
params = pkl2dict(args.config)
try:
for arg in args.changes:
try:
k, v = arg.split('=')
except ValueError:
print 'Overwritten arguments must have the form key=Value. \n Currently are: %s' % str(args.changes)
exit(1)
try:
params[k] = ast.literal_eval(v)
except ValueError:
params[k] = v
except ValueError:
print 'Error processing arguments: (', k, ",", v, ")"
exit(2)
if args.dest is not None:
print args.dest
output = open(args.dest, 'w')
else:
output = sys.stdout
# Print header
output.write('def load_parameters():\n')
output.write('\t"""\n')
output.write('\tLoads the defined hyperparameters\n')
output.write('\t:return parameters: Dictionary of loaded parameters\n')
output.write('\t"""\n')
for key, value in params.iteritems():
output.write('\t' + key + '=' + str(value) + '\n')
# Print ending
output.write('\t# ================================================ #\n')
output.write('\tparameters = locals().copy()\n')
output.write('\treturn parameters\n')
if args.dest is not None:
output.close()
|
mit
| -1,662,601,785,041,786,400 | 37.416667 | 116 | 0.547505 | false |
kernt/linuxtools
|
gnome3-shell/nautilus-scripts/applications/multimedia/audio/Play-In-XMMS.py
|
1
|
1810
|
#!/usr/bin/python
#
# simple script to recurse a subtree, find all the mp3 and queue them to
# XMMS.
#
# Please modify this script! My python is rusty at best.
#
# Travis Hume -- travis@usermail.com
# Thu Oct 24 11:06:54 2002
#
# Barak Korren - ifireball@yahoo.com
# Sat Apr 03 2004
# Some bugfixes, now preserves alphanumerical file-ordering in
# sub-directories
import sys, glob, os, os.path, dircache
def isAudioFile( f ):
# to support additional file types just add their appropriate
# extentions to this list (lower case).
file_types = ['.mp3','.ogg','.wav']
p,ext = os.path.splitext(f)
try:
file_types.index(ext.lower())
except:
return False
return True
# change this to something other than None to make the script
# follow symlinks
follow_links = None
def find_mp3s( dirs=None ):
""" finds all mp3 files rooted at dirs and returns them as a list """
if not dirs:
return []
mp3s = []
while dirs:
if os.path.isfile(dirs[0]) and isAudioFile(dirs[0]):
mp3s.append(dirs[0])
dirs = dirs[1:]
elif os.path.isdir(dirs[0]):
found_dirs = []
for f in dircache.listdir( dirs[0] ):
p = dirs[0] + "/" + f;
if os.path.isfile(p) and isAudioFile(p):
mp3s.append( p )
elif os.path.isdir( p ) and not f.endswith( "/proc" ):
if not os.path.islink( p ) or follow_links:
found_dirs.append( p )
dirs = found_dirs + dirs[1:]
return mp3s
dirs = sys.argv[1:]
dirs.reverse()
mp3s = find_mp3s( dirs )
#inf = "";
#for mp3 in mp3s:
# inf = inf + '"' + mp3 + '"' + "\n"
#os.execvp("zenity", ['zenity','--info','--text=' + inf] )
os.execvp("xmms", ['xmms','-p'] + mp3s )
|
gpl-3.0
| -3,455,724,984,299,591,000 | 26.014925 | 73 | 0.576796 | false |
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.4/Lib/warnings.py
|
1
|
8772
|
"""Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import sys, types
import linecache
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings"]
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
filters = []
defaultaction = "default"
onceregistry = {}
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
filename = filename[:-1]
else:
if module == "__main__":
filename = sys.argv[0]
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None):
if module is None:
module = filename
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(msg is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
# Print message and context
showwarning(message, category, filename, lineno)
def showwarning(message, category, filename, lineno, file=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno):
"""Function to format a warning the standard way."""
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno).strip()
if line:
s = s + " " + line + "\n"
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=0):
"""Insert an entry into the list of warnings filters (at the front).
Use assertions to check that all arguments have the right type."""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, basestring), "message must be a string"
assert isinstance(category, types.ClassType), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, basestring), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=0):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError, msg:
print >>sys.stderr, "Invalid -W option ignored:", msg
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ['default', 'always', 'ignore', 'module', 'once', 'error']:
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if (not isinstance(cat, types.ClassType) or
not issubclass(cat, Warning)):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Module initialization
_processoptions(sys.warnoptions)
# XXX OverflowWarning should go away for Python 2.5.
simplefilter("ignore", category=OverflowWarning, append=1)
simplefilter("ignore", category=PendingDeprecationWarning, append=1)
|
mit
| -8,630,744,983,485,260,000 | 33.4 | 79 | 0.604423 | false |
jepler/linuxcnc-mirror
|
configs/by_machine/plasmac/plasmac_stats.py
|
2
|
14411
|
#!/usr/bin/env python
'''
plasmac_stats.py
Copyright (C) 2019 Phillip A Carter
Inspired by and some parts copied from the work of John
(islander261 on the LinuxCNC forum)
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
import os
import gtk
import linuxcnc
import gobject
import hal
import hal_glib
import time
from gladevcp.persistence import IniFile
from gladevcp.persistence import widget_defaults
from gladevcp.persistence import select_widgets
from gmoccapy import getiniinfo
class HandlerClass:
def set_theme(self):
theme = gtk.settings_get_default().get_property('gtk-theme-name')
if os.path.exists(self.prefFile):
try:
with open(self.prefFile, 'r') as f_in:
for line in f_in:
if 'gtk_theme' in line and not 'Follow System Theme' in line:
(item, theme) = line.strip().replace(" ", "").split('=')
except:
print('*** configuration file, {} is invalid ***'.format(self.prefFile))
gtk.settings_get_default().set_property('gtk-theme-name', theme)
def pierce_count_changed(self,halpin):
if hal.get_value('plasmac_stats.state') >= self.TORCH_ON:
self.PIERCE_COUNT += 1
self.pierce_count += 1
self.builder.get_object('pierce-countT').set_label('{:d}'.format(self.PIERCE_COUNT))
self.builder.get_object('pierce-count').set_label('{:d}'.format(self.pierce_count))
def cut_length_changed(self,halpin):
if halpin.get():
self.thisCutLength = halpin.get()
if hal.get_value('halui.machine.units-per-mm') == 1:
self.builder.get_object('cut-lengthT').set_label('{:.2f} M'.format((self.CUT_LENGTH + self.thisCutLength) * 0.001))
self.builder.get_object('cut-length').set_label('{:.2f} M'.format((self.cut_length + self.thisCutLength) * 0.001))
else:
self.builder.get_object('cut-lengthT').set_label('{:.2f}\"'.format(self.CUT_LENGTH + self.thisCutLength))
self.builder.get_object('cut-length').set_label('{:.2f}\"'.format(self.cut_length + self.thisCutLength))
else:
self.CUT_LENGTH += self.thisCutLength
self.cut_length += self.thisCutLength
if hal.get_value('halui.machine.units-per-mm') == 1:
self.builder.get_object('cut-lengthT').set_label('{:.2f} M'.format(self.CUT_LENGTH * 0.001))
else:
self.builder.get_object('cut-lengthT').set_label('{:.2f}\"'.format(self.CUT_LENGTH))
self.thisCutLength = 0
def cut_time_changed(self,halpin):
if halpin.get():
self.thisCutTime = halpin.get()
self.display_time('cut-timeT', self.CUT_TIME + self.thisCutTime)
self.display_time('cut-time', self.cut_time + self.thisCutTime)
else:
self.CUT_TIME += self.thisCutTime
self.cut_time += self.thisCutTime
self.display_time('cut-timeT', self.CUT_TIME)
thisCutTime = 0
def torch_on_changed(self,halpin):
if halpin.get() and not self.torchOn:
self.torchStart = time.time()
elif not halpin.get() and self.torchOn:
self.TORCH_TIME += (time.time() - self.torchStart)
self.torch_time += (time.time() - self.torchStart)
self.display_time('torch-timeT', self.TORCH_TIME)
self.torchOn = halpin.get()
def prog_run_changed(self,halpin):
if halpin.get() and not self.progRun:
self.clear_job_values()
self.runStart = time.time()
self.progRun = True
def prog_idle_changed(self,halpin):
if halpin.get() and self.progRun:
self.RUN_TIME += (time.time() - self.runStart)
self.display_time('run-timeT', self.RUN_TIME)
self.progRun = False
def motion_type_changed(self,halpin):
if halpin.get() == 1 and self.oldMotionType != 1:
self.rapidStart = time.time()
self.rapidOn = True
elif halpin.get() != 1 and self.oldMotionType == 1:
self.RAPID_TIME += (time.time() - self.rapidStart)
self.rapid_time += (time.time() - self.rapidStart)
self.display_time('rapid-timeT', self.RAPID_TIME)
self.rapidOn = False
self.oldMotionType = halpin.get()
def state_changed(self,halpin):
if halpin.get() == self.PROBE_HEIGHT and self.oldState == self.IDLE:
self.probeStart = time.time()
self.probeOn = True
elif (halpin.get() > self.ZERO_HEIGHT or halpin.get() == self.IDLE) and self.probeOn:
self.PROBE_TIME += (time.time() - self.probeStart)
self.probe_time += (time.time() - self.probeStart)
self.display_time('probe-timeT', self.PROBE_TIME)
self.probeOn = False
self.oldState = halpin.get()
def pierce_reset(self,halbutton):
self.PIERCE_COUNT = 0
self.builder.get_object('pierce-countT').set_label('{:d}'.format(self.PIERCE_COUNT))
def cut_length_reset(self,halbutton):
self.CUT_LENGTH = 0.0
self.builder.get_object('cut-lengthT').set_label('{:.2f}'.format(self.CUT_LENGTH))
def cut_time_reset(self,halbutton):
self.CUT_TIME = 0.0
self.display_time('cut-timeT', self.CUT_TIME)
def torch_time_reset(self,halbutton):
self.TORCH_TIME = 0.0
self.display_time('torch-timeT', self.TORCH_TIME)
def run_time_reset(self,halbutton):
self.RUN_TIME = 0.0
self.display_time('run-timeT', self.RUN_TIME)
def rapid_time_reset(self,halbutton):
self.RAPID_TIME = 0.0
self.display_time('rapid-timeT', self.RAPID_TIME)
def probe_time_reset(self,halbutton):
self.PROBE_TIME = 0.0
self.display_time('probe-timeT', self.PROBE_TIME)
def clear_job_values(self):
self.pierce_count = 0
self.builder.get_object('pierce-count').set_label('{:d}'.format(self.pierce_count))
self.cut_length = 0
self.builder.get_object('cut-length').set_label('{:.2f}'.format(self.cut_length))
self.cut_time = 0
self.display_time('cut-time', self.cut_time)
self.torch_time = 0
self.display_time('torch-time', self.torch_time)
self.display_time('run-time', 0)
self.rapid_time = 0
self.display_time('rapid-time', self.rapid_time)
self.probe_time = 0
self.display_time('probe-time', self.probe_time)
self.torchOn = False
self.progRun = False
self.rapidOn = False
self.probeOn = False
def all_reset(self,halbutton):
self.pierce_reset(0)
self.cut_length_reset(0)
self.cut_time_reset(0)
self.torch_time_reset(0)
self.run_time_reset(0)
self.rapid_time_reset(0)
self.probe_time_reset(0)
def display_time(self,widget,time):
m, s = divmod(time, 60)
h, m = divmod(m, 60)
self.builder.get_object(widget).set_label('{:.0f}:{:02.0f}:{:02.0f}'.format(h,m,s))
def on_stats_box_destroy(self, obj, data = None):
self.ini.save_state(self)
def on_unix_signal(self,signum,stack_frame):
self.ini.save_state(self)
def periodic(self):
if self.torchOn:
self.display_time('torch-timeT', self.TORCH_TIME + (time.time() - self.torchStart))
self.display_time('torch-time', self.torch_time + (time.time() - self.torchStart))
if self.progRun:
self.display_time('run-timeT', self.RUN_TIME + (time.time() - self.runStart))
self.display_time('run-time', time.time() - self.runStart)
if self.rapidOn:
self.display_time('rapid-timeT', self.RAPID_TIME + (time.time() - self.rapidStart))
self.display_time('rapid-time', self.rapid_time + (time.time() - self.rapidStart))
if self.probeOn:
self.display_time('probe-timeT', self.PROBE_TIME + (time.time() - self.probeStart))
self.display_time('probe-time', self.probe_time + (time.time() - self.probeStart))
return True
def __init__(self, halcomp,builder,useropts):
self.halcomp = halcomp
self.builder = builder
self.i = linuxcnc.ini(os.environ['INI_FILE_NAME'])
self.prefFile = self.i.find('EMC', 'MACHINE') + '.pref'
self.set_theme()
self.pierceCount = hal_glib.GPin(halcomp.newpin('pierce-count', hal.HAL_S32, hal.HAL_IN))
self.pierceCount.connect('value-changed', self.pierce_count_changed)
self.cutLength = hal_glib.GPin(halcomp.newpin('cut-length', hal.HAL_FLOAT, hal.HAL_IN))
self.cutLength.connect('value-changed', self.cut_length_changed)
self.cutTime = hal_glib.GPin(halcomp.newpin('cut-time', hal.HAL_FLOAT, hal.HAL_IN))
self.cutTime.connect('value-changed', self.cut_time_changed)
self.torchOn = hal_glib.GPin(halcomp.newpin('torch-on', hal.HAL_BIT, hal.HAL_IN))
self.torchOn.connect('value-changed', self.torch_on_changed)
self.progRun = hal_glib.GPin(halcomp.newpin('program-is-running', hal.HAL_BIT, hal.HAL_IN))
self.progRun.connect('value-changed', self.prog_run_changed)
self.progIdle = hal_glib.GPin(halcomp.newpin('program-is-idle', hal.HAL_BIT, hal.HAL_IN))
self.progIdle.connect('value-changed', self.prog_idle_changed)
self.statePin = hal_glib.GPin(halcomp.newpin('state', hal.HAL_S32, hal.HAL_IN))
self.statePin.connect('value-changed', self.state_changed)
self.rapidTime = hal_glib.GPin(halcomp.newpin('motion-type', hal.HAL_S32, hal.HAL_IN))
self.rapidTime.connect('value-changed', self.motion_type_changed)
self.pierceReset = self.builder.get_object('pierce-count-reset')
self.pierceReset.connect('pressed', self.pierce_reset)
self.cutLengthReset = self.builder.get_object('cut-length-reset')
self.cutLengthReset.connect('pressed', self.cut_length_reset)
self.cutTimeReset = self.builder.get_object('cut-time-reset')
self.cutTimeReset.connect('pressed', self.cut_time_reset)
self.torchTimeReset = self.builder.get_object('torch-time-reset')
self.torchTimeReset.connect('pressed', self.torch_time_reset)
self.runTimeReset = self.builder.get_object('run-time-reset')
self.runTimeReset.connect('pressed', self.run_time_reset)
self.rapidTimeReset = self.builder.get_object('rapid-time-reset')
self.rapidTimeReset.connect('pressed', self.rapid_time_reset)
self.probeTimeReset = self.builder.get_object('probe-time-reset')
self.probeTimeReset.connect('pressed', self.probe_time_reset)
self.allReset = self.builder.get_object('all-reset')
self.allReset.connect('pressed', self.all_reset)
# plasmac states
self.IDLE = 0
self.PROBE_HEIGHT = 1
self.PROBE_DOWN = 2
self.PROBE_UP = 3
self.ZERO_HEIGHT = 4
self.PIERCE_HEIGHT = 5
self.TORCH_ON = 6
self.ARC_OK = 7
self.PIERCE_DELAY = 8
self.PUDDLE_JUMP = 9
self.CUT_HEGHT = 10
self.CUTTING = 11
self.SAFE_HEIGHT = 12
self.MAX_HEIGHT = 13
self.FINISH = 14
self.TORCH_PULSE = 15
self.PAUSED_MOTION = 16
self.OHMIC_TEST = 17
self.PROBE_TEST = 18
self.oldState = 0
self.oldMotionType = 0
self.pierce_count = 0
self.cut_length = 0
self.thisCutLength = 0
self.cut_time = 0.0
self.thisCutTime = 0.0
self.torch_time = 0.0
self.torchOn = False
self.progRun = False
self.rapid_time = 0.0
self.rapidOn = False
self.probe_time = 0.0
self.probeOn = False
self.defaults = {IniFile.vars:{"PIERCE_COUNT" : 0,
"CUT_LENGTH" : 0.0,
"CUT_TIME" : 0.0,
"TORCH_TIME" : 0.0,
"RUN_TIME" : 0.0,
"RAPID_TIME" : 0.0,
"PROBE_TIME" : 0.0,
},
}
get_ini_info = getiniinfo.GetIniInfo()
self.ini_filename = __name__ + ".var"
self.ini = IniFile(self.ini_filename, self.defaults, self.builder)
self.ini.restore_state(self)
self.builder.get_object('pierce-countT').set_label('{:d}'.format(self.PIERCE_COUNT))
self.builder.get_object('pierce-count').set_label('{:d}'.format(0))
if hal.get_value('halui.machine.units-per-mm') == 1:
self.builder.get_object('cut-lengthT').set_label('{:0.2f} M'.format(self.CUT_LENGTH * 0.001))
self.builder.get_object('cut-length').set_label('{:0.2f} M'.format(0))
else:
self.builder.get_object('cut-lengthT').set_label('{:0.2f}\"'.format(self.CUT_LENGTH))
self.builder.get_object('cut-length').set_label('{:0.2f}\"'.format(0))
self.display_time('cut-timeT', self.CUT_TIME)
self.display_time('torch-timeT', self.TORCH_TIME)
self.display_time('run-timeT', self.RUN_TIME)
self.display_time('rapid-timeT', self.RAPID_TIME)
self.display_time('probe-timeT', self.PROBE_TIME)
gobject.timeout_add(100, self.periodic)
def get_handlers(halcomp,builder,useropts):
return [HandlerClass(halcomp,builder,useropts)]
|
lgpl-2.1
| -3,141,769,740,846,787,000 | 44.749206 | 131 | 0.598779 | false |
whiskeysierra/paper-cut
|
papercut/core.py
|
1
|
2458
|
from __future__ import unicode_literals
import os
from clint.textui import colored, indent, puts
from glob import glob
from wand.image import Image
sizes = {
'A0': (841.0, 1189.0),
'A1': (594.0, 841.0),
'A2': (420.0, 594.0),
'A3': (297.0, 420.0),
'A4': (210.0, 297.0),
'A5': (148.0, 210.0),
'A6': (105.0, 148.0),
'A7': (74.0, 105.0),
'A8': (52.0, 74.0),
'A9': (37.0, 52.0),
'A10': (26.0, 37.0),
'Letter': (215.9, 279.4),
'Legal': (215.9, 355.6),
'Ledger': (2.0, 279.0),
'Tabloid': (9.0, 432.0),
}
orientations = {
'portrait': lambda (w, h): h / w,
'landscape': lambda (w, h): w / h,
}
# TODO make image extensions more dynamic, versatile or configurable
extensions = ['bmp', 'gif', 'jpeg', 'jpg', 'png', 'tiff']
def exists(target):
return os.path.exists(target) or glob('%s-*%s' % os.path.splitext(target))
def run(args):
size = sizes[args.size]
ratio = orientations[args.orientation](size)
for root, _, _ in os.walk(args.input):
puts(root)
with indent(4):
for extension in extensions:
files = glob(os.path.join(root, '*.%s' % extension))
for source in files:
with Image(filename=source) as original:
with original.clone() as img:
width, height = img.size
if width < height:
height = int(width * ratio)
else:
width = int(height / ratio)
dimension = '%sx%s' % (width, height)
relative = os.path.relpath(source, args.input)
target = os.path.join(args.output, relative)
directory = os.path.dirname(target)
if not args.dryrun:
if not os.path.exists(directory):
os.makedirs(directory)
if not args.overwrite and exists(target):
puts('[%s] %s' % (colored.yellow('exists'), relative))
else:
img.transform(crop=dimension)
img.save(filename=target)
puts('[%s] %s' % (colored.green('done'), relative))
|
mit
| -1,643,918,188,222,615,000 | 31.786667 | 90 | 0.454841 | false |
luisgg/germinate
|
germinate/tests/test_archive.py
|
1
|
3426
|
#! /usr/bin/env python
"""Unit tests for germinate.archive."""
# Copyright (C) 2012 Canonical Ltd.
#
# Germinate is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any
# later version.
#
# Germinate is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Germinate; see the file COPYING. If not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301, USA.
import gzip
import os
import textwrap
from germinate.archive import IndexType, TagFile
from germinate.tests.helpers import TestCase
class TestTagFile(TestCase):
def test_init_lists(self):
"""TagFile may be constructed with list parameters."""
tagfile = TagFile(
["dist"], ["component"], "arch", ["mirror"],
source_mirrors=["source_mirror"])
self.assertEqual(["dist"], tagfile._dists)
self.assertEqual(["component"], tagfile._components)
self.assertEqual(["mirror"], tagfile._mirrors)
self.assertEqual(["source_mirror"], tagfile._source_mirrors)
def test_init_strings(self):
"""TagFile may be constructed with string parameters."""
tagfile = TagFile(
"dist", "component", "arch", "mirror",
source_mirrors="source_mirror")
self.assertEqual(["dist"], tagfile._dists)
self.assertEqual(["component"], tagfile._components)
self.assertEqual(["mirror"], tagfile._mirrors)
self.assertEqual(["source_mirror"], tagfile._source_mirrors)
def test_sections(self):
"""Test fetching sections from a basic TagFile archive."""
self.useTempDir()
main_dir = os.path.join("mirror", "dists", "unstable", "main")
binary_dir = os.path.join(main_dir, "binary-i386")
source_dir = os.path.join(main_dir, "source")
os.makedirs(binary_dir)
os.makedirs(source_dir)
packages = gzip.GzipFile(os.path.join(binary_dir, "Packages.gz"), "w")
try:
packages.write(textwrap.dedent("""\
Package: test
Version: 1.0
Architecture: i386
""").encode("UTF-8"))
finally:
packages.close()
sources = gzip.GzipFile(os.path.join(source_dir, "Sources.gz"), "w")
try:
sources.write(textwrap.dedent("""\
Source: test
Version: 1.0
""").encode("UTF-8"))
finally:
sources.close()
tagfile = TagFile(
"unstable", "main", "i386", "file://%s/mirror" % self.temp_dir)
sections = list(tagfile.sections())
self.assertEqual(IndexType.PACKAGES, sections[0][0])
self.assertEqual("test", sections[0][1]["Package"])
self.assertEqual("1.0", sections[0][1]["Version"])
self.assertEqual("i386", sections[0][1]["Architecture"])
self.assertEqual(IndexType.SOURCES, sections[1][0])
self.assertEqual("test", sections[1][1]["Source"])
self.assertEqual("1.0", sections[1][1]["Version"])
|
gpl-2.0
| 2,343,314,905,086,168,600 | 38.37931 | 78 | 0.624051 | false |
youngjun0528/moheeto
|
BaekjoonOnlineJudge/acmicpc_2615.py
|
1
|
6033
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 오목
# https://www.acmicpc.net/problem/2615
import sys
import copy
if __name__ == '__main__':
matrix = []
for cnt in range(19):
row_data = [int(x) for x in sys.stdin.readline().strip().replace(' ','')]
matrix.append(row_data)
# 1이 이기는 경우
num_matrix = [[0] * 19 for x in range(19)]
num = 1
for i in range(19):
for j in range(19):
if matrix[i][j] == 1:
num_matrix[i][j] = num
num += 1
graph_first = {}
for i in range(19):
for j in range(19):
if num_matrix[i][j] > 0:
# 가로
result = []
if i == 0 or num_matrix[i - 1][j] == 0:
for cnt in range(1, 6):
if i + cnt < 19 and num_matrix[i + cnt][j] > 0:
result.append(str(num_matrix[i + cnt][j]))
else:
break
if len(result) == 4:
graph_first[str(num_matrix[i][j])] = result
break
# 세로
result = []
if j == 0 or num_matrix[i][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and num_matrix[i][j + cnt] > 0:
result.append(str(num_matrix[i][j + cnt]))
else:
break
if len(result) == 4:
graph_first[str(num_matrix[i][j])] = result
break
# 우하 대각선
result = []
if i == 18 or j == 0 or num_matrix[i - 1][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and i + cnt < 19 and num_matrix[i + cnt][j + cnt] > 0:
result.append(str(num_matrix[i + cnt][j + cnt]))
else:
break
if len(result) == 4:
graph_first[str(num_matrix[i][j])] = result
break
# 우상 대각선
result = []
if i == 18 or j == 0 or num_matrix[i + 1][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and i - cnt >= 0 and num_matrix[i - cnt][j + cnt] > 0:
result.append(str(num_matrix[i - cnt][j + cnt]))
else:
break
if len(result) == 4:
graph_first[str(num_matrix[i][j])] = result
break
# 2이 이기는 경우
num_matrix_2 = [[0] * 19 for x in range(19)]
num = 1
for i in range(19):
for j in range(19):
if matrix[i][j] == 2:
num_matrix_2[i][j] = num
num += 1
graph_second = {}
for i in range(19):
for j in range(19):
if num_matrix_2[i][j] > 0:
# 가로
result = []
if i == 0 or num_matrix_2[i - 1][j] == 0:
for cnt in range(1, 6):
if i + cnt < 19 and num_matrix_2[i + cnt][j] > 0:
result.append(str(num_matrix_2[i + cnt][j]))
else:
break
if len(result) == 4:
graph_second[str(num_matrix_2[i][j])] = result
break
# 세로
result = []
if j == 0 or num_matrix_2[i][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and num_matrix_2[i][j + cnt] > 0:
result.append(str(num_matrix_2[i][j + cnt]))
else:
break
if len(result) == 4:
graph_second[str(num_matrix_2[i][j])] = result
break
# 우하 대각선
result = []
if i == 18 or j == 0 or num_matrix_2[i - 1][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and i + cnt < 19 and num_matrix_2[i + cnt][j + cnt] > 0:
result.append(str(num_matrix_2[i + cnt][j + cnt]))
else:
break
if len(result) == 4:
graph_second[str(num_matrix_2[i][j])] = result
break
# 우상 대각선
result = []
if i == 18 or j == 0 or num_matrix_2[i + 1][j - 1] == 0:
for cnt in range(1, 6):
if j + cnt < 19 and i - cnt >= 0 and num_matrix_2[i - cnt][j + cnt] > 0:
result.append(str(num_matrix_2[i - cnt][j + cnt]))
else:
break
if len(result) == 4:
graph_second[str(num_matrix_2[i][j])] = result
break
check_one = True
if len(graph_first) > 0:
for d in graph_first.items():
if len(d[1]) == 4:
print(1)
i = 0
for mat in num_matrix:
if mat.count(int(d[0])) > 0:
print(str(i + 1) + ' ' + str(mat.index(int(d[0])) + 1))
check_one = False
i += 1
check_sec = True
if len(graph_second) > 0:
for d in graph_second.items():
if len(d[1]) == 4:
print(2)
i = 0
for mat in num_matrix_2:
if mat.count(int(d[0])) > 0:
print(str(i + 1) + ' ' + str(mat.index(int(d[0])) + 1))
check_sec = False
i += 1
if check_one and check_sec:
print(0)
|
apache-2.0
| 1,503,235,518,156,774,100 | 35.496933 | 96 | 0.353673 | false |
btgoodwin/apgRasPi
|
server/UIs/Tornado/pi_server.py
|
1
|
9178
|
#!/usr/bin/env python
import os
import sys
import time
import pdb
import json
import socket
import argparse
import tornado.web
import tornado.websocket
import tornado.httpserver
import tornado.ioloop
from threading import Timer
from heapq import merge
from ossie.utils import redhawk
from ossie.utils.redhawk.channels import ODMListener
from ossie.utils.weakmethod import WeakBoundMethod
def getLaptopIPaddress():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("gmail.com",80))
ipaddr = s.getsockname()[0]
s.close()
return ipaddr
def getLaptopLatLon():
dom = redhawk.attach(redhawk.scan()[0])
for node in dom.devMgrs:
if node.name.lower() == 'LaptopNode':
for dev in node.devs:
if dev.name.lower() == 'gps_receiver':
gps_port = dev.getPort('GPS_idl')
pos = gps_port._get_gps_time_pos().position
return pos.lat, pos.lon, pos.valid
lat = raw_input("WARNING: Unable to find GPS device connected to server.\nPlease enter latitude in degrees: ")
lon = raw_input("Please enter longitue in degrees: ")
return lat, lon, True
def _parseInput():
parser = argparse.ArgumentParser(description='Launch Server')
ipaddr = getLaptopIPaddress()
parser.add_argument('-a', help='IP address to use for serving', default=ipaddr)
parser.add_argument('-p', help='Port to use for serving', default='8080')
args = vars(parser.parse_args())
return args
class MapHandler(tornado.web.RequestHandler):
def initialize(self, ipaddr, port, lat, lon):
self._ipaddr = ipaddr
self._port = port
self._lat = lat
self._lon = lon
def get(self):
print("A client pulled map_template.html to show the map")
pis = 0
dom = redhawk.attach(redhawk.scan()[0])
for node in dom.devMgrs:
print("Node name: %s" % node.name)
if 'raspberry' in node.name.lower():
pis = pis + 1
print("Number of pis: %s" % pis)
self.render("map_template.html", ipaddr=self._ipaddr, port=self._port, lat=self._lat, lon=self._lon, num_pis=0)
class WebSocketMapHandler(tornado.websocket.WebSocketHandler):
def open(self):
print("A websocket connection was opened to the Map websocket")
self._connectToDomain()
self._setupOdmListener()
Timer(1.0, self._hack).start()
""" Connect to domain, add pis found to our list, add them to the GUI. """
def _connectToDomain(self):
self.dom = redhawk.attach(redhawk.scan()[0])
rx_pis, tx_pis = self._getPisList()
for node in rx_pis:
lat, lon, valid = self._getNodeLatLon(node)
self.update_map(node.name, 'add', lat, lon)
for node in tx_pis:
lat, lon, valid = self._getNodeLatLon(node)
self.update_map(node.name, 'addTx', lat, lon)
def _setupOdmListener(self):
try:
self.odm = ODMListener()
self.odm.connect(self.dom)
self.odm.deviceManagerAdded.addListener(WeakBoundMethod(self.deviceManagerAdded))
self.odm.deviceManagerRemoved.addListener(WeakBoundMethod(self.deviceManagerRemoved))
except:
print(" ERROR: setupOdmListener failed; please make sure a REDHAWK Domain Manager is running\n")
def _hack(self):
rx_pis, tx_pis = self._getPisList()
for node in rx_pis:
self.update_map(node.name, 'add', 0.0, 0.0)
for node in tx_pis:
self.update_map(node.name, 'addTx', 0.0, 0.0)
cb = tornado.ioloop.PeriodicCallback(self._timed_pi_update, 1000)
cb.start()
lob_poll = tornado.ioloop.PeriodicCallback(self._pollLobComponents, 2000)
lob_poll.start()
def on_message(self, message):
print("\nMessage received from map client: %s" % message)
def on_close(self):
print("\nWebsocket connection to %s closed" % self)
def update_map(self, name, action, lat, lon, lob_angle=0.0):
# action: 'add', 'addTx', 'remove', 'addLob', or 'update'
data = {'nodeName': name, 'action': action, 'lat': lat, 'lon': lon, 'angle': int(lob_angle)}
msg = json.dumps(data)
self.write_message(json.dumps(msg))
if ((action <> 'update') and (action <> 'addLob')):
print("Will now %s marker for node %s located at %s, %s" % (action, name, lat, lon))
def deviceManagerAdded(self, evt):
print("Added device manager %s" % evt.sourceName)
#pdb.set_trace()
if 'raspberry_pi' in evt.sourceName.lower():
#print("This is where I will call self.update_map to add a marker")
self.update_map(evt.sourceName, 'add', 0.0, 0.0)
def deviceManagerRemoved(self, evt):
print("Removed device manager %s" % evt.sourceName)
if evt.sourceName.lower() == 'raspberry_pi':
#print("This is where I will call self.update_map to remove a marker")
self.update_map(evt.sourceName, 'remove', 0.0, 0.0)
""" Timed update of node positions at runtime """
def _timed_pi_update(self):
rx_pis, tx_pis = self._getPisList()
for node in list(merge(rx_pis, tx_pis)):
try:
""" Somehow dom.devMgrs is not being updated fully when nodes join/leave"""
lat, lon, valid = self._getNodeLatLon(node)
except:
lat, lon, valid = 0.0, 0.0, False
self.update_map(node.name, 'update', lat, lon)
""" Simple function to pull all nodes whose name starts with raspberry_pi """
""" Node are sorted according to whether they are receiver or transmitter nodes """
def _getPisList(self):
rx_list = []
tx_list = []
for node in self.dom.devMgrs:
if node.name.startswith('raspberry_pi'):
for dev in node.devs:
if 'rtl_sdr' in dev.name.lower():
rx_list.append(node)
break
if 'transmit_control' in dev.name.lower():
tx_list.append(node)
break
return rx_list, tx_list
""" Fetch lat, lon, and validity of information from node. """
def _getNodeLatLon(self, node):
for dev in node.devs:
if (dev.name == 'gps_receiver'):
gps_port = dev.getPort('GPS_idl')
pos = gps_port._get_gps_time_pos().position
return pos.lat, pos.lon, pos.valid
""" Poll the LOB components """
def _pollLobComponents(self):
if len(self.dom.apps) > 0:
for wf in self.dom.apps:
for comp in wf.comps:
if 'lobCalc' in comp.name:
comp_dict = comp._query()
if comp_dict['valid']:
angle = int(comp_dict['lob'])
# TODO: This can be hardcoded for testing only
#piNum = comp_dict['streamID_prop'][-2:]
piNum = 87
adjusted_angle = self._nearestFive(angle)
print("INFO: raspberry_pi%s, LOB %d degrees (%d)" % (piNum, angle, adjusted_angle))
self.update_map('raspberry_pi'+str(piNum), 'addLob', 0, 0, adjusted_angle)
else:
#print("DEBUG: LOB valid for pi %s is not valid" % comp_dict['streamID_prop'][-2:])
pass
else:
print("No DF waveforms available")
def _nearestFive(self, value):
mod = value % 5
if mod < 3:
return value - mod
else:
return value + (5-mod)
class Application(tornado.web.Application):
def __init__(self, ipaddr, port, lat, lon):
handlers = [
(r'/', MapHandler, {'ipaddr':ipaddr, 'port':port, 'lat':lat, 'lon':lon}),
(r'/wsmap', WebSocketMapHandler),
]
settings = {
'static_path': os.path.join(os.path.dirname(__file__), "static"),
'template_path': 'templates',
'debug' : True,
}
tornado.web.Application.__init__(self, handlers, **settings)
if __name__ == '__main__':
# Get command line arguments
cfg = _parseInput()
ipaddr = cfg['a']
port = cfg['p']
laptop_lat, laptop_lon, pos_valid = getLaptopLatLon()
# Set up ODM Listener on first REDHAWK Domain Manager
pass
# Set up web server
ws_app = Application(ipaddr, port, laptop_lat, laptop_lon)
server = tornado.httpserver.HTTPServer(ws_app)
server.listen(port)
print("\nNow serving on port %s of address %s....\n" % (port, ipaddr))
print("\n****** REMOVE HARDCODED POSITION VALUES BEFORE RELEASING *****\n\n")
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
print("\nReceived KeyboardInterrupt, shutting down server.")
|
lgpl-3.0
| 3,179,455,331,251,596,300 | 38.390558 | 119 | 0.569296 | false |
fugitifduck/exabgp
|
lib/exabgp/bgp/message/update/attribute/community/community.py
|
1
|
2080
|
# encoding: utf-8
"""
community.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
from struct import pack
from struct import unpack
# ==================================================================== Community
#
class Community (object):
NO_EXPORT = pack('!L',0xFFFFFF01)
NO_ADVERTISE = pack('!L',0xFFFFFF02)
NO_EXPORT_SUBCONFED = pack('!L',0xFFFFFF03)
NO_PEER = pack('!L',0xFFFFFF04)
cache = {}
caching = True
__slots__ = ['community','_str']
def __init__ (self, community):
self.community = community
if community == self.NO_EXPORT:
self._str = 'no-export'
elif community == self.NO_ADVERTISE:
self._str = 'no-advertise'
elif community == self.NO_EXPORT_SUBCONFED:
self._str = 'no-export-subconfed'
else:
self._str = "%d:%d" % unpack('!HH',self.community)
def __cmp__ (self, other):
if not isinstance(other,self.__class__):
return -1
if self.community != other.community:
return -1
return 0
def json (self):
return "[ %d, %d ]" % unpack('!HH',self.community)
def pack (self, negotiated=None):
return self.community
def __str__ (self):
return self._str
def __len__ (self):
return 4
def __eq__ (self, other):
return self.community == other.community
def __ne__ (self, other):
return self.community != other.community
@classmethod
def unpack (cls, community, negotiated):
return cls(community)
@classmethod
def cached (cls, community):
if cls.caching and community in cls.cache:
return cls.cache[community]
instance = cls(community)
if cls.caching:
cls.cache[community] = instance
return instance
# Always cache well-known communities, they will be used a lot
if not Community.cache:
Community.cache[Community.NO_EXPORT] = Community(Community.NO_EXPORT)
Community.cache[Community.NO_ADVERTISE] = Community(Community.NO_ADVERTISE)
Community.cache[Community.NO_EXPORT_SUBCONFED] = Community(Community.NO_EXPORT_SUBCONFED)
Community.cache[Community.NO_PEER] = Community(Community.NO_PEER)
|
bsd-3-clause
| -291,284,668,875,226,200 | 24.679012 | 90 | 0.659615 | false |
UNICT-DMI/Telegram-DMI-Bot
|
module/commands/lezioni.py
|
1
|
10829
|
# -*- coding: utf-8 -*-
"""/lezioni command"""
import logging
import re
from typing import Tuple
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update
from telegram.ext import CallbackContext
from module.data import Lesson
from module.shared import check_log, send_message
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
def lezioni(update: Update, context: CallbackContext):
"""Called by the /lezioni command.
Shows the options available to execute a lesson query.
Args:
update: update event
context: context passed by the handler
"""
check_log(update, "lezioni")
if 'lezioni' in context.user_data:
context.user_data['lezioni'].clear() # ripulisce il dict dell'user relativo al comando /lezioni da eventuali dati presenti
else:
context.user_data['lezioni'] = {} # crea il dict che conterrà i dati del comando /lezioni all'interno della key ['lezioni'] di user data
user_id = update.message.from_user.id
chat_id = update.message.chat_id
if chat_id != user_id: # forza ad eseguire il comando in una chat privata, anche per evitare di inondare un gruppo con i risultati
context.bot.sendMessage(chat_id=chat_id, text="Questo comando è utilizzabile solo in privato")
context.bot.sendMessage(chat_id=user_id, text="Dal comando lezioni che hai eseguito in un gruppo")
message_text, inline_keyboard = get_lezioni_text_InLineKeyboard(context)
context.bot.sendMessage(chat_id=user_id, text=message_text, reply_markup=inline_keyboard)
def lezioni_handler(update: Update, context: CallbackContext):
"""Called by any of the buttons in the /lezioni command sub-menus.
The action will change depending on the button:
- anno -> adds / removes the selected year from the query parameters
- giorno -> adds / removes the selected day from the query parameters
- search -> executes the query with all the selected parametes and shows the result
Args:
update: update event
context: context passed by the handler
"""
callback_data = update.callback_query.data
chat_id = update.callback_query.message.chat_id
message_id = update.callback_query.message.message_id
lezioni_user_data = context.user_data['lezioni']
if "anno" in callback_data:
if callback_data[20:] not in lezioni_user_data.keys():
# se non era presente, setta la key di [1 anno|2 anno| 3 anno] a true...
lezioni_user_data[callback_data[20:]] = True
else:
#... o elmina la key se era già presente
del lezioni_user_data[callback_data[20:]]
elif "giorno" in callback_data:
if callback_data[22:] not in lezioni_user_data.keys():
#se non era presente, setta la key del giorno[1|2|3...] a true...
lezioni_user_data[callback_data[22:]] = True
else:
#... o elmina la key se era già presente
del lezioni_user_data[callback_data[22:]]
elif "search" in callback_data:
message_text = generate_lezioni_text(lezioni_user_data) # ottieni il risultato della query che soddisfa le richieste
context.bot.editMessageText(chat_id=chat_id, message_id=message_id, text=update.callback_query.message.text)
send_message(update, context, message_text) #manda il risutato della query suddividendo la stringa in più messaggi
lezioni_user_data.clear() #ripulisci il dict
return
else:
logger.error("lezioni_handler: an error has occurred")
message_text, inline_keyboard = get_lezioni_text_InLineKeyboard(context)
context.bot.editMessageText(text=message_text, chat_id=chat_id, message_id=message_id, reply_markup=inline_keyboard)
def lezioni_button_anno(update: Update, context: CallbackContext, chat_id, message_id):
"""Called by one of the buttons of the /lezioni command.
Allows the user to choose an year among the ones proposed
Args:
update: update event
context: context passed by the handler
chat_id: id of the chat of the user
message_id: id of the sub-menu message
"""
message_text = "Seleziona l'anno che ti interessa"
keyboard = [[]]
keyboard.append([
InlineKeyboardButton("Primo anno", callback_data="lezioni_button_anno_1 anno"),
InlineKeyboardButton("Secondo anno", callback_data="lezioni_button_anno_2 anno"),
InlineKeyboardButton("Terzo anno", callback_data="lezioni_button_anno_3 anno"),
])
context.bot.editMessageText(text=message_text, chat_id=chat_id, message_id=message_id, reply_markup=InlineKeyboardMarkup(keyboard))
def lezioni_button_giorno(update: Update, context: CallbackContext, chat_id, message_id):
"""Called by one of the buttons of the /lezioni command.
Allows the user to choose a day among the ones proposed
Args:
update: update event
context: context passed by the handler
chat_id: id of the chat of the user
message_id: id of the sub-menu message
"""
message_text = "Seleziona il giorno che ti interessa"
keyboard = [[]]
keyboard.append([
InlineKeyboardButton("Lunedì", callback_data="lezioni_button_giorno_1 giorno"),
InlineKeyboardButton("Martedì", callback_data="lezioni_button_giorno_2 giorno"),
])
keyboard.append([
InlineKeyboardButton("Mercoledì", callback_data="lezioni_button_giorno_3 giorno"),
InlineKeyboardButton("Giovedì", callback_data="lezioni_button_giorno_4 giorno"),
])
keyboard.append([
InlineKeyboardButton("Venerdì", callback_data="lezioni_button_giorno_5 giorno"),
])
context.bot.editMessageText(text=message_text, chat_id=chat_id, message_id=message_id, reply_markup=InlineKeyboardMarkup(keyboard))
def lezioni_button_insegnamento(update: Update, context: CallbackContext, chat_id, message_id):
"""Called by one of the buttons of the /lezioni command.
Allows the user to write the subject they want to search for
Args:
update: update event
context: context passed by the handler
chat_id: id of the chat of the user
message_id: id of the sub-menu message
"""
context.user_data['lezioni'][
'cmd'] = "input_insegnamento" # è in attesa di un messaggio nel formato corretto che imposti il valore del campo insegnamento
message_text = "Inserire il nome della materia nel formato:\n" + \
"nome: nome insegnamento\n" + \
"Esempio:\n" +\
"nome: SisTeMi oPeRaTIvI"
context.bot.editMessageText(text=message_text, chat_id=chat_id, message_id=message_id)
def lezioni_input_insegnamento(update: Update, context: CallbackContext):
"""Called after :func:`lezioni_button_insegnamento`.
Allows the user to input the wanted subject, in the format [Nn]ome: <insegnamento>
Args:
update: update event
context: context passed by the handler
"""
if context.user_data['lezioni'].get('cmd', 'null') == "input_insegnamento": #se effettivamente l'user aveva richiesto di modificare l'insegnamento...
check_log(update, "lezioni_input_insegnamento")
context.user_data['lezioni']['insegnamento'] = re.sub(r"^(?!=<[/])[Nn]ome:\s+", "",
update.message.text) #ottieni il nome dell'insegnamento e salvalo nel dict
del context.user_data['lezioni'][
'cmd'] #elimina la possibilità di modificare l'insegnamento fino a quando l'apposito button non viene premuto di nuovo
message_text, inline_keyboard = get_lezioni_text_InLineKeyboard(context)
context.bot.sendMessage(chat_id=update.message.chat_id, text=message_text, reply_markup=inline_keyboard)
def get_lezioni_text_InLineKeyboard(context: CallbackContext) -> Tuple[str, InlineKeyboardMarkup]:
"""Generates the text and the InlineKeyboard for the /lezioni command, based on the current parameters.
Args:
update: update event
context: context passed by the handler
Returns:
message_text and InlineKeyboardMarkup to use
"""
lezioni_user_data = context.user_data['lezioni']
# stringa contenente gli anni per cui la flag è true
text_anno = ", ".join([key for key in lezioni_user_data if "anno" in key]).replace("anno", "")
# stringa contenente le lezioni per cui la flag è true
text_giorno = ", ".join([Lesson.INT_TO_DAY[key.replace(" giorno", "")] for key in lezioni_user_data if "giorno" in key])
# stringa contenente l'insegnamento
text_insegnamento = lezioni_user_data.get("insegnamento", "")
message_text = "Anno: {}\nGiorno: {}\nInsegnamento: {}"\
.format(text_anno if text_anno else "tutti",\
text_giorno if text_giorno else "tutti",\
text_insegnamento if text_insegnamento else "tutti")
keyboard = [[]]
keyboard.append([InlineKeyboardButton(" ~ Personalizza la ricerca ~ ", callback_data="_div")])
keyboard.append([
InlineKeyboardButton(" Anno ", callback_data="sm_lezioni_button_anno"),
InlineKeyboardButton(" Giorno ", callback_data="sm_lezioni_button_giorno"),
])
keyboard.append([
InlineKeyboardButton(" Insegnamento ", callback_data="sm_lezioni_button_insegnamento"),
InlineKeyboardButton(" Cerca ", callback_data="lezioni_button_search")
])
return message_text, InlineKeyboardMarkup(keyboard)
def generate_lezioni_text(user_dict) -> str:
"""Called by :meth:`lezioni` after the search button in the /lezioni command has been pressed.
Executes the query and returns the text to send to the user
Args:
user_dict: dictionary that stores the user selected parameters to use in the query
Returns:
result of the query to send to the user
"""
output_str = []
# stringa contenente i giorni per cui il dict contiene la key, separati da " = '[]' and not "
where_giorno = " or giorno_settimana = ".join([key.replace("giorno", "") for key in user_dict if "giorno" in key])
# stringa contenente gli anni per cui il dict contiene la key, separate da "' or anno = '"
where_anno = " or anno = ".join([key.replace(" anno", "") for key in user_dict if "anno" in key])
# stringa contenente l'insegnamento, se presente
where_nome = user_dict.get("insegnamento", "")
lessons = Lesson.find(where_anno=where_anno, where_giorno=where_giorno, where_nome=where_nome)
if len(lessons) > 0:
output_str = '\n'.join(map(str, lessons))
output_str += "\nRisultati trovati: " + str(len(lessons))
else:
output_str = "Nessun risultato trovato :(\n"
return output_str
|
gpl-3.0
| 4,424,277,043,675,171,300 | 45.021277 | 154 | 0.678225 | false |
tensorflow/probability
|
tensorflow_probability/python/experimental/mcmc/sample_fold_test.py
|
1
|
18337
|
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for drivers in a Streaming Reductions Framework."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import warnings
# Dependency imports
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.experimental.mcmc.internal import test_fixtures
from tensorflow_probability.python.internal import samplers
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow_probability.python.internal import test_util
@test_util.test_all_tf_execution_regimes
class SampleFoldTest(test_util.TestCase):
def test_simple_operation(self):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer)
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt, last_sample, kr])
self.assertEqual(3, reduction_rslt)
self.assertEqual(5, last_sample)
self.assertEqual(5, kernel_results.counter_1)
self.assertEqual(10, kernel_results.counter_2)
# Warm-restart the underlying kernel but not the reduction
reduction_rslt_2, last_sample_2, kr_2 = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=last_sample,
kernel=fake_kernel,
reducer=fake_reducer,
previous_kernel_results=kernel_results)
reduction_rslt_2, last_sample_2, kernel_results_2 = self.evaluate([
reduction_rslt_2, last_sample_2, kr_2])
self.assertEqual(8, reduction_rslt_2)
self.assertEqual(10, last_sample_2)
self.assertEqual(10, kernel_results_2.counter_1)
self.assertEqual(20, kernel_results_2.counter_2)
def test_reducer_warm_restart(self):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
red_res, last_sample, kr, red_states = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer,
return_final_reducer_states=True)
red_res, last_sample, kernel_results, red_states = self.evaluate([
red_res, last_sample, kr, red_states])
self.assertEqual(3, red_res)
self.assertEqual(5, last_sample)
self.assertEqual(5, kernel_results.counter_1)
self.assertEqual(10, kernel_results.counter_2)
# Warm-restart the underlying kernel and the reduction
reduction_rslt_2, last_sample_2, kr_2 = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=last_sample,
previous_kernel_results=kernel_results,
kernel=fake_kernel,
reducer=fake_reducer,
previous_reducer_state=red_states)
reduction_rslt_2, last_sample_2, kernel_results_2 = self.evaluate([
reduction_rslt_2, last_sample_2, kr_2])
self.assertEqual(5.5, reduction_rslt_2)
self.assertEqual(10, last_sample_2)
self.assertEqual(10, kernel_results_2.counter_1)
self.assertEqual(20, kernel_results_2.counter_2)
@parameterized.parameters(1., 2.)
def test_current_state(self, curr_state):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=curr_state,
kernel=fake_kernel,
reducer=fake_reducer)
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt, last_sample, kr])
self.assertEqual(
np.mean(np.arange(curr_state + 1, curr_state + 6)), reduction_rslt)
self.assertEqual(curr_state + 5, last_sample)
self.assertEqual(5, kernel_results.counter_1)
self.assertEqual(10, kernel_results.counter_2)
def test_reducing_kernel_results(self):
kernel = test_fixtures.TestTransitionKernel()
def reduction_target(current_state, kernel_results):
del current_state
assert isinstance(
kernel_results, test_fixtures.TestTransitionKernelResults)
return kernel_results.counter_2
reduction = tfp.experimental.mcmc.ExpectationsReducer(reduction_target)
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=kernel,
reducer=reduction)
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt, last_sample, kr])
self.assertEqual(np.mean(np.arange(2, 12, 2)), reduction_rslt)
self.assertEqual(5, last_sample)
self.assertEqual(5, kernel_results.counter_1)
self.assertEqual(10, kernel_results.counter_2)
def test_nested_reducers(self):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducers = [
[test_fixtures.NaiveMeanReducer(),
tfp.experimental.mcmc.CovarianceReducer()],
[test_fixtures.NaiveMeanReducer()]]
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=3,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducers)
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt, last_sample, kr])
self.assertEqual(2, len(reduction_rslt))
self.assertEqual(2, len(reduction_rslt[0]))
self.assertEqual(1, len(reduction_rslt[1]))
self.assertEqual(2, reduction_rslt[0][0])
self.assertNear(2/3, reduction_rslt[0][1], err=1e-6)
self.assertEqual(3, last_sample)
self.assertEqual(3, kernel_results.counter_1)
self.assertEqual(6, kernel_results.counter_2)
def test_true_streaming_covariance(self):
seed = test_util.test_seed()
fake_kernel = test_fixtures.TestTransitionKernel(())
cov_reducer = tfp.experimental.mcmc.CovarianceReducer()
reduction_rslt, _, _ = tfp.experimental.mcmc.sample_fold(
num_steps=20,
current_state=tf.convert_to_tensor([0., 0.]),
kernel=fake_kernel,
reducer=cov_reducer,
seed=seed,)
reduction_rslt = self.evaluate(reduction_rslt)
self.assertAllClose(
np.cov(np.column_stack((np.arange(20), np.arange(20))).T, ddof=0),
reduction_rslt,
rtol=1e-5)
def test_batched_streaming_covariance(self):
fake_kernel = test_fixtures.TestTransitionKernel((2, 3))
cov_reducer = tfp.experimental.mcmc.CovarianceReducer(event_ndims=1)
reduction_rslt, last_sample, _ = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=tf.convert_to_tensor(
[[0., 0., 0.], [0., 0., 0.]]),
kernel=fake_kernel,
reducer=cov_reducer)
reduction_rslt = self.evaluate(reduction_rslt)
self.assertEqual((2, 3, 3), reduction_rslt.shape)
self.assertAllEqual(np.ones(reduction_rslt.shape) * 2, reduction_rslt)
self.assertAllEqualNested(last_sample, [[5., 5., 5.], [5., 5., 5.]])
def test_seed_reproducibility(self):
seed = samplers.sanitize_seed(test_util.test_seed())
fake_kernel = test_fixtures.RandomTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
first_reduction_rslt, _, _ = tfp.experimental.mcmc.sample_fold(
num_steps=3,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer,
seed=seed)
second_reduction_rslt, _, _ = tfp.experimental.mcmc.sample_fold(
num_steps=3,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer,
seed=seed)
first_reduction_rslt, second_reduction_rslt = self.evaluate([
first_reduction_rslt, second_reduction_rslt])
self.assertEqual(first_reduction_rslt, second_reduction_rslt)
def test_thinning_and_burnin(self):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer,
num_burnin_steps=10,
num_steps_between_results=1)
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt,
last_sample,
kr])
self.assertEqual(16, reduction_rslt)
self.assertEqual(20, last_sample)
self.assertEqual(
20, kernel_results.counter_1)
self.assertEqual(
40, kernel_results.counter_2)
def test_tensor_thinning_and_burnin(self):
fake_kernel = test_fixtures.TestTransitionKernel()
fake_reducer = test_fixtures.NaiveMeanReducer()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=tf.convert_to_tensor(5),
current_state=0.,
kernel=fake_kernel,
reducer=fake_reducer,
num_burnin_steps=tf.convert_to_tensor(10),
num_steps_between_results=tf.convert_to_tensor(1))
reduction_rslt, last_sample, kernel_results = self.evaluate([
reduction_rslt,
last_sample,
kr])
self.assertEqual(16, reduction_rslt)
self.assertEqual(20, last_sample)
self.assertEqual(
20, kernel_results.counter_1)
self.assertEqual(
40, kernel_results.counter_2)
def test_none_reducer(self):
fake_kernel = test_fixtures.TestTransitionKernel()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=fake_kernel,
reducer=None,
num_burnin_steps=10,
num_steps_between_results=1)
last_sample, kernel_results = self.evaluate([
last_sample, kr])
self.assertIsNone(reduction_rslt)
self.assertEqual(20, last_sample)
self.assertEqual(20, kernel_results.counter_1)
self.assertEqual(40, kernel_results.counter_2)
def test_empty_reducer(self):
fake_kernel = test_fixtures.TestTransitionKernel()
reduction_rslt, last_sample, kr = tfp.experimental.mcmc.sample_fold(
num_steps=5,
current_state=0.,
kernel=fake_kernel,
reducer=[],
num_burnin_steps=10,
num_steps_between_results=1)
last_sample, kernel_results = self.evaluate([
last_sample, kr])
self.assertEqual([], reduction_rslt)
self.assertEqual(20, last_sample)
self.assertEqual(20, kernel_results.counter_1)
self.assertEqual(40, kernel_results.counter_2)
@test_util.test_all_tf_execution_regimes
class SampleChainTest(test_util.TestCase):
def setUp(self):
self._shape_param = 5.
self._rate_param = 10.
super(SampleChainTest, self).setUp()
def test_basic_operation(self):
kernel = test_fixtures.TestTransitionKernel()
result = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
current_state=0.,
kernel=kernel,
seed=test_util.test_seed())
samples = result.trace
kernel_results = result.final_kernel_results
self.assertAllClose(
[2], tensorshape_util.as_list(samples.shape))
samples, kernel_results = self.evaluate([samples, kernel_results])
self.assertAllClose([1, 2], samples)
self.assertAllClose(2, kernel_results.counter_1)
self.assertAllClose(4, kernel_results.counter_2)
# Warm-restart the underlying kernel. The Trace does not support warm
# restart.
result_2 = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
**result.resume_kwargs)
samples_2, kernel_results_2 = self.evaluate(
[result_2.trace, result_2.final_kernel_results])
self.assertAllClose([3, 4], samples_2)
self.assertAllClose(4, kernel_results_2.counter_1)
self.assertAllClose(8, kernel_results_2.counter_2)
def test_burn_in(self):
kernel = test_fixtures.TestTransitionKernel()
result = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
current_state=0.,
kernel=kernel,
num_burnin_steps=1,
seed=test_util.test_seed())
samples = result.trace
kernel_results = result.final_kernel_results
self.assertAllClose([2], tensorshape_util.as_list(samples.shape))
samples, kernel_results = self.evaluate([samples, kernel_results])
self.assertAllClose([2, 3], samples)
self.assertAllClose(3, kernel_results.counter_1)
self.assertAllClose(6, kernel_results.counter_2)
def test_thinning(self):
kernel = test_fixtures.TestTransitionKernel()
result = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
current_state=0.,
kernel=kernel,
num_steps_between_results=2,
seed=test_util.test_seed())
samples = result.trace
kernel_results = result.final_kernel_results
self.assertAllClose([2], tensorshape_util.as_list(samples.shape))
samples, kernel_results = self.evaluate([samples, kernel_results])
self.assertAllClose([3, 6], samples)
self.assertAllClose(6, kernel_results.counter_1)
self.assertAllClose(12, kernel_results.counter_2)
def test_custom_trace(self):
kernel = test_fixtures.TestTransitionKernel()
res = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
current_state=0.,
kernel=kernel,
trace_fn=lambda *args: args,
seed=test_util.test_seed())
trace = res.trace
self.assertAllClose([2], tensorshape_util.as_list(trace[0].shape))
self.assertAllClose(
[2], tensorshape_util.as_list(trace[1].counter_1.shape))
self.assertAllClose(
[2], tensorshape_util.as_list(trace[1].counter_2.shape))
trace = self.evaluate(trace)
self.assertAllClose([1, 2], trace[0])
self.assertAllClose([1, 2], trace[1].counter_1)
self.assertAllClose([2, 4], trace[1].counter_2)
def test_is_calibrated(self):
with warnings.catch_warnings(record=True) as triggered:
kernel = test_fixtures.TestTransitionKernel(is_calibrated=False)
tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=2,
current_state=0.,
kernel=kernel,
trace_fn=lambda current_state, kernel_results: kernel_results,
seed=test_util.test_seed())
self.assertTrue(
any('supplied `TransitionKernel` is not calibrated.' in str(
warning.message) for warning in triggered))
def test_reproduce_bug159550941(self):
# Reproduction for b/159550941.
input_signature = [tf.TensorSpec([], tf.int32)]
@tf.function(input_signature=input_signature)
def sample(chains):
initial_state = tf.zeros([chains, 1])
def log_prob(x):
return tf.reduce_sum(tfp.distributions.Normal(0, 1).log_prob(x), -1)
kernel = tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=log_prob,
num_leapfrog_steps=3,
step_size=1e-3)
results = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=5,
num_burnin_steps=4,
current_state=initial_state,
kernel=kernel)
return results.trace
# Checking that shape inference doesn't fail.
sample(2)
def test_seed_reproducibility(self):
first_fake_kernel = test_fixtures.RandomTransitionKernel()
second_fake_kernel = test_fixtures.RandomTransitionKernel()
seed = test_util.test_seed(sampler_type='stateless')
first_trace = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=5,
current_state=0.,
kernel=first_fake_kernel,
seed=seed).trace
second_trace = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=5,
current_state=1., # difference should be irrelevant
kernel=second_fake_kernel,
seed=seed).trace
first_trace, second_trace = self.evaluate([
first_trace, second_trace])
self.assertAllCloseNested(
first_trace, second_trace, rtol=1e-6)
@test_util.test_graph_mode_only
class SampleChainGraphTest(test_util.TestCase):
def test_chain_works_correlated_multivariate(self):
dtype = np.float32
true_mean = dtype([0, 0])
true_cov = dtype([[1, 0.5],
[0.5, 1]])
true_cov_chol = np.linalg.cholesky(true_cov)
num_results = 3000
counter = collections.Counter()
@tf.function
def target_log_prob(x, y):
counter['target_calls'] += 1
# Corresponds to unnormalized MVN.
# z = matmul(inv(chol(true_cov)), [x, y] - true_mean)
z = tf.stack([x, y], axis=-1) - true_mean
z = tf.linalg.triangular_solve(true_cov_chol, z[..., tf.newaxis])[..., 0]
return -0.5 * tf.reduce_sum(z**2., axis=-1)
states = tfp.experimental.mcmc.sample_chain_with_burnin(
num_results=num_results,
current_state=[dtype(-2), dtype(2)],
kernel=tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target_log_prob,
step_size=[0.5, 0.5],
num_leapfrog_steps=2),
num_burnin_steps=20,
num_steps_between_results=1,
seed=test_util.test_seed()).trace
self.assertAllEqual(dict(target_calls=1), counter)
states = tf.stack(states, axis=-1)
self.assertEqual(num_results, tf.compat.dimension_value(states.shape[0]))
sample_mean = tf.reduce_mean(states, axis=0)
x = states - sample_mean
sample_cov = tf.matmul(x, x, transpose_a=True) / dtype(num_results)
sample_mean_, sample_cov_ = self.evaluate([sample_mean, sample_cov])
self.assertAllClose(true_mean, sample_mean_,
atol=0.1, rtol=0.)
self.assertAllClose(true_cov, sample_cov_,
atol=0., rtol=0.175)
if __name__ == '__main__':
tf.test.main()
|
apache-2.0
| 5,906,248,424,636,624,000 | 37.202083 | 82 | 0.670557 | false |
JesseBuesking/clocked
|
clocked/decorators.py
|
1
|
3351
|
""" Decorator support for clocked. """
import inspect
import new
from clocked.profiler_provider import ProfilerProvider
def _create_function_wrapper(obj, name):
def wrapper(*args, **kwargs):
profiler = ProfilerProvider.get_current_profiler()
if profiler is None:
return obj(*args, **kwargs)
else:
profiler.step_impl(name)
ret = obj(*args, **kwargs)
profiler.head.stop()
return ret
wrapper.__module__ = obj.__module__
wrapper.__name__ = obj.__name__
wrapper.__doc__ = obj.__doc__
wrapper.__dict__.update(getattr(obj, '__dict__', {}))
return wrapper
def _create_method_wrapper(obj, name):
if obj.im_self is not None:
def wrapper(*args, **kwargs):
profiler = ProfilerProvider.get_current_profiler()
if profiler is None:
return obj.__func__(*args, **kwargs)
else:
profiler.step_impl(name)
ret = obj.__func__(*args, **kwargs)
profiler.head.stop()
return ret
wrapper.__module__ = obj.__func__.__module__
wrapper.__name__ = obj.__func__.__name__
wrapper.__doc__ = obj.__func__.__doc__
wrapper.__dict__.update(getattr(obj.__func__, '__dict__', {}))
wrapper = new.instancemethod(wrapper, obj.im_self)
else:
def wrapper(*args, **kwargs):
profiler = ProfilerProvider.get_current_profiler()
if profiler is None:
return obj.__func__(*args, **kwargs)
else:
profiler.step_impl(name)
ret = obj.__func__(*args, **kwargs)
profiler.head.stop()
return ret
wrapper.__module__ = obj.__func__.__module__
wrapper.__name__ = obj.__func__.__name__
wrapper.__doc__ = obj.__func__.__doc__
wrapper.__dict__.update(getattr(obj.__func__, '__dict__', {}))
return wrapper
def clocked(obj):
"""
Clocked decorator. Put this on a class or and individual function for it's
timing information to be tracked.
"""
_is_class = inspect.isclass(obj)
_is_func = inspect.isfunction(obj)
if not _is_class and not _is_func:
raise Exception('unsupported type {}'.format(type(obj)))
if _is_func:
return _create_function_wrapper(obj, '{}.{}:{}'.format(
obj.__module__,
obj.__name__,
obj.func_code.co_firstlineno
))
elif _is_class:
for name, method in inspect.getmembers(obj):
if inspect.isfunction(method):
wrapper = _create_function_wrapper(method, '{}.{}.{}:{}'.format(
obj.__module__,
obj.__name__,
method.__name__,
method.func_code.co_firstlineno
))
staticmethod(wrapper)
setattr(obj, name, staticmethod(wrapper))
elif inspect.ismethod(method):
wrapper = _create_method_wrapper(method, '{}.{}.{}:{}'.format(
obj.__module__,
obj.__name__,
method.__func__.__name__,
method.func_code.co_firstlineno
))
setattr(obj, name, wrapper)
return obj
|
mit
| 8,303,731,516,610,316,000 | 32.51 | 80 | 0.509997 | false |
Molecular-Image-Recognition/Molecular-Image-Recognition
|
code/rmgpy/thermo/thermodataTest.py
|
2
|
11660
|
#!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu),
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This script contains unit tests of the :mod:`rmgpy.thermo.thermodata` module.
"""
import unittest
import numpy
from rmgpy.thermo.thermodata import ThermoData
import rmgpy.constants as constants
################################################################################
class TestThermoData(unittest.TestCase):
"""
Contains unit tests of the :class:`ThermoData` class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.H298 = -32.9725
self.S298 = 27.5727
self.Tdata = numpy.array([300,400,500,600,800,1000,1500])
self.Cpdata = numpy.array([6.3827,7.80327,9.22175,10.5528,12.8323,14.6013,17.4089])
self.Cp0 = 4.0
self.CpInf = 21.5
self.Tmin = 100.
self.Tmax = 3000.
self.E0 = -782292.
self.comment = 'C2H6'
self.thermodata = ThermoData(
Tdata = (self.Tdata,"K"),
Cpdata = (self.Cpdata*constants.R,"J/(mol*K)"),
H298 = (self.H298*0.001*constants.R*298.,"kJ/mol"),
S298 = (self.S298*constants.R,"J/(mol*K)"),
Cp0 = (self.Cp0*constants.R,"J/(mol*K)"),
CpInf = (self.CpInf*constants.R,"J/(mol*K)"),
Tmin = (self.Tmin,"K"),
Tmax = (self.Tmax,"K"),
E0 = (self.E0,'J/mol'),
comment = self.comment,
)
def test_Tdata(self):
"""
Test that the ThermoData Tdata property was properly set.
"""
self.assertEqual(self.thermodata.Tdata.value_si.shape, self.Tdata.shape)
for T, T0 in zip(self.thermodata.Tdata.value_si, self.Tdata):
self.assertAlmostEqual(T, T0, 4)
def test_Cpdata(self):
"""
Test that the ThermoData Cpdata property was properly set.
"""
self.assertEqual(self.thermodata.Cpdata.value_si.shape, self.Cpdata.shape)
for Cp, Cp0 in zip(self.thermodata.Cpdata.value_si / constants.R, self.Cpdata):
self.assertAlmostEqual(Cp, Cp0, 4)
def test_H298(self):
"""
Test that the ThermoData H298 property was properly set.
"""
self.assertAlmostEqual(self.thermodata.H298.value_si / constants.R / 298., self.H298, 4)
def test_S298(self):
"""
Test that the ThermoData S298 property was properly set.
"""
self.assertAlmostEqual(self.thermodata.S298.value_si / constants.R, self.S298, 4)
def test_Cp0(self):
"""
Test that the ThermoData Cp0 property was properly set.
"""
self.assertAlmostEqual(self.thermodata.Cp0.value_si / constants.R, self.Cp0, 4)
def test_CpInf(self):
"""
Test that the ThermoData CpInf property was properly set.
"""
self.assertAlmostEqual(self.thermodata.CpInf.value_si / constants.R, self.CpInf, 4)
def test_Tmin(self):
"""
Test that the ThermoData Tmin property was properly set.
"""
self.assertAlmostEqual(self.thermodata.Tmin.value_si, self.Tmin, 6)
def test_Tmax(self):
"""
Test that the ThermoData Tmax property was properly set.
"""
self.assertAlmostEqual(self.thermodata.Tmax.value_si, self.Tmax, 6)
def test_E0(self):
"""
Test that the ThermoData E0 property was properly set.
"""
self.assertAlmostEqual(self.thermodata.E0.value_si, self.E0, 6)
def test_Comment(self):
"""
Test that the ThermoData comment property was properly set.
"""
self.assertEqual(self.thermodata.comment, self.comment)
def test_isTemperatureValid(self):
"""
Test the ThermoData.isTemperatureValid() method.
"""
Tdata = [200,400,600,800,1000,1200,1400,1600,1800,2000]
validdata = [True,True,True,True,True,True,True,True,True,True]
for T, valid in zip(Tdata, validdata):
valid0 = self.thermodata.isTemperatureValid(T)
self.assertEqual(valid0, valid)
def test_getHeatCapacity(self):
"""
Test the ThermoData.getHeatCapacity() method.
"""
Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000])
Cpexplist = numpy.array([4.96208, 7.80327, 10.5528, 12.8323, 14.6013, 15.7243, 16.8473, 17.9704, 19.0934, 20.2165]) * constants.R
for T, Cpexp in zip(Tlist, Cpexplist):
Cpact = self.thermodata.getHeatCapacity(T)
self.assertAlmostEqual(Cpexp, Cpact, 2)
def test_getEnthalpy(self):
"""
Test the ThermoData.getEnthalpy() method.
"""
Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000])
Hexplist = numpy.array([-51.9015, -22.7594, -12.1063, -6.15660, -2.18192, 0.708869, 2.93415, 4.74350, 6.27555, 7.61349]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.thermodata.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e0)
def test_getEntropy(self):
"""
Test the ThermoData.getEntropy() method.
"""
Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000])
Sexplist = numpy.array([25.3347, 29.6460, 33.3386, 36.6867, 39.7402, 42.5016, 45.0098, 47.3328, 49.5142, 51.5841]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.thermodata.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, 3)
def test_getFreeEnergy(self):
"""
Test the ThermoData.getFreeEnergy() method.
"""
Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000])
for T in Tlist:
Gexp = self.thermodata.getEnthalpy(T) - T * self.thermodata.getEntropy(T)
Gact = self.thermodata.getFreeEnergy(T)
self.assertAlmostEqual(Gexp, Gact, 3)
def test_pickle(self):
"""
Test that a ThermoData object can be successfully pickled and
unpickled with no loss of information.
"""
import cPickle
thermodata = cPickle.loads(cPickle.dumps(self.thermodata))
self.assertEqual(self.thermodata.Tdata.value.shape, thermodata.Tdata.value.shape)
for T, T0 in zip(self.thermodata.Tdata.value, thermodata.Tdata.value):
self.assertAlmostEqual(T, T0, 4)
self.assertEqual(self.thermodata.Tdata.units, thermodata.Tdata.units)
self.assertEqual(self.thermodata.Cpdata.value.shape, thermodata.Cpdata.value.shape)
for Cp, Cp0 in zip(self.thermodata.Cpdata.value, thermodata.Cpdata.value):
self.assertAlmostEqual(Cp, Cp0, 3)
self.assertEqual(self.thermodata.Cpdata.units, thermodata.Cpdata.units)
self.assertAlmostEqual(self.thermodata.H298.value, thermodata.H298.value, 4)
self.assertEqual(self.thermodata.H298.units, thermodata.H298.units)
self.assertAlmostEqual(self.thermodata.S298.value, thermodata.S298.value, 2)
self.assertEqual(self.thermodata.S298.units, thermodata.S298.units)
self.assertAlmostEqual(self.thermodata.Cp0.value, thermodata.Cp0.value, 4)
self.assertEqual(self.thermodata.Cp0.units, thermodata.Cp0.units)
self.assertAlmostEqual(self.thermodata.CpInf.value, thermodata.CpInf.value, 3)
self.assertEqual(self.thermodata.CpInf.units, thermodata.CpInf.units)
self.assertAlmostEqual(self.thermodata.Tmin.value, thermodata.Tmin.value, 4)
self.assertEqual(self.thermodata.Tmin.units, thermodata.Tmin.units)
self.assertAlmostEqual(self.thermodata.Tmax.value, thermodata.Tmax.value, 4)
self.assertEqual(self.thermodata.Tmax.units, thermodata.Tmax.units)
self.assertAlmostEqual(self.thermodata.E0.value, thermodata.E0.value, 4)
self.assertEqual(self.thermodata.E0.units, thermodata.E0.units)
self.assertEqual(self.thermodata.label,thermodata.label)
self.assertEqual(self.thermodata.comment, thermodata.comment)
def test_repr(self):
"""
Test that a ThermoData object can be successfully reconstructed from its
repr() output with no loss of information.
"""
thermodata = None
exec('thermodata = {0!r}'.format(self.thermodata))
self.assertEqual(self.thermodata.Tdata.value.shape, thermodata.Tdata.value.shape)
for T, T0 in zip(self.thermodata.Tdata.value, thermodata.Tdata.value):
self.assertAlmostEqual(T, T0, 4)
self.assertEqual(self.thermodata.Tdata.units, thermodata.Tdata.units)
self.assertEqual(self.thermodata.Cpdata.value.shape, thermodata.Cpdata.value.shape)
for Cp, Cp0 in zip(self.thermodata.Cpdata.value, thermodata.Cpdata.value):
self.assertAlmostEqual(Cp, Cp0, 3)
self.assertEqual(self.thermodata.Cpdata.units, thermodata.Cpdata.units)
self.assertAlmostEqual(self.thermodata.H298.value, thermodata.H298.value, 4)
self.assertEqual(self.thermodata.H298.units, thermodata.H298.units)
self.assertAlmostEqual(self.thermodata.S298.value, thermodata.S298.value, 2)
self.assertEqual(self.thermodata.S298.units, thermodata.S298.units)
self.assertAlmostEqual(self.thermodata.Cp0.value, thermodata.Cp0.value, 4)
self.assertEqual(self.thermodata.Cp0.units, thermodata.Cp0.units)
self.assertAlmostEqual(self.thermodata.CpInf.value, thermodata.CpInf.value, 3)
self.assertEqual(self.thermodata.CpInf.units, thermodata.CpInf.units)
self.assertAlmostEqual(self.thermodata.Tmin.value, thermodata.Tmin.value, 4)
self.assertEqual(self.thermodata.Tmin.units, thermodata.Tmin.units)
self.assertAlmostEqual(self.thermodata.Tmax.value, thermodata.Tmax.value, 4)
self.assertEqual(self.thermodata.Tmax.units, thermodata.Tmax.units)
self.assertAlmostEqual(self.thermodata.E0.value, thermodata.E0.value, 4)
self.assertEqual(self.thermodata.E0.units, thermodata.E0.units)
self.assertEqual(self.thermodata.label, thermodata.label)
self.assertEqual(self.thermodata.comment, thermodata.comment)
|
mit
| -7,168,585,736,887,390,000 | 45.454183 | 150 | 0.64434 | false |
Titulacion-Sistemas/PythonTitulacion-EV
|
Lib/site-packages/pylint/test/functional/abstract_class_instantiated_py3.py
|
1
|
1178
|
"""Check that instantiating a class with
`abc.ABCMeta` as metaclass fails if it defines
abstract methods.
"""
# pylint: disable=too-few-public-methods, missing-docstring
# pylint: disable=abstract-class-not-used, abstract-class-little-used
# pylint: disable=abstract-method
__revision__ = 0
import abc
class GoodClass(object, metaclass=abc.ABCMeta):
pass
class SecondGoodClass(object, metaclass=abc.ABCMeta):
def test(self):
""" do nothing. """
class ThirdGoodClass(object, metaclass=abc.ABCMeta):
""" This should not raise the warning. """
def test(self):
raise NotImplementedError()
class BadClass(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def test(self):
""" do nothing. """
class SecondBadClass(object, metaclass=abc.ABCMeta):
@property
@abc.abstractmethod
def test(self):
""" do nothing. """
class ThirdBadClass(SecondBadClass):
pass
def main():
""" do nothing """
GoodClass()
SecondGoodClass()
ThirdGoodClass()
BadClass() # [abstract-class-instantiated]
SecondBadClass() # [abstract-class-instantiated]
ThirdBadClass() # [abstract-class-instantiated]
|
mit
| 8,542,265,376,761,066,000 | 23.541667 | 69 | 0.689304 | false |
sequarius/DockerCenter
|
client/dockerc.py
|
1
|
6400
|
import sys
from idl.ttypes import CommandDTO
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from idl import CenterSynRPCService
from Command import Command
from prettytable import PrettyTable
sys.path.append("./idl")
docker_center_command = ['node-list', "create-job", "job-list", "start-job", "stop-job", 'help', 'version']
docker_center_param_name = ['--node-tag']
BILLION = 100000000
def execute_command(dc_command):
try:
# transport = TSocket.TSocket('localhost', 9047)
transport = TSocket.TSocket('192.168.30.1', 9047)
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = CenterSynRPCService.Client(protocol)
transport.open()
dto = CommandDTO(command.command, dc_command.docker_params)
dto.nodeTag = int(dc_command.center_params['--node-tag'])
result = client.executeCommand(dto)
print(result.returnMessage)
except Thrift.TException as e:
print(e)
def parse_param(args):
center_param_map = {}
docker_param_list = []
dc_command = Command()
if len(args) < 2:
dc_command.command = 'help'
return
dc_command.command = args[1]
skip_loop = False
for x in args[2:]:
if skip_loop:
skip_loop = False
continue
if x in docker_center_param_name:
center_param_map[x] = args[args.index(x) + 1]
skip_loop = True
else:
docker_param_list.append(x)
dc_command.center_params = center_param_map
dc_command.docker_params = docker_param_list
return dc_command
# def get_docker_engine_version(version_str):
# lines = version_str.split('\n')
# for i, line in enumerate(lines):
# if "Server:" not in line:
# continue
# if "Version:" in lines[i + 1]:
# return lines[i + 1].replace("Version:", "").strip()
# return "UNKNOWN"
def get_node_info():
try:
client, transport = get_thrift_client()
transport.open()
result = client.getNodeMap()
transport.close()
x = PrettyTable(["Tag", "Name", "Node Ip", "version", "status", "Architecture", "Free Disk", "Free Memory",
"Response Time", "Container Running/Total"])
for node in result.values():
x.add_row([node.tag, node.name, node.ip, node.dockerVersion, node.dockerStatus,
node.architecture,
node.freeDiskSpace / BILLION, node.freeMemorySpace / BILLION, node.responseTime,
str(node.RunningContainerCount)
+ '/' + str(node.containerCount)])
print(x)
except Thrift.TException as e:
print(e)
def create_job(job_name):
try:
client, transport = get_thrift_client()
transport.open()
result = client.newJob(job_name)
transport.close()
print(result.message)
except Thrift.TException as e:
print(e)
def start_job(job_name):
try:
client, transport = get_thrift_client()
transport.open()
result = client.startJob(job_name)
transport.close()
print(result.message)
except Thrift.TException as e:
print(e)
def stop_job(job_name):
try:
print(job_name)
client, transport = get_thrift_client()
transport.open()
result = client.stopJob(job_name)
transport.close()
print(result.message)
except Thrift.TException as e:
print(e)
def get_thrift_client():
transport = TSocket.TSocket('localhost', 9047)
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = CenterSynRPCService.Client(protocol)
return client, transport
def get_job_list():
try:
client, transport = get_thrift_client()
transport.open()
result = client.getJoblist()
transport.close()
print(result)
x = PrettyTable(["ID", "Name", "Status", "Deploy Strategy", "SubName Strategy"])
for job in result:
x.add_row([job.jobId, job.jobname, job.status, job.deployStrategy, job.subNameStrategy])
print(x)
except Thrift.TException as e:
print(e)
if __name__ == '__main__':
command = parse_param(sys.argv)
if command.command not in docker_center_command:
execute_command(command)
else:
if command.command == docker_center_command[0]:
get_node_info()
if command.command == docker_center_command[1]:
if len(command.docker_params) != 1:
print("missing job name, try user dockerc help to get function use.")
else:
job_name = command.docker_params[0]
create_job(job_name)
if command.command == docker_center_command[2]:
get_job_list()
if command.command == docker_center_command[3]:
if len(command.docker_params) != 1:
print("missing job name, try user dockerc help to get function use.")
else:
job_name = command.docker_params[0]
start_job(job_name)
if command.command == docker_center_command[4]:
if len(command.docker_params) != 1:
print("missing job name, try user dockerc help to get function use.")
else:
job_name = command.docker_params[0]
stop_job(job_name)
if command.command == docker_center_command[5]:
print("node-tag ${node_tag}\t use this param to set the node to run command.")
print("node-list\t show all nodes registered in Docker Center.")
print("job-list\t show all jobs registered in Docker Center.")
print("create-job ${job_name}\t create a new job in Docker Center.")
print("start-job ${job_name}\t start a job in Docker Center.")
print("stop-job ${job_name}\t stop a job in Docker Center.")
print("log ${job_name}\t show all logs achieved of job ")
print("version\t show Docker Center current version.")
if command.command == docker_center_command[6]:
print("Docker Center 1.0.0")
|
mit
| 9,125,652,749,247,856,000 | 33.978142 | 115 | 0.60375 | false |
nOkuda/activetm
|
activetm/labeled.py
|
1
|
3406
|
"""LabeledDataset for labeled datasets"""
import numpy as np
import ankura.pipeline
def get_labels(filename):
"""Since labels are assumed to be normalized from 0 to 1 in computing the
augmented Q matrix, this function will scale all labels to fit this range
"""
smallest = float('inf')
largest = float('-inf')
labels = {}
with open(filename) as ifh:
for line in ifh:
data = line.strip().split()
val = float(data[1])
labels[data[0]] = val
if val < smallest:
smallest = val
if val > largest:
largest = val
difference = largest - smallest
if difference < 1e-50:
# all of the label values were essentially the same, so just assign
# everything to have the same label
for label in labels:
labels[label] = 0.5
elif abs(difference - 1) > 1e-50:
for label in labels:
labels[label] = (labels[label] - smallest) / difference
# otherwise, the labels were already spanning the range 0 to 1, so no need
# to change anything
return labels
class LabeledDataset(ankura.pipeline.Dataset):
"""Implementation of labeled dataset
Attributes of the object with the same names as those in
ankura.pipeline.Dataset have the same behaviors. The labels are stored in a
dictionary mapping titles to floats.
"""
def __init__(self, dataset, labels):
ankura.pipeline.Dataset.__init__(self, dataset.docwords, dataset.vocab, dataset.titles)
self.labels = labels
# precompute vanilla Q beforehand (useful for semi-supervised)
ankura.pipeline.Dataset.compute_cooccurrences(self)
self._dataset_cooccurrences = self._cooccurrences
# don't keep self._cooccurrences, since we want compute_cooccurrences to
# compute the proper augmented Q later
self._cooccurrences = None
def compute_cooccurrences(self):
orig_height, orig_width = self._dataset_cooccurrences.shape
self._cooccurrences = np.zeros((orig_height, orig_width+2))
self._cooccurrences[:, :-2] = self._dataset_cooccurrences
# multiply word counts per document with corresponding regressand
regressands = []
labeled_docs = []
for i, title in enumerate(self.titles):
if title in self.labels:
regressands.append(self.labels[title])
labeled_docs.append(i)
# TODO extract information directly (indexing into matrix is slow)
labeled_docwords = self.docwords[:, np.array(labeled_docs)]
# Make weighted sum for labels
reg_sums = labeled_docwords.dot(np.array(regressands))
# summing rows of sparse matrix returns a row matrix; but we want a
# numpy array
vocab_counts = np.array(labeled_docwords.sum(axis=1).T)[0]
#pylint:disable=consider-using-enumerate
for i in range(len(vocab_counts)):
if vocab_counts[i] > 0:
# divide by vocabulary count
self._cooccurrences[i, -2] = reg_sums[i] / vocab_counts[i]
# if vocab_counts[i] == 0, reg_sums[i, :] == np.zeros
# TODO was the above sufficient for making semi-supervised work?
# fill in second augmented column with 1 - average
self._cooccurrences[:, -1] = 1.0 - self._cooccurrences[:, -2]
|
gpl-3.0
| -1,385,774,416,320,325,000 | 41.575 | 95 | 0.630945 | false |
Fost/django-boot
|
config/settings.py
|
1
|
5558
|
"""
Django settings for {{ project_name }} project.
Common settings for all environments. Don't directly use this settings file,
use environments/development.py or environments/production.py and import this
file from there.
"""
import sys
from path import path
from django.conf import global_settings
PROJECT_NAME = "Boots Django"
PROJECT_ROOT = path(__file__).abspath().dirname().dirname()
sys.path.insert(0, PROJECT_ROOT / 'apps')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'GMT'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = False
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = PROJECT_ROOT / 'public/media'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = PROJECT_ROOT / 'public/static'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_ROOT / 'static',
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '{{ secret_key }}'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'config.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'config.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_ROOT / 'templates/',
)
INSTALLED_APPS = (
'grappelli',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
# 3rd party apps
'south',
'django_extensions',
'mainpage'
# Project specific apps go here
# 'my_app',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + ()
##############################################################################
# Third-party app settings
##############################################################################
# django-grappelli
GRAPPELLI_ADMIN_TITLE = PROJECT_NAME + " Admin"
|
mit
| 3,567,302,115,650,525,000 | 30.76 | 88 | 0.685498 | false |
maistrovas/Internet-Store
|
Internet_store/home/views.py
|
1
|
1180
|
from django.shortcuts import render
from django.conf import settings
from django.core.mail import send_mail
from django.shortcuts import render
from Internet_store.products.models import ProductFeatured, Product
def home(request):
title = 'Sign up now!'
featured_image = ProductFeatured.objects.filter(active=True).first()
products = Product.objects.all().order_by('?')[:6]
featured_products = Product.objects.all().order_by('?')[:6]
context = {
'title': title,
'featured_image': featured_image,
'products': products,
'featured_products': featured_products,
}
# if form.is_valid():
# # form.save()
# # print request.POST['email'] #not recommended
# instance = form.save(commit=False)
#
# full_name = form.cleaned_data.get("full_name")
# if not full_name:
# full_name = "New full name"
# instance.full_name = full_name
# # if not instance.full_name:
# # instance.full_name = "Justin"
# instance.save()
# context = {
# "title": "Thank you"
# }
return render(request, 'pages/home.html', context)
|
mit
| 1,821,171,190,496,549,000 | 30.052632 | 72 | 0.607627 | false |
syci/domsense-agilebg-addons
|
account_vat_on_payment/account.py
|
1
|
13339
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-2012 Domsense s.r.l. (<http://www.domsense.com>).
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
import decimal_precision as dp
class account_voucher(osv.osv):
_inherit = "account.voucher"
_columns = {
'shadow_move_id': fields.many2one('account.move','Shadow Entry', readonly=True),
}
def is_vat_on_payment(self, voucher):
vat_on_p =0
valid_lines =0
if voucher.type in ('payment', 'receipt'):
for line in voucher.line_ids:
if line.amount:
valid_lines +=1
if line.move_line_id and line.move_line_id.invoice and line.move_line_id.invoice.vat_on_payment:
vat_on_p +=1
if vat_on_p and vat_on_p != valid_lines:
raise osv.except_osv(_('Error'), _("Can't handle VAT on payment if not every invoice is on a VAT on payment treatment"))
return vat_on_p
def action_move_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
inv_pool = self.pool.get('account.invoice')
journal_pool = self.pool.get('account.journal')
move_line_pool = self.pool.get('account.move.line')
move_pool = self.pool.get('account.move')
currency_obj = self.pool.get('res.currency')
res=False
for voucher in self.browse(cr, uid, ids, context):
entry_posted = voucher.journal_id.entry_posted
# disable the 'skip draft state' option because "mixed" entry (shadow + real) won't pass validation. Anyway every entry will be posted later (if 'entry_posted' is enabled)
if entry_posted:
journal_pool.write(cr, uid, voucher.journal_id.id, {'entry_posted': False})
res=super(account_voucher,self).action_move_line_create(cr, uid, [voucher.id], context)
voucher.refresh() # because 'move_id' has been updated by 'action_move_line_create'
if entry_posted:
journal_pool.write(cr, uid, voucher.journal_id.id, {'entry_posted': True})
if self.is_vat_on_payment(voucher):
if not voucher.journal_id.vat_on_payment_related_journal_id:
raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but journal %s does not have a related shadow journal') % voucher.journal_id.name)
lines_to_create = []
amounts_by_invoice = super(account_voucher,self).allocated_amounts_grouped_by_invoice(cr, uid,voucher, context)
for inv_id in amounts_by_invoice:
invoice = inv_pool.browse(cr, uid, inv_id, context)
for inv_move_line in invoice.move_id.line_id:
if inv_move_line.account_id.type != 'receivable' and inv_move_line.account_id.type != 'payable':
# compute the VAT or base line proportionally to the paid amount
new_line_amount = currency_obj.round(cr, uid, voucher.company_id.currency_id, ((amounts_by_invoice[invoice.id]['allocated'] + amounts_by_invoice[invoice.id]['write-off']) / amounts_by_invoice[invoice.id]['total']) * (inv_move_line.credit or inv_move_line.debit))
if not inv_move_line.real_account_id:
raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but move line %s does not have a related real account') % inv_move_line.name)
# prepare the real move line
vals = {
'name': inv_move_line.name,
'account_id': inv_move_line.real_account_id.id,
'credit': inv_move_line.credit and new_line_amount or 0.0,
'debit': inv_move_line.debit and new_line_amount or 0.0,
'type': 'real',
'partner_id': inv_move_line.partner_id and inv_move_line.partner_id.id or False
}
if inv_move_line.tax_code_id:
if not inv_move_line.real_tax_code_id:
raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but move line %s does not have a related real tax code') % inv_move_line.name)
vals['tax_code_id'] = inv_move_line.real_tax_code_id.id
if inv_move_line.tax_amount < 0:
vals['tax_amount'] = -new_line_amount
else:
vals['tax_amount'] = new_line_amount
lines_to_create.append(vals)
# prepare the shadow move line
vals={
'name': inv_move_line.name,
'account_id': inv_move_line.account_id.id,
'credit': inv_move_line.debit and new_line_amount or 0.0,
'debit': inv_move_line.credit and new_line_amount or 0.0,
'type': 'shadow',
'partner_id': inv_move_line.partner_id and inv_move_line.partner_id.id or False
}
if inv_move_line.tax_code_id:
vals['tax_code_id'] = inv_move_line.tax_code_id.id
if inv_move_line.tax_amount < 0:
vals['tax_amount'] = new_line_amount
else:
vals['tax_amount'] = -new_line_amount
lines_to_create.append(vals)
context['journal_id'] = voucher.journal_id.vat_on_payment_related_journal_id.id
context['period_id'] = voucher.move_id.period_id.id
shadow_move_id = move_pool.create(cr, uid, {
'journal_id': voucher.journal_id.vat_on_payment_related_journal_id.id,
'period_id': voucher.move_id.period_id.id,
'date': voucher.move_id.date,
}, context)
# move the payment move lines to shadow entry
for line in voucher.move_ids:
if line.account_id.type != 'liquidity':
line.write({
'move_id': shadow_move_id,
}, update_check=False)
# this will allow user to see the real entry from invoice payment tab
if line.account_id.type == 'receivable' or line.account_id.type == 'payable':
line.write({
'real_payment_move_id': voucher.move_id.id,
})
for line_to_create in lines_to_create:
if line_to_create['type'] == 'real':
line_to_create['move_id'] = voucher.move_id.id
elif line_to_create['type'] == 'shadow':
line_to_create['move_id'] = shadow_move_id
del line_to_create['type']
move_line_pool.create(cr, uid, line_to_create, context)
voucher.write({'shadow_move_id': shadow_move_id})
super(account_voucher,self).balance_move(cr, uid, shadow_move_id, context)
super(account_voucher,self).balance_move(cr, uid, voucher.move_id.id, context)
return res
def cancel_voucher(self, cr, uid, ids, context=None):
res = super(account_voucher,self).cancel_voucher(cr, uid, ids, context)
reconcile_pool = self.pool.get('account.move.reconcile')
move_pool = self.pool.get('account.move')
for voucher in self.browse(cr, uid, ids, context=context):
recs = []
if voucher.shadow_move_id:
for line in voucher.shadow_move_id.line_id:
if line.reconcile_id:
recs += [line.reconcile_id.id]
if line.reconcile_partial_id:
recs += [line.reconcile_partial_id.id]
reconcile_pool.unlink(cr, uid, recs)
if voucher.shadow_move_id:
move_pool.button_cancel(cr, uid, [voucher.shadow_move_id.id])
move_pool.unlink(cr, uid, [voucher.shadow_move_id.id])
return res
class account_invoice(osv.osv):
def _get_vat_on_payment(self, cr, uid, context=None):
return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.vat_on_payment
def finalize_invoice_move_lines(self, cr, uid, invoice_browse, move_lines):
"""
Use shadow accounts for journal entry to be generated, according to account and tax code related records
"""
move_lines = super(account_invoice,self).finalize_invoice_move_lines(cr, uid, invoice_browse, move_lines)
acc_pool = self.pool.get('account.account')
tax_code_pool = self.pool.get('account.tax.code')
new_move_lines = []
for line_tup in move_lines:
if invoice_browse.vat_on_payment:
if line_tup[2].get('account_id', False):
account = acc_pool.browse(cr, uid, line_tup[2]['account_id'])
if account.type != 'receivable' and account.type != 'payable':
if not account.vat_on_payment_related_account_id:
raise osv.except_osv(_('Error'), _('The invoice is \'VAT on payment\' but account %s does not have a related shadow account') % account.name)
line_tup[2]['real_account_id'] = line_tup[2]['account_id']
line_tup[2]['account_id'] = account.vat_on_payment_related_account_id.id
if line_tup[2].get('tax_code_id', False):
tax_code = tax_code_pool.browse(cr, uid, line_tup[2]['tax_code_id'])
if not tax_code.vat_on_payment_related_tax_code_id:
raise osv.except_osv(_('Error'), _('The invoice is \'VAT on payment\' but tax code %s does not have a related shadow tax code') % tax_code.name)
line_tup[2]['real_tax_code_id'] = line_tup[2]['tax_code_id']
line_tup[2]['tax_code_id'] = tax_code.vat_on_payment_related_tax_code_id.id
new_move_lines.append(line_tup)
return new_move_lines
_inherit = "account.invoice"
_columns = {
'vat_on_payment': fields.boolean('Vat on payment'),
}
_defaults = {
'vat_on_payment': _get_vat_on_payment,
}
class account_move_line(osv.osv):
_inherit = "account.move.line"
_columns = {
'real_payment_move_id': fields.many2one('account.move', 'Real payment entry'),
'real_account_id': fields.many2one('account.account','Real account'),
'real_tax_code_id': fields.many2one('account.tax.code','Real tax code'),
}
class account_account(osv.osv):
_inherit = "account.account"
_columns = {
'vat_on_payment_related_account_id': fields.many2one('account.account', 'Shadow Account for VAT on payment', help='Related account used for real registrations on a VAT on payment basis. Set the shadow account here'),
}
class account_tax_code(osv.osv):
_inherit = "account.tax.code"
_columns = {
'vat_on_payment_related_tax_code_id': fields.many2one('account.tax.code', 'Shadow Tax code for VAT on payment', help='Related tax code used for real registrations on a VAT on payment basis. Set the shadow tax code here'),
}
class account_journal(osv.osv):
_inherit = "account.journal"
_columns = {
'vat_on_payment_related_journal_id': fields.many2one('account.journal', 'Shadow Journal for VAT on payment', help='Related journal used for shadow registrations on a VAT on payment basis. Set the shadow journal here'),
}
|
gpl-2.0
| -3,210,202,336,083,480,600 | 55.521186 | 290 | 0.54022 | false |
willre/homework
|
day02/shopping.py
|
1
|
6490
|
#!/usr/bin/env python
# encoding: utf-8
"""
@version:
@author: Will
@license:
@contact:
@site: http://www.timesnotes.com
@file: shopping.py
@time: 15-11-3 上午8:48
"""
from collections import OrderedDict
########################################
# 预算金额
while True:
tmp_yusuan = raw_input("预算金额(最小单位:元):")
if tmp_yusuan.strip().isdigit():
price_of_goods = int(tmp_yusuan.strip())
break
else:
print "输入错误,请重新输入预算金额"
# print price_of_goods
#商品总价
sum_goods_price = 0
# 从文件中读取信息
with open('goods.db') as goods_file:
data_of_goods = goods_file.readlines()
# print data_of_goods
# 声明有序字典,并将读取的数据,格式化后写入字典
dict_of_goods = OrderedDict()
dict_of_goods['id']={'name':'name','price':'price'}
dict_of_goods_of_bought = OrderedDict()
dict_of_goods_of_bought['id']={'name':'name','price':'price','count':'count','total':'total'}
for goods_info_str in data_of_goods:
goods_info = goods_info_str.strip().split(':')
dict_of_goods[int(goods_info[0])]={'name':goods_info[1],'price':goods_info[2]}
# print dict_of_goods
# 输出商品列表
for ele in dict_of_goods:
print ele,'\t',dict_of_goods[ele]['name'],'\t\t',dict_of_goods[ele]['price']
print '根据提示购买,输入help获取帮助信息'
def help_info():
print '这里是帮助信息,输入‘help’获取帮助;输入‘cx’查询已购买商品啊,输入‘wc’完成购买;输入‘quit’退出购物车。'
return ''
login = True
while login:
buy_goods_id_count = raw_input("请输入商品id和数量,空格隔开。\n商品:")
buy_goods_id_count_tmp_list = buy_goods_id_count.split()
if len(buy_goods_id_count_tmp_list)>2:
print "输入有误,请重新输入"
elif len(buy_goods_id_count_tmp_list)<1:
print "输入有误,请重新输入"
elif len(buy_goods_id_count_tmp_list) ==1:
##打印帮助信息
if buy_goods_id_count_tmp_list[0].lower() == 'help':
print help_info()
# elif buy_goods_id_count_tmp_list[0] == 'wc':
# login =False
# pass
#查询已经购买的商品
elif buy_goods_id_count_tmp_list[0].lower() == 'cx':
# print dict_of_goods_of_bought
for goods_info_of_bought in dict_of_goods_of_bought:
print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\
dict_of_goods_of_bought[goods_info_of_bought]['name'],\
dict_of_goods_of_bought[goods_info_of_bought]['price'],\
dict_of_goods_of_bought[goods_info_of_bought]['count'],\
dict_of_goods_of_bought[goods_info_of_bought]['total'])
# print ele,'\t',buy_goods_id_count_tmp_list[ele]['name'],'\t\t',buy_goods_id_count_tmp_list[ele]['price']
##退出系统
elif buy_goods_id_count_tmp_list[0].lower() == 'quit':
print '退出系统'
break
elif buy_goods_id_count_tmp_list[0].lower() == 'wc':
print "商品选购结束"
break
else:
print "输入有误,请重新输入"
else:
if not buy_goods_id_count_tmp_list[0].isdigit() or not buy_goods_id_count_tmp_list[1].isdigit():
print "输入有误,请重新输入"
continue
else:
#这里id和count都输入正确
choose_goods_id = int(buy_goods_id_count_tmp_list[0])
choose_goods_count = int(buy_goods_id_count_tmp_list[1])
# print dict_of_goods[choose_goods_id]['price'] , choose_goods_count ,int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count
#已购商品字典,添加元素
if str(choose_goods_id) not in dict_of_goods_of_bought:
dict_of_goods_of_bought[str(choose_goods_id)]={'name':dict_of_goods[choose_goods_id]['name'],
'price':dict_of_goods[choose_goods_id]['price'],
'count':choose_goods_count,
'total':int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count}
else:
dict_of_goods_of_bought[str(choose_goods_id)]['count'] += choose_goods_count
sum_goods_price += int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count
# print sum_goods_price
print '预算为:',tmp_yusuan
print '已购商商品总价为:',sum_goods_price
chae = int(tmp_yusuan)-sum_goods_price
if chae <0:
print "您此次购买商品总价为:",sum_goods_price,'超支金额为:',sum_goods_price-int(tmp_yusuan)
print '购买的物品为:\n'
for goods_info_of_bought in dict_of_goods_of_bought:
print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\
dict_of_goods_of_bought[goods_info_of_bought]['name'],\
dict_of_goods_of_bought[goods_info_of_bought]['price'],\
dict_of_goods_of_bought[goods_info_of_bought]['count'],\
dict_of_goods_of_bought[goods_info_of_bought]['total'])
else:
print "您此次购买商品总价为:",sum_goods_price,'剩余金额为:',int(tmp_yusuan)-sum_goods_price
print '购买的物品为:'
for goods_info_of_bought in dict_of_goods_of_bought:
print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\
dict_of_goods_of_bought[goods_info_of_bought]['name'],\
dict_of_goods_of_bought[goods_info_of_bought]['price'],\
dict_of_goods_of_bought[goods_info_of_bought]['count'],\
dict_of_goods_of_bought[goods_info_of_bought]['total'])
# 购买的商品写入文件
with open('shopping.db','w') as wf:
for result_shopping_ele in dict_of_goods_of_bought:
# print result_shopping_ele
tmp_shopping_data = ''
if result_shopping_ele != 'id':
tmp_shopping_data='{}:{}:{}:{}:{}:{}:{}|'.format(result_shopping_ele,\
dict_of_goods_of_bought[result_shopping_ele]['name'],\
dict_of_goods_of_bought[result_shopping_ele]['price'],\
dict_of_goods_of_bought[result_shopping_ele]['count'],\
dict_of_goods_of_bought[result_shopping_ele]['total'],tmp_yusuan,chae)
# print tmp_shopping_data
wf.write(tmp_shopping_data)
|
gpl-2.0
| -1,441,964,848,486,135,300 | 33.934524 | 147 | 0.57805 | false |
LEX2016WoKaGru/pyClamster
|
gui/region.py
|
1
|
5174
|
# -*- coding: utf-8 -*-
"""
Created on 09.06.16
Created for pyclamster-gui
Copyright (C) {2016}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# System modules
import copy
# External modules
import numpy as np
# Internal modules
__version__ = "0.1"
class region(object):
# image_margins = [left,right,top,bottom]
def __init__(self,center=None,image_margins=None,image_data=None,region_data=None):
self.center = np.array([0,0])
self.data = None # [[x, y, r, g, b], ...] (x,y based on region center)
if isinstance(center, np.ndarray):
if center.size == 2:
self.center = center
elif isinstance(image_margins, np.ndarray):
if image_margins.size == 4:
self.center = self._calc_image_center(image_margins)
if isinstance(region_data,np.ndarray):
if region_data.shape[1] == 5 and len(region_data.shape) == 2:
self.data = region_data
elif isinstance(image_data,np.ndarray):
if image_data.shape[1] == 5 and len(image_data.shape) == 2:
self.data = image_data
self.data[:,0] = self.data[:,0] - self.center[0]
self.data[:,1] = self.data[:,1] - self.center[1]
def addData(self, image, x, y, center = None):
if not isinstance(center,np.ndarray):
center = self.center
img_x = center[0] + x
img_y = center[1] + y
if not isinstance(self.data,np.ndarray):
self.data = np.array([x,y,image[img_x,img_y,0],image[img_x,img_y,1],image[img_x,img_y,2]]).T
else:
# remove already added entries
not_existing_entries = np.array([not [z[0],z[1]] in self.data[:,0:2].tolist() for z in zip(x,y)])
x = x[not_existing_entries]
y = y[not_existing_entries]
# get new data that should be added
new_data = np.array([x,y,image[img_x,img_y,0],image[img_x,img_y,1],image[img_x,img_y,2]]).T
# append existing data
self.data = np.append(self.data,new_data,0)
def removeData(self, x, y, center_offset = None):
if not isinstance(center_offset,np.ndarray):
center_offset = np.array([0,0])
x = x + center_offset[0]
y = y + center_offset[1]
# find existing entries
z = np.array([x,y]).T
if len(z.shape) == 1:
z = np.array([z])
existing_entries = np.array([z[i].tolist() in self.data[:,0:2].tolist() for i in range(len(z))])
z = z[existing_entries]
# define which entries to keep
keep_entries = [not di in z.tolist() for di in self.data[:,0:2].tolist()]
if any(keep_entries):
self.data = self.data[np.array(keep_entries)]
else:
self.data = None
def addRegion(self, region, center_offset = None):
new_data = copy.copy(region.data)
if not isinstance(center_offset,np.ndarray):
center_offset = region.center - self.center
new_data[:,0] = new_data[:,0] + center_offset[0]
new_data[:,1] = new_data[:,1] + center_offset[1]
# find not existing entries
not_existing_entries = np.array([not zi in self.data[:,0:2].tolist() for zi in new_data[:,0:2].tolist()])
self.data = np.append(self.data,new_data[not_existing_entries],0)
def removeRegion(self, region, center_offset = None):
rm_x = region.data[:,0]
rm_y = region.data[:,1]
if not isinstance(center_offset, np.ndarray):
center_offset = region.center - self.center
self.removeData(rm_x, rm_y, center_offset)
def cropImageRegion(self, image, center = None): #TODO
if not isinstance(center,np.ndarray):
center = self.center
pass
def exportToMask(self, image_margins, center = None): #TODO
if not isinstance(center,np.ndarray):
center = self.center
pass
def _calc_image_center(self, image_margins):
x = (image_margins[1]-image_margins[0])*.5
y = (image_margins[3]-image_margins[2])*.5
return np.array([x,y],dtype=int)
if __name__ == '__main__':
import numpy as np
r = region(region_data=np.array([[1,1,0,99,99]]),center=np.array([960,960]))
r2 = region(region_data=np.array([[1,1,99,0,99]]),center=np.array([0,0]))
r3 = region(image_data =np.array([[1,1,99,99,0]]),image_margins = np.array([0,1920,0,1920]))
r.addRegion(r2)
r.addRegion(r2,center_offset = np.array([10,100]))
|
gpl-3.0
| -3,441,065,994,838,054,400 | 37.61194 | 113 | 0.591998 | false |
gogetdata/ggd-cli
|
ggd/search.py
|
1
|
22038
|
# -------------------------------------------------------------------------------------------------------------
## Import Statements
# -------------------------------------------------------------------------------------------------------------
from __future__ import print_function
import sys
from .utils import get_builds, get_ggd_channels, get_species
SPECIES_LIST = sorted(get_species(update_files=False))
GENOME_BUILDS = sorted(get_builds("*"))
CHANNEL_LIST = [x.encode("ascii") for x in get_ggd_channels()]
# -------------------------------------------------------------------------------------------------------------
## Argument Parser
# -------------------------------------------------------------------------------------------------------------
def add_search(p):
c = p.add_parser(
"search",
help="Search for a ggd data package",
description="Search for available ggd data packages. Results are filtered by match score from high to low. (Only 5 results will be reported unless the -dn flag is changed)",
)
c.add_argument(
"search_term",
nargs="+",
help="**Required** The term(s) to search for. Multiple terms can be used. Example: 'ggd search reference genome'",
)
c.add_argument(
"--search-type",
default="both",
choices=["both", "combined-only", "non-combined-only"],
help=(
"(Optional) How to search for data packages with the search terms provided. Options = 'combined-only', 'non-combined-only', and 'both'."
" 'combined-only' will use the provided search terms as a single search term. 'non-combined-only' will use the provided search term to search for"
" data package that match each search term separately. 'both' will use the search terms combined and each search term separately to search"
" for data packages. Default = 'both'"
),
)
c.add_argument(
"-g",
"--genome-build",
default=[],
action="append",
choices=[str(x) for x in GENOME_BUILDS],
help="(Optional) Filter results by the genome build of the desired recipe",
)
c.add_argument(
"-s",
"--species",
default=[],
action="append",
help="(Optional) Filter results by the species for the desired recipe",
choices=[str(x) for x in SPECIES_LIST],
)
c.add_argument(
"-dn",
"--display-number",
default=5,
help="(Optional) The number of search results to display. (Default = 5)",
)
c.add_argument(
"-m",
"--match-score",
default="90",
help="(Optional) A score between 0 and 100 to use to filter the results by. (Default = 90). The lower the number the more results will be output",
)
c.add_argument(
"-c",
"--channel",
help="(Optional) The ggd channel to search. (Default = genomics)",
choices=[x.decode("ascii") for x in CHANNEL_LIST],
default="genomics",
)
c.set_defaults(func=search)
# -------------------------------------------------------------------------------------------------------------
## Functions/Methods
# -------------------------------------------------------------------------------------------------------------
def load_json(jfile):
"""Method to load a json file into a dictionary
load_json
=========
Method to load a json file
Parameters:
---------
1) jfile: (str) The path to the json file
Returns:
1) (dict) A dictionary of a json object
"""
import json
with open(jfile) as jsonFile:
return json.load(jsonFile)
def load_json_from_url(json_url):
"""Method to load a json file from a url
load_json_from_url
==================
Method to load a json file from a url. Uses the requests module
to get the json file from the url.
Parameters:
---------
1) json_url: (str) The url to the json path
Returns:
++++++++
1) (dict) A dictionary of a json object
"""
import json
import traceback
import requests
try:
return requests.get(json_url).json()
except ValueError as e:
sys.stderr.write("\n:ggd:search: !!ERROR!! in loading json file from url")
sys.stderr.write("\n\t Invalid URL: %s" % json_url)
sys.stderr.write(str(e))
sys.stderr.write(traceback.format_exc())
sys.exit(1)
def search_packages(json_dict, search_terms, search_type="both", score_cutoff=50):
"""Method to search for ggd packages in the ggd channeldata.json metadata file based on user provided search terms
search_packages
===============
Method to search for ggd packages/recipes
containing specific search terms
NOTE: Both the package name and the package keywords are searched
Parameters:
---------
1) json_dict: (dict) A json file loaded into a dictionary. (The file to search)
the load_json_from_url() method creates the dictionary
2) search_terms: (list) A list of terms representing package names or keywords to search for
3) search_type: (str) A string matching either 'both', 'combined-only', or 'non-combined-only',
representing how to use the search terms.
4) score_cutoff: (int) A number between 0 and 100 that represent which matches to return
(Default = 50)
Returns:
++++++++
1) (dict) A list of pkg names who's either name or keyword match score reached the score cutoff
"""
import re
from collections import defaultdict
from fuzzywuzzy import fuzz, process
pkg_score = defaultdict(lambda: defaultdict(float))
## Get final search terms based on search type
final_search_terms = []
if search_type == "both":
final_search_terms.append(" ".join(search_terms))
final_search_terms.extend(search_terms)
if search_type == "combined-only":
final_search_terms.append(" ".join(search_terms))
if search_type == "non-combined-only":
final_search_terms = search_terms
## Search for data packages
for term in final_search_terms:
for pkg in json_dict["packages"].keys():
## Get match score between name and term
score = fuzz.partial_ratio(term.lower(), pkg.lower())
## Get the max score from all keyword scores found
keyword_max_score = max(
[
fuzz.ratio(term.lower(), x.lower())
for x in [
subkeyword
for keyword in json_dict["packages"][pkg]["keywords"]
for subkeyword in re.split("-|_", keyword.strip())
]
+ json_dict["packages"][pkg]["keywords"]
]
)
## Skip any package that does not meet the match score
if score < score_cutoff and keyword_max_score < score_cutoff:
continue
## Set max score in dict
if float(pkg_score[pkg]["pkg_score"]) < float(score):
pkg_score[pkg]["pkg_score"] = float(score)
if float(pkg_score[pkg]["keyword_score"]) < float(keyword_max_score):
pkg_score[pkg]["keyword_score"] = float(keyword_max_score)
## Get a final list of pkg names
temp_pkg_list = sorted(
[
[pkg, float(max_scores["pkg_score"])]
for pkg, max_scores in pkg_score.items()
if float(max_scores["pkg_score"]) >= float(score_cutoff)
or float(max_scores["keyword_score"]) >= float(score_cutoff)
],
key=lambda x: x[1],
reverse=True,
)
final_list = [pkg_list[0] for pkg_list in temp_pkg_list]
return final_list
def check_installed(ggd_recipe, ggd_jdict):
"""Method to check if the recipe has already been installed and is in the conda ggd storage path.
check_if_installed
==================
This method is used to check if the ggd package has been installed and is located in the ggd storage path.
"""
import glob
import os
from .utils import conda_root
species = ggd_jdict["packages"][ggd_recipe]["identifiers"]["species"]
build = ggd_jdict["packages"][ggd_recipe]["identifiers"]["genome-build"]
version = ggd_jdict["packages"][ggd_recipe]["version"]
CONDA_ROOT = conda_root()
path = os.path.join(CONDA_ROOT, "share", "ggd", species, build, ggd_recipe, version)
recipe_exists = glob.glob(path)
if recipe_exists:
return (True, path)
else:
return (False, None)
def filter_by_identifiers(iden_keys, json_dict, filter_terms):
"""Method to filter a dictionary by an identifier field for the certain package.
filter_by_identifiers
=====================
A method used to filter the list of data packages by information in the
identifiers field in the channeldata.json file
Parameters:
----------
1) iden_keys: (list) A list of he identifiers keys. Example = ["species","genome-build"]
2) json_dict: (dict) The json dictionary created from load_json()
3) filter_terms: (list) A list of the term(s) to filter by. Example: ["Homo_sapiens","hg19"]
NOTE: List order of iden_keys should match list order of filter_terms
Returns:
++++++++
1) (dict) Updated/filtered json_dict
"""
import copy
keys = json_dict["packages"].keys()
key_count = len(keys)
keys_to_keep = set()
if len(iden_keys) > 0 and len(iden_keys) == len(filter_terms):
for key in keys:
for i, iden_key in enumerate(iden_keys):
if iden_key in json_dict["packages"][key]["identifiers"]:
if len(filter_terms[i]) == 0:
continue
if (
filter_terms[i]
in json_dict["packages"][key]["identifiers"][iden_key]
):
keys_to_keep.add(key)
new_json_dict = copy.deepcopy(json_dict)
## Remove packages
if len(keys_to_keep) > 0:
for key in keys:
if key not in keys_to_keep:
del new_json_dict["packages"][key]
if len(new_json_dict["packages"].keys()) == key_count:
## If unable to return a filtered set return the original match list
print(
"\n:ggd:search: WARNING: Unable to filter packages using: '%s'"
% ", ".join(filter_terms)
)
print("\tThe un-filtered list will be used\n")
return new_json_dict
def print_summary(search_terms, json_dict, match_list, installed_pkgs, installed_paths):
""" Method to print the summary/results of the search
print_summary
============
Method used to print out the final set of searched packages
Parameters:
---------
1) search_terms: (list) The search terms from the user
2) json_dict: (dict) The json dictionary from the load_json() method
3) match_list: (list) The filtered and final set of searched recipes
4) installed_pkgs: (set) A set of pkg names that are installed
5) installed_paths: (dict) A dictionary with keys = pkg names, values = installed paths
Returns:
+++++++
1) True if print summary printed out successfully
"""
dash = " " + "-" * 100
if len(match_list) < 1:
print(
"\n:ggd:search: No results for %s. Update your search term(s) and try again"
% ", ".join(search_terms)
)
sys.exit()
print("\n", dash)
for pkg in match_list:
results = []
if pkg in json_dict["packages"]:
# results.append("\n\t{} {}\n".format(("\033[1m" + "GGD Package:" + "\033[0m"), pkg))
results.append(
"\n\t{}\n\t{}".format(("\033[1m" + pkg + "\033[0m"), "=" * len(pkg))
)
if (
"summary" in json_dict["packages"][pkg]
and json_dict["packages"][pkg]["summary"]
):
results.append(
"\t{} {}".format(
("\033[1m" + "Summary:" + "\033[0m"),
json_dict["packages"][pkg]["summary"],
)
)
if (
"identifiers" in json_dict["packages"][pkg]
and json_dict["packages"][pkg]["identifiers"]
):
results.append(
"\t{} {}".format(
("\033[1m" + "Species:" + "\033[0m"),
json_dict["packages"][pkg]["identifiers"]["species"],
)
)
results.append(
"\t{} {}".format(
("\033[1m" + "Genome Build:" + "\033[0m"),
json_dict["packages"][pkg]["identifiers"]["genome-build"],
)
)
if (
"keywords" in json_dict["packages"][pkg]
and json_dict["packages"][pkg]["keywords"]
):
results.append(
"\t{} {}".format(
("\033[1m" + "Keywords:" + "\033[0m"),
", ".join(json_dict["packages"][pkg]["keywords"]),
)
)
if (
"tags" in json_dict["packages"][pkg]
and json_dict["packages"][pkg]["tags"]
):
if "cache" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Cached:" + "\033[0m"),
json_dict["packages"][pkg]["tags"]["cached"],
)
)
if "data-provider" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Data Provider:" + "\033[0m"),
json_dict["packages"][pkg]["tags"]["data-provider"],
)
)
if "data-version" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Data Version:" + "\033[0m"),
json_dict["packages"][pkg]["tags"]["data-version"],
)
)
if "file-type" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "File type(s):" + "\033[0m"),
", ".join(json_dict["packages"][pkg]["tags"]["file-type"]),
)
)
if "genomic-coordinate-base" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Data file coordinate base:" + "\033[0m"),
json_dict["packages"][pkg]["tags"][
"genomic-coordinate-base"
],
)
)
if "final-files" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Included Data Files:" + "\033[0m"),
"\n\t\t"
+ "\n\t\t".join(
json_dict["packages"][pkg]["tags"]["final-files"]
),
)
)
else:
results.append(
"\t{} {}".format(
("\033[1m" + "Prefix Install WARNING:" + "\033[0m"),
(
"This package has not been set up to use the --prefix flag when running ggd install."
" Once installed, this package will work with other ggd tools that use the --prefix flag."
),
)
)
if "final-file-sizes" in json_dict["packages"][pkg]["tags"]:
results.append(
"\t{} {}".format(
("\033[1m" + "Approximate Data File Sizes:" + "\033[0m"),
"\n\t\t"
+ "\n\t\t".join(
[
"{}: {}".format(
x,
json_dict["packages"][pkg]["tags"][
"final-file-sizes"
][x],
)
for x in json_dict["packages"][pkg]["tags"][
"final-file-sizes"
]
]
),
)
)
if pkg in installed_pkgs: ## IF installed
results.append(
"\n\tThis package is already installed on your system.\n\t You can find the installed data files here: %s"
% installed_paths[pkg]
)
else:
from .utils import check_for_meta_recipes
results.append(
"\n\tTo install run:\n\t\tggd install %s %s"
% (
pkg,
"--id <meta-recipe ID>"
if check_for_meta_recipes(pkg, json_dict)
else "",
)
)
print("\n\n".join(results))
print("\n", dash)
print("\n\033[1m>>> Scroll up to see package details and install info <<<\033[0m")
longest_pkg_name = max(map(len, match_list)) + 2
print("\n\n" + ("*" * longest_pkg_name))
print("\033[1mPackage Name Results\033[0m")
print("====================\n")
print("\n".join(match_list))
print("\nNOTE: Name order matches order of packages in detailed section above")
print("*" * longest_pkg_name + "\n")
return True
def search(parser, args):
"""Main method for ggd search.
search
=====
Main method for running a recipe/package search
Parameters:
----------
1) parser
2) args
"""
from .utils import get_builds, get_channeldata_url
## load the channeldata.json file
j_dict = load_json_from_url(get_channeldata_url(args.channel))
## Remove the ggd key if it exists
ggd_key = j_dict["packages"].pop("ggd", None)
## identify if search_terms have any species or genome build in them
species_lower = {x.lower(): x for x in SPECIES_LIST}
gb_lower = {x.lower(): x for x in GENOME_BUILDS}
filtered_search_terms = []
for term in args.search_term:
if term.lower() in species_lower.keys():
if species_lower[term.lower()] not in args.species:
args.species.append(species_lower[term.lower()])
elif term.lower() in gb_lower.keys():
if gb_lower[term.lower()] not in args.genome_build:
args.genome_build.append(gb_lower[term.lower()])
else:
## Only use search terms that are not used to filter the results by identifiers
filtered_search_terms.append(term)
## genome_build takes precedence over species (If genome build provided, species is implied)
final_species_list = args.species
for species in args.species:
build = get_builds(species)
if [x for x in build if x in args.genome_build]:
final_species_list.remove(species)
args.species = final_species_list
## Filter the json dict by species or genome build if applicable
if args.genome_build or args.species:
j_dict = filter_by_identifiers(
["species"] * len(args.species) + ["genome-build"] * len(args.genome_build),
j_dict,
args.species + args.genome_build,
)
## Search pkg names and keywords
match_results = search_packages(
j_dict, filtered_search_terms, args.search_type, int(args.match_score)
)
## Get installed paths
installed_dict = {}
installed_set = set()
for pkg in match_results:
isinstalled, path = check_installed(pkg, j_dict)
if isinstalled:
installed_dict[pkg] = path
installed_set.add(pkg)
## Print search results
match_result_num = str(len(match_results))
if int(match_result_num) >= int(args.display_number):
subset_match_results = match_results[0 : int(args.display_number)]
else:
subset_match_results = match_results
## Print search results to STDOUT
printed = print_summary(
args.search_term, j_dict, subset_match_results, installed_set, installed_dict
)
## Add a comment if a subset of search results are provided
if int(match_result_num) > int(args.display_number):
print(
"\n\n:ggd:search: NOTE: Only showing results for top {d} of {m} matches.".format(
d=str(args.display_number), m=match_result_num
)
)
print(
":ggd:search: To display all matches append your search command with '-dn {m}'".format(
m=match_result_num
)
)
print(
"\n\t ggd search {t} -dn {m}\n".format(
t=" ".join(args.search_term), m=match_result_num
)
)
## Return result of print_summary
return printed
|
mit
| -37,952,016,967,056,370 | 36.038655 | 181 | 0.493602 | false |
django-oscar/django-oscar-mws
|
oscar_mws/test/factories.py
|
1
|
4670
|
import factory
from time import time
from decimal import Decimal as D
from django.utils.timezone import now
from django.db.models import get_model
from oscar.core.loading import get_class
from oscar_mws import MWS_MARKETPLACE_US
Selector = get_class('partner.strategy', 'Selector')
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('auth', 'User')
first_name = 'Peter'
last_name = 'Griffin'
email = 'peter@petoria.pt'
password = 'plaintext'
class CountryFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('address', 'Country')
iso_3166_1_a2 = factory.Iterator(['US', 'GB', 'DE'])
iso_3166_1_a3 = factory.Iterator(['USA', 'GBR', 'DEU'])
iso_3166_1_numeric = factory.Iterator(['840', '276', '826'])
class ProductClassFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('catalogue', 'ProductClass')
name = factory.Sequence(lambda n: 'Dummy product class {}'.format(n))
class BasketFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('basket', 'Basket')
strategy = Selector().strategy()
class AmazonProfileFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('oscar_mws', 'AmazonProfile')
FACTORY_DJANGO_GET_OR_CREATE = ('product',)
sku = factory.Sequence(lambda n: "sku_{}".format(str(time())[:10]))
release_date = now()
product = factory.SubFactory(
'oscar_mws.test.factories.ProductFactory', amazon_profile=None)
class ProductFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('catalogue', 'Product')
title = 'Dummy Product'
product_class = factory.SubFactory(ProductClassFactory)
amazon_profile = factory.RelatedFactory(AmazonProfileFactory, 'product')
@factory.post_generation
def stockrecord(self, created, extracted, **kwargs):
if not created:
return
if not extracted:
kwargs.setdefault('product', self)
extracted = StockRecordFactory(**kwargs)
self.stockrecords.add(extracted)
class MerchantAccountFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('oscar_mws', 'MerchantAccount')
name = "Dummy Merchant"
seller_id = 'ASLLRIDHERE1J56'
aws_api_key = 'FAKE_KEY'
aws_api_secret = 'FAKE_SECRET'
class AmazonMarketplaceFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('oscar_mws', 'AmazonMarketplace')
name = "Dummy Marketplace"
region = MWS_MARKETPLACE_US
marketplace_id = factory.Sequence(lambda n: 'MWS_MKT_{}'.format(n))
merchant = factory.SubFactory(MerchantAccountFactory)
class FeedSubmissionFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('oscar_mws', 'FeedSubmission')
FACTORY_DJANGO_GET_OR_CREATE = ('submission_id',)
merchant = factory.SubFactory(MerchantAccountFactory)
date_submitted = now()
class PartnerFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('partner', 'Partner')
name = factory.Sequence(lambda n: 'Dummy partner {}'.format(n))
class StockRecordFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('partner', 'StockRecord')
price_excl_tax = D('12.99')
partner = factory.SubFactory(PartnerFactory)
product = factory.SubFactory(ProductFactory)
class ShippingAddressFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('order', 'ShippingAddress')
first_name = 'Peter'
last_name = 'Griffin'
line1 = '31 Spooner Street'
line4 = 'Quahog'
state = 'RI'
country = factory.SubFactory(CountryFactory)
postcode = '12345'
class OrderLineFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('order', 'Line')
product = factory.SubFactory(ProductFactory)
line_price_excl_tax = D('12.99')
line_price_incl_tax = D('12.99')
line_price_before_discounts_incl_tax = D('12.99')
line_price_before_discounts_excl_tax = D('12.99')
class OrderFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('order', 'Order')
number = factory.Sequence(lambda n: "{}".format(10000 + n))
site = factory.LazyAttribute(
lambda a: get_model('sites', 'Site').objects.all()[0]
)
total_incl_tax = D('12.99')
total_excl_tax = D('12.99')
shipping_address = factory.SubFactory(ShippingAddressFactory)
class FulfillmentOrderFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_model('oscar_mws', 'FulfillmentOrder')
fulfillment_id = 'extern_id_1154539615776'
merchant = factory.SubFactory(MerchantAccountFactory)
date_updated = now()
order = factory.SubFactory(OrderFactory)
shipping_address = factory.SubFactory(ShippingAddressFactory)
|
bsd-3-clause
| -2,751,810,532,007,086,000 | 29.324675 | 76 | 0.701285 | false |
chop-dbhi/serrano
|
serrano/resources/field/dims.py
|
1
|
7254
|
from decimal import Decimal
from django.db.models import Q, Count
from django.utils.encoding import smart_unicode
from restlib2.http import codes
from restlib2.params import Parametizer, StrParam, BoolParam, IntParam
from modeltree.tree import MODELTREE_DEFAULT_ALIAS, trees
from avocado.events import usage
from avocado.models import DataField
from avocado.query import pipeline
from avocado.stats import kmeans
from .base import FieldBase
MINIMUM_OBSERVATIONS = 500
MAXIMUM_OBSERVATIONS = 50000
class FieldDimsParametizer(Parametizer):
aware = BoolParam(False)
cluster = BoolParam(True)
n = IntParam()
nulls = BoolParam(False)
processor = StrParam('default', choices=pipeline.query_processors)
sort = StrParam()
tree = StrParam(MODELTREE_DEFAULT_ALIAS, choices=trees)
class FieldDimensions(FieldBase):
"Field Counts Resource"
parametizer = FieldDimsParametizer
def get(self, request, pk):
instance = self.get_object(request, pk=pk)
params = self.get_params(request)
tree = trees[params.get('tree')]
opts = tree.root_model._meta
tree_field = DataField(pk='{0}:{1}'.format(params.get('tree'), pk),
app_name=opts.app_label,
model_name=opts.module_name,
field_name=opts.pk.name)
# This will eventually make its way in the parametizer, but lists
# are not supported.
dimensions = request.GET.getlist('dimensions')
if params['aware']:
context = self.get_context(request)
else:
context = None
QueryProcessor = pipeline.query_processors[params['processor']]
processor = QueryProcessor(context=context, tree=tree)
queryset = processor.get_queryset(request=request)
# Explicit fields to group by, ignore ones that dont exist or the
# user does not have permission to view. Default is to group by the
# reference field for disinct counts.
if any(dimensions):
fields = []
groupby = []
for pk in dimensions:
f = self.get_object(request, pk=pk)
if f:
fields.append(f)
groupby.append(tree.query_string_for_field(f.field,
model=f.model))
else:
fields = [instance]
groupby = [tree.query_string_for_field(instance.field,
model=instance.model)]
# Exclude null values. Depending on the downstream use of the data,
# nulls may or may not be desirable.
if not params['nulls']:
q = Q()
for field in groupby:
q = q & Q(**{'{0}__isnull'.format(field): False})
queryset = queryset.filter(q)
queryset = queryset.values(*groupby)
# Begin constructing the response
resp = {
'data': [],
'outliers': [],
'clustered': False,
'size': 0,
}
queryset = queryset.annotate(count=Count(tree_field.field.name))\
.values_list('count', *groupby)
# Evaluate list of points
length = len(queryset)
# Nothing to do
if not length:
usage.log('dims', instance=instance, request=request, data={
'size': 0,
'clustered': False,
'aware': params['aware'],
})
return resp
if length > MAXIMUM_OBSERVATIONS:
data = {
'message': 'Data too large',
}
return self.render(request, data,
status=codes.unprocessable_entity)
# Apply ordering. If any of the fields are enumerable, ordering should
# be relative to those fields. For continuous data, the ordering is
# relative to the count of each group
if (any([d.enumerable for d in fields]) and
not params['sort'] == 'count'):
queryset = queryset.order_by(*groupby)
else:
queryset = queryset.order_by('-count')
clustered = False
points = [{
'count': point[0],
'values': point[1:],
} for point in list(queryset)]
outliers = []
# For N-dimensional continuous data, check if clustering should occur
# to down-sample the data.
if all([d.simple_type == 'number' for d in fields]):
# Extract observations for clustering.
obs = []
null_points = []
numeric_points = []
for i, point in enumerate(points):
# We need to handle points that have null dimensions
# differently than those that are all numeric as the kmeans
# module currently cannot handle mixed type dimensions so we
# only allow fully numeric points to be passed to the kmeans
# module.
if None in point['values']:
null_points.append(point)
continue
for i, dim in enumerate(point['values']):
if isinstance(dim, Decimal):
point['values'][i] = float(str(dim))
numeric_points.append(point)
obs.append(point['values'])
# Perform k-means clustering. Determine centroids and calculate
# the weighted count relatives to the centroid and observations
# within the kmeans module.
if params['cluster'] and length >= MINIMUM_OBSERVATIONS:
clustered = True
counts = [p['count'] for p in numeric_points]
points, outliers = kmeans.weighted_counts(
obs, counts, params['n'])
else:
indexes = kmeans.find_outliers(obs, normalized=False)
outliers = []
for idx in indexes:
outliers.append(numeric_points[idx])
numeric_points[idx] = None
points = [p for p in numeric_points if p is not None]
# Now that we have done the analysis using the purely numeric
# points, we can add the mixed/null dimensionality points back in
# to the list before returning results.
points += null_points
usage.log('dims', instance=instance, request=request, data={
'size': length,
'clustered': clustered,
'aware': params['aware'],
})
labeled_points = []
value_labels = tree_field.value_labels(queryset=queryset)
for point in points:
labeled_points.append({
'count': point['count'],
'values': [{
'label': value_labels.get(value, smart_unicode(value)),
'value': value
} for value in point['values']]
})
return {
'data': labeled_points,
'clustered': clustered,
'outliers': outliers,
'size': length,
}
|
bsd-2-clause
| 3,394,574,782,945,426,000 | 33.056338 | 78 | 0.546319 | false |
vecnet/vecnet.azure
|
VM_Mock.py
|
1
|
5058
|
from settings_local import SUBSCRIPTION_ID, STORAGE_ACCOUNT_NAME, STORAGE_ACCOUNT_KEY, EMAIL_USERNAME, EMAIL_PASSWORD
__author__ = 'Natalie Sanders'
__author__ = 'Natalie Sanders'
from azure.servicemanagement import *
from azure.storage import *
from subprocess import call
from os import chdir
import os
import socket
import zipfile
import pickle
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email.utils import COMMASPACE, formatdate
from email import encoders
global user_info
def delete_vm():
hosted_service = sms.get_hosted_service_properties(service_name=username, embed_detail=True)
if hosted_service.deployments:
deployment = sms.get_deployment_by_name(username, username)
roles = deployment.role_list
for instance in roles:
if machine_name == instance.role_name:
if len(roles) == 1:
sms.delete_deployment(service_name=username, deployment_name=username)
else:
sms.delete_role(service_name=username, deployment_name=username, role_name=machine_name)
break
def send_mail( send_from, send_to, subject, text, files=[], server="localhost", port=587, username='', password='', isTls=True):
msg = MIMEMultipart()
msg['From'] = send_from
msg['To'] = send_to
msg['Date'] = formatdate(localtime = True)
msg['Subject'] = subject
msg.attach( MIMEText(text) )
for f in files:
part = MIMEBase('application', "octet-stream")
part.set_payload( open(f,"rb").read() )
encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="{0}"'.format(os.path.basename(f)))
msg.attach(part)
smtp = smtplib.SMTP(server, port)
if isTls: smtp.starttls()
smtp.login(username,password)
smtp.sendmail(send_from, send_to, msg.as_string())
smtp.quit()
print "emailed\n"
def upload_results():
z = zipfile.ZipFile(user_info["sim"]+'_Results.zip', "w", zipfile.ZIP_DEFLATED)
for f in os.listdir("Input"):
chdir("c:/Users/Public/Sim/Input")
z.write(f)
chdir("c:/Users/Public/Sim/Output")
z.write("stdout.txt")
z.close()
result = 'r-' + machine_name
blob_service.put_block_blob_from_path(container_name, result, 'c:/Users/Public/Sim/Output.zip')
print "uploaded\n"
def download_input():
blob_service.get_blob_to_path(container_name, machine_name, 'c:/Users/Public/Sim/Input.zip')
chdir("C:/Users/Public/Sim")
z = zipfile.ZipFile('Input.zip', 'r')
z.extractall('Input')
z.close()
print "downloaded\n"
########################################################################################################################
## MAIN ##
########################################################################################################################
##### Service Management Object #####
machine_name = socket.gethostname()
split = machine_name.split('-')
container_name = '-'.join(split[:-1]).lower()
username = '-'.join(split[:-1])
subscription_id = SUBSCRIPTION_ID
certificate_path = 'CURRENT_USER\\my\\AzureCertificate'
call(['certutil', '-user', '-f', '-p', '1', '-importPFX', 'c:/temp/azure.pfx'])
sms = ServiceManagementService(subscription_id, certificate_path)
###### Redirect stdout to File ######
chdir('C:/Users/Public/Sim')
output = open("Output/stdout.txt", "w+")
####### Download Input Files ########
blob_service = BlobService(
account_name=STORAGE_ACCOUNT_NAME,
account_key=STORAGE_ACCOUNT_KEY)
try:
download_input()
f = "C:/Users/Public/Sim/Input/AzureUserInfo.pickle"
user_info = pickle.load(file(f))
output.write('Mock model executed correctly.')
output.close()
print "download input"
except:
output.write('Could not download input from the cloud.\n')
output.close()
try:
########### Upload Results ##########
upload_results()
########### Email Results ###########
send_mail( send_from = 'vecnet.results@gmail.com',
send_to = user_info["email"],
subject = 'The results for your ' + user_info["sim"] + ' simulation are ready!',
text = 'Hi ' + user_info['username'] + ',\n\nYour ' + user_info["sim"] + ' simulation has '
'finished running. Look for your results below.\n\nThanks for using VecNet Azure '
'resources!\nThe VecNet Team',
files = ['c:/Users/Public/Sim/' + user_info["sim"] + '_Results.zip'],
server = "smtp.gmail.com",
port = 587,
username = EMAIL_USERNAME,
password = EMAIL_PASSWORD,
isTls = True)
print "sent mail"
############# Exit Script #############
finally:
delete_vm()
|
mpl-2.0
| -3,948,261,241,863,998,000 | 33.408163 | 128 | 0.574931 | false |
mprunell/utils
|
url_diff.py
|
1
|
5578
|
import sys
import getopt
import traceback
import urllib2
from urlparse import urljoin, urlparse, ParseResult
from BeautifulSoup import BeautifulSoup
def connect(conn, url):
assert conn is not None, 'Input connection must be valid'
assert url, 'Input old URL cannot be empty'
response = None
try:
response = conn.open(url)
except urllib2.HTTPError as e:
error_msg = 'Error {} connecting to {}'.format(e.code, url)
sys.stderr.write(repr(error_msg) + '\n')
except urllib2.URLError as e:
error_msg = 'Error {} connecting to {}'.format(e.reason, url)
sys.stderr.write(repr(error_msg) + '\n')
except:
error_msg = 'Error connecting to {}'.format(url)
sys.stderr.write(repr(error_msg) + '\n')
return response
def crawl_page (conn, url, domain, visited_links=[]):
assert conn is not None, 'Input connection must be valid'
assert url, 'Input old URL cannot be empty'
assert domain, 'Input old domain cannot be empty'
assert isinstance(visited_links, list)
visited_links.append(url)
remaining_links = []
title = ''
meta_desc = ''
response = connect(conn, url)
if not response is None:
body = response.read()
try:
soup = BeautifulSoup(body)
except:
error_msg = 'Error parsing {}'.format(url)
sys.stderr.write(error_msg + "\n")
soup = None
if not soup is None:
if soup.html:
if soup.html.head:
title = soup.html.head.title.string or ''
else:
title =''
else:
title = ''
meta_desc = soup.findAll(attrs={"name":"description"})
if len (meta_desc) > 0:
meta_desc = meta_desc[0]['content']
else:
meta_desc = ""
if visited_links:
anchors = soup.findAll("a")
for anchor in anchors:
if anchor is None or not anchor.has_key('href'): continue
try:
href = anchor['href']
if domain in href or (not 'www' in href and not 'http' in href):
link = urljoin('http://' + domain, href).split ("#")[0].lower()
if not link in visited_links and link != '/' and not 'mailto' in link:
if not link in visited_links:
if not '.pdf' in link.lower() \
and not '.png' in link.lower() \
and not '.jpg' in link.lower():
remaining_links.append(link)
except:
print traceback.format_exc()
print '{};{};{}'.format(url.encode('utf-8'), title.encode('utf-8').strip(' \n\t\r'), meta_desc.encode ('utf-8').strip(' \n\t\r'))
assert visited_links, 'Output visited_links cannot be empty'
return remaining_links, visited_links
def clean_scheme(url):
assert url, 'Input URL cannot be empty'
scheme = 'http://'
sections = url.split(scheme)
if len(sections) == 1:
url = scheme + url
assert url, 'Output URL cannot be empty'
assert scheme in url, 'Output URL must have a scheme'
return url
def replace_domain(source_url, new_domain):
o = urlparse(source_url)
return ParseResult(o.scheme, new_domain, o.path, o.params, o.query, o.fragment).geturl()
def find_differences(old_domain, new_domain, verbose=False):
old_domain = unicode(old_domain)
new_domain = unicode(new_domain)
old_url = clean_scheme(old_domain)
conn = urllib2.build_opener()
visited_links = []
remaining_links, visited_links = crawl_page(conn, old_url, old_domain, visited_links)
new_url = replace_domain(old_url, new_domain)
crawl_page(conn, new_url, new_domain)
while True:
if remaining_links:
ln = remaining_links.pop()
more_links, visited_links = crawl_page(conn, ln, old_domain, visited_links)
new_ln = replace_domain(ln, new_domain)
crawl_page(conn, new_ln, new_domain)
remaining_links.extend(more_links)
else:
break
def main():
old_domain = ''
new_domain = ''
version = '1.0'
verbose = False
help = False
try:
options, remainder = getopt.getopt(sys.argv[1:], 'o:n:', ['old_domain=',
'new_domain='
'verbose',
'help'
])
except getopt.GetoptError:
sys.exit(2)
option_not_found = False
for opt, arg in options:
if opt in ('-o', '--old'):
old_domain = arg
elif opt in ('-n', '--new'):
new_domain = arg
elif opt in ('-v', '--verbose'):
verbose = True
elif opt in ('-h', '--help'):
help = True
else:
option_not_found = True
if not options or option_not_found or help:
print 'Usage: {} -o <old_url> -n <new_url>'.format(sys.argv[0])
if help:
sys.exit(0)
else:
sys.exit(1)
find_differences(old_domain, new_domain, verbose)
if __name__ == "__main__":
main()
|
mit
| -2,697,641,972,369,733,000 | 33.220859 | 137 | 0.512191 | false |
clawpack/clawpack-4.x
|
doc/sphinx/example-acoustics-1d/setplot_2.py
|
2
|
2095
|
"""
Single figure and axes with two items
=======================================
Only the pressure q[0] is plotted.
In this example the line and points are plotted in different colors by
specifying a second item on the same axes.
"""
#--------------------------
def setplot(plotdata):
#--------------------------
"""
Specify what is to be plotted at each frame.
Input: plotdata, an instance of pyclaw.plotters.data.ClawPlotData.
Output: a modified version of plotdata.
"""
plotdata.clearfigures() # clear any old figures,axes,items data
# Figure for q[0]
plotfigure = plotdata.new_plotfigure(name='Pressure', figno=1)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = 'auto'
plotaxes.ylimits = [-.5,1.1]
plotaxes.title = 'Pressure'
# Set up for item on these axes:
plotitem = plotaxes.new_plotitem(name='line', plot_type='1d')
plotitem.plot_var = 0
plotitem.plotstyle = '-'
plotitem.color = 'b'
# Set up for item on these axes:
plotitem = plotaxes.new_plotitem(name='points', plot_type='1d')
plotitem.plot_var = 0
plotitem.plotstyle = 'o'
plotitem.color = '#ff00ff' # any color supported by matplotlib
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.html_homelink = '../README.html'# pointer for index page
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 1 # layout of plots
plotdata.latex_framesperline = 2 # layout of plots
plotdata.latex_makepdf = True # also run pdflatex?
return plotdata
|
bsd-3-clause
| 131,839,920,120,688,060 | 32.790323 | 74 | 0.609547 | false |
yuanyelele/solfege
|
solfege/tracebackwindow.py
|
1
|
4872
|
# GNU Solfege - free ear training software
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2007, 2008, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import sys
from gi.repository import Gtk
from solfege import gu
from solfege import reportbug
class TracebackWindow(Gtk.Dialog):
def __init__(self, show_gtk_warnings):
Gtk.Dialog.__init__(self)
self.m_show_gtk_warnings = show_gtk_warnings
self.set_default_size(630, 400)
self.vbox.set_border_width(8)
label = Gtk.Label(label=_("GNU Solfege message window"))
label.set_name('Heading2')
self.vbox.pack_start(label, False, False, 0)
label = Gtk.Label(label=_("Please report this to the bug database or send an email to bug-solfege@gnu.org if the content of the message make you believe you have found a bug."))
label.set_line_wrap(True)
self.vbox.pack_start(label, False, False, 0)
scrollwin = Gtk.ScrolledWindow()
scrollwin.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.vbox.pack_start(scrollwin, True, True, 0)
self.g_text = Gtk.TextView()
scrollwin.add(self.g_text)
self.g_report = Gtk.Button()
self.g_report.connect('clicked', self.do_report)
box = Gtk.HBox()
self.g_report.add(box)
im = Gtk.Image.new_from_stock('gtk-execute', Gtk.IconSize.BUTTON)
box.pack_start(im, True, True, 0)
label = Gtk.Label()
label.set_text_with_mnemonic(gu.escape(_('_Make automatic bug report')))
label.set_use_markup(True)
box.pack_start(label, True, True, 0)
self.action_area.pack_start(self.g_report, True, True, 0)
self.g_close = Gtk.Button(stock='gtk-close')
self.action_area.pack_start(self.g_close, True, True, 0)
self.g_close.connect('clicked', lambda w: self.hide())
def do_report(self, *v):
yesno = gu.dialog_yesno(_(
"Automatic bug reports are often mostly useless because "
"people omit their email address and add very little info "
"about what happened. Fixing bugs is difficult if we "
"cannot contact you and ask for more information.\n\n"
"I would prefer if you open a web browser and report your "
"bug to the bug tracker at http://bugs.solfege.org.\n\n"
"This will give your bug report higher priority and it "
"will be fixed faster.\n\nAre you willing to do that?"))
if yesno:
return
self.m_send_exception = 'Nothing'
b = self.g_text.get_buffer()
d = reportbug.ReportBugWindow(
self, b.get_text(b.get_start_iter(),
b.get_end_iter(), False))
while 1:
ret = d.run()
if ret in (Gtk.ResponseType.REJECT, Gtk.ResponseType.DELETE_EVENT):
break
elif ret == reportbug.RESPONSE_SEND:
self.m_send_exception = d.send_bugreport()
break
if self.m_send_exception != 'Nothing':
if self.m_send_exception:
m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL,
Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE,
"Sending bugreport failed:\n%s" % self.m_send_exception)
else:
m = Gtk.MessageDialog(self, Gtk.DialogFlags.MODAL,
Gtk.MessageType.INFO, Gtk.ButtonsType.CLOSE,
'Report sent to http://www.solfege.org')
m.run()
m.destroy()
d.destroy()
def write(self, txt):
if ("DeprecationWarning:" in txt) or \
(not self.m_show_gtk_warnings and (
"GtkWarning" in txt
or "PangoWarning" in txt
or ("Python C API version mismatch" in txt and
("solfege_c_midi" in txt or "swig" in txt))
)):
return
sys.stdout.write(txt)
if txt.strip():
self.show_all()
buffer = self.g_text.get_buffer()
buffer.insert(buffer.get_end_iter(), txt)
self.set_focus(self.g_close)
def flush(self, *v):
pass
def close(self, *v):
pass
|
gpl-3.0
| -2,274,994,143,251,863,300 | 42.891892 | 185 | 0.609401 | false |
alexkorovkov/TeXPreview
|
LaTexPreview.py
|
1
|
4013
|
import sublime, sublime_plugin
try: # python 3
from .functions import *
from .openfunctions import *
except ValueError: # python 2
from functions import *
from openfunctions import *
def plugin_loaded():
ENVIRON['PATH'] += str(
sublime.load_settings("TeXPreview.sublime-settings").get("latex_path")
)
print("Your path for TeXPrevew:", ENVIRON['PATH'])
class LatexPreviewEvent(sublime_plugin.EventListener):
def on_selection_modified_async(self, view):
global workingFiles
fileName = view.file_name()
if not(fileName in workingFiles):
return
currentProperties = workingFiles[fileName]
if (sublime.load_settings(
"TeXPreview.sublime-settings"
).get("external_view") == False):
sublime_open(view, currentProperties)
return
if ((currentProperties.runProc != None) and (currentProperties.runProc.poll() != None)):
currentProperties.runProc = None
currentProperties.isRun = False
if ((os.path.exists(currentProperties.resFileName))):
fileDelete(currentProperties.resFileName)
return
if (currentProperties.isRun == False):
return
auto_reload = sublime.load_settings("TeXPreview.sublime-settings").get("auto_reload")
if (auto_reload == False):
return
if (auto_reload == "application_reload"):
applicationReload(view, currentProperties)
return
changePic(view, currentProperties)
#def on_load_async(self, view):
# ENVIRON['PATH'] += sublime.load_settings("TeXPreview.sublime-settings").get("latex_path")
def on_pre_close(self, view):
fileName = view.file_name()
stopPrevew(fileName)
dirPath = os.path.dirname(view.file_name())+os.path.sep +r'TeX_Preview_tmp'
if ((os.path.exists(dirPath))):
try:
os.rmdir(dirPath)
except:
pass
class LatexPreviewCommand(sublime_plugin.TextCommand):
def run(self, view):
fileName = self.view.file_name()
if (fileName == None):
return
if (fileName[-4:] != '.tex'):
return
global workingFiles
if not(fileName in workingFiles):
workingFiles[fileName] = FileProperties()
currentProperties = workingFiles[fileName]
currentProperties.code = None
currentProperties.isRun = True
currentProperties.cutFunction = lambda x:cutEquation(x)
if (sublime.load_settings(
"TeXPreview.sublime-settings"
).get("external_view") == False):
sublime_open(self.view, currentProperties)
else:
applicationReload(self.view, currentProperties)
class LatexBlockPreviewCommand(sublime_plugin.TextCommand):
def run(self, view):
fileName = self.view.file_name()
if (fileName == None):
return
if (fileName[-4:] != '.tex'):
return
global workingFiles
if not(fileName in workingFiles):
workingFiles[fileName] = FileProperties()
currentProperties = workingFiles[fileName]
currentProperties.code = None
currentProperties.isRun = True
currentProperties.cutFunction = lambda x:cutBlock(x)
if (sublime.load_settings(
"TeXPreview.sublime-settings"
).get("external_view") == False):
sublime_open(self.view, currentProperties)
else:
applicationReload(self.view, currentProperties)
class LatexStopPreviewCommand(sublime_plugin.TextCommand):
def run(self, view):
fileName = self.view.file_name()
stopPrevew(fileName)
self.view.window().destroy_output_panel("tex_pr_exec")
|
gpl-3.0
| 4,334,221,548,284,516,000 | 27.260563 | 98 | 0.59083 | false |
arxiv-vanity/arxiv-vanity
|
arxiv_vanity/papers/tests/test_processor.py
|
1
|
1921
|
from io import StringIO
import unittest
from ..processor import process_render
class ProcessorTest(unittest.TestCase):
maxDiff = None
def test_basics(self):
html = """
<head>
<link href="style.css">
<style>body { }</style>
<script>blah</script>
</head>
<body>
<img src="fig.gif">
<img src="data:foo">
<a href="http://example.com">Hello</a>
<div class="ltx_abstract"><p>Science was done</p></div>
<figure class="ltx_figure"><img src="first_image.gif"></figure>
<script src="script.js" /></body>
"""
output = process_render(StringIO(html), "prefix", {})
self.assertEqual(
output["body"],
"""
<img src="prefix/fig.gif"/>
<img src="data:foo"/>
<a href="http://example.com" target="_blank">Hello</a>
<div class="ltx_abstract"><p>Science was done</p></div>
<figure class="ltx_figure"><img src="prefix/first_image.gif"/></figure>
<script src="prefix/script.js"></script>
""",
)
self.assertEqual(output["links"], '<link href="prefix/style.css">\n')
self.assertEqual(output["styles"], "<style>body { }</style>\n")
self.assertEqual(output["scripts"], "<script>blah</script>\n")
self.assertEqual(output["abstract"], "Science was done")
self.assertEqual(output["first_image"], "prefix/first_image.gif")
def test_arxiv_urls_are_converted_to_vanity_urls(self):
html = '<head></head><a href="https://arxiv.org/abs/1710.06542">Something</a>'
output = process_render(StringIO(html), "", {})
self.assertEqual(
output["body"],
'<a href="/papers/1710.06542/" target="_blank">Something</a>',
)
def test_emails_are_removed(self):
html = '<head></head><a href="mailto:foo@bar.com">some email link</a> another@email.com'
output = process_render(StringIO(html), "", {})
self.assertEqual(
output["body"], "some email link ",
)
|
apache-2.0
| 4,360,503,527,853,255,000 | 32.12069 | 96 | 0.609058 | false |
RedHatInsights/insights-core
|
insights/tests/test_context.py
|
1
|
2547
|
from insights.core.context import (ExecutionContextMeta, HostArchiveContext,
SerializedArchiveContext, SosArchiveContext)
def test_host_archive_context():
files = ["/foo/junk", "/insights_commands"]
actual = HostArchiveContext.handles(files)
assert actual == ("/", HostArchiveContext), actual
files = ["/foo/junk", "/insights_commands/things"]
actual = HostArchiveContext.handles(files)
assert actual == ("/", HostArchiveContext), actual
files = ["/foo/junk", "/foo/junk/insights_commands/foobar.txt"]
actual = HostArchiveContext.handles(files)
assert actual == ("/foo/junk", HostArchiveContext), actual
def test_host_archive_context_unsupported():
files = ["/foo/junk", "/not_insights_commands"]
actual = HostArchiveContext.handles(files)
assert actual == (None, None), actual
files = ["/foo/junk", "/insights_commands_not"]
actual = HostArchiveContext.handles(files)
assert actual == (None, None), actual
def test_sos_archive_context_supported():
files = ["/foo/junk", "/sos_commands"]
actual = SosArchiveContext.handles(files)
assert actual == ("/", SosArchiveContext), actual
files = ["/foo/junk", "/sos_commands/things"]
actual = SosArchiveContext.handles(files)
assert actual == ("/", SosArchiveContext), actual
files = ["/foo/junk", "/foo/junk/sos_commands/foobar.txt"]
actual = SosArchiveContext.handles(files)
assert actual == ("/foo/junk", SosArchiveContext), actual
def test_sos_archive_context_unsupported():
files = ["/foo/junk", "/sos_commands_not"]
actual = SosArchiveContext.handles(files)
assert actual == (None, None), actual
files = ["/foo/junk", "/not_sos_commands"]
actual = SosArchiveContext.handles(files)
assert actual == (None, None), actual
def test_serialize_archive_context_supported():
files = ["/foo/junk", "/insights_archive.txt"]
actual = SerializedArchiveContext.handles(files)
assert actual == ("/", SerializedArchiveContext), actual
def test_serialized_archive_context_unsupported():
files = ["/foo/junk", "/sos_commands_not"]
actual = SerializedArchiveContext.handles(files)
assert actual == (None, None), actual
files = ["/foo/junk", "/insights_archive"]
actual = SerializedArchiveContext.handles(files)
assert actual == (None, None), actual
def test_unrecognized():
files = ["/foo/junk", "/bar/junk"]
actual = ExecutionContextMeta.identify(files)
assert actual == (None, None), actual
|
apache-2.0
| -3,199,718,697,504,564,700 | 34.375 | 79 | 0.675697 | false |
kd5bjo/icing
|
icing.py
|
1
|
5109
|
#!/usr/bin/env python
# icing.py
#
# Created by Eric Sumner on 2010-10-29.
# Copyright 2010 Haleret Productions. All rights reserved.
#
# See the file named "COPYING" in this directory for licensing information.
import sys
# Syntactic sugar for common object types:
#
# NSNumber: @42
# NSMutableArray: @[a, b, c]
# NSMutableDictionary: @{ k1 => v1, k2 => v2, k3 => v3 }
#
# You must #import "NSArray.h" to use the array or dictionary syntax
# Implementation notes:
# - Files that don't use the new syntax should come out completely unchanged
# - Expansions should always span the same number of lines as their source,
# so that line numbers stay in sync
# - Should output as much as we can on unexpected end-of-file
# - Translations should be as direct as possible, and easy to understand
out = sys.stdout
source = sys.stdin
matched_tokens = {'[':']', '{':'}', '(':')'}
# Represents the input file; probably shouldn't slurp the whole file,
# but it was easy
class charstream:
def __init__(self, source):
self.source = source
def next(self):
if self.source:
rtn = self.source[0]
self.source = self.source[1:]
return rtn
else: raise StopIteration
def push(self, c):
self.source = c+self.source
def __iter__(self):
return self
source = charstream(source.read())
# Pass over a single- or double-quoted string without doing macro expansion
def read_string(delim, source):
out = delim
escaped = False
for c in source:
out += c
if escaped:
escaped = False
else:
if c == '\\': escaped = True
if c == delim: break
return out
# Pass over comments without doing macro expansion
# Does not support nested /* style comments
def read_comment(delim, source):
out = delim
comment_type = source.next()
out += comment_type
if comment_type == '/':
for c in source:
out += c
if c in '\r\n': break
return out
elif comment_type == '*':
for c in source:
out += c
if out.endswith('*/'): break
return out
else:
source.push(comment_type)
return delim
# Dispatch to the proper generator after seeing an @
def read_directive(delim, source):
out = delim + source.next()
directive_type = out[-1]
if directive_type == '[':
return parse_array_constant(out, source)
elif directive_type == '{':
return parse_dict_constant(out, source)
elif directive_type in '-0123456789':
return parse_number_constant(out, source)
else:
source.push(directive_type)
return delim
# Read (and macroexpand) input until one of the end tokens is reached at the
# current nesting level (of quotes, parens, braces, comments, etc)
def read_argument(source, *end):
out = ''
for c in source:
if c in '"\'': out += read_string(c, source)
elif c in '/': out += read_comment(c, source)
elif c in '@': out += read_directive(c, source)
elif c in matched_tokens:
text, delim = read_argument(source, matched_tokens[c])
out += text
if delim: out += delim
else:
out += c
for e in end:
if out.endswith(e): return out[:-len(e)], e
return out, None
# Generate NSMutableArray constructor for @[a, b, c]
def parse_array_constant(out, source):
out = '[NSMutableArray arrayWithObjects: '
args = []
while True:
arg, delim = read_argument(source, ',', ']')
args.append('[NSNull maybe:%s]' % arg)
if delim == ']': break
if not delim: return out + ','.join(args)
if len(args) and args[-1].strip():
args += [' ']
return out + ','.join(args) + 'nil]'
# Generate NSMutableDictionary constructor for @{ k1 => v1, k2 => v2 }
def parse_dict_constant(out, source):
out = '[NSMutableDictionary dictionaryWithObjectsAndKeys: '
args = []
last_delim = ','
while True:
key, delim = read_argument(source, '=>', '}')
if delim == '}':
if key.strip():
args += ['', key, ' ']
else:
args += [key]
break
val, delim = read_argument(source, ',', '}')
args += ['[NSNull maybe:%s]' % val, key]
if delim == '}':
args += [' ']
break
if not delim: return out + ','.join(args)
return out + ','.join(args) + 'nil]'
# Generate NSNumber constructor for @4.2e+12
def parse_number_constant(out, source):
out = out[1:]
for c in source:
if c.isalnum() or c == '.' or (out[-1] in 'eE' and c in '+-'):
out += c
else:
source.push(c)
break
return '[NSNumber numberWithDouble: %s ]' % out
# Process the input file
for c in source:
if c in '"\'': out.write(read_string(c, source))
elif c in '/': out.write(read_comment(c, source))
elif c in '@': out.write(read_directive(c, source))
else: out.write(c)
|
bsd-2-clause
| -6,088,098,910,970,394,000 | 29.592814 | 76 | 0.574868 | false |
isfon/DjangoSales
|
DjangoSales/apps/users/migrations/0001_initial.py
|
1
|
2405
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-10 19:56
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0007_alter_validators_add_error_messages'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(max_length=20, unique=True, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z]*$', message='Only alphanumeric characters are allowed.')])),
('email', models.EmailField(max_length=255, verbose_name='email address')),
('first_name', models.CharField(blank=True, max_length=30, null=True)),
('last_name', models.CharField(blank=True, max_length=50, null=True)),
('date_joined', models.DateTimeField(auto_now_add=True)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('tipo', models.CharField(blank=True, choices=[('Administrador', 'Administrador'), ('Empleado', 'Empleado')], max_length=50, null=True)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
mit
| -8,246,163,853,700,214,000 | 58.125 | 266 | 0.619958 | false |
yeming233/rally
|
rally/common/objects/__init__.py
|
1
|
1072
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Contains the Rally objects."""
from rally.common.objects.credential import Credential # noqa
from rally.common.objects.deploy import Deployment # noqa
from rally.common.objects.task import Subtask # noqa
from rally.common.objects.task import Task # noqa
from rally.common.objects.task import Workload # noqa
from rally.common.objects.verification import Verification # noqa
from rally.common.objects.verifier import Verifier # noqa
|
apache-2.0
| 6,664,427,053,616,751,000 | 45.608696 | 78 | 0.75653 | false |
iric-soft/km
|
km/tools/find_report.py
|
1
|
12904
|
import sys
import re
from .. utils import common as uc
def print_line(sample, region, location, type_var, removed,
added, abnormal, normal, ratio, min_cov, min_exclu,
variant, target, info, var_seq, ref_seq):
line = "\t".join([sample, region, location, type_var, removed,
added, abnormal, normal, ratio, min_cov, min_exclu,
variant, target, info, var_seq, ref_seq])
sys.stdout.write(line + "\n")
def print_vcf_header():
header = '##fileformat=VCFv4.1\n'
header += '##INFO=<ID=TYPE,Number=A,Type=String,Description='
header += '"The type of variant, either Insertion, ITD, I&I, Deletion, Substitution or Indel.">\n'
header += '##INFO=<ID=TARGET,Number=A,Type=String,Description='
header += '"Name of the sequencing that contains the mutation.">\n'
header += '##INFO=<ID=RATIO,Number=A,Type=String,Description="Ratio of mutation to reference.">\n'
header += '##INFO=<ID=MINCOV,Number=A,Type=String,Description='
header += '"Minimum k-mer coverage of alternative allele.">\n'
header += '##INFO=<ID=REMOVED,Number=A,Type=String,Description="Number of removed bases.">\n'
header += '##INFO=<ID=ADDED,Number=A,Type=String,Description="Number of added bases.">\n'
header += '#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n'
sys.stdout.write(header)
def print_vcf_line(chro, loc, ref_var, alt_var, type_var, target, ratio, min_cov, rem, ad):
line = "\t".join([chro, str(loc), ".", ref_var, alt_var, ".", ".",
"TYPE="+type_var+";TARGET="+target+";RATIO="+ratio+";MINCOV="+min_cov +
";REMOVED="+str(rem)+";ADDED="+str(ad)])
sys.stdout.write(line + "\n")
def init_ref_seq(arg_ref):
if not arg_ref:
sys.exit("ERROR: Target file is empty\n")
# BE 1-based
nts = []
chro = None
strand = None
# CODE for multiple >LOC lines:
for line in open(arg_ref, "r"):
line = line.strip()
nt = []
# Parse attributes
if line[0] == '>':
# sanity check
loc = line.split(" ")[0]
if ":" not in loc or "-" not in loc:
sys.exit('ERROR: Fasta entries do not contain a correctly ' +
'formatted location: {}\n'.format(loc))
# look up attributes in fasta file
line = line.replace(">", "location=", 1)
attr = {x.split("=")[0].strip(): x.split("=")[1].strip() for x in line.split("|")}
exon = attr["location"]
chro, pos = exon.split(":")
refstart, refstop = pos.split("-")
# get nt coordinates on the genome
if 'strand' not in list(attr.keys()):
attr['strand'] = '+'
sys.stderr.write("WARNING: Strand is assumed to be '+' \n")
strand = attr["strand"]
for i in range(int(refstart), int(refstop) + 1):
nt += [i]
nt = nt[::-1] if strand == "-" else nt
nts.extend(nt)
return nts, chro, strand
def create_report(args):
# Find correct extremities of a mutation
sys.setrecursionlimit(10000)
def get_extremities(va, p, rs):
if p - 1 > 0 and rs[p - 1] == va[-1]:
return get_extremities(rs[p - 1] + va[:-1], p - 1, rs)
return p - 1
if args.format == "vcf" and args.info == "cluster":
# Note: could salvage that option if we get the fill ref from vs_ref entries
sys.exit("ERROR: -f vcf and -i cluster options are incompatible")
variants = {}
samples = {}
data = {}
vcf = True if args.format == 'vcf' else False
table = True if args.format == 'table' else False
(nts, chro, strand) = init_ref_seq(args.target)
if vcf:
print_vcf_header()
elif not table:
print_line("Sample", "Region", "Location", "Type", "Removed",
"Added", "Abnormal", "Normal", "rVAF", "Min_coverage",
"Exclu_min_cov", "Variant", "Target", "Info", "Variant_sequence",
"Reference_sequence")
for line in args.infile:
# filter header
if line[0] == "#":
# sys.stderr.write("Filtred: " + line)
continue
tok = line.strip("\n").split("\t")
# filter on info column
if not re.search(args.info, line) or tok[0] == "Database" or len(tok) <= 1:
# sys.stderr.write("Filtered: " + line)
continue
samp = tok[0]
query = tok[1]
ratio = tok[4]
alt_exp = tok[5]
ref_exp = tok[9]
min_cov = tok[6]
start_off = tok[7]
alt_seq = tok[8]
refSeq = tok[10]
info = tok[11]
min_exclu = ""
variant = (tok[2], tok[3])
ref_seq = refSeq.upper()
if args.exclu != "" and alt_seq != "":
res = uc.get_cov(args.exclu, alt_seq)
min_exclu = str(res[2])
if int(min_cov) < args.min_cov:
continue
# case: entries with no mutations
if variant[0] == 'Reference':
mod = ""
if strand == "-":
region = "{}:{}-{}".format(chro, nts[-1], nts[0])
else:
region = "{}:{}-{}".format(chro, nts[0], nts[-1])
if not vcf and not table:
print_line(samp, region, '-', variant[0], '0', '0',
'0.0', alt_exp, tok[4], min_cov, min_exclu, '-',
query, tok[-1], "", "")
continue
elif vcf:
continue
# case: there is a mutation
else:
start, mod, stop = variant[1].split(":")
delet, insert = mod.split("/")
added = str(len(insert))
removed = str(len(delet))
# start and end positions in 0-based coordinates
pos = int(start) - 1
pos -= int(start_off)
end = int(stop) - 2 # one to go back to last position, the other for 0-base
end -= int(start_off)
if strand == "+":
start_pos = nts[pos]
end_pos = nts[end]
elif strand == "-":
start_pos = nts[end]
end_pos = nts[pos]
region = "{}:{}-{}".format(chro, start_pos, end_pos + 1)
ref_var = delet.upper()
alt_var = insert.upper()
loc_var = start_pos
end_var = end_pos
if len(delet) == 0 and len(insert) != 0:
if strand == "+":
start_pos = nts[pos]
end_pos = nts[end + 1] # insertions end at last position
elif strand == "-":
start_pos = nts[end + 1]
end_pos = nts[pos]
region = "{}:{}-{}".format(chro, start_pos, end_pos + 1)
var = insert.upper()
ibef = get_extremities(var, pos, ref_seq) # include current position
before = ref_seq[ibef:pos]
iaft = get_extremities(var[::-1], len(ref_seq)-pos, ref_seq[::-1])
after = ref_seq[::-1][iaft:len(ref_seq)-pos][::-1]
iaft = len(ref_seq) - iaft - 1
ref_var = before + after
alt_var = before + var + after
loc_var = nts[iaft] if strand == "-" else nts[ibef]
end_var = nts[iaft-len(ref_var)+1] if strand == "-" else nts[ibef+len(ref_var)-1]
if loc_var + len(ref_var) - 1 != end_var and vcf:
sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n")
continue
# Reinterpret mutations for small ITDs
# careful, going upstream may put us outside the reference.
upstream = alt_seq[pos-len(insert):pos]
match = 0
if pos-len(insert) >= 0:
for i in range(0, len(insert)):
if insert[i] == upstream[i]:
match += 1
match = float(match)/len(insert)
insert_type = "Insertion"
if pos-len(insert) >= 0 and len(insert) >= 3 and insert == upstream:
insert_type = "ITD"
added += " | " + str(end_pos - start_pos + 1)
elif pos-len(insert) >= 0 and len(insert) >= 3 and match > 0.5:
insert_type = "I&I"
added += " | " + str(end_pos - start_pos + 1)
location = chro + ":" + str(end_pos)
elif variant[0] == 'Deletion':
region = "{}:{}-{}".format(chro, start_pos, end_pos + 1)
location = ""
insert_type = variant[0]
var = delet.upper()
ibef = get_extremities(var, pos, ref_seq)
before = ref_seq[ibef:pos]
iaft = get_extremities(var[::-1], len(ref_seq)-pos-1-len(var)+1, ref_seq[::-1])
after = ref_seq[::-1][iaft:len(ref_seq)-pos-1-len(var)+1][::-1]
iaft = len(ref_seq) - iaft - 1
ref_var = before + var + after
alt_var = before + after
loc_var = nts[iaft] if strand == "-" else nts[ibef]
end_var = nts[iaft-len(ref_var)+1] if strand == "-" else nts[ibef+len(ref_var)-1]
if loc_var + len(ref_var) - 1 != end_var and vcf:
continue
elif variant[0] == 'Substitution':
location = chro + ":" + str(start_pos)
insert_type = variant[0]
if loc_var + len(ref_var) - 1 != end_var and vcf:
sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n")
continue
elif variant[0] == 'Indel':
location = chro + ":" + str(end_pos)
insert_type = variant[0]
ref_var = ref_seq[pos-1] + delet.upper() + ref_seq[end + 1]
alt_var = ref_seq[pos-1] + insert.upper() + ref_seq[end + 1]
loc_var = start_pos - 1
end_var = end_pos + 1
if loc_var + len(ref_var) - 1 != end_var and vcf:
sys.stderr.write("NOTE: Mutation overlaps 2 exons or more, VCF output is disabled \n")
continue
else:
sys.stderr.write("WARNING: This variant isn't taken account\n")
sys.stderr.write(" - variant: " + str(variant[0]) + "\n")
sys.stderr.write(" - line: " + line)
sys.exit()
if not vcf and not table:
print_line(samp, region, location, insert_type,
removed, added, alt_exp, ref_exp, ratio,
min_cov, min_exclu, mod, query, info,
alt_seq, refSeq)
elif vcf:
complement = str.maketrans('ATGCU', 'TACGA')
ref_var = ref_var.translate(complement)[::-1] if strand == '-' else ref_var
alt_var = alt_var.translate(complement)[::-1] if strand == '-' else alt_var
print_vcf_line(chro, loc_var, ref_var, alt_var, insert_type,
query, ratio, min_cov, removed, added.replace(" ", ""))
elif table:
var_name = variant[0] + "/" + query if "/" not in variant[0] else variant[0]
region_mod = region + ":" + mod if mod else region
var = (var_name, region_mod)
if var not in variants:
variants[var] = 0
variants[var] += 1
if samp not in samples:
samples[samp] = set()
samples[samp].add(var)
if samp not in data:
data[samp] = {}
data[samp][var] = float(ratio)
if table:
sorted_variants = sorted(variants, key=variants.get, reverse=True)
sys.stdout.write("Sample")
for v in sorted_variants:
if v[0].split("/")[0] == "Reference":
sys.stdout.write("\t" + v[0])
else:
sys.stdout.write("\t" + v[1])
sys.stdout.write("\n")
for s, sv in samples.items():
sys.stdout.write(s)
for v in sorted_variants:
if v in sv:
if 'Reference' not in v[0] and (not data[s][v]):
sys.stdout.write("\t" + ".")
else:
sys.stdout.write("\t" + str(data[s][v]))
else:
sys.stdout.write("\t" + ".")
sys.stdout.write("\n")
def main_find_report(args, argparser):
if args.infile.isatty() or args.target is None:
argparser.print_help()
sys.exit()
create_report(args)
|
mit
| 5,941,889,215,289,551,000 | 37.404762 | 106 | 0.483571 | false |
brendanlong/compression
|
compression/huffman.py
|
1
|
4277
|
import bitstring
import collections
import heapq
class Node(object):
def __init__(self, left, right):
if left.weight is None:
self.weight = None
else:
self.weight = left.weight + right.weight
self.left = left
self.right = right
self.symbol = left.symbol
def __lt__(self, other):
# If weights are equal, sort based on symbol. We do this so that the
# huffman tree will be deterministic, which makes it easier to test.
if self.weight == other.weight:
return self.symbol < other.symbol
return self.weight < other.weight
def add_to_code(self, bit):
for child in self.left, self.right:
child.add_to_code(bit)
def codes(self):
out = self.left.codes()
out.update(self.right.codes())
return out
def read(self, stream):
if stream.read("bool"):
return self.left.read(stream)
else:
return self.right.read(stream)
def binary(self, out=None):
out = bitstring.BitArray("0b0")
out.append(self.left.binary())
out.append(self.right.binary())
return out
@staticmethod
def from_binary(stream):
try:
stream.pos
except AttributeError:
stream = bitstring.BitStream(stream)
code = bitstring.BitArray()
out = Node._from_binary(stream, code)
return out
@staticmethod
def _from_binary(stream, code):
if stream.read("bool"):
symbol = stream.read("uint:8")
return LeafNode(symbol, code=code)
else:
return Node(
Node._from_binary(stream, code + bitstring.Bits("0b1")),
Node._from_binary(stream, code + bitstring.Bits("0b0")))
@staticmethod
def from_data(data, weights=None):
if weights is None:
weights = collections.Counter(data)
heap = []
for symbol, weight in weights.items():
heapq.heappush(heap, LeafNode(symbol, weight))
while len(heap) > 1:
first = heapq.heappop(heap)
second = heapq.heappop(heap)
first.add_to_code(1)
second.add_to_code(0)
heapq.heappush(heap, Node(first, second))
return heap[0]
class LeafNode(Node):
def __init__(self, symbol, weight=None, code=None):
self.symbol = symbol
self.weight = weight
if code is not None:
self.code = code
else:
self.code = bitstring.BitArray()
def add_to_code(self, bit):
self.code.prepend("0b%s" % bit)
def codes(self):
return {self.symbol: self.code}
def binary(self):
out = bitstring.BitArray("0b1")
out.append(bitstring.Bits(uint=self.symbol, length=8))
return out
def read(self, stream):
return self.symbol
def compress(data, weights=None):
"""Performs huffman compression on data.
data - The data to compress (bytes).
weights - The weights for each code point. If None, we will use the
number of occurances. Should be a dict of {symbol: weight}.
return - The compressed data, with the huffman tree prepended (bytes).
"""
tree = Node.from_data(data, weights)
codes = tree.codes()
output = tree.binary()
for byte in data:
output.append(codes[byte])
# Pad the front with 0's followed by 1 so we know where the real data
# starts
pad_bits = 8 - (len(output) % 8)
if pad_bits == 0:
pad_bits = 8
padding = bitstring.BitArray()
for i in range(pad_bits - 1):
padding.append("0b0")
padding.append("0b1")
output.prepend(padding)
return output.tobytes()
def decompress(data):
"""Decompresses huffman compressed data.
data - The compressed data, with the huffman tree prepended (bytes).
return - The decompressed data (bytes)
"""
stream = bitstring.BitStream(data)
# Read padding
while not stream.read("bool"):
pass
tree = Node.from_binary(stream)
out = []
try:
while 1:
out.append(tree.read(stream))
except bitstring.ReadError:
pass
return bytes(out)
|
unlicense
| -2,542,109,405,791,390,700 | 26.242038 | 77 | 0.582418 | false |
suvit/speedydeploy
|
speedydeploy/project/django.py
|
1
|
5549
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
from fabric import api as fab
from fabric.contrib.files import uncomment, comment
from ..base import _
from ..utils import render_template, upload_template
from ..deployment import command
class DjangoProject(object):
namespace = 'django'
python_path = '../env/bin/python'
media_path = '../media'
settings_local = './settings_local.py'
settings_local_filename = 'settings_local.py'
version = (1, 3)
HAS_WSGI = property(lambda self: self.version >= (1, 4))
HAS_REQUIREDEBUGFALSE = property(lambda self: self.version >= (1, 4))
USE_LOGGING = True
USE_SENTRY = True
USE_CELERY = property(lambda self: hasattr(fab.env, 'celery'))
USE_SPHINXSEARCH = property(lambda self: hasattr(fab.env, 'sphinxsearch'))
# app depends
USE_SOUTH = True
# TODO get info from settings.py
USE_STATICFILES = False
def __init__(self, project_path,
settings_local=None,
python_path=None):
self.project_path = project_path
fab.env['django_project_path'] = project_path
if settings_local is not None:
self.settings_local = settings_local
if python_path is not None:
self.python_path = python_path
self.settings_local_path = self.project_path + self.settings_local
path = fab.env['os'].path
fab.env['django_project_name'] = path.basename(self.project_path.rstrip('/'))
fab.env['django_python_path'] = project_path
fab.env['django_settings'] = 'settings'
fab.env['reqs_file'] = 'requirements.txt'
def get_version(self):
return '.'.join(str(part) for part in self.version)
def install_requirements(self, update=True):
opts = '-r'
if update:
opts = '-U %s' % opts
with fab.cd(_('%(django_python_path)s')):
fab.run(_("../%%(virtualenv)s/bin/pip install %s"
" %%(reqs_file)s" % opts))
def manage(self, command):
with fab.cd(_('%(django_python_path)s')):
fab.run('%s manage.py %s' % (self.python_path,
command)
)
# legacy
run = manage
def syncdb(self, app=''):
self.manage('syncdb --noinput %s' % app)
def migrate(self, app=''):
self.manage('migrate %s' % app)
def init_debug_server(self):
self.manage('init_debug_server')
def runserver(self, host="0.0.0.0", port=8080):
self.manage('runserver %s:%s' % (host, port))
def createsuperuser(self):
self.manage('createsuperuser')
@command(same_name=True)
def update_settings_local(self):
settings_local_path = fab.env.os.path.join(self.project_path,
self.settings_local_filename)
context = fab.env
names = ['logging']
if self.USE_SENTRY:
names.append('sentry')
if self.USE_SPHINXSEARCH:
names.append('sphinxsearch')
if self.USE_CELERY:
names.append('celery')
for name in names:
if getattr(self, 'USE_' + name.upper(), False):
text = render_template('django/settings_%s.py' % name,
context=context,
use_jinja=True)
context['settings_%s' % name] = text
else:
context['settings_%s' % name] = ''
upload_template(self.settings_local,
settings_local_path,
context, use_jinja=True)
def update_code(self):
with fab.cd(self.project_path):
fab.run('svn up')
def reload(self):
self.update_settings_local()
if self.version >= (1, 7) or self.USE_SOUTH:
self.migrate()
else:
self.syncdb()
if self.USE_STATICFILES:
self.deploy_static()
def set_maintanance_mode(self, on=True):
settings_local_path = self.project_path + 'settings_local.py'
if on:
uncomment(settings_local_path, r'MAINTENANCE_MODE.*')
else:
comment(settings_local_path, r'MAINTENANCE_MODE.*')
@command
def deploy_static(self):
self.manage('collectstatic -v0 --noinput')
class Django13(DjangoProject):
pass
class Django13ChangeProjectDir(Django13):
def __init__(self, *args, **kwargs):
super(Django13ChangeProjectDir, self).__init__(*args, **kwargs)
path = fab.env['os'].path
python_path = path.split(self.project_path.rstrip('/'))[0]
fab.env['django_python_path'] = python_path
fab.env['django_settings'] = '%s.settings' % fab.env['django_project_name']
class Django14(DjangoProject):
version = (1, 4)
def __init__(self, *args, **kwargs):
super(Django14, self).__init__(*args, **kwargs)
path = fab.env['os'].path
python_path = path.split(self.project_path.rstrip('/'))[0]
fab.env['django_python_path'] = python_path
fab.env['django_settings'] = '%s.settings' % fab.env['django_project_name']
class Django15(Django14):
version = (1, 5)
class Django16(Django15):
version = (1, 6)
class Django17(Django16):
version = (1, 7)
USE_SOUTH = False
def migrate(self, app=''):
self.manage('migrate --no-color %s' % app)
class Django18(Django17):
version = (1, 8)
|
mit
| -7,802,574,031,891,586,000 | 28.052356 | 85 | 0.565507 | false |
santisiri/popego
|
envs/ALPHA-POPEGO/lib/python2.5/site-packages/numpy-1.0.4-py2.5-linux-x86_64.egg/numpy/lib/arraysetops.py
|
1
|
9143
|
"""
Set operations for 1D numeric arrays based on sorting.
Contains:
ediff1d,
unique1d,
intersect1d,
intersect1d_nu,
setxor1d,
setmember1d,
union1d,
setdiff1d
All functions work best with integer numerical arrays on input (e.g. indices).
For floating point arrays, innacurate results may appear due to usual round-off
and floating point comparison issues.
Except unique1d, union1d and intersect1d_nu, all functions expect inputs with
unique elements. Speed could be gained in some operations by an implementaion of
sort(), that can provide directly the permutation vectors, avoiding thus calls
to argsort().
Run _test_unique1d_speed() to compare performance of numpy.unique1d() and
numpy.unique() - it should be the same.
To do: Optionally return indices analogously to unique1d for all functions.
Author: Robert Cimrman
created: 01.11.2005
last revision: 07.01.2007
"""
__all__ = ['ediff1d', 'unique1d', 'intersect1d', 'intersect1d_nu', 'setxor1d',
'setmember1d', 'union1d', 'setdiff1d']
import time
import numpy as nm
def ediff1d(ary, to_end = None, to_begin = None):
"""The differences between consecutive elements of an array, possibly with
prefixed and/or appended values.
:Parameters:
- `ary` : array
This array will be flattened before the difference is taken.
- `to_end` : number, optional
If provided, this number will be tacked onto the end of the returned
differences.
- `to_begin` : number, optional
If provided, this number will be taked onto the beginning of the
returned differences.
:Returns:
- `ed` : array
The differences. Loosely, this will be (ary[1:] - ary[:-1]).
"""
ary = nm.asarray(ary).flat
ed = ary[1:] - ary[:-1]
arrays = [ed]
if to_begin is not None:
arrays.insert(0, to_begin)
if to_end is not None:
arrays.append(to_end)
if len(arrays) != 1:
# We'll save ourselves a copy of a potentially large array in the common
# case where neither to_begin or to_end was given.
ed = nm.hstack(arrays)
return ed
def unique1d(ar1, return_index=False):
"""Find the unique elements of 1D array.
Most of the other array set operations operate on the unique arrays
generated by this function.
:Parameters:
- `ar1` : array
This array will be flattened if it is not already 1D.
- `return_index` : bool, optional
If True, also return the indices against ar1 that result in the unique
array.
:Returns:
- `unique` : array
The unique values.
- `unique_indices` : int array, optional
The indices of the unique values. Only provided if return_index is True.
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
ar = nm.asarray(ar1).flatten()
if ar.size == 0:
if return_index: return nm.empty(0, nm.bool), ar
else: return ar
if return_index:
perm = ar.argsort()
aux = ar[perm]
flag = nm.concatenate( ([True], aux[1:] != aux[:-1]) )
return perm[flag], aux[flag]
else:
ar.sort()
flag = nm.concatenate( ([True], ar[1:] != ar[:-1]) )
return ar[flag]
def intersect1d( ar1, ar2 ):
"""Intersection of 1D arrays with unique elements.
Use unique1d() to generate arrays with only unique elements to use as inputs
to this function. Alternatively, use intersect1d_nu() which will find the
unique values for you.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `intersection` : array
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
aux = nm.concatenate((ar1,ar2))
aux.sort()
return aux[aux[1:] == aux[:-1]]
def intersect1d_nu( ar1, ar2 ):
"""Intersection of 1D arrays with any elements.
The input arrays do not have unique elements like intersect1d() requires.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `intersection` : array
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
# Might be faster than unique1d( intersect1d( ar1, ar2 ) )?
aux = nm.concatenate((unique1d(ar1), unique1d(ar2)))
aux.sort()
return aux[aux[1:] == aux[:-1]]
def setxor1d( ar1, ar2 ):
"""Set exclusive-or of 1D arrays with unique elements.
Use unique1d() to generate arrays with only unique elements to use as inputs
to this function.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `xor` : array
The values that are only in one, but not both, of the input arrays.
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
aux = nm.concatenate((ar1, ar2))
if aux.size == 0:
return aux
aux.sort()
# flag = ediff1d( aux, to_end = 1, to_begin = 1 ) == 0
flag = nm.concatenate( ([True], aux[1:] != aux[:-1], [True] ) )
# flag2 = ediff1d( flag ) == 0
flag2 = flag[1:] == flag[:-1]
return aux[flag2]
def setmember1d( ar1, ar2 ):
"""Return a boolean array of shape of ar1 containing True where the elements
of ar1 are in ar2 and False otherwise.
Use unique1d() to generate arrays with only unique elements to use as inputs
to this function.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `mask` : bool array
The values ar1[mask] are in ar2.
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
zlike = nm.zeros_like
ar = nm.concatenate( (ar1, ar2 ) )
tt = nm.concatenate( (zlike( ar1 ), zlike( ar2 ) + 1) )
# We need this to be a stable sort, so always use 'mergesort' here. The
# values from the first array should always come before the values from the
# second array.
perm = ar.argsort(kind='mergesort')
aux = ar[perm]
aux2 = tt[perm]
# flag = ediff1d( aux, 1 ) == 0
flag = nm.concatenate( (aux[1:] == aux[:-1], [False] ) )
ii = nm.where( flag * aux2 )[0]
aux = perm[ii+1]
perm[ii+1] = perm[ii]
perm[ii] = aux
indx = perm.argsort(kind='mergesort')[:len( ar1 )]
return flag[indx]
def union1d( ar1, ar2 ):
"""Union of 1D arrays with unique elements.
Use unique1d() to generate arrays with only unique elements to use as inputs
to this function.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `union` : array
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
return unique1d( nm.concatenate( (ar1, ar2) ) )
def setdiff1d( ar1, ar2 ):
"""Set difference of 1D arrays with unique elements.
Use unique1d() to generate arrays with only unique elements to use as inputs
to this function.
:Parameters:
- `ar1` : array
- `ar2` : array
:Returns:
- `difference` : array
The values in ar1 that are not in ar2.
:See also:
numpy.lib.arraysetops has a number of other functions for performing set
operations on arrays.
"""
aux = setmember1d(ar1,ar2)
if aux.size == 0:
return aux
else:
return nm.asarray(ar1)[aux == 0]
def _test_unique1d_speed( plot_results = False ):
# exponents = nm.linspace( 2, 7, 9 )
exponents = nm.linspace( 2, 7, 9 )
ratios = []
nItems = []
dt1s = []
dt2s = []
for ii in exponents:
nItem = 10 ** ii
print 'using %d items:' % nItem
a = nm.fix( nItem / 10 * nm.random.random( nItem ) )
print 'unique:'
tt = time.clock()
b = nm.unique( a )
dt1 = time.clock() - tt
print dt1
print 'unique1d:'
tt = time.clock()
c = unique1d( a )
dt2 = time.clock() - tt
print dt2
if dt1 < 1e-8:
ratio = 'ND'
else:
ratio = dt2 / dt1
print 'ratio:', ratio
print 'nUnique: %d == %d\n' % (len( b ), len( c ))
nItems.append( nItem )
ratios.append( ratio )
dt1s.append( dt1 )
dt2s.append( dt2 )
assert nm.alltrue( b == c )
print nItems
print dt1s
print dt2s
print ratios
if plot_results:
import pylab
def plotMe( fig, fun, nItems, dt1s, dt2s ):
pylab.figure( fig )
fun( nItems, dt1s, 'g-o', linewidth = 2, markersize = 8 )
fun( nItems, dt2s, 'b-x', linewidth = 2, markersize = 8 )
pylab.legend( ('unique', 'unique1d' ) )
pylab.xlabel( 'nItem' )
pylab.ylabel( 'time [s]' )
plotMe( 1, pylab.loglog, nItems, dt1s, dt2s )
plotMe( 2, pylab.plot, nItems, dt1s, dt2s )
pylab.show()
if (__name__ == '__main__'):
_test_unique1d_speed( plot_results = True )
|
bsd-3-clause
| 7,430,667,529,558,364,000 | 26.960245 | 80 | 0.603959 | false |
mganeva/mantid
|
scripts/Interface/reduction_gui/reduction/diffraction/diffraction_reduction_script.py
|
1
|
15359
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
#pylint: disable=invalid-name,R0912
"""
Classes for each reduction step. Those are kept separately
from the the interface class so that the DgsReduction class could
be used independently of the interface implementation
"""
import os
from mantid.kernel import Logger
from mantid.api import FileFinder
from reduction_gui.reduction.scripter import BaseReductionScripter
class DiffractionReductionScripter(BaseReductionScripter):
""" Organizes the set of reduction parameters that will be used to
create a reduction script. Parameters are organized by groups that
will each have their own UI representation.
Items in dictionary:
1. facility_name;
2. instrument_name
3. _output_directory
4. _observers
"""
TOPLEVEL_WORKFLOWALG = "SNSPowderReductionPlus"
WIDTH_END = "".join([" " for i in range(len(TOPLEVEL_WORKFLOWALG))])
WIDTH = WIDTH_END + " "
AUTOSCRIPTNAME = 'SNSPowderReductionScript_AutoSave.py'
def __init__(self, name, facility='SNS'):
""" Initialization
"""
# Call base class
super(DiffractionReductionScripter, self).__init__(name=name, facility=facility)
# Find whether there is stored setup XMLs
homedir = os.path.expanduser("~")
mantidconfigdir = os.path.join(homedir, ".mantid")
self.configDir = mantidconfigdir
# create configuration dir if it has not been
if os.path.exists(self.configDir) is False:
os.makedirs(self.configDir)
# Information output
if self.facility_name is False:
self.facility_name = 'SNS'
dbmsg = '[SNS Powder Reduction] Facility = %s, Instrument = %s\n' \
'Auto-save Directory %s' % (self.facility_name, self.instrument_name,
mantidconfigdir)
Logger("DiffractionReductionScripter").debug(str(dbmsg))
return
def to_script(self, file_name=None):
""" Generate reduction script via observers and
(1) save the script to disk and (2) save the reduction setup to disk.
Arguments:
- file_name: name of the file to write the script to
"""
# Collect partial scripters from observers
paramdict = {}
for observer in self._observers:
obstate = observer.state()
self.parseTabSetupScript(observer._subject.__class__.__name__, obstate, paramdict)
# ENDFOR
# Construct python commands
script = self.constructPythonScript(paramdict)
# Save script to disk
if file_name is None:
file_name = os.path.join(self.configDir, DiffractionReductionScripter.AUTOSCRIPTNAME)
try:
f = open(file_name, 'w')
f.write(script)
f.close()
except IOError as e:
print ("Unable to save script to file. Reason: %s." % (str(e)))
# Export XML file
autosavexmlfname = os.path.join(self.configDir, "snspowderreduction.xml")
self.to_xml(autosavexmlfname)
# Information output
wbuf = "Reduction script: (script is saved to %s; setup is saved to %s. \n" % (
file_name, autosavexmlfname)
wbuf += script
wbuf += "\n========== End of Script ==========="
print (wbuf)
return script
def to_xml(self, file_name=None):
""" Extending base class to_xml
"""
BaseReductionScripter.to_xml(self, file_name)
return
def parseTabSetupScript(self, tabsetuptype, setupscript, paramdict):
""" Parse script returned from tab setup
@param setupscript : object of SetupScript for this tab/observer
"""
# print "ClassName: %s. Type %s" % (tabsetuptype, type(setupscript))
if setupscript is None:
return
else:
paramdict[tabsetuptype] = {}
terms = str(setupscript).split("\n")
for item in terms:
item = item.strip()
if item == "":
continue
item = item.rstrip(",")
subterms = item.split("=", 1)
key = subterms[0].strip()
value = subterms[1].strip().strip("\"").strip("'")
paramdict[tabsetuptype][key] = value
# ENDFOR
# ENDIF
return
def constructPythonScript(self, paramdict):
""" Construct python script
"""
# 1. Obtain all information
runsetupdict = paramdict["RunSetupWidget"]
advsetupdict = paramdict["AdvancedSetupWidget"]
filterdict = paramdict["FilterSetupWidget"]
# 2. Obtain some information
datafilenames = self.getDataFileNames(runsetupdict, advsetupdict)
if len(datafilenames) == 0:
raise NotImplementedError("RunNumber cannot be neglected. ")
dofilter = self.doFiltering(filterdict)
# 3. Header
script = "from mantid.simpleapi import *\n"
script += "config['default.facility']=\"%s\"\n" % self.facility_name
script += "\n"
if dofilter:
# a) Construct python script with generating filters
for runtuple in datafilenames:
runnumber = runtuple[0]
datafilename = runtuple[1]
# print "Working on run ", str(runnumber), " in file ", datafilename
# i. Load meta data only
metadatawsname = str(datafilename.split(".")[0]+"_meta")
splitwsname = str(datafilename.split(".")[0] + "_splitters")
splitinfowsname = str(datafilename.split(".")[0] + "_splitinfo")
script += "# Load data's log only\n"
script += "Load(\n"
script += "{}Filename = '{}',\n".format(DiffractionReductionScripter.WIDTH, datafilename)
script += "{}OutputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, metadatawsname)
script += "{}MetaDataOnly = True)\n".format(DiffractionReductionScripter.WIDTH)
script += "\n"
# ii. Generate event filters
script += "# Construct the event filters\n"
script += "GenerateEventsFilter(\n"
script += "{}InputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, metadatawsname)
script += "{}OutputWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, splitwsname)
script += "{}InformationWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, splitinfowsname)
if filterdict["FilterByTimeMin"] != "":
script += "{}StartTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["FilterByTimeMin"])
if filterdict["FilterByTimeMax"] != "":
script += "{}StopTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["FilterByTimeMax"])
if filterdict["FilterType"] == "ByTime":
# Filter by time
script += "{}TimeInterval = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LengthOfTimeInterval"])
script += "{}UnitOfTime = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["UnitOfTime"])
script += "{}LogName = '',\n".format(DiffractionReductionScripter.WIDTH) # intentionally empty
elif filterdict["FilterType"] == "ByLogValue":
# Filter by log value
script += "{}LogName = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LogName"])
if filterdict["MinimumLogValue"] != "":
script += "{}MinimumLogValue = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["MinimumLogValue"])
if filterdict["MaximumLogValue"] != "":
script += "{}MaximumLogValue = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["MaximumLogValue"])
script += "{}FilterLogValueByChangingDirection = '{}',\n".format(DiffractionReductionScripter.WIDTH,
filterdict["FilterLogValueByChangingDirection"])
if filterdict["LogValueInterval"] != "":
# Filter by log value interval
script += "{}LogValueInterval = '{}',\n".format(DiffractionReductionScripter.WIDTH,
filterdict["LogValueInterval"])
script += "{}LogBoundary = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["LogBoundary"])
if filterdict["TimeTolerance"] != "":
script += "{}TimeTolerance = '{}',\n".format(DiffractionReductionScripter.WIDTH, filterdict["TimeTolerance"])
if filterdict["LogValueTolerance"] != "":
script += "{}LogValueTolerance = '{}',\n".format(DiffractionReductionScripter.WIDTH,
filterdict["LogValueTolerance"])
# ENDIF
script += ")\n"
# iii. Data reduction
script += self.buildPowderDataReductionScript(runsetupdict, advsetupdict, runnumber, splitwsname, splitinfowsname)
# ENDFOR data file names
else:
# b) Construct python scrpt without generating filters
script += self.buildPowderDataReductionScript(runsetupdict, advsetupdict)
# ENDIF : do filter
print ("Script and Save XML to default.")
return script
def doFiltering(self, filterdict):
""" Check filter dictionary to determine whether filtering is required.
"""
dofilter = False
if filterdict["FilterByTimeMin"] != "":
dofilter = True
# print "Yes! Min Generate Filter will be called!"
if filterdict["FilterByTimeMax"] != "":
dofilter = True
# print "Yes! Max Generate Filter will be called!"
if filterdict["FilterType"] != "NoFilter":
dofilter = True
# print "Yes! FilterType Generate Filter will be called!"
return dofilter
def getDataFileNames(self, runsetupdict, advsetupdict):
""" Obtain the data file names (run names + SUFFIX)
Return: list of files
"""
runnumbers_str = str(runsetupdict["RunNumber"])
if runnumbers_str.count(':') > 0:
runnumbers_str = runnumbers_str.replace(':', '-')
runnumbers_str = FileFinder.findRuns('{}{}'.format(self.instrument_name, runnumbers_str))
runnumbers_str = [os.path.split(filename)[-1] for filename in runnumbers_str]
# create an integer version
runnumbers = []
for filename in runnumbers_str:
for extension in ['_event.nxs', '.nxs.h5']:
filename = filename.replace(extension, '')
runnumber = filename.split('_')[-1]
runnumbers.append(int(runnumber))
# put together the output
datafilenames = []
for (filename, runnumber) in zip(runnumbers_str, runnumbers):
datafilenames.append((runnumber, filename))
return datafilenames
def buildPowderDataReductionScript(self, runsetupdict, advsetupdict,
runnumber=None, splitwsname=None,
splitinfowsname=None):
""" Build the script to call SNSPowderReduction()
"""
script = 'SNSPowderReduction(\n'
# 1. Run setup
# a) determine whether to turn on/off corrections
if int(runsetupdict["DisableBackgroundCorrection"]) == 1:
runsetupdict["BackgroundNumber"] = -1
if int(runsetupdict["DisableVanadiumCorrection"]) == 1:
runsetupdict["VanadiumNumber"] = -1
if int(runsetupdict["DisableVanadiumBackgroundCorrection"]) == 1:
runsetupdict["VanadiumBackgroundNumber"] = -1
# b) do resample X or binning
if int(runsetupdict["DoReSampleX"]) == 0:
# turn off the option of SampleX
runsetupdict["ResampleX"] = ''
else:
# turn off the binning
runsetupdict["Binning"] = ''
# only NOMAD uses 'ExpIniFile'
if not self.instrument_name.lower().startswith('nom'):
runsetupdict.pop('ExpIniFile', None)
# c) all properties
for propname, propvalue in runsetupdict.iteritems():
# skip these pseudo-properties
if propname in ['DisableBackgroundCorrection', 'DisableVanadiumCorrection',
'DisableVanadiumBackgroundCorrection', 'DoReSampleX']:
continue
if propvalue == '' or propvalue is None:
# Skip not-defined value
continue
if propname == "RunNumber":
propname = 'Filename' # change to what SNSPowderReduction uses
# option to take user input run number
if runnumber is not None:
propvalue = runnumber
# add the instrument name to the file hint
propvalue = '{}_{}'.format(self.instrument_name, str(propvalue))
# Add value
script += "{}{} = '{}',\n".format(DiffractionReductionScripter.WIDTH, propname, propvalue)
# ENDFOR
# 2. Advanced setup
for propname, propvalue in advsetupdict.iteritems():
if propvalue == '' or propvalue is None:
# Skip not-defined value
continue
# Add to script
script += "{}{} = '{}',\n".format(DiffractionReductionScripter.WIDTH, propname, propvalue)
# ENDFOR
# 3. Optional spliter workspace
if splitwsname is not None and splitwsname != "":
script += "{}SplittersWorkspace = '{}',\n".format(DiffractionReductionScripter.WIDTH, str(splitwsname))
if splitinfowsname is not None and splitinfowsname != "":
script += "{}SplitInformationWorkspace='{}',\n".format(DiffractionReductionScripter.WIDTH,
str(splitinfowsname))
script += "{})\n".format(DiffractionReductionScripter.WIDTH)
return script
def _synInstrument(self):
""" Syn instrument from observer-widget
"""
# Facility instrument
for observer in self._observers:
observertype = observer._subject.__class__.__name__
print ("[ToScript] Observer Type = ", observertype)
if observertype.count("AdvancedWidget") == 1:
self.instrument_name = observer._subject._instrument_name
return
|
gpl-3.0
| -2,344,812,219,454,872,600 | 41.545706 | 140 | 0.578488 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.