text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import datetime
import csv
with open('SYSTEMS.csv') as f:
reader = csv.reader(f)
ALLOWED_SYSTEMS = [l[0] for l in reader]
class IntelEntry:
KEYS = ["timer_name", "alliance", "system", "time", "date", "location"]
def __init__(self, timer_name="", alliance="", system="", time="", date="", location=""):
if timer_name != "":
self.timer_name = timer_name
else:
raise ValueError("Provided timer not valid.")
if alliance != "":
self.alliance = alliance.strip()
else:
raise ValueError("Provided alliance not valid.")
system = system.upper()
if system in ALLOWED_SYSTEMS:
self.system = system
else:
raise ValueError("Provided solar system not valid.")
self.location = location
if time != "":
self.time = datetime.datetime.strptime(' '.join([date, time]), '%m/%d/%y %H:%M')
if self.time < datetime.datetime.now():
raise ValueError("Provided date/time not valid. Time must be in the future.")
else:
raise ValueError("Provided date/time not valid. Time must be in format '%m/%d/%y %H:%M'.")
def to_dict(self):
return { "timer_name": self.timer_name,
"alliance": self.alliance,
"system": self.system,
"location": self.location,
"time": self.time }
|
cameronobrien/BroadsideBot
|
app/intel_entry.py
|
Python
|
mit
| 1,447 | 0.004838 |
import sys
from rpython.memory.gc.semispace import SemiSpaceGC
from rpython.memory.gc.semispace import GCFLAG_EXTERNAL, GCFLAG_FORWARDED
from rpython.memory.gc.semispace import GC_HASH_TAKEN_ADDR
from rpython.memory.gc import env
from rpython.rtyper.lltypesystem.llmemory import NULL, raw_malloc_usage
from rpython.rtyper.lltypesystem import lltype, llmemory, llarena
from rpython.rlib.objectmodel import free_non_gc_object
from rpython.rlib.debug import ll_assert
from rpython.rlib.debug import debug_print, debug_start, debug_stop
from rpython.rlib.rarithmetic import intmask, LONG_BIT
from rpython.rtyper.lltypesystem.lloperation import llop
WORD = LONG_BIT // 8
# The following flag is never set on young objects, i.e. the ones living
# in the nursery. It is initially set on all prebuilt and old objects,
# and gets cleared by the write_barrier() when we write in them a
# pointer to a young object.
GCFLAG_NO_YOUNG_PTRS = SemiSpaceGC.first_unused_gcflag << 0
# The following flag is set on some last-generation objects (== prebuilt
# objects for GenerationGC, but see also HybridGC). The flag is set
# unless the object is already listed in 'last_generation_root_objects'.
# When a pointer is written inside an object with GCFLAG_NO_HEAP_PTRS
# set, the write_barrier clears the flag and adds the object to
# 'last_generation_root_objects'.
GCFLAG_NO_HEAP_PTRS = SemiSpaceGC.first_unused_gcflag << 1
class GenerationGC(SemiSpaceGC):
"""A basic generational GC: it's a SemiSpaceGC with an additional
nursery for young objects. A write barrier is used to ensure that
old objects that contain pointers to young objects are recorded in
a list.
"""
inline_simple_malloc = True
inline_simple_malloc_varsize = True
needs_write_barrier = True
prebuilt_gc_objects_are_static_roots = False
first_unused_gcflag = SemiSpaceGC.first_unused_gcflag << 2
# the following values override the default arguments of __init__ when
# translating to a real backend.
TRANSLATION_PARAMS = {'space_size': 8*1024*1024, # 8 MB
'nursery_size': 3*1024*1024, # 3 MB
'min_nursery_size': 48*1024,
'auto_nursery_size': True}
nursery_hash_base = -1
def __init__(self, config,
nursery_size=32*WORD,
min_nursery_size=32*WORD,
auto_nursery_size=False,
space_size=1024*WORD,
max_space_size=sys.maxint//2+1,
**kwds):
SemiSpaceGC.__init__(self, config,
space_size = space_size,
max_space_size = max_space_size,
**kwds)
assert min_nursery_size <= nursery_size <= space_size // 2
self.initial_nursery_size = nursery_size
self.auto_nursery_size = auto_nursery_size
self.min_nursery_size = min_nursery_size
# define nursery fields
self.reset_nursery()
self._setup_wb()
# compute the constant lower bounds for the attributes
# largest_young_fixedsize and largest_young_var_basesize.
# It is expected that most (or all) objects have a fixedsize
# that is much lower anyway.
sz = self.get_young_fixedsize(self.min_nursery_size)
self.lb_young_fixedsize = sz
sz = self.get_young_var_basesize(self.min_nursery_size)
self.lb_young_var_basesize = sz
def setup(self):
self.old_objects_pointing_to_young = self.AddressStack()
# ^^^ a list of addresses inside the old objects space; it
# may contain static prebuilt objects as well. More precisely,
# it lists exactly the old and static objects whose
# GCFLAG_NO_YOUNG_PTRS bit is not set.
self.young_objects_with_weakrefs = self.AddressStack()
self.last_generation_root_objects = self.AddressStack()
self.young_objects_with_id = self.AddressDict()
SemiSpaceGC.setup(self)
self.set_nursery_size(self.initial_nursery_size)
# the GC is fully setup now. The rest can make use of it.
if self.auto_nursery_size:
newsize = nursery_size_from_env()
#if newsize <= 0:
# ---disabled--- just use the default value.
# newsize = env.estimate_best_nursery_size()
if newsize > 0:
self.set_nursery_size(newsize)
self.reset_nursery()
def _teardown(self):
self.collect() # should restore last gen objects flags
SemiSpaceGC._teardown(self)
def reset_nursery(self):
self.nursery = NULL
self.nursery_top = NULL
self.nursery_free = NULL
def set_nursery_size(self, newsize):
debug_start("gc-set-nursery-size")
if newsize < self.min_nursery_size:
newsize = self.min_nursery_size
if newsize > self.space_size // 2:
newsize = self.space_size // 2
# Compute the new bounds for how large young objects can be
# (larger objects are allocated directly old). XXX adjust
self.nursery_size = newsize
self.largest_young_fixedsize = self.get_young_fixedsize(newsize)
self.largest_young_var_basesize = self.get_young_var_basesize(newsize)
scale = 0
while (self.min_nursery_size << (scale+1)) <= newsize:
scale += 1
self.nursery_scale = scale
debug_print("nursery_size =", newsize)
debug_print("largest_young_fixedsize =",
self.largest_young_fixedsize)
debug_print("largest_young_var_basesize =",
self.largest_young_var_basesize)
debug_print("nursery_scale =", scale)
# we get the following invariant:
assert self.nursery_size >= (self.min_nursery_size << scale)
# Force a full collect to remove the current nursery whose size
# no longer matches the bounds that we just computed. This must
# be done after changing the bounds, because it might re-create
# a new nursery (e.g. if it invokes finalizers).
self.semispace_collect()
debug_stop("gc-set-nursery-size")
@staticmethod
def get_young_fixedsize(nursery_size):
return nursery_size // 2 - 1
@staticmethod
def get_young_var_basesize(nursery_size):
return nursery_size // 4 - 1
@classmethod
def JIT_max_size_of_young_obj(cls):
min_nurs_size = cls.TRANSLATION_PARAMS['min_nursery_size']
return cls.get_young_fixedsize(min_nurs_size)
def is_in_nursery(self, addr):
ll_assert(llmemory.cast_adr_to_int(addr) & 1 == 0,
"odd-valued (i.e. tagged) pointer unexpected here")
return self.nursery <= addr < self.nursery_top
def appears_to_be_in_nursery(self, addr):
# same as is_in_nursery(), but may return True accidentally if
# 'addr' is a tagged pointer with just the wrong value.
if not self.translated_to_c:
if not self.is_valid_gc_object(addr):
return False
return self.nursery <= addr < self.nursery_top
def malloc_fixedsize_clear(self, typeid, size,
has_finalizer=False,
is_finalizer_light=False,
contains_weakptr=False):
if (has_finalizer or
(raw_malloc_usage(size) > self.lb_young_fixedsize and
raw_malloc_usage(size) > self.largest_young_fixedsize)):
# ^^^ we do two size comparisons; the first one appears redundant,
# but it can be constant-folded if 'size' is a constant; then
# it almost always folds down to False, which kills the
# second comparison as well.
ll_assert(not contains_weakptr, "wrong case for mallocing weakref")
# "non-simple" case or object too big: don't use the nursery
return SemiSpaceGC.malloc_fixedsize_clear(self, typeid, size,
has_finalizer,
is_finalizer_light,
contains_weakptr)
size_gc_header = self.gcheaderbuilder.size_gc_header
totalsize = size_gc_header + size
result = self.nursery_free
if raw_malloc_usage(totalsize) > self.nursery_top - result:
result = self.collect_nursery()
llarena.arena_reserve(result, totalsize)
# GCFLAG_NO_YOUNG_PTRS is never set on young objs
self.init_gc_object(result, typeid, flags=0)
self.nursery_free = result + totalsize
if contains_weakptr:
self.young_objects_with_weakrefs.append(result + size_gc_header)
return llmemory.cast_adr_to_ptr(result+size_gc_header, llmemory.GCREF)
def malloc_varsize_clear(self, typeid, length, size, itemsize,
offset_to_length):
# Only use the nursery if there are not too many items.
if not raw_malloc_usage(itemsize):
too_many_items = False
else:
# The following line is usually constant-folded because both
# min_nursery_size and itemsize are constants (the latter
# due to inlining).
maxlength_for_minimal_nursery = (self.min_nursery_size // 4 //
raw_malloc_usage(itemsize))
# The actual maximum length for our nursery depends on how
# many times our nursery is bigger than the minimal size.
# The computation is done in this roundabout way so that
# only the only remaining computation is the following
# shift.
maxlength = maxlength_for_minimal_nursery << self.nursery_scale
too_many_items = length > maxlength
if (too_many_items or
(raw_malloc_usage(size) > self.lb_young_var_basesize and
raw_malloc_usage(size) > self.largest_young_var_basesize)):
# ^^^ we do two size comparisons; the first one appears redundant,
# but it can be constant-folded if 'size' is a constant; then
# it almost always folds down to False, which kills the
# second comparison as well.
return SemiSpaceGC.malloc_varsize_clear(self, typeid, length, size,
itemsize, offset_to_length)
# with the above checks we know now that totalsize cannot be more
# than about half of the nursery size; in particular, the + and *
# cannot overflow
size_gc_header = self.gcheaderbuilder.size_gc_header
totalsize = size_gc_header + size + itemsize * length
result = self.nursery_free
if raw_malloc_usage(totalsize) > self.nursery_top - result:
result = self.collect_nursery()
llarena.arena_reserve(result, totalsize)
# GCFLAG_NO_YOUNG_PTRS is never set on young objs
self.init_gc_object(result, typeid, flags=0)
(result + size_gc_header + offset_to_length).signed[0] = length
self.nursery_free = result + llarena.round_up_for_allocation(totalsize)
return llmemory.cast_adr_to_ptr(result+size_gc_header, llmemory.GCREF)
# override the init_gc_object methods to change the default value of 'flags',
# used by objects that are directly created outside the nursery by the SemiSpaceGC.
# These objects must have the GCFLAG_NO_YOUNG_PTRS flag set immediately.
def init_gc_object(self, addr, typeid, flags=GCFLAG_NO_YOUNG_PTRS):
SemiSpaceGC.init_gc_object(self, addr, typeid, flags)
def init_gc_object_immortal(self, addr, typeid,
flags=GCFLAG_NO_YOUNG_PTRS|GCFLAG_NO_HEAP_PTRS):
SemiSpaceGC.init_gc_object_immortal(self, addr, typeid, flags)
# flags exposed for the HybridGC subclass
GCFLAGS_FOR_NEW_YOUNG_OBJECTS = 0 # NO_YOUNG_PTRS never set on young objs
GCFLAGS_FOR_NEW_EXTERNAL_OBJECTS = (GCFLAG_EXTERNAL | GCFLAG_FORWARDED |
GCFLAG_NO_YOUNG_PTRS |
GC_HASH_TAKEN_ADDR)
# ____________________________________________________________
# Support code for full collections
def collect(self, gen=1):
if gen == 0:
self.collect_nursery()
else:
SemiSpaceGC.collect(self)
def semispace_collect(self, size_changing=False):
self.reset_young_gcflags() # we are doing a full collection anyway
self.weakrefs_grow_older()
self.ids_grow_older()
self.reset_nursery()
SemiSpaceGC.semispace_collect(self, size_changing)
def make_a_copy(self, obj, objsize):
tid = self.header(obj).tid
# During a full collect, all copied objects might implicitly come
# from the nursery. In case they do, we must add this flag:
tid |= GCFLAG_NO_YOUNG_PTRS
return self._make_a_copy_with_tid(obj, objsize, tid)
# history: this was missing and caused an object to become old but without the
# flag set. Such an object is bogus in the sense that the write_barrier doesn't
# work on it. So it can eventually contain a ptr to a young object but we didn't
# know about it. That ptr was not updated in the next minor collect... boom at
# the next usage.
def reset_young_gcflags(self):
# This empties self.old_objects_pointing_to_young, and puts the
# GCFLAG_NO_YOUNG_PTRS back on all these objects. We could put
# the flag back more lazily but we expect this list to be short
# anyway, and it's much saner to stick to the invariant:
# non-young objects all have GCFLAG_NO_YOUNG_PTRS set unless
# they are listed in old_objects_pointing_to_young.
oldlist = self.old_objects_pointing_to_young
while oldlist.non_empty():
obj = oldlist.pop()
hdr = self.header(obj)
hdr.tid |= GCFLAG_NO_YOUNG_PTRS
def weakrefs_grow_older(self):
while self.young_objects_with_weakrefs.non_empty():
obj = self.young_objects_with_weakrefs.pop()
self.objects_with_weakrefs.append(obj)
def collect_roots(self):
"""GenerationGC: collects all roots.
HybridGC: collects all roots, excluding the generation 3 ones.
"""
# Warning! References from static (and possibly gen3) objects
# are found by collect_last_generation_roots(), which must be
# called *first*! If it is called after walk_roots(), then the
# HybridGC explodes if one of the _collect_root causes an object
# to be added to self.last_generation_root_objects. Indeed, in
# this case, the newly added object is traced twice: once by
# collect_last_generation_roots() and once because it was added
# in self.rawmalloced_objects_to_trace.
self.collect_last_generation_roots()
self.root_walker.walk_roots(
SemiSpaceGC._collect_root, # stack roots
SemiSpaceGC._collect_root, # static in prebuilt non-gc structures
None) # we don't need the static in prebuilt gc objects
def collect_last_generation_roots(self):
stack = self.last_generation_root_objects
self.last_generation_root_objects = self.AddressStack()
while stack.non_empty():
obj = stack.pop()
self.header(obj).tid |= GCFLAG_NO_HEAP_PTRS
# ^^^ the flag we just added will be removed immediately if
# the object still contains pointers to younger objects
self.trace(obj, self._trace_external_obj, obj)
stack.delete()
def _trace_external_obj(self, pointer, obj):
addr = pointer.address[0]
newaddr = self.copy(addr)
pointer.address[0] = newaddr
self.write_into_last_generation_obj(obj)
# ____________________________________________________________
# Implementation of nursery-only collections
def collect_nursery(self):
if self.nursery_size > self.top_of_space - self.free:
# the semispace is running out, do a full collect
self.obtain_free_space(self.nursery_size)
ll_assert(self.nursery_size <= self.top_of_space - self.free,
"obtain_free_space failed to do its job")
if self.nursery:
debug_start("gc-minor")
debug_print("--- minor collect ---")
debug_print("nursery:", self.nursery, "to", self.nursery_top)
# a nursery-only collection
scan = beginning = self.free
self.collect_oldrefs_to_nursery()
self.collect_roots_in_nursery()
self.collect_young_objects_with_finalizers()
scan = self.scan_objects_just_copied_out_of_nursery(scan)
# at this point, all static and old objects have got their
# GCFLAG_NO_YOUNG_PTRS set again by trace_and_drag_out_of_nursery
if self.young_objects_with_weakrefs.non_empty():
self.invalidate_young_weakrefs()
if self.young_objects_with_id.length() > 0:
self.update_young_objects_with_id()
# mark the nursery as free and fill it with zeroes again
llarena.arena_reset(self.nursery, self.nursery_size, 2)
debug_print("survived (fraction of the size):",
float(scan - beginning) / self.nursery_size)
debug_stop("gc-minor")
#self.debug_check_consistency() # -- quite expensive
else:
# no nursery - this occurs after a full collect, triggered either
# just above or by some previous non-nursery-based allocation.
# Grab a piece of the current space for the nursery.
self.nursery = self.free
self.nursery_top = self.nursery + self.nursery_size
self.free = self.nursery_top
self.nursery_free = self.nursery
# at this point we know that the nursery is empty
self.change_nursery_hash_base()
return self.nursery_free
def change_nursery_hash_base(self):
# The following should be enough to ensure that young objects
# tend to always get a different hash. It also makes sure that
# nursery_hash_base is not a multiple of 4, to avoid collisions
# with the hash of non-young objects.
hash_base = self.nursery_hash_base
hash_base += self.nursery_size - 1
if (hash_base & 3) == 0:
hash_base -= 1
self.nursery_hash_base = intmask(hash_base)
# NB. we can use self.copy() to move objects out of the nursery,
# but only if the object was really in the nursery.
def collect_oldrefs_to_nursery(self):
# Follow the old_objects_pointing_to_young list and move the
# young objects they point to out of the nursery.
count = 0
oldlist = self.old_objects_pointing_to_young
while oldlist.non_empty():
count += 1
obj = oldlist.pop()
hdr = self.header(obj)
hdr.tid |= GCFLAG_NO_YOUNG_PTRS
self.trace_and_drag_out_of_nursery(obj)
debug_print("collect_oldrefs_to_nursery", count)
def collect_roots_in_nursery(self):
# we don't need to trace prebuilt GcStructs during a minor collect:
# if a prebuilt GcStruct contains a pointer to a young object,
# then the write_barrier must have ensured that the prebuilt
# GcStruct is in the list self.old_objects_pointing_to_young.
self.root_walker.walk_roots(
GenerationGC._collect_root_in_nursery, # stack roots
GenerationGC._collect_root_in_nursery, # static in prebuilt non-gc
None) # static in prebuilt gc
def _collect_root_in_nursery(self, root):
obj = root.address[0]
if self.is_in_nursery(obj):
root.address[0] = self.copy(obj)
def collect_young_objects_with_finalizers(self):
# XXX always walk the whole 'objects_with_finalizers' list here
new = self.AddressDeque()
while self.objects_with_finalizers.non_empty():
obj = self.objects_with_finalizers.popleft()
fq_nr = self.objects_with_finalizers.popleft()
if self.is_in_nursery(obj):
obj = self.copy(obj)
new.append(obj)
new.append(fq_nr)
self.objects_with_finalizers.delete()
self.objects_with_finalizers = new
def scan_objects_just_copied_out_of_nursery(self, scan):
while scan < self.free:
curr = scan + self.size_gc_header()
self.trace_and_drag_out_of_nursery(curr)
scan += self.size_gc_header() + self.get_size_incl_hash(curr)
return scan
def trace_and_drag_out_of_nursery(self, obj):
"""obj must not be in the nursery. This copies all the
young objects it references out of the nursery.
"""
self.trace(obj, self._trace_drag_out, None)
def _trace_drag_out(self, pointer, ignored):
if self.is_in_nursery(pointer.address[0]):
pointer.address[0] = self.copy(pointer.address[0])
# The code relies on the fact that no weakref can be an old object
# weakly pointing to a young object. Indeed, weakrefs are immutable
# so they cannot point to an object that was created after it.
def invalidate_young_weakrefs(self):
# walk over the list of objects that contain weakrefs and are in the
# nursery. if the object it references survives then update the
# weakref; otherwise invalidate the weakref
while self.young_objects_with_weakrefs.non_empty():
obj = self.young_objects_with_weakrefs.pop()
if not self.surviving(obj):
continue # weakref itself dies
obj = self.get_forwarding_address(obj)
offset = self.weakpointer_offset(self.get_type_id(obj))
pointing_to = (obj + offset).address[0]
if self.is_in_nursery(pointing_to):
if self.surviving(pointing_to):
(obj + offset).address[0] = self.get_forwarding_address(
pointing_to)
else:
(obj + offset).address[0] = NULL
continue # no need to remember this weakref any longer
self.objects_with_weakrefs.append(obj)
# for the JIT: a minimal description of the write_barrier() method
# (the JIT assumes it is of the shape
# "if addr_struct.int0 & JIT_WB_IF_FLAG: remember_young_pointer()")
JIT_WB_IF_FLAG = GCFLAG_NO_YOUNG_PTRS
def write_barrier(self, addr_struct):
if self.header(addr_struct).tid & GCFLAG_NO_YOUNG_PTRS:
self.remember_young_pointer(addr_struct)
def _setup_wb(self):
DEBUG = self.DEBUG
# The purpose of attaching remember_young_pointer to the instance
# instead of keeping it as a regular method is to help the JIT call it.
# Additionally, it makes the code in write_barrier() marginally smaller
# (which is important because it is inlined *everywhere*).
# For x86, there is also an extra requirement: when the JIT calls
# remember_young_pointer(), it assumes that it will not touch the SSE
# registers, so it does not save and restore them (that's a *hack*!).
def remember_young_pointer(addr_struct):
#llop.debug_print(lltype.Void, "\tremember_young_pointer",
# addr_struct)
if DEBUG:
ll_assert(not self.is_in_nursery(addr_struct),
"nursery object with GCFLAG_NO_YOUNG_PTRS")
#
# What is important in this function is that it *must*
# clear the flag GCFLAG_NO_YOUNG_PTRS from 'addr_struct'
# if the newly written value is in the nursery. It is ok
# if it also clears the flag in some more cases --- it is
# a win to not actually pass the 'newvalue' pointer here.
self.old_objects_pointing_to_young.append(addr_struct)
self.header(addr_struct).tid &= ~GCFLAG_NO_YOUNG_PTRS
self.write_into_last_generation_obj(addr_struct)
remember_young_pointer._dont_inline_ = True
self.remember_young_pointer = remember_young_pointer
def write_into_last_generation_obj(self, addr_struct):
objhdr = self.header(addr_struct)
if objhdr.tid & GCFLAG_NO_HEAP_PTRS:
objhdr.tid &= ~GCFLAG_NO_HEAP_PTRS
self.last_generation_root_objects.append(addr_struct)
write_into_last_generation_obj._always_inline_ = True
def writebarrier_before_copy(self, source_addr, dest_addr,
source_start, dest_start, length):
""" This has the same effect as calling writebarrier over
each element in dest copied from source, except it might reset
one of the following flags a bit too eagerly, which means we'll have
a bit more objects to track, but being on the safe side.
"""
source_hdr = self.header(source_addr)
dest_hdr = self.header(dest_addr)
if dest_hdr.tid & GCFLAG_NO_YOUNG_PTRS == 0:
return True
# ^^^ a fast path of write-barrier
if source_hdr.tid & GCFLAG_NO_YOUNG_PTRS == 0:
# there might be an object in source that is in nursery
self.old_objects_pointing_to_young.append(dest_addr)
dest_hdr.tid &= ~GCFLAG_NO_YOUNG_PTRS
if dest_hdr.tid & GCFLAG_NO_HEAP_PTRS:
if source_hdr.tid & GCFLAG_NO_HEAP_PTRS == 0:
# ^^^ equivalend of addr from source not being in last
# gen
dest_hdr.tid &= ~GCFLAG_NO_HEAP_PTRS
self.last_generation_root_objects.append(dest_addr)
return True
def is_last_generation(self, obj):
# overridden by HybridGC
return (self.header(obj).tid & GCFLAG_EXTERNAL) != 0
def _compute_id(self, obj):
if self.is_in_nursery(obj):
result = self.young_objects_with_id.get(obj)
if not result:
result = self._next_id()
self.young_objects_with_id.setitem(obj, result)
return result
else:
return SemiSpaceGC._compute_id(self, obj)
def update_young_objects_with_id(self):
self.young_objects_with_id.foreach(self._update_object_id,
self.objects_with_id)
self.young_objects_with_id.clear()
# NB. the clear() also makes the dictionary shrink back to its
# minimal size, which is actually a good idea: a large, mostly-empty
# table is bad for the next call to 'foreach'.
def ids_grow_older(self):
self.young_objects_with_id.foreach(self._id_grow_older, None)
self.young_objects_with_id.clear()
def _id_grow_older(self, obj, id, ignored):
self.objects_with_id.setitem(obj, id)
def _compute_current_nursery_hash(self, obj):
return intmask(llmemory.cast_adr_to_int(obj) + self.nursery_hash_base)
def enumerate_all_roots(self, callback, arg):
self.last_generation_root_objects.foreach(callback, arg)
SemiSpaceGC.enumerate_all_roots(self, callback, arg)
enumerate_all_roots._annspecialcase_ = 'specialize:arg(1)'
def debug_check_object(self, obj):
"""Check the invariants about 'obj' that should be true
between collections."""
SemiSpaceGC.debug_check_object(self, obj)
tid = self.header(obj).tid
if tid & GCFLAG_NO_YOUNG_PTRS:
ll_assert(not self.is_in_nursery(obj),
"nursery object with GCFLAG_NO_YOUNG_PTRS")
self.trace(obj, self._debug_no_nursery_pointer, None)
elif not self.is_in_nursery(obj):
ll_assert(self._d_oopty.contains(obj),
"missing from old_objects_pointing_to_young")
if tid & GCFLAG_NO_HEAP_PTRS:
ll_assert(self.is_last_generation(obj),
"GCFLAG_NO_HEAP_PTRS on non-3rd-generation object")
self.trace(obj, self._debug_no_gen1or2_pointer, None)
elif self.is_last_generation(obj):
ll_assert(self._d_lgro.contains(obj),
"missing from last_generation_root_objects")
def _debug_no_nursery_pointer(self, root, ignored):
ll_assert(not self.is_in_nursery(root.address[0]),
"GCFLAG_NO_YOUNG_PTRS but found a young pointer")
def _debug_no_gen1or2_pointer(self, root, ignored):
target = root.address[0]
ll_assert(not target or self.is_last_generation(target),
"GCFLAG_NO_HEAP_PTRS but found a pointer to gen1or2")
def debug_check_consistency(self):
if self.DEBUG:
self._d_oopty = self.old_objects_pointing_to_young.stack2dict()
self._d_lgro = self.last_generation_root_objects.stack2dict()
SemiSpaceGC.debug_check_consistency(self)
self._d_oopty.delete()
self._d_lgro.delete()
self.old_objects_pointing_to_young.foreach(
self._debug_check_flag_1, None)
self.last_generation_root_objects.foreach(
self._debug_check_flag_2, None)
def _debug_check_flag_1(self, obj, ignored):
ll_assert(not (self.header(obj).tid & GCFLAG_NO_YOUNG_PTRS),
"unexpected GCFLAG_NO_YOUNG_PTRS")
def _debug_check_flag_2(self, obj, ignored):
ll_assert(not (self.header(obj).tid & GCFLAG_NO_HEAP_PTRS),
"unexpected GCFLAG_NO_HEAP_PTRS")
def debug_check_can_copy(self, obj):
if self.is_in_nursery(obj):
pass # it's ok to copy an object out of the nursery
else:
SemiSpaceGC.debug_check_can_copy(self, obj)
# ____________________________________________________________
def nursery_size_from_env():
return env.read_from_env('PYPY_GENERATIONGC_NURSERY')
|
oblique-labs/pyVM
|
rpython/memory/gc/generation.py
|
Python
|
mit
| 30,342 | 0.000857 |
"""Support for the Roku remote."""
import requests.exceptions
from homeassistant.components import remote
from homeassistant.const import CONF_HOST
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Roku remote platform."""
if not discovery_info:
return
host = discovery_info[CONF_HOST]
async_add_entities([RokuRemote(host)], True)
class RokuRemote(remote.RemoteDevice):
"""Device that sends commands to an Roku."""
def __init__(self, host):
"""Initialize the Roku device."""
from roku import Roku
self.roku = Roku(host)
self._device_info = {}
def update(self):
"""Retrieve latest state."""
try:
self._device_info = self.roku.device_info
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
pass
@property
def name(self):
"""Return the name of the device."""
if self._device_info.user_device_name:
return self._device_info.user_device_name
return f"Roku {self._device_info.serial_num}"
@property
def unique_id(self):
"""Return a unique ID."""
return self._device_info.serial_num
@property
def is_on(self):
"""Return true if device is on."""
return True
@property
def should_poll(self):
"""No polling needed for Roku."""
return False
def send_command(self, command, **kwargs):
"""Send a command to one device."""
for single_command in command:
if not hasattr(self.roku, single_command):
continue
getattr(self.roku, single_command)()
|
Cinntax/home-assistant
|
homeassistant/components/roku/remote.py
|
Python
|
apache-2.0
| 1,715 | 0.001166 |
# # # WARNING # # #
# This list must also be updated in doc/_templates/autosummary/class.rst if it
# is changed here!
_doc_special_members = ('__contains__', '__getitem__', '__iter__', '__len__',
'__add__', '__sub__', '__mul__', '__div__',
'__neg__', '__hash__')
from ._bunch import Bunch, BunchConst, BunchConstNamed
from .check import (check_fname, check_version, check_random_state,
_check_fname, _check_subject, _check_pandas_installed,
_check_pandas_index_arguments,
_check_event_id, _check_ch_locs, _check_compensation_grade,
_check_if_nan, _is_numeric, _ensure_int, _check_preload,
_validate_type, _check_info_inv,
_check_channels_spatial_filter, _check_one_ch_type,
_check_rank, _check_option, _check_depth, _check_combine,
_path_like, _check_src_normal, _check_stc_units,
_check_pyqt5_version, _check_sphere, _check_time_format,
_check_freesurfer_home, _suggest, _require_version,
_on_missing, _check_on_missing, int_like, _safe_input,
_check_all_same_channel_names, path_like, _ensure_events,
_check_eeglabio_installed, _check_dict_keys,
_check_edflib_installed, _to_rgb, _soft_import,
_import_h5py, _import_h5io_funcs,
_import_pymatreader_funcs)
from .config import (set_config, get_config, get_config_path, set_cache_dir,
set_memmap_min_size, get_subjects_dir, _get_stim_channel,
sys_info, _get_extra_data_path, _get_root_dir,
_get_numpy_libs)
from .docs import (copy_function_doc_to_method_doc, copy_doc, linkcode_resolve,
open_docs, deprecated, fill_doc, deprecated_alias,
copy_base_doc_to_subclass_doc, docdict as _docdict)
from .fetching import _url_to_local_path
from ._logging import (verbose, logger, set_log_level, set_log_file,
use_log_level, catch_logging, warn, filter_out_warnings,
wrapped_stdout, _get_call_line, _record_warnings,
ClosingStringIO, _VerboseDep)
from .misc import (run_subprocess, _pl, _clean_names, pformat, _file_like,
_explain_exception, _get_argvalues, sizeof_fmt,
running_subprocess, _DefaultEventParser,
_assert_no_instances, _resource_path)
from .progressbar import ProgressBar
from ._testing import (run_command_if_main, requires_sklearn,
requires_version, requires_nibabel, requires_mne,
requires_good_network, requires_pandas, requires_h5py,
ArgvSetter, SilenceStdout, has_freesurfer, has_mne_c,
_TempDir, has_nibabel, buggy_mkl_svd,
requires_numpydoc, requires_vtk, requires_freesurfer,
requires_nitime, requires_dipy,
requires_neuromag2ft, requires_pylsl,
assert_object_equal, assert_and_remove_boundary_annot,
_raw_annot, assert_dig_allclose, assert_meg_snr,
assert_snr, assert_stcs_equal, modified_env,
_click_ch_name)
from .numerics import (hashfunc, _compute_row_norms,
_reg_pinv, random_permutation, _reject_data_segments,
compute_corr, _get_inst_data, array_split_idx,
sum_squared, split_list, _gen_events, create_slices,
_time_mask, _freq_mask, grand_average, object_diff,
object_hash, object_size, _apply_scaling_cov,
_undo_scaling_cov, _apply_scaling_array,
_undo_scaling_array, _scaled_array, _replace_md5, _PCA,
_mask_to_onsets_offsets, _array_equal_nan,
_julian_to_cal, _cal_to_julian, _dt_to_julian,
_julian_to_dt, _dt_to_stamp, _stamp_to_dt,
_check_dt, _ReuseCycle, _arange_div, _hashable_ndarray,
_custom_lru_cache)
from .mixin import (SizeMixin, GetEpochsMixin, _prepare_read_metadata,
_prepare_write_metadata, _FakeNoPandas, ShiftTimeMixin)
from .linalg import (_svd_lwork, _repeated_svd, _sym_mat_pow, sqrtm_sym, eigh,
_get_blas_funcs)
from .dataframe import (_set_pandas_dtype, _scale_dataframe_data,
_convert_times, _build_data_frame)
|
wmvanvliet/mne-python
|
mne/utils/__init__.py
|
Python
|
bsd-3-clause
| 4,697 | 0.002768 |
#!/usr/bin/env python
# coding: utf-8
import re
import bleach
import passlib
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
# from flask import redirect, url_for, make_response
from flask.ext.restplus import Resource
from flask.ext.mail import Message
from auths import get_auth_url, get_username
from models import User
from extensions import db, sv
from utils import decode_validate_token
from cutils import ExtraApi
api = ExtraApi(version='1.0',
title='Vira-lata',
description='An authentication microservice.')
api.update_parser_arguments({
'username': {
'location': 'json',
'help': 'The username.',
},
'password': {
'location': 'json',
'help': 'The password.',
},
'new_password': {
'location': 'json',
'help': 'A new password, when changing the current one.',
},
'code': {
'location': 'json',
'help': 'A temporary code used to reset the password.',
},
'email': {
'location': 'json',
'help': 'The email.',
},
'description': {
'location': 'json',
'help': 'The user description.',
},
})
@api.route('/login/external/manual/<string:backend>')
class LoginExtManAPI(Resource):
def get(self, backend):
'''Asks the URL that should be used to login with a specific backend
(like Facebook).'''
return {'redirect': get_auth_url(backend, 'loginextmanapi')}
@api.route('/complete/manual/<string:backend>')
class CompleteLoginExtManAPI(Resource):
def post(self, backend):
'''Completes the login with a specific backend.'''
username = get_username(backend, redirect_uri='/')
return create_tokens(username)
# @api.route('/login/external/automatic/<string:backend>')
# class StartLoginExtAutoAPI(Resource):
# def get(self, backend):
# '''Asks the URL that should be used to login with a specific backend
# (like Facebook).'''
# print('AUTH-GET')
# print(get_auth_url(backend, 'completeloginautoapi'))
# return {'redirect': get_auth_url(backend, 'completeloginautoapi')}
# # return redirect(get_auth_url(backend, 'completeloginautoapi'))
# @api.route('/complete/automatic/<string:backend>')
# class CompleteLoginAutoAPI(Resource):
# def get(self, backend):
# '''Completes the login with a specific backend.'''
# print('COMPLETE-GET')
# username = get_username(backend,
# url_for('completeloginautoapi',
# backend='facebook'))
# tokens = create_tokens(username)
# response = redirect("http://localhost:5001/")
# # import IPython; IPython.embed()
# return response
# # return create_tokens(username)
@api.route('/login/local')
class LoginLocalAPI(Resource):
@api.doc(parser=api.create_parser('username', 'password'))
def post(self):
'''Login using local DB, not a third-party service.'''
args = api.general_parse()
username = args['username']
password = args['password']
try:
if User.verify_user_password(username, password):
return create_tokens(username)
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
except NoResultFound:
api.abort_with_msg(400,
'Username seems not registered...',
['username'])
@api.route('/renew_micro_token')
class RenewMicroToken(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Get a new micro token to be used with the other microservices.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if decoded['type'] != 'main':
# This seems not to be a main token. It must be main for security
# reasons, for only main ones can be invalidated at logout.
# Allowing micro tokens would allow infinite renew by a
# compromised token
api.abort_with_msg(400, 'Must use a main token', ['token'])
token = create_token(decoded['username']),
return {
'microToken': token,
'microTokenValidPeriod': api.app.config[
'MICRO_TOKEN_VALID_PERIOD'],
}
@api.route('/reset_password')
class ResetPassword(Resource):
@api.doc(parser=api.create_parser('username', 'email'))
def post(self):
'''Sends an email to the user with a code to reset password.'''
args = api.general_parse()
user = get_user(args['username'])
check_user_email(user, args['email'])
msg = Message(
api.app.config['MAIL_SUBJECT'],
sender=api.app.config['SENDER_NAME'],
recipients=[user.email])
code = passlib.utils.generate_password(15)
exp = api.app.config['TIME_RESET_PASSWORD']
user.set_temp_password(code, exp)
db.session.commit()
msg.body = (api.app.config['EMAIL_TEMPLATE']
.format(code=code, exp_min=exp/60))
api.mail.send(msg)
return {
'message': 'Check email!',
'exp': exp,
}
@api.doc(parser=api.create_parser('username', 'email', 'code', 'password'))
def put(self):
'''Change the password of a user using a temporary code.'''
args = api.general_parse()
password = args['password']
validate_password(password)
username = args['username']
user = get_user(username)
check_user_email(user, args['email'])
if not user.check_temp_password(args['code']):
api.abort_with_msg(400, 'Invalid code', ['code'])
user.hash_password(password)
# Commit is done by create_tokens
return create_tokens(username)
@api.route('/logout')
class Logout(Resource):
@api.doc(parser=api.create_parser('token'))
def post(self):
'''Invalidates the main token.'''
args = api.general_parse()
decoded = decode_token(args['token'])
# Invalidates all main tokens
get_user(decoded['username']).last_token_exp = 0
db.session.commit()
return {}
@api.route('/users/<string:username>')
class UserAPI(Resource):
@api.doc(parser=api.create_parser('token'))
def get(self, username):
'''Get information about an user.'''
args = api.general_parse()
try:
user = User.get_user(username)
except NoResultFound:
api.abort_with_msg(404, 'User not found', ['username'])
resp = {
'username': user.username,
'description': user.description,
}
# Add email if this is the owner of the account
token = args['token']
if token:
decoded = decode_token(token)
if decoded['username'] == username:
resp['email'] = user.email
return resp
@api.doc(parser=api.create_parser('token', 'description',
'email', 'password', 'new_password'))
def put(self, username):
'''Edit information about an user.'''
args = api.general_parse()
decoded = decode_token(args['token'])
if username == decoded['username']:
user = get_user(decoded['username'])
changed = False
password = args.get('password')
# If is changing password
if password:
new_password = args['new_password']
if user.verify_password(password):
validate_password(new_password, 'new_password')
user.hash_password(new_password)
changed = True
else:
api.abort_with_msg(400, 'Wrong password...', ['password'])
# If is changing description
if args['description']:
user.description = bleach.clean(args['description'],
strip=True)
changed = True
email = args.get('email')
# If is changing email
if email:
validate_email(email)
user.email = email
changed = True
# If some data seems to have changed, commit
if changed:
db.session.commit()
return {
'username': user.username,
'description': user.description,
'email': user.email,
}
else:
api.abort_with_msg(550, 'Editing other user profile...',
['username', 'token'])
@api.route('/users')
class ListUsers(Resource):
def get(self):
'''List registered users.'''
users = db.session.query(User.username).all()
return {
'users': [u[0] for u in users]
}
@api.doc(parser=api.create_parser('username', 'password', 'email'))
def post(self):
'''Register a new user.'''
args = api.general_parse()
username = args['username']
# TODO: case insensitive? ver isso na hora de login tb
# username = username.lower()
if len(username) < 5 or len(username) > 20:
api.abort_with_msg(
400,
'Invalid username size. Must be between 5 and 20 characters.',
['username'])
if not re.match(r'[A-Za-z0-9]{5,}', username):
api.abort_with_msg(400, 'Invalid characters in username...',
['username'])
password = args['password']
validate_password(password)
email = args.get('email')
validate_email(email)
user = User(username=username, email=email)
user.hash_password(password)
db.session.add(user)
try:
db.session.commit()
except IntegrityError:
api.abort_with_msg(
400,
'Error to create user.'
' Maybe username is already registered...',
['username'])
return create_tokens(username)
# def create_token(username, exp_minutes=5):
# '''Returns a token.'''
# return sv.encode({
# 'username': username,
# }, exp_minutes)
def create_tokens(username):
'''Returns new main and micro tokens for the user.'''
main_token = create_token(username, True)
user = get_user(username)
# TODO: Talvez usar algo mais rápido para decodificar o token,
# como ignorar verificações?
user.last_token_exp = sv.decode(main_token)['exp']
db.session.commit()
return {
'mainToken': main_token,
'microToken': create_token(username),
'microTokenValidPeriod': api.app.config['MICRO_TOKEN_VALID_PERIOD'],
'mainTokenValidPeriod': api.app.config['MAIN_TOKEN_VALID_PERIOD'],
}
def create_token(username, main=False):
'''Returns a token for the passed username.
"main" controls the type of the token.'''
if main:
exp_minutes = api.app.config['MAIN_TOKEN_VALID_PERIOD']
token_type = 'main'
else:
exp_minutes = api.app.config['MICRO_TOKEN_VALID_PERIOD']
token_type = 'micro'
return sv.encode({
'username': username,
'type': token_type,
}, exp_minutes)
def decode_token(token):
decoded = decode_validate_token(token, sv, api)
# Verify if main token is not invalid
if decoded['type'] == 'main':
user = get_user(decoded['username'])
if decoded['exp'] != user.last_token_exp:
api.abort_with_msg(400, 'Invalid main token!', ['token'])
return decoded
def get_user(username):
try:
return User.get_user(username)
except NoResultFound:
# Returning 400 because 404 adds another msg that corrupts the json
api.abort_with_msg(400, 'User not found', ['username'])
def validate_password(password, fieldname='password'):
'''Check if is a valid password. The fieldname parameter is used to
specify the fieldname in the error message.'''
if len(password) < 5:
api.abort_with_msg(
400,
'Invalid password. Needs at least 5 characters.',
[fieldname])
if not re.match(r'[A-Za-z0-9@#$%^&+=]{5,}', password):
api.abort_with_msg(
400,
'Invalid characters in password...',
[fieldname])
def validate_email(email):
'''Check if is a valid email.'''
if not re.match(r'[^@]+@[^@]+\.[^@]+', email):
api.abort_with_msg(400, 'Invalid email', ['email'])
def check_user_email(user, email):
if user.email != email:
api.abort_with_msg(400, 'Wrong email.', ['email'])
|
okfn-brasil/viralata
|
viralata/views.py
|
Python
|
agpl-3.0
| 12,901 | 0 |
"""
Author: Junhong Chen
"""
from Bio import SeqIO
import gzip
import sys
import os
pe1 = []
pe2 = []
pname = []
for dirName, subdirList, fileList in os.walk(sys.argv[1]):
for fname in fileList:
tmp = fname.split(".")[0]
tmp = tmp[:len(tmp)-1]
if tmp not in pname:
pname.append(tmp)
pe1.append(dirName+"/"+tmp+"1.fq.gz")
pe2.append(dirName+"/"+tmp+"2.fq.gz")
def concat(name,file_list):
with open(name, 'w') as w_file:
for filen in file_list:
print 'working with',filen
with gzip.open(filen, 'rU') as o_file:
seq_records = SeqIO.parse(o_file, 'fastq')
SeqIO.write(seq_records, w_file, 'fastq')
#print pe1
#print pe2
concat(sys.argv[2]+"-pe1.fq", pe1)
concat(sys.argv[2]+"-pe2.fq", pe2)
|
macmanes-lab/MCBS913
|
code/Junhong Chen/concatReads.py
|
Python
|
mit
| 925 | 0.016216 |
"""
The different agnocomplete classes to be discovered
"""
from copy import copy
from six import with_metaclass
from abc import abstractmethod, ABCMeta
import logging
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import force_text as text
from django.conf import settings
import requests
from .constants import AGNOCOMPLETE_DEFAULT_PAGESIZE
from .constants import AGNOCOMPLETE_MIN_PAGESIZE
from .constants import AGNOCOMPLETE_MAX_PAGESIZE
from .constants import AGNOCOMPLETE_DEFAULT_QUERYSIZE
from .constants import AGNOCOMPLETE_MIN_QUERYSIZE
from .exceptions import AuthenticationRequiredAgnocompleteException
from .exceptions import SkipItem
from .exceptions import ItemNotFound
logger = logging.getLogger(__name__)
class ClassPropertyDescriptor(object):
"""
Toolkit class used to instanciate a class property.
"""
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
"""
Setter: the decorated method will become a class property.
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
"""
Decorator: the given function will become a class property.
e.g::
class SafeClass(object):
@classproperty
def safe(cls):
return True
class UnsafeClass(object):
@classproperty
def safe(cls):
return False
"""
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
def load_settings_sizes():
"""
Load sizes from settings or fallback to the module constants
"""
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
# Query sizes
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
)
class AgnocompleteBase(with_metaclass(ABCMeta, object)):
"""
Base class for Agnocomplete tools.
"""
# To be overridden by settings, or constructor arguments
page_size = None
page_size_max = None
page_size_min = None
query_size = None
query_size_min = None
url = None
def __init__(self, user=None, page_size=None, url=None):
# Loading the user context
self.user = user
# Load from settings or fallback to constants
settings_page_size, settings_page_size_min, settings_page_size_max, \
query_size, query_size_min = load_settings_sizes()
# Use the class attributes or fallback to settings
self._conf_page_size = self.page_size or settings_page_size
self._conf_page_size_min = self.page_size_min or settings_page_size_min
self._conf_page_size_max = self.page_size_max or settings_page_size_max
# Use instance constructor parameters to eventually override defaults
page_size = page_size or self._conf_page_size
if page_size > self._conf_page_size_max \
or page_size < self._conf_page_size_min:
page_size = self._conf_page_size
# Finally set this as the wanted page_size
self._page_size = page_size
# set query sizes
self._query_size = self.query_size or query_size
self._query_size_min = self.query_size_min or query_size_min
# Eventual custom URL
self._url = url
def set_agnocomplete_field(self, field):
self.agnocomplete_field = field
@classproperty
def slug(cls):
"""
Return the key used in the register, used as a slug for the URL.
You can override this by adding a class property.
"""
return cls.__name__
def get_url(self):
return self._url or self.url
def get_page_size(self):
"""
Return the computed page_size
It takes into account:
* class variables
* constructor arguments,
* settings
* fallback to the module constants if needed.
"""
return self._page_size
def get_query_size(self):
"""
Return the computed default query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size
def get_query_size_min(self):
"""
Return the computed minimum query size
It takes into account:
* class variables
* settings,
* fallback to the module constants
"""
return self._query_size_min
@abstractmethod
def get_choices(self):
pass
@abstractmethod
def items(self, query=None, **kwargs):
pass
@abstractmethod
def selected(self, ids):
"""
Return the values (as a tuple of pairs) for the ids provided
"""
pass
def is_valid_query(self, query):
"""
Return True if the search query is valid.
e.g.:
* not empty,
* not too short,
"""
# No query, no item
if not query:
return False
# Query is too short, no item
if len(query) < self.get_query_size_min():
return False
return True
class AgnocompleteChoices(AgnocompleteBase):
"""
Usage Example::
class AgnocompleteColor(AgnocompleteChoices):
choices = (
('red', 'Red'),
('green', 'Green'),
('blue', 'Blue'),
)
"""
choices = ()
def get_choices(self):
return self.choices
def item(self, current_item):
value, label = current_item
return dict(value=value, label=label)
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
result = copy(self.choices)
if query:
result = filter(lambda x: x[1].lower().startswith(query), result)
result = tuple(result)
# Slicing before rendering
result = result[:self.get_page_size()]
return [self.item(item) for item in result]
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
result = copy(self.choices)
result = filter(lambda x: x[0] in ids, result)
# result = ((item, item) for item in result)
return list(result)
class AgnocompleteModelBase(with_metaclass(ABCMeta, AgnocompleteBase)):
model = None
requires_authentication = False
@abstractmethod
def get_queryset(self):
pass
@property
def fields(self):
raise NotImplementedError(
"Integrator: You must have a `fields` property")
def get_model(self):
"""
Return the class Model used by this Agnocomplete
"""
if hasattr(self, 'model') and self.model:
return self.model
# Give me a "none" queryset
try:
none = self.get_queryset().none()
return none.model
except Exception:
raise ImproperlyConfigured(
"Integrator: Unable to determine the model with this queryset."
" Please add a `model` property")
def get_model_queryset(self):
"""
Return an unfiltered complete model queryset.
To be used for the select Input initialization
"""
return self.get_model().objects.all()
get_choices = get_model_queryset
def get_field_name(self):
"""
Return the model field name to be used as a value, or 'pk' if unset
"""
if hasattr(self, 'agnocomplete_field') and \
hasattr(self.agnocomplete_field, 'to_field_name'):
return self.agnocomplete_field.to_field_name or 'pk'
return 'pk'
class AgnocompleteModel(AgnocompleteModelBase):
"""
Example::
class AgnocompletePeople(AgnocompleteModel):
model = People
fields = ['first_name', 'last_name']
class AgnocompletePersonQueryset(AgnocompleteModel):
fields = ['first_name', 'last_name']
def get_queryset(self):
return People.objects.filter(email__contains='example.com')
"""
def __init__(self, *args, **kwargs):
super(AgnocompleteModel, self).__init__(*args, **kwargs)
self.__final_queryset = None
def _construct_qs_filter(self, field_name):
"""
Using a field name optionnaly prefixed by `^`, `=`, `@`, return a
case-insensitive filter condition name usable as a queryset `filter()`
keyword argument.
"""
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
def get_queryset(self):
if not hasattr(self, 'model') or not self.model:
raise NotImplementedError(
"Integrator: You must either have a `model` property "
"or a `get_queryset()` method"
)
return self.model.objects.all()
def get_queryset_filters(self, query):
"""
Return the filtered queryset
"""
conditions = Q()
for field_name in self.fields:
conditions |= Q(**{
self._construct_qs_filter(field_name): query
})
return conditions
def paginate(self, qs):
"""
Paginate a given Queryset
"""
return qs[:self.get_page_size()]
@property
def _final_queryset(self):
"""
Paginated final queryset
"""
if self.__final_queryset is None:
return None
return self.paginate(self.__final_queryset)
# final_queryset alias
final_queryset = _final_queryset
@property
def final_raw_queryset(self):
return self.__final_queryset
def serialize(self, queryset):
result = []
for item in self.paginate(queryset):
result.append(self.item(item))
return result
def item(self, current_item):
"""
Return the current item.
@param current_item: Current item
@type param: django.models
@return: Value and label of the current item
@rtype : dict
"""
return {
'value': text(getattr(current_item, self.get_field_name())),
'label': self.label(current_item)
}
def label(self, current_item):
"""
Return a label for the current item.
@param current_item: Current item
@type param: django.models
@return: Label of the current item
@rtype : text
"""
return text(current_item)
def build_extra_filtered_queryset(self, queryset, **kwargs):
"""
Apply eventual queryset filters, based on the optional extra arguments
passed to the query.
By default, this method returns the queryset "verbatim". You can
override or overwrite this to perform custom filter on this QS.
* `queryset`: it's the final queryset build using the search terms.
* `kwargs`: this dictionary contains the extra arguments passed to the
agnocomplete class.
"""
# By default, we're ignoring these arguments and return verbatim QS
return queryset
def build_filtered_queryset(self, query, **kwargs):
"""
Build and return the fully-filtered queryset
"""
# Take the basic queryset
qs = self.get_queryset()
# filter it via the query conditions
qs = qs.filter(self.get_queryset_filters(query))
return self.build_extra_filtered_queryset(qs, **kwargs)
def items(self, query=None, **kwargs):
"""
Return the items to be sent to the client
"""
# Cut this, we don't need no empty query
if not query:
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
# Query is too short, no item
if len(query) < self.get_query_size_min():
self.__final_queryset = self.get_model().objects.none()
return self.serialize(self.__final_queryset)
if self.requires_authentication:
if not self.user:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
if not self.user.is_authenticated:
raise AuthenticationRequiredAgnocompleteException(
"Authentication is required to use this autocomplete"
)
qs = self.build_filtered_queryset(query, **kwargs)
# The final queryset is the paginated queryset
self.__final_queryset = qs
return self.serialize(qs)
def selected(self, ids):
"""
Return the selected options as a list of tuples
"""
# Cleanup the ID list
if self.get_field_name() == 'pk':
ids = filter(lambda x: "{}".format(x).isdigit(), copy(ids))
else:
ids = filter(lambda x: len("{}".format(x)) > 0, copy(ids))
# Prepare the QS
# TODO: not contextually filtered, check if it's possible at some point
qs = self.get_model_queryset().filter(
**{'{}__in'.format(self.get_field_name()): ids})
result = []
for item in qs:
item_repr = self.item(item)
result.append(
(item_repr['value'], item_repr['label'])
)
return result
class AgnocompleteUrlProxy(with_metaclass(ABCMeta, AgnocompleteBase)):
"""
This class serves as a proxy between your application and a 3rd party
URL (typically a REST HTTP API).
"""
value_key = 'value'
label_key = 'label'
method = 'get'
data_key = 'data'
def get_search_url(self):
raise NotImplementedError(
"Integrator: You must implement a `get_search_url` method"
" or have a `search_url` property in this class.")
@property
def search_url(self):
return self.get_search_url()
def get_item_url(self, pk):
raise NotImplementedError(
"Integrator: You must implement a `get_item_url` method")
def get_choices(self):
return []
def get_http_method_arg_name(self):
"""
Return the HTTP function to call and the params/data argument name
"""
if self.method == 'get':
arg_name = 'params'
else:
arg_name = 'data'
return getattr(requests, self.method), arg_name
def http_call(self, url=None, **kwargs):
"""
Call the target URL via HTTP and return the JSON result
"""
if not url:
url = self.search_url
http_func, arg_name = self.get_http_method_arg_name()
# Build the argument dictionary to pass in the http function
_kwargs = {
arg_name: kwargs,
}
# The actual HTTP call
response = http_func(
url=url.format(**kwargs),
headers=self.get_http_headers(),
**_kwargs
)
# Error handling
if response.status_code != 200:
logger.warning('Invalid Request for `%s`', response.url)
# Raising a "requests" exception
response.raise_for_status()
return response.json()
def item(self, current_item):
return dict(
value=text(current_item[self.value_key]),
label=text(current_item[self.label_key]),
)
def get_http_headers(self):
"""
Return a dictionary that will be added to the HTTP request to the API
You can overwrite this method, that return an empty dict by default.
"""
return {}
def get_http_result(self, http_result):
"""
Return an iterable with all the result items in.
You can override/overwrite this method to adapt it to the payload
returned by the 3rd party API.
"""
return http_result.get(self.data_key, [])
def get_http_call_kwargs(self, query, **kwargs):
"""
Return the HTTP query arguments.
You can override this method to pass further arguments corresponding
to your search_url.
"""
return {'q': query}
def items(self, query=None, **kwargs):
if not self.is_valid_query(query):
return []
# Call to search URL
http_result = self.http_call(
**self.get_http_call_kwargs(query, **kwargs)
)
# In case of error, on the API side, the error is raised and handled
# in the view.
http_result = self.get_http_result(http_result)
result = []
for item in http_result:
# Eventual result reshaping.
try:
result.append(self.item(item))
except SkipItem:
continue
return result
def selected(self, ids):
data = []
# Filter out "falsy IDs" (empty string, None, 0...)
ids = filter(lambda x: x, ids)
for _id in ids:
if _id:
# Call to the item URL
result = self.http_call(url=self.get_item_url(pk=_id))
if self.data_key in result and len(result[self.data_key]):
for item in result[self.data_key]:
data.append(
(
text(item[self.value_key]),
text(item[self.label_key])
)
)
return data
def validate(self, value):
"""
From a value available on the remote server, the method returns the
complete item matching the value.
If case the value is not available on the server side or filtered
through :meth:`item`, the class:`agnocomplete.exceptions.ItemNotFound`
is raised.
"""
url = self.get_item_url(value)
try:
data = self.http_call(url=url)
except requests.HTTPError:
raise ItemNotFound()
data = self.get_http_result(data)
try:
self.item(data)
except SkipItem:
raise ItemNotFound()
return value
|
novafloss/django-agnocomplete
|
agnocomplete/core.py
|
Python
|
mit
| 20,047 | 0 |
def revertdigits( item ):
return (item%10)*100 + (int(item/10)%10)*10 + int(item/100)
numlist = [314, 315, 642, 246, 129, 999]
numlist.sort( key=revertdigits )
print( numlist )
|
paulmcquad/Python
|
11 - Lists/sort revertdigits.py
|
Python
|
gpl-3.0
| 181 | 0.055249 |
#!/usr/bin/python3
#
# Copyright (c) 2014-2022 The Voxie Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import numpy as np
import voxie
args = voxie.parser.parse_args()
context = voxie.VoxieContext(args)
instance = context.createInstance()
if args.voxie_action != 'RunFilter':
raise Exception('Invalid operation: ' + args.voxie_action)
with context.makeObject(context.bus, context.busName, args.voxie_operation, ['de.uni_stuttgart.Voxie.ExternalOperationRunFilter']).ClaimOperationAndCatch() as op:
filterPath = op.FilterObject
pars = op.Parameters
# print (pars)
properties = pars[filterPath._objectPath]['Properties'].getValue('a{sv}')
# print (properties)
inputPath = properties['de.uni_stuttgart.Voxie.Input'].getValue('o')
inputDataPath = pars[inputPath]['Data'].getValue('o')
inputData = context.makeObject(context.bus, context.busName, inputDataPath, [
'de.uni_stuttgart.Voxie.VolumeDataVoxel'])
outputPath = properties['de.uni_stuttgart.Voxie.Output'].getValue('o')
factor = properties['de.uni_stuttgart.Voxie.Filter.Downsample.Factor'].getValue(
'x')
origin = inputData.VolumeOrigin
sizeOrig = inputData.ArrayShape
spacingOrig = np.array(inputData.GridSpacing)
# print (origin, sizeOrig, spacingOrig)
# TODO: Don't cut away data at the end
# size = ((int(sizeOrig[0]) + factor - 1) // factor,
# (int(sizeOrig[1]) + factor - 1) // factor,
# (int(sizeOrig[2]) + factor - 1) // factor)
size = (int(sizeOrig[0]) // factor,
int(sizeOrig[1]) // factor,
int(sizeOrig[2]) // factor)
spacing = spacingOrig * factor
with inputData.GetBufferReadonly() as bufferOld:
arrayOld = bufferOld.array
arrayOld2 = arrayOld[:size[0] * factor,
:size[1] * factor, :size[2] * factor]
arrayOld3 = arrayOld2.view()
arrayOld3.shape = size[0], factor, size[1], factor, size[2], factor
dataType = ('float', 32, 'native') # TODO?
with instance.CreateVolumeDataVoxel(size, dataType, origin, spacing) as data:
with data.CreateUpdate() as update, data.GetBufferWritable(update) as buffer:
buffer[:] = 0
zCount = arrayOld3.shape[4]
for z in range(zCount):
buffer[:, :, z] = np.mean(
arrayOld3[:, :, :, :, z, :], axis=(1, 3, 4))
op.SetProgress((z + 1) / zCount)
version = update.Finish()
result = {}
result[outputPath] = {
'Data': voxie.Variant('o', data._objectPath),
'DataVersion': voxie.Variant('o', version._objectPath),
}
op.Finish(result)
|
voxie-viewer/voxie
|
filters/downsample.py
|
Python
|
mit
| 3,821 | 0.001309 |
'''
Created on Jun 6, 2014
@author: rtermondt
'''
from django.conf import settings
def global_settings(request):
invitation_system_setting = getattr(settings, 'INVITATION_SYSTEM', None)
if invitation_system_setting == True:
invite_system = True
else:
invite_system = False
return {
'INVITATION_SYSTEM': invite_system
}
|
richtermondt/inithub-web
|
inithub/inithub/context_processors.py
|
Python
|
mit
| 376 | 0.013298 |
'''Michael Lange <klappnase (at) freakmail (dot) de>
The ToolTip class provides a flexible tooltip widget for Tkinter; it is based on IDLE's ToolTip
module which unfortunately seems to be broken (at least the version I saw).
INITIALIZATION OPTIONS:
anchor : where the text should be positioned inside the widget, must be on of "n", "s", "e", "w", "nw" and so on;
default is "center"
bd : borderwidth of the widget; default is 1 (NOTE: don't use "borderwidth" here)
bg : background color to use for the widget; default is "lightyellow" (NOTE: don't use "background")
delay : time in ms that it takes for the widget to appear on the screen when the mouse pointer has
entered the parent widget; default is 800
fg : foreground (i.e. text) color to use; default is "black" (NOTE: don't use "foreground")
follow_mouse : if set to 1 the tooltip will follow the mouse pointer instead of being displayed
outside of the parent widget; this may be useful if you want to use tooltips for
large widgets like listboxes or canvases; default is 0
font : font to use for the widget; default is system specific
justify : how multiple lines of text will be aligned, must be "left", "right" or "center"; default is "left"
padx : extra space added to the left and right within the widget; default is 4
pady : extra space above and below the text; default is 2
relief : one of "flat", "ridge", "groove", "raised", "sunken" or "solid"; default is "solid"
state : must be "normal" or "disabled"; if set to "disabled" the tooltip will not appear; default is "normal"
text : the text that is displayed inside the widget
textvariable : if set to an instance of Tkinter.StringVar() the variable's value will be used as text for the widget
width : width of the widget; the default is 0, which means that "wraplength" will be used to limit the widgets width
wraplength : limits the number of characters in each line; default is 150
WIDGET METHODS:
configure(**opts) : change one or more of the widget's options as described above; the changes will take effect the
next time the tooltip shows up; NOTE: follow_mouse cannot be changed after widget initialization
Other widget methods that might be useful if you want to subclass ToolTip:
enter() : callback when the mouse pointer enters the parent widget
leave() : called when the mouse pointer leaves the parent widget
motion() : is called when the mouse pointer moves inside the parent widget if follow_mouse is set to 1 and the
tooltip has shown up to continually update the coordinates of the tooltip window
coords() : calculates the screen coordinates of the tooltip window
create_contents() : creates the contents of the tooltip window (by default a Tkinter.Label)
'''
# Ideas gleaned from PySol
import Tkinter
class ToolTip:
def __init__(self, master, text='Your text here', delay=800, **opts):
self.master = master
self._opts = {'anchor':'center', 'bd':1, 'bg':'lightyellow', 'delay':delay, 'fg':'black',\
'follow_mouse':0, 'font':None, 'justify':'left', 'padx':4, 'pady':2,\
'relief':'solid', 'state':'normal', 'text':text, 'textvariable':None,\
'width':0, 'wraplength':150}
self.configure(**opts)
self._tipwindow = None
self._id = None
self._id1 = self.master.bind("<Enter>", self.enter, '+')
self._id2 = self.master.bind("<Leave>", self.leave, '+')
self._id3 = self.master.bind("<ButtonPress>", self.leave, '+')
self._follow_mouse = 0
if self._opts['follow_mouse']:
self._id4 = self.master.bind("<Motion>", self.motion, '+')
self._follow_mouse = 1
def configure(self, **opts):
for key in opts:
if self._opts.has_key(key):
self._opts[key] = opts[key]
else:
KeyError = 'KeyError: Unknown option: "%s"' %key
raise KeyError
##----these methods handle the callbacks on "<Enter>", "<Leave>" and "<Motion>"---------------##
##----events on the parent widget; override them if you want to change the widget's behavior--##
def enter(self, event=None):
self._schedule()
def leave(self, event=None):
self._unschedule()
self._hide()
def motion(self, event=None):
if self._tipwindow and self._follow_mouse:
x, y = self.coords()
self._tipwindow.wm_geometry("+%d+%d" % (x, y))
##------the methods that do the work:---------------------------------------------------------##
def _schedule(self):
self._unschedule()
if self._opts['state'] == 'disabled':
return
self._id = self.master.after(self._opts['delay'], self._show)
def _unschedule(self):
id = self._id
self._id = None
if id:
self.master.after_cancel(id)
def _show(self):
if self._opts['state'] == 'disabled':
self._unschedule()
return
if not self._tipwindow:
self._tipwindow = tw = Tkinter.Toplevel(self.master)
# hide the window until we know the geometry
tw.withdraw()
tw.wm_overrideredirect(1)
if tw.tk.call("tk", "windowingsystem") == 'aqua':
tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w, "help", "none")
self.create_contents()
tw.update_idletasks()
x, y = self.coords()
tw.wm_geometry("+%d+%d" % (x, y))
tw.deiconify()
def _hide(self):
tw = self._tipwindow
self._tipwindow = None
if tw:
tw.destroy()
##----these methods might be overridden in derived classes:----------------------------------##
def coords(self):
# The tip window must be completely outside the master widget;
# otherwise when the mouse enters the tip window we get
# a leave event and it disappears, and then we get an enter
# event and it reappears, and so on forever :-(
# or we take care that the mouse pointer is always outside the tipwindow :-)
tw = self._tipwindow
twx, twy = tw.winfo_reqwidth(), tw.winfo_reqheight()
w, h = tw.winfo_screenwidth(), tw.winfo_screenheight()
# calculate the y coordinate:
if self._follow_mouse:
y = tw.winfo_pointery() + 20
# make sure the tipwindow is never outside the screen:
if y + twy > h:
y = y - twy - 30
else:
y = self.master.winfo_rooty() + self.master.winfo_height() + 3
if y + twy > h:
y = self.master.winfo_rooty() - twy - 3
# we can use the same x coord in both cases:
x = tw.winfo_pointerx() - twx / 2
if x < 0:
x = 0
elif x + twx > w:
x = w - twx
return x, y
def create_contents(self):
opts = self._opts.copy()
for opt in ('delay', 'follow_mouse', 'state'):
del opts[opt]
label = Tkinter.Label(self._tipwindow, **opts)
label.pack()
##---------demo code-----------------------------------##
def demo():
root = Tkinter.Tk(className='ToolTip-demo')
l = Tkinter.Listbox(root)
l.insert('end', "I'm a listbox")
l.pack(side='top')
t1 = ToolTip(l, follow_mouse=1, text="I'm a tooltip with follow_mouse set to 1, so I won't be placed outside my parent")
b = Tkinter.Button(root, text='Quit', command=root.quit)
b.pack(side='bottom')
t2 = ToolTip(b, text='Enough of this')
root.mainloop()
if __name__ == '__main__':
demo()
|
regionbibliotekhalland/digitalasagor
|
tooltip.py
|
Python
|
gpl-3.0
| 8,114 | 0.008134 |
#!/usr/bin/env python
# angelus.py - John Burnett & Will Johnson (c)2015
#
# Angelus does the following:
# -FFT analysis
# -Partial tracking
# -Modal analysis
# -Resynthesis
#
# Angelus will eventually do the following:
# -FFT Analysis -> Notation
# -Modal Analysis -> 3D mesh (and reverse?)
from FFT_Analyzer import FFT_Analyzer
from writeRObU import writeRObU
from Synthesizer import Synthesizer
import sys
def main():
fname = sys.argv[1]
title = parse_fname(fname)
infile = "../audio/" + fname
outfile = "../build/" + title + ".ro"
analysis = FFT_Analyzer(infile)
analysis.perform_analysis()
analysis.stft(20)
analysis.get_modal_data(30)
out = writeRObU(outfile, analysis.modal_model)
out.write()
synth = Synthesizer(analysis, title)
synth.write_wav()
#synth.write_residual()
def parse_fname(fname):
s = ""
for l in fname:
if l != '.': s += l
else: return s
main()
|
johncburnett/Angelus
|
src/angelus.py
|
Python
|
gpl-2.0
| 953 | 0.008395 |
# -*- coding: utf-8 -*-
import common_sale_contract
import test_sale_contract
|
tvtsoft/odoo8
|
addons/sale_contract/tests/__init__.py
|
Python
|
agpl-3.0
| 78 | 0 |
import unittest
from streamlink.stream import StreamIOIterWrapper
class TestPluginStream(unittest.TestCase):
def test_iter(self):
def generator():
yield b"1" * 8192
yield b"2" * 4096
yield b"3" * 2048
fd = StreamIOIterWrapper(generator())
self.assertEqual(fd.read(4096), b"1" * 4096)
self.assertEqual(fd.read(2048), b"1" * 2048)
self.assertEqual(fd.read(2048), b"1" * 2048)
self.assertEqual(fd.read(1), b"2")
self.assertEqual(fd.read(4095), b"2" * 4095)
self.assertEqual(fd.read(1536), b"3" * 1536)
self.assertEqual(fd.read(), b"3" * 512)
|
beardypig/streamlink
|
tests/streams/test_stream_wrappers.py
|
Python
|
bsd-2-clause
| 655 | 0 |
from django.conf import settings
from django.conf.urls.defaults import *
from models import *
from django.views.generic import date_based, list_detail
from django.contrib.auth.decorators import login_required
# Number of random images from the gallery to display.
SAMPLE_SIZE = ":%s" % getattr(settings, 'GALLERY_SAMPLE_SIZE', 8)
# galleries
gallery_args = {'date_field': 'date_added', 'allow_empty': True, 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}
urlpatterns = patterns('django.views.generic.date_based',
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-detail'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), gallery_args, name='pl-gallery-archive-day'),
url(r'^gallery/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), gallery_args, name='pl-gallery-archive-month'),
url(r'^gallery/(?P<year>\d{4})/$', login_required(date_based.archive_year), gallery_args, name='pl-gallery-archive-year'),
url(r'^gallery/?$', login_required(date_based.archive_index), gallery_args, name='pl-gallery-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^gallery/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Gallery.objects.filter(is_public=True), 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery'),
url(r'^gallery/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Gallery.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 8, 'extra_context':{'sample_size':SAMPLE_SIZE}}, name='pl-gallery-list'),
)
# photographs
photo_args = {'date_field': 'date_added', 'allow_empty': True, 'queryset': Photo.objects.filter(is_public=True)}
urlpatterns += patterns('django.views.generic.date_based',
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/(?P<slug>[\-\d\w]+)/$', login_required(date_based.object_detail), {'date_field': 'date_added', 'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo-detail'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/(?P<day>\w{1,2})/$', login_required(date_based.archive_day), photo_args, name='pl-photo-archive-day'),
url(r'^photo/(?P<year>\d{4})/(?P<month>[a-z]{3})/$', login_required(date_based.archive_month), photo_args, name='pl-photo-archive-month'),
url(r'^photo/(?P<year>\d{4})/$', login_required(date_based.archive_year), photo_args, name='pl-photo-archive-year'),
url(r'^photo/$', login_required(date_based.archive_index), photo_args, name='pl-photo-archive'),
)
urlpatterns += patterns('django.views.generic.list_detail',
url(r'^photo/(?P<slug>[\-\d\w]+)/$', login_required(list_detail.object_detail), {'slug_field': 'title_slug', 'queryset': Photo.objects.filter(is_public=True)}, name='pl-photo'),
url(r'^photo/page/(?P<page>[0-9]+)/$', login_required(list_detail.object_list), {'queryset': Photo.objects.filter(is_public=True), 'allow_empty': True, 'paginate_by': 20}, name='pl-photo-list'),
)
|
garyp/djwed
|
photologue/urls.py
|
Python
|
mit
| 3,400 | 0.008529 |
#!/usr/bin/python -*- coding:utf-8 -*-
__Author__ = "Riyaz Ahmad Bhat"
__Email__ = "riyaz.ah.bhat@gmail.com"
import re
from collections import namedtuple
from sanity_checker import SanityChecker
class DefaultList(list):
"""Equivalent of Default dictionaries for Indexing Errors."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try: return list.__getitem__(self, index)
except IndexError: return self.default
class SSFReader (SanityChecker):
def __init__ (self, sentence):
super(SSFReader, self).__init__()
self.id_ = int()
self.nodeList = list()
self.chunk_word = dict()
self.sentence = sentence
self.modifierModified = dict()
self.node = namedtuple('node',
('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent',
'chunkId', 'chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn'))
self.features = namedtuple('features',
('lemma','cat','gen','num','per','case','vib','tam'))
def getAnnotations (self):
children_ = list()
for line in self.sentence.split("\n"):
nodeInfo = line.decode("utf-8").split("\t")
if nodeInfo[0].isdigit():
assert len(nodeInfo) == 4 # no need to process trash! FIXME
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
attributes = self.updateFSValues(attributeValue_pairs)
h = attributes.get #NOTE h -> head node attributes
elif nodeInfo[0].replace(".",'',1).isdigit():
assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME
self.id_ += 1
pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii")
wordForm_ = nodeInfo[1]
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk
self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'),
self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'),
h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'),
h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'),
h('coref_'),h('stype_'),h('voicetype_'),h('posn_')))
self.modifierModified[h('chunkId_')] = h('parent_')
self.chunk_word[h('chunkId_')] = h('head_')
else:
attributes = self.updateFSValues(attributeValue_pairs)
c = attributes.get #NOTE c -> child node attributes
children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \
if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'),
c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'),
c('troot_'),c('coref_'),None, None, c('posn_')))
else: children_ = list()
return self
def FSPairs (self, FS) :
feats = dict()
for feat in FS.split():
if "=" not in feat:continue
feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel='))
assert len(feat.split("=")) == 2
attribute,value = feat.split("=")
feats[attribute] = value
return feats
def morphFeatures (self, AF):
"LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM"
assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME
lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",")
if len(lemma_) > 1: lemma_ = lemma_.strip("'")
return lemma_.strip("'"),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'")
def updateFSValues (self, attributeValue_pairs):
attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\
'coref_','stype_','voicetype_','posn_'], [None] * 14))
attributes.update(dict(zip(['lemma_','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8)))
for key,value in attributeValue_pairs.items():
if key == "af":
attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\
attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \
self.morphFeatures (value)
elif key == "drel":
assert len(value.split(":")) == 2 # no need to process trash! FIXME
attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":")
assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME
else:
variable = str(key) + "_"
if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value)
attributes[variable] = re.sub("'|\"",'',value)
return attributes
|
darshan95/Shift-Reduce-Chunk-Expander
|
src/ssf_reader.py
|
Python
|
mit
| 4,608 | 0.052951 |
import nspkg1.foo
|
ahb0327/intellij-community
|
python/testData/completion/importQualifiedNamespacePackage/a.after.py
|
Python
|
apache-2.0
| 18 | 0 |
'''
Tree from:
http://www.quesucede.com/page/show/id/python-3-tree-implementation
'''
from urllib.parse import urlparse
import os
(_ROOT, _DEPTH, _BREADTH) = range(3)
class Node:
def __init__(self, identifier):
self.__identifier = identifier
self.__children = []
@property
def identifier(self):
return self.__identifier
@property
def children(self):
return self.__children
def add_child(self, identifier):
self.__children.append(identifier)
class Tree:
def __init__(self):
self.__nodes = {}
@property
def nodes(self):
return self.__nodes
def add_node(self, identifier, parent=None):
print("identifier: " + identifier + " parent= " + str(parent))
node = Node(identifier)
self[identifier] = node
if parent is not None:
self[parent].add_child(identifier)
return node
def display(self, identifier, depth=_ROOT):
children = self[identifier].children
if depth == _ROOT:
print("{0}".format(identifier))
else:
print("\t"*depth, "{0}".format(identifier))
depth += 1
for child in children:
print("\t"*depth, "{0}".format(identifier))
self.display(child, depth) # recursive call
def traverse(self, identifier, mode=_DEPTH):
yield identifier
queue = self[identifier].children
while queue:
yield queue[0]
expansion = self[queue[0]].children
if mode == _DEPTH:
queue = expansion + queue[1:] # depth-first
elif mode == _BREADTH:
queue = queue[1:] + expansion # width-first
def __getitem__(self, key):
return self.__nodes[key]
def __setitem__(self, key, item):
self.__nodes[key] = item
'''
tree = Tree()
t = print("{0}".format("palestras"))
tree.add_node("Harry") # root node
tree.add_node("Jane", t)
tree.add_node("Bill", "Harry")
tree.add_node("Joe", "Jane")
tree.add_node("Diane", "Jane")
tree.add_node("George", "Diane")
tree.add_node("Mary", "Diane")
tree.add_node("Jill", "George")
tree.add_node("Carol", "Jill")
tree.add_node("Grace", "Bill")
tree.add_node("Mark", "Jane")
tree.display("Harry")
print("***** DEPTH-FIRST ITERATION *****")
for node in tree.traverse("Harry"):
print(node)
print("***** BREADTH-FIRST ITERATION *****")
for node in tree.traverse("Harry", mode=_BREADTH):
print(node)
'''
|
glaudsonml/kurgan-ai
|
libs/Tree.py
|
Python
|
apache-2.0
| 2,507 | 0.001197 |
#!/usr/bin/python
import sys, commands, struct, operator, subprocess, os
if len(sys.argv) != 3:
print 'usage:',sys.argv[0],'<program> <core>'
sys.exit(1)
prog, core = sys.argv[1:]
# finds out the size of void*/size_t. could be hardcoded for speed...
try:
cell = int(commands.getoutput('gdb '+prog+r''' -ex 'printf "cell %d\n", sizeof(void*)' -ex q | grep cell''').split()[1])
except:
print 'gdb failed to open',prog,core,'- assuming a 32b pointer'
cell = 4
fmt = {4:'I',8:'Q'}[cell]
def gdb_sym_info(addrs,exe):
gdb = subprocess.Popen(['gdb',prog,core], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
info = {}
found = 0
for addr in addrs:
if addr:
gdb.stdin.write('info symbol 0x%x\n'%addr)
gdb.stdin.write('list *0x%x\n'%addr)
gdb.stdin.write('printf "\\ndone\\n"\n')
gdb.stdin.flush()
line = ''
lineinfo = None
syminfo = 'UNKNOWN'
while line != 'done':
line = gdb.stdout.readline().strip()
if 'is in' in line: lineinfo = line.split('is in ')[1]
if 'in section' in line: syminfo = line.split('(gdb) ')[1]
if lineinfo:
info[addr] = lineinfo
else:
info[addr] = syminfo
found += int(info[addr] != 'UNKNOWN')
return info, found
def addr2line_sym_info(addrs,exe):
addr2line = subprocess.Popen('addr2line -f -e'.split()+[exe], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
info = {}
for addr in addrs:
if addr:
addr2line.stdin.write('0x%x\n'%addr)
addr2line.stdin.flush()
info[addr] = addr2line.stdout.readline().strip()+' '+addr2line.stdout.readline().strip()
return info
def sym_info(addrs,exe):
if 'HEAPPROF_ADDR2LINE' in os.environ:
gdb_found = 0
else:
syminfo, gdb_found = gdb_sym_info(addrs, prog)
if gdb_found < 1: # gdb didn't manage to find anything - perhaps the core dump is in a custom format
syminfo = addr2line_sym_info(addrs, prog)
return syminfo
# a silly guard against "non-blocks" - occurences of HeaP and ProF
# in code instead of data
def is_block(s,e): return (e-s)%cell == 0 and (e-s)/cell < 100
class Block:
def __init__(self, metadata):
self.size = struct.unpack(fmt, metadata[0:cell])[0]
self.stack = struct.unpack('%d'%(len(metadata)/cell - 1)+fmt, metadata[cell:])
def find_blocks(bytes):
blocks = []
end_index = 0
while True:
start_index = bytes.find('HeaP',end_index)
end_index = bytes.find('ProF',start_index)
if not is_block(start_index, end_index):
end_index = start_index + cell # search again
else:
if min(start_index, end_index) < 0:
break
blocks.append(Block(bytes[start_index+cell:end_index])) # this assumes little endian...
return blocks
def code_addrs(blocks):
return list(reduce(operator.or_, [set(block.stack) for block in blocks]))
def report(blocks, syminfo):
stack2sizes = {}
for block in blocks:
stack2sizes.setdefault(block.stack,list()).append(block.size)
total = sorted([(sum(sizes), stack) for stack, sizes in stack2sizes.iteritems()])
heapsize = sum([size for size, stack in total])
for size, stack in reversed(total):
print '%d%% %d %s'%(int(100.*size/heapsize), size, stack2sizes[stack])
for addr in stack:
if addr:
print ' 0x%x'%addr, syminfo[addr]
blocks = find_blocks(open(core,'rb').read())
if not blocks:
print 'no heap blocks found in the core dump (searched for metadata enclosed in the magic string HeaP...ProF)'
sys.exit(1)
syminfo = sym_info(code_addrs(blocks), prog)
report(blocks, syminfo)
|
yosefk/heapprof
|
heapprof.py
|
Python
|
bsd-2-clause
| 3,588 | 0.028428 |
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
from PyQt5 import uic
from . import guiStart
from . import guiCompileSuccess
# sys.path.insert(1, 'C:/Users/GuSan/Desktop/powerOverWhelming/project/src/comp_exec')
from ..comp_exec import validation
from . import guiErrorCode
class GuiSelectCode(QtWidgets.QMainWindow) :
def setupUi(self, SelectCode):
SelectCode.setObjectName("SelectCode")
SelectCode.resize(1300, 1100)
self.centralwidget = QtWidgets.QWidget(SelectCode)
self.centralwidget.setObjectName("centralwidget")
self.opt_select_code_3 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_3.setGeometry(QtCore.QRect(970, 100, 21, 22))
self.opt_select_code_3.setText("")
self.opt_select_code_3.setObjectName("opt_select_code_3")
self.txt_select_code_1 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_1.setGeometry(QtCore.QRect(150, 140, 320, 721))
self.txt_select_code_1.setObjectName("txt_select_code_1")
self.opt_select_code_1 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_1.setGeometry(QtCore.QRect(310, 100, 21, 22))
self.opt_select_code_1.setText("")
self.opt_select_code_1.setObjectName("opt_select_code_1")
self.txt_select_code_3 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_3.setGeometry(QtCore.QRect(810, 140, 320, 721))
self.txt_select_code_3.setObjectName("txt_select_code_3")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(560, 40, 201, 41))
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.btn_compile_start = QtWidgets.QPushButton(self.centralwidget)
self.btn_compile_start.setGeometry(QtCore.QRect(980, 890, 151, 51))
self.btn_compile_start.setObjectName("btn_compile_start")
self.btn_return_search = QtWidgets.QPushButton(self.centralwidget)
self.btn_return_search.setGeometry(QtCore.QRect(980, 970, 151, 51))
self.btn_return_search.setObjectName("btn_return_search")
self.opt_select_code_2 = QtWidgets.QRadioButton(self.centralwidget)
self.opt_select_code_2.setGeometry(QtCore.QRect(640, 100, 21, 22))
self.opt_select_code_2.setText("")
self.opt_select_code_2.setObjectName("opt_select_code_2")
self.txt_select_code_2 = QtWidgets.QPlainTextEdit(self.centralwidget)
self.txt_select_code_2.setGeometry(QtCore.QRect(480, 140, 320, 721))
self.txt_select_code_2.setObjectName("txt_select_code_2")
self.progress = QtWidgets.QProgressBar(self.centralwidget)
self.progress.setGeometry(QtCore.QRect(150, 910, 791, 31))
self.progress.setProperty("value", 0)
self.progress.setObjectName("progress")
SelectCode.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(SelectCode)
self.statusbar.setObjectName("statusbar")
SelectCode.setStatusBar(self.statusbar)
self.retranslateUi(SelectCode)
QtCore.QMetaObject.connectSlotsByName(SelectCode)
def retranslateUi(self, SelectCode):
_translate = QtCore.QCoreApplication.translate
SelectCode.setWindowTitle(_translate("SelectCode", "Select Code"))
self.label.setText(_translate("SelectCode", "Select Code"))
self.btn_compile_start.setText(_translate("SelectCode", "Compile!"))
self.btn_return_search.setText(_translate("SelectCode", "Return to Search"))
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setupUi(self)
self.initUi()
def initUi(self) :
self.btn_compile_start.clicked.connect(self.compile_click)
self.opt_select_code_1.setChecked(True)
self.btn_return_search.clicked.connect(self.return_search)
#window_start = guiStart.GuiStart(self)
#self.txt_select_code_1.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[0])
#self.txt_select_code_2.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[1])
#self.txt_select_code_3.setPlainText(window_start.inputOutput(window_start.edit_keyword.toPlainText())[2])
def return_search(self) :
global window_search_code
self.close()
window_search_code = guiStart.GuiStart()
window_search_code.show()
def compile_click(self) :
global window_compile_success
global window_compile_fail
window_compile_success = guiCompileSuccess.GuiCompileSuccess()
window_compile_fail = guiErrorCode.GuiErrorCode()
self.completed = 0
while self.completed<100 :
self.completed+=0.001
self.progress.setValue(self.completed)
QtWidgets.QApplication.processEvents()
tupleCompile = validation.validation(self.loadText(), 'cpp')
print(tupleCompile[0])
if(tupleCompile[1]==1) :
msg = QtWidgets.QMessageBox()
msg.setText("컴파일 에러")
msg.setWindowTitle("컴파일 에러")
msg.show()
msg.exec_()
window_compile_fail.txt_error_code.setPlainText(self.loadText())
window_compile_fail.txt_error_context.setPlainText(tupleCompile[0])
window_compile_fail.show()
return window_compile_fail
else :
window_compile_success.txt_code_complete.setPlainText(self.loadText())
window_compile_success.txt_output_test.setPlainText(tupleCompile[0])
window_compile_success.show()
return window_compile_success
def loadText(self) :
if(self.opt_select_code_1.isChecked()) :
print("radioButton 1 is toggled")
return self.txt_select_code_1.toPlainText()
elif(self.opt_select_code_2.isChecked()) :
print("radioButton 2 is toggled")
return self.txt_select_code_2.toPlainText()
else :
print("radioButton 3 is toggled")
return self.txt_select_code_3.toPlainText()
|
lunapocket/powerOverWhelming
|
project/src/gui/guiSelectCode.py
|
Python
|
gpl-3.0
| 6,372 | 0.010233 |
# -*- coding: iso-8859-1 -*-
from enigma import eConsoleAppContainer
from Components.Console import Console
from Components.About import about
from Components.PackageInfo import PackageInfoHandler
from Components.Language import language
from Components.Sources.List import List
from Components.Ipkg import IpkgComponent
from Components.Network import iNetwork
from Tools.Directories import pathExists, fileExists, resolveFilename, SCOPE_METADIR
from Tools.HardwareInfo import HardwareInfo
from time import time
from boxbranding import getImageVersion
class SoftwareTools(PackageInfoHandler):
lastDownloadDate = None
NetworkConnectionAvailable = None
list_updating = False
available_updates = 0
available_updatelist = []
available_packetlist = []
installed_packetlist = {}
def __init__(self):
aboutInfo = getImageVersion()
if aboutInfo.startswith("dev-"):
self.ImageVersion = 'Experimental'
else:
self.ImageVersion = 'Stable'
self.language = language.getLanguage()[:2] # getLanguage returns e.g. "fi_FI" for "language_country"
PackageInfoHandler.__init__(self, self.statusCallback, blocking = False, neededTag = 'ALL_TAGS', neededFlag = self.ImageVersion)
self.directory = resolveFilename(SCOPE_METADIR)
self.hardware_info = HardwareInfo()
self.list = List([])
self.NotifierCallback = None
self.Console = Console()
self.UpdateConsole = Console()
self.cmdList = []
self.unwanted_extensions = ('-dbg', '-dev', '-doc', '-staticdev', '-src')
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
def statusCallback(self, status, progress):
pass
def startSoftwareTools(self, callback = None):
if callback is not None:
self.NotifierCallback = callback
iNetwork.checkNetworkState(self.checkNetworkCB)
def checkNetworkCB(self, data):
if data is not None:
if data <= 2:
self.NetworkConnectionAvailable = True
self.getUpdates()
else:
self.NetworkConnectionAvailable = False
self.getUpdates()
def getUpdates(self, callback = None):
if self.lastDownloadDate is None:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
else:
if self.NetworkConnectionAvailable:
self.lastDownloadDate = time()
if self.list_updating is False and callback is None:
self.list_updating = True
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is False and callback is not None:
self.list_updating = True
self.NotifierCallback = callback
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
elif self.list_updating is True and callback is not None:
self.NotifierCallback = callback
else:
if self.list_updating and callback is not None:
self.NotifierCallback = callback
self.startIpkgListAvailable()
else:
self.list_updating = False
if callback is not None:
callback(False)
elif self.NotifierCallback is not None:
self.NotifierCallback(False)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback(False)
elif event == IpkgComponent.EVENT_DONE:
if self.list_updating:
self.startIpkgListAvailable()
pass
def startIpkgListAvailable(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list"
self.UpdateConsole.ePopen(cmd, self.IpkgListAvailableCB, callback)
def IpkgListAvailableCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.list_updating:
self.available_packetlist = []
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
descr = l > 2 and tokens[2].strip() or ""
self.available_packetlist.append([name, version, descr])
if callback is None:
self.startInstallMetaPackage()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startInstallMetaPackage(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if self.NetworkConnectionAvailable:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " install enigma2-meta enigma2-plugins-meta enigma2-skins-meta"
self.UpdateConsole.ePopen(cmd, self.InstallMetaPackageCB, callback)
else:
self.InstallMetaPackageCB(True)
def InstallMetaPackageCB(self, result, retval = None, extra_args = None):
(callback) = extra_args or None
if result:
self.fillPackagesIndexList()
if callback is None:
self.startIpkgListInstalled()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def startIpkgListInstalled(self, callback = None):
if callback is not None:
self.list_updating = True
if self.list_updating:
if not self.UpdateConsole:
self.UpdateConsole = Console()
cmd = self.ipkg.ipkg + " list_installed"
self.UpdateConsole.ePopen(cmd, self.IpkgListInstalledCB, callback)
def IpkgListInstalledCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
self.installed_packetlist = {}
for x in result.splitlines():
tokens = x.split(' - ')
name = tokens[0].strip()
if not any(name.endswith(x) for x in self.unwanted_extensions):
l = len(tokens)
version = l > 1 and tokens[1].strip() or ""
self.installed_packetlist[name] = version
for package in self.packagesIndexlist[:]:
if not self.verifyPrerequisites(package[0]["prerequisites"]):
self.packagesIndexlist.remove(package)
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
if "packagetype" in attributes:
if attributes["packagetype"] == "internal":
self.packagesIndexlist.remove(package)
if callback is None:
self.countUpdates()
else:
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
callback(True)
else:
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(False)
def countUpdates(self, callback = None):
self.available_updates = 0
self.available_updatelist = []
for package in self.packagesIndexlist[:]:
attributes = package[0]["attributes"]
packagename = attributes["packagename"]
for x in self.available_packetlist:
if x[0] == packagename:
if packagename in self.installed_packetlist:
if self.installed_packetlist[packagename] != x[1]:
self.available_updates +=1
self.available_updatelist.append([packagename])
self.list_updating = False
if self.UpdateConsole:
if len(self.UpdateConsole.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
elif self.NotifierCallback is not None:
self.NotifierCallback(True)
self.NotifierCallback = None
def startIpkgUpdate(self, callback = None):
if not self.Console:
self.Console = Console()
cmd = self.ipkg.ipkg + " update"
self.Console.ePopen(cmd, self.IpkgUpdateCB, callback)
def IpkgUpdateCB(self, result, retval, extra_args = None):
(callback) = extra_args or None
if result:
if self.Console:
if len(self.Console.appContainers) == 0:
if callback is not None:
callback(True)
callback = None
def cleanupSoftwareTools(self):
self.list_updating = False
if self.NotifierCallback is not None:
self.NotifierCallback = None
self.ipkg.stop()
if self.Console is not None:
if len(self.Console.appContainers):
for name in self.Console.appContainers.keys():
self.Console.kill(name)
if self.UpdateConsole is not None:
if len(self.UpdateConsole.appContainers):
for name in self.UpdateConsole.appContainers.keys():
self.UpdateConsole.kill(name)
def verifyPrerequisites(self, prerequisites):
if "hardware" in prerequisites:
hardware_found = False
for hardware in prerequisites["hardware"]:
if hardware == self.hardware_info.device_name:
hardware_found = True
if not hardware_found:
return False
return True
iSoftwareTools = SoftwareTools()
|
atvcaptain/enigma2
|
lib/python/Plugins/SystemPlugins/SoftwareManager/SoftwareTools.py
|
Python
|
gpl-2.0
| 9,405 | 0.031154 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
import os
import sys
from optparse import OptionParser, BadOptionError
from celery import __version__
from celery.platforms import EX_FAILURE, detached
from celery.utils.log import get_logger
from celery.bin.base import daemon_options, Option
logger = get_logger(__name__)
OPTION_LIST = daemon_options(default_pidfile="celeryd.pid") + (
Option("--fake",
default=False, action="store_true", dest="fake",
help="Don't fork (for debugging purposes)"), )
def detach(path, argv, logfile=None, pidfile=None, uid=None,
gid=None, umask=0, working_directory=None, fake=False, ):
with detached(logfile, pidfile, uid, gid, umask, working_directory, fake):
try:
os.execv(path, [path] + argv)
except Exception:
from celery import current_app
current_app.log.setup_logging_subsystem("ERROR", logfile)
logger.critical("Can't exec %r", " ".join([path] + argv),
exc_info=True)
return EX_FAILURE
class PartialOptionParser(OptionParser):
def __init__(self, *args, **kwargs):
self.leftovers = []
OptionParser.__init__(self, *args, **kwargs)
def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
if "=" in arg:
opt, next_arg = arg.split("=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
try:
opt = self._match_long_opt(opt)
option = self._long_opt.get(opt)
except BadOptionError:
option = None
if option:
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
if nargs == 1:
self.error("%s option requires an argument" % opt)
else:
self.error("%s option requires %d arguments" % (
opt, nargs))
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error("%s option does not take a value" % opt)
else:
value = None
option.process(opt, value, values, self)
else:
self.leftovers.append(arg)
def _process_short_opts(self, rargs, values):
arg = rargs[0]
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError:
self.leftovers.append(arg)
if rargs and not rargs[0][0] == "-":
self.leftovers.append(rargs.pop(0))
class detached_celeryd(object):
option_list = OPTION_LIST
usage = "%prog [options] [celeryd options]"
version = __version__
description = ("Detaches Celery worker nodes. See `celeryd --help` "
"for the list of supported worker arguments.")
command = sys.executable
execv_path = sys.executable
execv_argv = ["-m", "celery.bin.celeryd"]
def Parser(self, prog_name):
return PartialOptionParser(prog=prog_name,
option_list=self.option_list,
usage=self.usage,
description=self.description,
version=self.version)
def parse_options(self, prog_name, argv):
parser = self.Parser(prog_name)
options, values = parser.parse_args(argv)
if options.logfile:
parser.leftovers.append("--logfile=%s" % (options.logfile, ))
if options.pidfile:
parser.leftovers.append("--pidfile=%s" % (options.pidfile, ))
return options, values, parser.leftovers
def execute_from_commandline(self, argv=None):
if argv is None:
argv = sys.argv
config = []
seen_cargs = 0
for arg in argv:
if seen_cargs:
config.append(arg)
else:
if arg == "--":
seen_cargs = 1
config.append(arg)
prog_name = os.path.basename(argv[0])
options, values, leftovers = self.parse_options(prog_name, argv[1:])
sys.exit(detach(path=self.execv_path,
argv=self.execv_argv + leftovers + config,
**vars(options)))
def main():
detached_celeryd().execute_from_commandline()
if __name__ == "__main__": # pragma: no cover
main()
|
couchbaselabs/celery
|
celery/bin/celeryd_detach.py
|
Python
|
bsd-3-clause
| 4,792 | 0.000417 |
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import sqlalchemy as sa
metadata = sa.MetaData()
Queues = sa.Table('Queues', metadata,
sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('name', sa.String(64)),
sa.Column('metadata', sa.LargeBinary),
sa.UniqueConstraint('project', 'name'),
)
Pools = sa.Table('Pools', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('uri', sa.String(255),
unique=True, nullable=False),
sa.Column('weight', sa.INTEGER, nullable=False),
sa.Column('options', sa.Text()),
sa.Column('flavor', sa.String(64), nullable=True))
# NOTE(gengchc2): Modify pool_group define: turn NOT NULL into DEFAULT NULL:
# [alter table Flavors change column pool_group pool_group varchar(64)
# default null;]
Flavors = sa.Table('Flavors', metadata,
sa.Column('name', sa.String(64), primary_key=True),
sa.Column('project', sa.String(64)),
sa.Column('capabilities', sa.Text()))
Catalogue = sa.Table('Catalogue', metadata,
sa.Column('pool', sa.String(64),
sa.ForeignKey('Pools.name',
ondelete='CASCADE')),
sa.Column('project', sa.String(64)),
sa.Column('queue', sa.String(64), nullable=False),
sa.UniqueConstraint('project', 'queue'))
|
openstack/zaqar
|
zaqar/storage/sqlalchemy/tables.py
|
Python
|
apache-2.0
| 2,182 | 0 |
import os
import calendar
import datetime
import logging
from primitives import *
from constants import *
from helpers import OrderedAttrDict, utc
"""
The AS types and their FLV representations.
"""
log = logging.getLogger('flvlib.astypes')
class MalformedFLV(Exception):
pass
# Number
def get_number(f, max_offset=None):
return get_double(f)
def make_number(num):
return make_double(num)
# Boolean
def get_boolean(f, max_offset=None):
value = get_ui8(f)
return bool(value)
def make_boolean(value):
return make_ui8((value and 1) or 0)
# String
def get_string(f, max_offset=None):
# First 16 bits are the string's length
length = get_ui16(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_string(string):
if isinstance(string, unicode):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui16(len(string))
return length + string
# Longstring
def get_longstring(f, max_offset=None):
# First 32 bits are the string's length
length = get_ui32(f)
# Then comes the string itself
ret = f.read(length)
return ret
def make_longstring(string):
if isinstance(string, unicode):
# We need a blob, not unicode. Arbitrarily choose UTF-8
string = string.encode('UTF-8')
length = make_ui32(len(string))
return length + string
# ECMA Array
class ECMAArray(OrderedAttrDict):
pass
def get_ecma_array(f, max_offset=None):
length = get_ui32(f)
log.debug("The ECMA array has approximately %d elements", length)
array = ECMAArray()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an ECMA array")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f, max_offset=max_offset)
array[name] = value
return array
def make_ecma_array(d):
length = make_ui32(len(d))
rest = ''.join([make_script_data_variable(name, value)
for name, value in d.iteritems()])
marker = make_ui24(9)
return length + rest + marker
# Strict Array
def get_strict_array(f, max_offset=None):
length = get_ui32(f)
log.debug("The length is %d", length)
elements = [get_script_data_value(f, max_offset=max_offset)
for _ in xrange(length)]
return elements
def make_strict_array(l):
ret = make_ui32(len(l))
rest = ''.join([make_script_data_value(value) for value in l])
return ret + rest
# Date
def get_date(f, max_offset=None):
timestamp = get_number(f) / 1000.0
# From the following document:
# http://opensource.adobe.com/wiki/download/
# attachments/1114283/amf0_spec_121207.pdf
#
# Section 2.13 Date Type
#
# (...) While the design of this type reserves room for time zone offset
# information, it should not be filled in, nor used (...)
_ignored = get_si16(f)
return datetime.datetime.fromtimestamp(timestamp, utc)
def make_date(date):
if date.tzinfo:
utc_date = date.astimezone(utc)
else:
# assume it's UTC
utc_date = date.replace(tzinfo=utc)
ret = make_number(calendar.timegm(utc_date.timetuple()) * 1000)
offset = 0
return ret + make_si16(offset)
# Null
def get_null(f, max_offset=None):
return None
def make_null(none):
return ''
# Object
class FLVObject(OrderedAttrDict):
pass
def get_object(f, max_offset=None):
ret = FLVObject()
while True:
if max_offset and (f.tell() == max_offset):
log.debug("Prematurely terminating reading an object")
break
marker = get_ui24(f)
if marker == 9:
log.debug("Marker!")
break
else:
f.seek(-3, os.SEEK_CUR)
name, value = get_script_data_variable(f)
setattr(ret, name, value)
return ret
def make_object(obj):
# If the object is iterable, serialize keys/values. If not, fall
# back on iterating over __dict__.
# This makes sure that make_object(get_object(StringIO(blob))) == blob
try:
iterator = obj.iteritems()
except AttributeError:
iterator = obj.__dict__.iteritems()
ret = ''.join([make_script_data_variable(name, value)
for name, value in iterator])
marker = make_ui24(9)
return ret + marker
# MovieClip
class MovieClip(object):
def __init__(self, path):
self.path = path
def __eq__(self, other):
return isinstance(other, MovieClip) and self.path == other.path
def __repr__(self):
return "<MovieClip at %s>" % self.path
def get_movieclip(f, max_offset=None):
ret = get_string(f)
return MovieClip(ret)
def make_movieclip(clip):
return make_string(clip.path)
# Undefined
class Undefined(object):
def __eq__(self, other):
return isinstance(other, Undefined)
def __repr__(self):
return '<Undefined>'
def get_undefined(f, max_offset=None):
return Undefined()
def make_undefined(undefined):
return ''
# Reference
class Reference(object):
def __init__(self, ref):
self.ref = ref
def __eq__(self, other):
return isinstance(other, Reference) and self.ref == other.ref
def __repr__(self):
return "<Reference to %d>" % self.ref
def get_reference(f, max_offset=None):
ret = get_ui16(f)
return Reference(ret)
def make_reference(reference):
return make_ui16(reference.ref)
as_type_to_getter_and_maker = {
VALUE_TYPE_NUMBER: (get_number, make_number),
VALUE_TYPE_BOOLEAN: (get_boolean, make_boolean),
VALUE_TYPE_STRING: (get_string, make_string),
VALUE_TYPE_OBJECT: (get_object, make_object),
VALUE_TYPE_MOVIECLIP: (get_movieclip, make_movieclip),
VALUE_TYPE_NULL: (get_null, make_null),
VALUE_TYPE_UNDEFINED: (get_undefined, make_undefined),
VALUE_TYPE_REFERENCE: (get_reference, make_reference),
VALUE_TYPE_ECMA_ARRAY: (get_ecma_array, make_ecma_array),
VALUE_TYPE_STRICT_ARRAY: (get_strict_array, make_strict_array),
VALUE_TYPE_DATE: (get_date, make_date),
VALUE_TYPE_LONGSTRING: (get_longstring, make_longstring)
}
type_to_as_type = {
bool: VALUE_TYPE_BOOLEAN,
int: VALUE_TYPE_NUMBER,
long: VALUE_TYPE_NUMBER,
float: VALUE_TYPE_NUMBER,
# WARNING: not supporting Longstrings here.
# With a max length of 65535 chars, noone will notice.
str: VALUE_TYPE_STRING,
unicode: VALUE_TYPE_STRING,
list: VALUE_TYPE_STRICT_ARRAY,
dict: VALUE_TYPE_ECMA_ARRAY,
ECMAArray: VALUE_TYPE_ECMA_ARRAY,
datetime.datetime: VALUE_TYPE_DATE,
Undefined: VALUE_TYPE_UNDEFINED,
MovieClip: VALUE_TYPE_MOVIECLIP,
Reference: VALUE_TYPE_REFERENCE,
type(None): VALUE_TYPE_NULL
}
# SCRIPTDATAVARIABLE
def get_script_data_variable(f, max_offset=None):
name = get_string(f)
log.debug("The name is %s", name)
value = get_script_data_value(f, max_offset=max_offset)
log.debug("The value is %r", value)
return (name, value)
def make_script_data_variable(name, value):
log.debug("The name is %s", name)
log.debug("The value is %r", value)
ret = make_string(name) + make_script_data_value(value)
return ret
# SCRIPTDATAVALUE
def get_script_data_value(f, max_offset=None):
value_type = get_ui8(f)
log.debug("The value type is %r", value_type)
try:
get_value = as_type_to_getter_and_maker[value_type][0]
except KeyError:
raise MalformedFLV("Invalid script data value type: %d", value_type)
log.debug("The getter function is %r", get_value)
value = get_value(f, max_offset=max_offset)
return value
def make_script_data_value(value):
value_type = type_to_as_type.get(value.__class__, VALUE_TYPE_OBJECT)
log.debug("The value type is %r", value_type)
# KeyError can't happen here, because we always fall back on
# VALUE_TYPE_OBJECT when determining value_type
make_value = as_type_to_getter_and_maker[value_type][1]
log.debug("The maker function is %r", make_value)
type_tag = make_ui8(value_type)
ret = make_value(value)
return type_tag + ret
|
mrquim/mrquimrepo
|
script.video.F4mProxy/lib/flvlib/astypes.py
|
Python
|
gpl-2.0
| 8,332 | 0.00228 |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
from typing import Union, List
try:
from dataclasses import dataclass
except ImportError:
from pydantic.dataclasses import dataclass
import numpy as np
from psi4 import core
from psi4.driver import constants
from psi4.driver.p4util import solvers
from psi4.driver.p4util.exceptions import *
from psi4.driver.procrouting.response.scf_products import (TDRSCFEngine, TDUSCFEngine)
dipole = {
'name': 'Dipole polarizabilities',
'printout_labels': ['X', 'Y', 'Z'],
'mints_function': core.MintsHelper.ao_dipole,
'vector names': ['AO Mux', 'AO Muy', 'AO Muz']
}
quadrupole = {
'name': 'Quadrupole polarizabilities',
'printout_labels': ['XX', 'XY', 'XZ', 'YY', 'YZ', 'ZZ'],
'mints_function': core.MintsHelper.ao_quadrupole,
}
quadrupole['vector names'] = ["AO Quadrupole " + x for x in quadrupole["printout_labels"]]
traceless_quadrupole = {
'name': 'Traceless quadrupole polarizabilities',
'printout_labels': ['XX', 'XY', 'XZ', 'YY', 'YZ', 'ZZ'],
'mints_function': core.MintsHelper.ao_traceless_quadrupole,
}
traceless_quadrupole['vector names'] = [
"AO Traceless Quadrupole " + x for x in traceless_quadrupole["printout_labels"]
]
property_dicts = {
'DIPOLE_POLARIZABILITIES': dipole,
'QUADRUPOLE_POLARIZABILITIES': quadrupole,
'TRACELESS_QUADRUPOLE_POLARIZABILITIES': traceless_quadrupole
}
def cpscf_linear_response(wfn, *args, **kwargs):
"""
Compute the static properties from a reference wavefunction. The currently implemented properties are
- dipole polarizability
- quadrupole polarizability
Parameters
----------
wfn : psi4 wavefunction
The reference wavefunction.
args : list
The list of arguments. For each argument, such as ``dipole polarizability``, will return the corresponding
response. The user may also choose to pass a list or tuple of custom vectors.
kwargs : dict
Options that control how the response is computed. The following options are supported (with default values):
- ``conv_tol``: 1e-5
- ``max_iter``: 10
- ``print_lvl``: 2
Returns
-------
responses : list
The list of responses.
"""
mints = core.MintsHelper(wfn.basisset())
# list of dictionaries to control response calculations, count how many user-supplied vectors we have
complete_dict = []
n_user = 0
for arg in args:
# for each string keyword, append the appropriate dictionary (vide supra) to our list
if isinstance(arg, str):
ret = property_dicts.get(arg)
if ret:
complete_dict.append(ret)
else:
raise ValidationError('Do not understand {}. Abort.'.format(arg))
# the user passed a list of vectors. absorb them into a dictionary
elif isinstance(arg, tuple) or isinstance(arg, list):
complete_dict.append({
'name': 'User Vectors',
'length': len(arg),
'vectors': arg,
'vector names': ['User Vector {}_{}'.format(n_user, i) for i in range(len(arg))]
})
n_user += len(arg)
# single vector passed. stored in a dictionary as a list of length 1 (can be handled as the case above that way)
# note: the length is set to '0' to designate that it was not really passed as a list
else:
complete_dict.append({
'name': 'User Vector',
'length': 0,
'vectors': [arg],
'vector names': ['User Vector {}'.format(n_user)]
})
n_user += 1
# vectors will be passed to the cphf solver, vector_names stores the corresponding names
vectors = []
vector_names = []
# construct the list of vectors. for the keywords, fetch the appropriate tensors from MintsHelper
for prop in complete_dict:
if 'User' in prop['name']:
for name, vec in zip(prop['vector names'], prop['vectors']):
vectors.append(vec)
vector_names.append(name)
else:
tmp_vectors = prop['mints_function'](mints)
for tmp in tmp_vectors:
tmp.scale(-2.0) # RHF only
vectors.append(tmp)
vector_names.append(tmp.name)
# do we have any vectors to work with?
if len(vectors) == 0:
raise ValidationError('I have no vectors to work with. Aborting.')
# print information on module, vectors that will be used
_print_header(complete_dict, n_user)
# fetch wavefunction information
nmo = wfn.nmo()
ndocc = wfn.nalpha()
nvirt = nmo - ndocc
c_occ = wfn.Ca_subset("AO", "OCC")
c_vir = wfn.Ca_subset("AO", "VIR")
nbf = c_occ.shape[0]
# the vectors need to be in the MO basis. if they have the shape nbf x nbf, transform.
for i in range(len(vectors)):
shape = vectors[i].shape
if shape == (nbf, nbf):
vectors[i] = core.triplet(c_occ, vectors[i], c_vir, True, False, False)
# verify that this vector already has the correct shape
elif shape != (ndocc, nvirt):
raise ValidationError('ERROR: "{}" has an unrecognized shape ({}, {}). Must be either ({}, {}) or ({}, {})'.format(
vector_names[i], shape[0], shape[1], nbf, nbf, ndocc, nvirt))
# compute response vectors for each input vector
params = [kwargs.pop("conv_tol", 1.e-5), kwargs.pop("max_iter", 10), kwargs.pop("print_lvl", 2)]
responses = wfn.cphf_solve(vectors, *params)
# zip vectors, responses for easy access
vectors = {k: v for k, v in zip(vector_names, vectors)}
responses = {k: v for k, v in zip(vector_names, responses)}
# compute response values, format output
output = []
for prop in complete_dict:
# try to replicate the data structure of the input
if 'User' in prop['name']:
if prop['length'] == 0:
output.append(responses[prop['vector names'][0]])
else:
buf = []
for name in prop['vector names']:
buf.append(responses[name])
output.append(buf)
else:
names = prop['vector names']
dim = len(names)
buf = np.zeros((dim, dim))
for i, i_name in enumerate(names):
for j, j_name in enumerate(names):
buf[i, j] = -1.0 * vectors[i_name].vector_dot(responses[j_name])
output.append(buf)
_print_output(complete_dict, output)
return output
def _print_header(complete_dict, n_user):
core.print_out('\n\n ---------------------------------------------------------\n'
' {:^57}\n'.format('CPSCF Linear Response Solver') +
' {:^57}\n'.format('by Marvin Lechner and Daniel G. A. Smith') +
' ---------------------------------------------------------\n')
core.print_out('\n ==> Requested Responses <==\n\n')
for prop in complete_dict:
if 'User' not in prop['name']:
core.print_out(' {}\n'.format(prop['name']))
if n_user != 0:
core.print_out(' {} user-supplied vector(s)\n'.format(n_user))
def _print_matrix(descriptors, content, title):
length = len(descriptors)
matrix_header = ' ' + ' {:^10}' * length + '\n'
core.print_out(matrix_header.format(*descriptors))
core.print_out(' -----' + ' ----------' * length + '\n')
for i, desc in enumerate(descriptors):
core.print_out(' {:^5}'.format(desc))
for j in range(length):
core.print_out(' {:>10.5f}'.format(content[i, j]))
# Set the name
var_name = title + " " + descriptors[i] + descriptors[j]
core.set_variable(var_name, content[i, j])
core.print_out('\n')
def _print_output(complete_dict, output):
core.print_out('\n ==> Response Properties <==\n')
for i, prop in enumerate(complete_dict):
if not 'User' in prop['name']:
core.print_out('\n => {} <=\n\n'.format(prop['name']))
directions = prop['printout_labels']
var_name = prop['name'].upper().replace("IES", "Y")
_print_matrix(directions, output[i], var_name)
def _print_tdscf_header(*, r_convergence: float, guess_type: str, restricted: bool, ptype: str):
core.print_out("\n\n ---------------------------------------------------------\n"
f" {'TDSCF excitation energies':^57}\n" +
f" {'by Andrew M. James and Daniel G. A. Smith':^57}\n" +
" ---------------------------------------------------------\n")
core.print_out("\n ==> Options <==\n\n")
core.print_out(f" {'Residual threshold':<20s}: {r_convergence:.4e}\n")
core.print_out(f" {'Initial guess':20s}: {guess_type.lower()}\n")
reference = 'RHF' if restricted else 'UHF'
core.print_out(f" {'Reference':20s}: {reference}\n")
solver_type = 'Hamiltonian' if ptype == "RPA" else "Davidson"
core.print_out(f" {'Solver type':20s}: {ptype} ({solver_type})\n")
core.print_out("\n")
@dataclass
class _TDSCFResults:
E_ex_au: float
irrep_GS: str
irrep_ES: str
irrep_trans: str
edtm_length: np.ndarray
f_length: float
edtm_velocity: np.ndarray
f_velocity: float
mdtm: np.ndarray
R_length: float
R_velocity: float
spin_mult: str
R_eigvec: Union[core.Matrix, List[core.Matrix]]
L_eigvec: Union[core.Matrix, List[core.Matrix]]
def _solve_loop(wfn,
ptype,
solve_function,
states_per_irrep: List[int],
maxiter: int,
restricted: bool = True,
spin_mult: str = "singlet") -> List[_TDSCFResults]:
"""
References
----------
For the expression of the transition moments in length and velocity gauges:
- T. B. Pedersen, A. E. Hansen, "Ab Initio Calculation and Display of the
Rotary Strength Tensor in the Random Phase Approximation. Method and Model
Studies." Chem. Phys. Lett., 246, 1 (1995)
- P. J. Lestrange, F. Egidi, X. Li, "The Consequences of Improperly
Describing Oscillator Strengths beyond the Electric Dipole Approximation."
J. Chem. Phys., 143, 234103 (2015)
"""
core.print_out("\n ==> Requested Excitations <==\n\n")
for nstate, state_sym in zip(states_per_irrep, wfn.molecule().irrep_labels()):
core.print_out(f" {nstate} {spin_mult} states with {state_sym} symmetry\n")
# construct the engine
if restricted:
if spin_mult == "triplet":
engine = TDRSCFEngine(wfn, ptype=ptype.lower(), triplet=True)
else:
engine = TDRSCFEngine(wfn, ptype=ptype.lower(), triplet=False)
else:
engine = TDUSCFEngine(wfn, ptype=ptype.lower())
# collect results and compute some spectroscopic observables
mints = core.MintsHelper(wfn.basisset())
results = []
irrep_GS = wfn.molecule().irrep_labels()[engine.G_gs]
for state_sym, nstates in enumerate(states_per_irrep):
if nstates == 0:
continue
irrep_ES = wfn.molecule().irrep_labels()[state_sym]
core.print_out(f"\n\n ==> Seeking the lowest {nstates} {spin_mult} states with {irrep_ES} symmetry")
engine.reset_for_state_symm(state_sym)
guess_ = engine.generate_guess(nstates * 4)
# ret = {"eigvals": ee, "eigvecs": (rvecs, rvecs), "stats": stats} (TDA)
# ret = {"eigvals": ee, "eigvecs": (rvecs, lvecs), "stats": stats} (RPA)
ret = solve_function(engine, nstates, guess_, maxiter)
# check whether all roots converged
if not ret["stats"][-1]["done"]:
# raise error
raise TDSCFConvergenceError(maxiter, wfn, f"singlet excitations in irrep {irrep_ES}", ret["stats"][-1])
# flatten dictionary: helps with sorting by energy
# also append state symmetry to return value
for e, (R, L) in zip(ret["eigvals"], ret["eigvecs"]):
irrep_trans = wfn.molecule().irrep_labels()[engine.G_gs ^ state_sym]
# length-gauge electric dipole transition moment
edtm_length = engine.residue(R, mints.so_dipole())
# length-gauge oscillator strength
f_length = ((2 * e) / 3) * np.sum(edtm_length**2)
# velocity-gauge electric dipole transition moment
edtm_velocity = engine.residue(L, mints.so_nabla())
## velocity-gauge oscillator strength
f_velocity = (2 / (3 * e)) * np.sum(edtm_velocity**2)
# length gauge magnetic dipole transition moment
# 1/2 is the Bohr magneton in atomic units
mdtm = 0.5 * engine.residue(L, mints.so_angular_momentum())
# NOTE The signs for rotatory strengths are opposite WRT the cited paper.
# This is becasue Psi4 defines length-gauge dipole integral to include the electron charge (-1.0)
# length gauge rotatory strength
R_length = np.einsum("i,i", edtm_length, mdtm)
# velocity gauge rotatory strength
R_velocity = -np.einsum("i,i", edtm_velocity, mdtm) / e
results.append(
_TDSCFResults(e, irrep_GS, irrep_ES, irrep_trans, edtm_length, f_length, edtm_velocity, f_velocity,
mdtm, R_length, R_velocity, spin_mult, R, L))
return results
def _states_per_irrep(states, nirrep):
"""Distributes states into nirrep"""
spi = [states // nirrep] * nirrep
for i in range(states % nirrep):
spi[i] += 1
return spi
def _validate_tdscf(*, wfn, states, triplets, guess) -> None:
# validate states
if not isinstance(states, (int, list)):
raise ValidationError("TDSCF: Number of states must be either an integer or a list of integers")
# list of states per irrep given, validate it
if isinstance(states, list):
if len(states) != wfn.nirrep():
raise ValidationError(f"TDSCF: States requested ({states}) do not match number of irreps ({wfn.nirrep()})")
# do triplets?
if triplets not in ["NONE", "ALSO", "ONLY"]:
raise ValidationError(
f"TDSCF: Triplet option ({triplets}) unrecognized. Must be one of 'NONE', 'ALSO' or 'ONLY'")
restricted = wfn.same_a_b_orbs()
do_triplets = False if triplets == "NONE" else True
if (not restricted) and do_triplets:
raise ValidationError("TDSCF: Cannot compute triplets with an unrestricted reference")
# determine how many states per irrep to seek and apportion them between singlets/triplets and irreps.
# validate calculation
if restricted and wfn.functional().needs_xc() and do_triplets:
raise ValidationError("TDSCF: Restricted Vx kernel only spin-adapted for singlets")
not_lda = wfn.functional().is_gga() or wfn.functional().is_meta()
if (not restricted) and not_lda:
raise ValidationError("TDSCF: Unrestricted Kohn-Sham Vx kernel currently limited to SVWN functional")
if guess != "DENOMINATORS":
raise ValidationError(f"TDSCF: Guess type {guess} is not valid")
def tdscf_excitations(wfn,
*,
states: Union[int, List[int]],
triplets: str = "NONE",
tda: bool = False,
r_convergence: float = 1.0e-4,
maxiter: int = 60,
guess: str = "DENOMINATORS",
verbose: int = 1):
"""Compute excitations from a SCF(HF/KS) wavefunction
Parameters
-----------
wfn : :py:class:`psi4.core.Wavefunction`
The reference wavefunction
states : Union[int, List[int]]
How many roots (excited states) should the solver seek to converge?
This function accepts either an integer or a list of integers:
- The list has :math:`n_{\mathrm{irrep}}` elements and is only
acceptable if the system has symmetry. It tells the solver how many
states per irrep to calculate.
- If an integer is given _and_ the system has symmetry, the states
will be distributed among irreps.
For example, ``states = 10`` for a D2h system will compute 10 states
distributed as ``[2, 2, 1, 1, 1, 1, 1, 1]`` among irreps.
triplets : {"NONE", "ONLY", "ALSO"}
Should the solver seek to converge states of triplet symmetry?
Default is `none`: do not seek to converge triplets.
Valid options are:
- `NONE`. Do not seek to converge triplets.
- `ONLY`. Only seek to converge triplets.
- `ALSO`. Seek to converge both triplets and singlets. This choice is
only valid for restricted reference wavefunction.
The number of states given will be apportioned roughly 50-50 between
singlet and triplet states, preferring the former. For example:
given ``state = 5, triplets = "ALSO"``, the solver will seek to
converge 3 states of singlet spin symmetry and 2 of triplet spin
symmetry. When asking for ``states = [3, 3, 3, 3], triplets =
"ALSO"`` states (C2v symmetry), ``[2, 2, 2, 2]`` will be of singlet
spin symmetry and ``[1, 1, 1, 1]``` will be of triplet spin
symmetry.
tda : bool, optional.
Should the solver use the Tamm-Dancoff approximation (TDA) or the
random-phase approximation (RPA)?
Default is ``False``: use RPA.
Note that TDA is equivalent to CIS for HF references.
r_convergence : float, optional.
The convergence threshold for the norm of the residual vector.
Default: 1.0e-4
Using a tighter convergence threshold here requires tighter SCF ground
state convergence threshold. As a rule of thumb, with the SCF ground
state density converged to :math:`10^{-N}` (``D_CONVERGENGE = 1.0e-N``),
you can afford converging a corresponding TDSCF calculation to
:math:`10^{-(N-2)}`.
The default value is consistent with the default value for
``D_CONVERGENCE``.
maxiter : int, optional
Maximum number of iterations.
Default: 60
guess : str, optional.
How should the starting trial vectors be generated?
Default: `DENOMINATORS`, i.e. use orbital energy differences to generate
guess vectors.
verbose : int, optional.
How verbose should the solver be?
Default: 1
Notes
-----
The algorithm employed to solve the non-Hermitian eigenvalue problem (``tda = False``)
will fail when the SCF wavefunction has a triplet instability.
This function can be used for:
- restricted singlets: RPA or TDA, any functional
- restricted triplets: RPA or TDA, Hartree-Fock only
- unresctricted: RPA or TDA, Hartre-Fock and LDA only
Tighter convergence thresholds will require a larger iterative subspace.
The maximum size of the iterative subspace is calculated based on `r_convergence`:
max_vecs_per_root = -np.log10(r_convergence) * 50
for the default converegence threshold this gives 200 trial vectors per root and a maximum subspace size
of:
max_ss_size = max_vecs_per_root * n
where `n` are the number of roots to seek in the given irrep.
For each irrep, the algorithm will store up to `max_ss_size` trial vectors
before collapsing (restarting) the iterations from the `n` best
approximations.
"""
# validate input parameters
triplets = triplets.upper()
guess = guess.upper()
_validate_tdscf(wfn=wfn, states=states, triplets=triplets, guess=guess)
restricted = wfn.same_a_b_orbs()
# determine how many states per irrep to seek and apportion them between singlets/triplets and irreps.
singlets_per_irrep = []
triplets_per_irrep = []
if isinstance(states, list):
if triplets == "ONLY":
triplets_per_irrep = states
elif triplets == "ALSO":
singlets_per_irrep = [(s // 2) + (s % 2) for s in states]
triplets_per_irrep = [(s // 2) for s in states]
else:
singlets_per_irrep = states
else:
# total number of states given
# first distribute them among singlets and triplets, preferring the
# former then distribute them among irreps
if triplets == "ONLY":
triplets_per_irrep = _states_per_irrep(states, wfn.nirrep())
elif triplets == "ALSO":
spi = (states // 2) + (states % 2)
singlets_per_irrep = _states_per_irrep(spi, wfn.nirrep())
tpi = states - spi
triplets_per_irrep = _states_per_irrep(tpi, wfn.nirrep())
else:
singlets_per_irrep = _states_per_irrep(states, wfn.nirrep())
# tie maximum number of vectors per root to requested residual tolerance
# This gives 200 vectors per root with default tolerance
max_vecs_per_root = int(-np.log10(r_convergence) * 50)
def rpa_solver(e, n, g, m):
return solvers.hamiltonian_solver(engine=e,
nroot=n,
guess=g,
r_convergence=r_convergence,
max_ss_size=max_vecs_per_root * n,
verbose=verbose)
def tda_solver(e, n, g, m):
return solvers.davidson_solver(engine=e,
nroot=n,
guess=g,
r_convergence=r_convergence,
max_ss_size=max_vecs_per_root * n,
verbose=verbose)
# determine which solver function to use: Davidson for TDA or Hamiltonian for RPA?
if tda:
ptype = "TDA"
solve_function = tda_solver
else:
ptype = "RPA"
solve_function = rpa_solver
_print_tdscf_header(r_convergence=r_convergence, guess_type=guess, restricted=restricted, ptype=ptype)
# collect solver results into a list
_results = []
# singlets solve loop
if triplets == "NONE" or triplets == "ALSO":
res_1 = _solve_loop(wfn, ptype, solve_function, singlets_per_irrep, maxiter, restricted, "singlet")
_results.extend(res_1)
# triplets solve loop
if triplets == "ALSO" or triplets == "ONLY":
res_3 = _solve_loop(wfn, ptype, solve_function, triplets_per_irrep, maxiter, restricted, "triplet")
_results.extend(res_3)
# sort by energy
_results = sorted(_results, key=lambda x: x.E_ex_au)
core.print_out("\n{}\n".format("*"*90) +
"{}{:^70}{}\n".format("*"*10, "WARNING", "*"*10) +
"{}{:^70}{}\n".format("*"*10, "Length-gauge rotatory strengths are **NOT** gauge-origin invariant", "*"*10) +
"{}\n\n".format("*"*90)) #yapf: disable
# print results
core.print_out(" " + (" " * 20) + " " + "Excitation Energy".center(31) + f" {'Total Energy':^15}" +
"Oscillator Strength".center(31) + "Rotatory Strength".center(31) + "\n")
core.print_out(
f" {'#':^4} {'Sym: GS->ES (Trans)':^20} {'au':^15} {'eV':^15} {'au':^15} {'au (length)':^15} {'au (velocity)':^15} {'au (length)':^15} {'au (velocity)':^15}\n"
)
core.print_out(
f" {'-':->4} {'-':->20} {'-':->15} {'-':->15} {'-':->15} {'-':->15} {'-':->15} {'-':->15} {'-':->15}\n")
# collect results
solver_results = []
for i, x in enumerate(_results):
sym_descr = f"{x.irrep_GS}->{x.irrep_ES} ({1 if x.spin_mult== 'singlet' else 3} {x.irrep_trans})"
E_ex_ev = constants.conversion_factor('hartree', 'eV') * x.E_ex_au
E_tot_au = wfn.energy() + x.E_ex_au
# prepare return dictionary for this root
solver_results.append({
"EXCITATION ENERGY": x.E_ex_au,
"ELECTRIC DIPOLE TRANSITION MOMENT (LEN)": x.edtm_length,
"OSCILLATOR STRENGTH (LEN)": x.f_length,
"ELECTRIC DIPOLE TRANSITION MOMENT (VEL)": x.edtm_velocity,
"OSCILLATOR STRENGTH (VEL)": x.f_velocity,
"MAGNETIC DIPOLE TRANSITION MOMENT": x.mdtm,
"ROTATORY STRENGTH (LEN)": x.R_length,
"ROTATORY STRENGTH (VEL)": x.R_velocity,
"SYMMETRY": x.irrep_trans,
"SPIN": x.spin_mult,
"RIGHT EIGENVECTOR ALPHA": x.R_eigvec if restricted else x.R_eigvec[0],
"LEFT EIGENVECTOR ALPHA": x.L_eigvec if restricted else x.L_eigvec[0],
"RIGHT EIGENVECTOR BETA": x.R_eigvec if restricted else x.R_eigvec[1],
"LEFT EIGENVECTOR BETA": x.L_eigvec if restricted else x.L_eigvec[1],
})
# stash in psivars/wfnvars
ssuper_name = wfn.functional().name()
wfn.set_variable(f"TD-{ssuper_name} ROOT {i+1} TOTAL ENERGY - {x.irrep_ES} SYMMETRY", E_tot_au)
wfn.set_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} EXCITATION ENERGY - {x.irrep_ES} SYMMETRY", x.E_ex_au)
wfn.set_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} OSCILLATOR STRENGTH (LEN) - {x.irrep_ES} SYMMETRY",
x.f_length)
wfn.set_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} OSCILLATOR STRENGTH (VEL) - {x.irrep_ES} SYMMETRY",
x.f_velocity)
wfn.set_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} ROTATORY STRENGTH (LEN) - {x.irrep_ES} SYMMETRY",
x.R_length)
wfn.set_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} ROTATORY STRENGTH (VEL) - {x.irrep_ES} SYMMETRY",
x.R_velocity)
wfn.set_array_variable(
f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} ELECTRIC TRANSITION DIPOLE MOMENT (LEN) - {x.irrep_ES} SYMMETRY",
core.Matrix.from_array(x.edtm_length.reshape((1, 3))))
wfn.set_array_variable(
f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} ELECTRIC TRANSITION DIPOLE MOMENT (VEL) - {x.irrep_ES} SYMMETRY",
core.Matrix.from_array(x.edtm_velocity.reshape((1, 3))))
wfn.set_array_variable(
f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} MAGNETIC TRANSITION DIPOLE MOMENT - {x.irrep_ES} SYMMETRY",
core.Matrix.from_array(x.mdtm.reshape((1, 3))))
wfn.set_array_variable(
f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} RIGHT EIGENVECTOR ALPHA - {x.irrep_ES} SYMMETRY",
x.R_eigvec if restricted else x.R_eigvec[0])
wfn.set_array_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} LEFT EIGENVECTOR ALPHA - {x.irrep_ES} SYMMETRY",
x.L_eigvec if restricted else x.L_eigvec[0])
wfn.set_array_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} RIGHT EIGENVECTOR BETA - {x.irrep_ES} SYMMETRY",
x.R_eigvec if restricted else x.R_eigvec[1])
wfn.set_array_variable(f"TD-{ssuper_name} ROOT 0 -> ROOT {i+1} LEFT EIGENVECTOR ALPHA - {x.irrep_ES} SYMMETRY",
x.L_eigvec if restricted else x.L_eigvec[1])
core.print_out(
f" {i+1:^4} {sym_descr:^20} {x.E_ex_au:< 15.5f} {E_ex_ev:< 15.5f} {E_tot_au:< 15.5f} {x.f_length:< 15.4f} {x.f_velocity:< 15.4f} {x.R_length:< 15.4f} {x.R_velocity:< 15.4f}\n"
)
core.print_out("\n")
return solver_results
|
dgasmith/psi4
|
psi4/driver/procrouting/response/scf_response.py
|
Python
|
lgpl-3.0
| 28,511 | 0.002876 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is a doctest example with Numpy arrays.
For more information about doctest, see
https://docs.python.org/3/library/doctest.html (reference)
and
www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html (nice examples in
French).
To run doctest, execute this script (thanks to the
`if __name__ == "__main__": import doctest ; doctest.testmod()` directives)
or execute the following command in a terminal::
python3 -m doctest datapipe/io/images.py
"""
import numpy as np
def example1():
"""A very basic doctest example.
Notes
-----
The numpy module is imported at the end of this file, in the test::
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
Examples
--------
>>> numpy.array([1, 2, 3])
array([1, 2, 3])
"""
pass
def example2():
"""A very basic doctest example to test values returned by this function.
Examples
--------
>>> example2()
array([1, 2, 3])
"""
return numpy.array([1, 2, 3])
def example3(a):
"""A very basic example.
Examples
--------
>>> a = numpy.array([3, 1, 2])
>>> example3(a)
>>> a
array([1, 2, 3])
"""
a.sort()
def example4(a):
"""Replace *in-place* `NaN` values in `a` by zeros.
Replace `NaN` ("Not a Number") values in `a` by zeros.
Parameters
----------
image : array_like
The image to process. `NaN` values are replaced **in-place** thus this
function changes the provided object.
Returns
-------
array_like
Returns a boolean mask array indicating whether values in `a`
initially contained `NaN` values (`True`) of not (`False`). This array
is defined by the instruction `np.isnan(a)`.
Notes
-----
`NaN` values are replaced **in-place** in the provided `a`
parameter.
Examples
--------
>>> a = numpy.array([1., 2., numpy.nan])
>>> a
array([ 1., 2., nan])
>>> example4(a)
array([False, False, True], dtype=bool)
Be careful with white space! The following will work...
>>> a
array([ 1., 2., 0.])
but this one would't
# >>> a
# array([ 1., 2., 0.])
As an alternative, the `doctest: +NORMALIZE_WHITESPACE` can be used (see
https://docs.python.org/3/library/doctest.html#doctest.NORMALIZE_WHITESPACE
and http://www.fil.univ-lille1.fr/~L1S2API/CoursTP/tp_doctest.html)
>>> a
... # doctest: +NORMALIZE_WHITESPACE
array([ 1., 2., 0.])
but the space before the '1' is still required...
"""
nan_mask = np.isnan(a)
a[nan_mask] = 0
return nan_mask
if __name__ == "__main__":
import doctest
import numpy
doctest.testmod()
|
jeremiedecock/snippets
|
python/doctest/numpy_example.py
|
Python
|
mit
| 2,806 | 0.000713 |
import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('david@example.net')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
|
Ginray/my-flask-blog
|
tests/test_user_model.py
|
Python
|
mit
| 5,437 | 0.000368 |
import numpy as np
import torch
from PIL import Image
from argparse import ArgumentParser
from torch.optim import SGD, Adam
from torch.autograd import Variable
from torch.utils.data import DataLoader
from torchvision.transforms import Compose, CenterCrop, Normalize
from torchvision.transforms import ToTensor, ToPILImage
from piwise.dataset import VOC12
from piwise.network import FCN8, FCN16, FCN32, UNet, PSPNet, SegNet
from piwise.criterion import CrossEntropyLoss2d
from piwise.transform import Relabel, ToLabel, Colorize
from piwise.visualize import Dashboard
NUM_CHANNELS = 3
NUM_CLASSES = 22
color_transform = Colorize()
image_transform = ToPILImage()
input_transform = Compose([
CenterCrop(256),
ToTensor(),
Normalize([.485, .456, .406], [.229, .224, .225]),
])
target_transform = Compose([
CenterCrop(256),
ToLabel(),
Relabel(255, 21),
])
def train(args, model):
model.train()
weight = torch.ones(22)
weight[0] = 0
loader = DataLoader(VOC12(args.datadir, input_transform, target_transform),
num_workers=args.num_workers, batch_size=args.batch_size, shuffle=True)
if args.cuda:
criterion = CrossEntropyLoss2d(weight.cuda())
else:
criterion = CrossEntropyLoss2d(weight)
optimizer = Adam(model.parameters())
if args.model.startswith('FCN'):
optimizer = SGD(model.parameters(), 1e-4, .9, 2e-5)
if args.model.startswith('PSP'):
optimizer = SGD(model.parameters(), 1e-2, .9, 1e-4)
if args.model.startswith('Seg'):
optimizer = SGD(model.parameters(), 1e-3, .9)
if args.steps_plot > 0:
board = Dashboard(args.port)
for epoch in range(1, args.num_epochs+1):
epoch_loss = []
for step, (images, labels) in enumerate(loader):
if args.cuda:
images = images.cuda()
labels = labels.cuda()
inputs = Variable(images)
targets = Variable(labels)
outputs = model(inputs)
optimizer.zero_grad()
loss = criterion(outputs, targets[:, 0])
loss.backward()
optimizer.step()
epoch_loss.append(loss.data[0])
if args.steps_plot > 0 and step % args.steps_plot == 0:
image = inputs[0].cpu().data
image[0] = image[0] * .229 + .485
image[1] = image[1] * .224 + .456
image[2] = image[2] * .225 + .406
board.image(image,
f'input (epoch: {epoch}, step: {step})')
board.image(color_transform(outputs[0].cpu().max(0)[1].data),
f'output (epoch: {epoch}, step: {step})')
board.image(color_transform(targets[0].cpu().data),
f'target (epoch: {epoch}, step: {step})')
if args.steps_loss > 0 and step % args.steps_loss == 0:
average = sum(epoch_loss) / len(epoch_loss)
print(f'loss: {average} (epoch: {epoch}, step: {step})')
if args.steps_save > 0 and step % args.steps_save == 0:
filename = f'{args.model}-{epoch:03}-{step:04}.pth'
torch.save(model.state_dict(), filename)
print(f'save: {filename} (epoch: {epoch}, step: {step})')
def evaluate(args, model):
model.eval()
image = input_transform(Image.open(args.image))
label = model(Variable(image, volatile=True).unsqueeze(0))
label = color_transform(label[0].data.max(0)[1])
image_transform(label).save(args.label)
def main(args):
Net = None
if args.model == 'fcn8':
Net = FCN8
if args.model == 'fcn16':
Net = FCN16
if args.model == 'fcn32':
Net = FCN32
if args.model == 'fcn32':
Net = FCN32
if args.model == 'unet':
Net = UNet
if args.model == 'pspnet':
Net = PSPNet
if args.model == 'segnet':
Net = SegNet
assert Net is not None, f'model {args.model} not available'
model = Net(NUM_CLASSES)
if args.cuda:
model = model.cuda()
if args.state:
try:
model.load_state_dict(torch.load(args.state))
except AssertionError:
model.load_state_dict(torch.load(args.state,
map_location=lambda storage, loc: storage))
if args.mode == 'eval':
evaluate(args, model)
if args.mode == 'train':
train(args, model)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--cuda', action='store_true')
parser.add_argument('--model', required=True)
parser.add_argument('--state')
subparsers = parser.add_subparsers(dest='mode')
subparsers.required = True
parser_eval = subparsers.add_parser('eval')
parser_eval.add_argument('image')
parser_eval.add_argument('label')
parser_train = subparsers.add_parser('train')
parser_train.add_argument('--port', type=int, default=80)
parser_train.add_argument('--datadir', required=True)
parser_train.add_argument('--num-epochs', type=int, default=32)
parser_train.add_argument('--num-workers', type=int, default=4)
parser_train.add_argument('--batch-size', type=int, default=1)
parser_train.add_argument('--steps-loss', type=int, default=50)
parser_train.add_argument('--steps-plot', type=int, default=0)
parser_train.add_argument('--steps-save', type=int, default=500)
main(parser.parse_args())
|
bodokaiser/piwise
|
main.py
|
Python
|
bsd-3-clause
| 5,437 | 0.001839 |
# read_hadamard_file.py
# Reads data from a text file to create a 3D
# version of a given Hadamard Matrix.
# Created by Rick Henderson
# Created on June 4, 2015
# Completed June 5, 2015
# Note: A "Hadamard File" is a text file containing rows
# rows of + and - where the + indicates a 1 or a cube
# and the - represents a 0 or a space.
import bpy
# Set the order (size) of the matrix
nOrder = 12
# You can also change these values if you want to alter the offset between the cubes
xOffset = 1.0
yOffset = 1.0
zOffset = 0 # You would have to alter the code more if you want a 3D array of cubes
xpos = 0
ypos = 0
char_number = 0
# Open the file to read from
# Modified technique from DiveIntoPython3.net/files.html
line_number = 0
with open('c:/had12.txt', encoding='utf-8') as a_file:
for each_row in a_file:
line_number += 1
# Just print the current row to the console as a test
print(each_row.rstrip())
for a_char in each_row:
char_number += 1
# If the current character is +, generate a cube then position it
if a_char == '+':
bpy.ops.mesh.primitive_cube_add(radius=0.5)
bpy.context.object.location[0] = line_number * xOffset
bpy.context.object.location[1] = char_number * yOffset
# Now an entire row has been read, so reset char_number to 0
char_number = 0
# Program Ends
|
rickhenderson/code-samples
|
python-blender/read_hadamard_file.py
|
Python
|
gpl-3.0
| 1,503 | 0.007984 |
from numpy.distutils.core import setup, Extension
#from setuptools import setup, Extension
setup(
name = "Infer", version = "1.0",
description='Python version of MCMC, plus other inference codes under development',
author='Neale Gibson',
author_email='ngibson@eso.org',
packages=['Infer'],
package_dir={'Infer':'src'},
#and extension package for solving toeplitz matrices...
ext_modules = [
Extension("Infer.LevinsonTrenchZoharSolve",sources=["src/LevinsonTrenchZoharSolve.c"],),
]
)
|
nealegibson/Infer
|
setup.py
|
Python
|
gpl-3.0
| 518 | 0.027027 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import communication_config
from . import communication_job
from . import communication_attachment
from . import res_partner
from . import email
from . import crm_phonecall
from . import ir_attachment
from . import mail_template
from . import communication_dashboard
from . import report_with_omr
|
eicher31/compassion-modules
|
partner_communication/models/__init__.py
|
Python
|
agpl-3.0
| 706 | 0.001416 |
# -*- coding: utf-8 -*-
###############################################################################
#
# ListPlaylistsByID
# Returns a collection of playlists that match the provided IDs.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListPlaylistsByID(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListPlaylistsByID Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListPlaylistsByID, self).__init__(temboo_session, '/Library/YouTube/Playlists/ListPlaylistsByID')
def new_input_set(self):
return ListPlaylistsByIDInputSet()
def _make_result_set(self, result, path):
return ListPlaylistsByIDResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListPlaylistsByIDChoreographyExecution(session, exec_id, path)
class ListPlaylistsByIDInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListPlaylistsByID
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((optional, string) The API Key provided by Google for simple API access when you do not need to access user data.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('APIKey', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required for OAuth authentication unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Allows you to specify a subset of fields to include in the response using an xpath-like syntax (i.e. items/snippet/title).)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Fields', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PageToken', value)
def set_Part(self, value):
"""
Set the value of the Part input for this Choreo. ((optional, string) Specifies a comma-separated list of playlist resource properties that the API response will include. Part names that you can pass are: id, snippet, and status.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('Part', value)
def set_PlaylistID(self, value):
"""
Set the value of the PlaylistID input for this Choreo. ((required, string) A comma-separated list of the YouTube playlist ID(s) for the resource(s) that are being retrieved.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('PlaylistID', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required for OAuth authentication unless providing a valid AccessToken.)
"""
super(ListPlaylistsByIDInputSet, self)._set_input('RefreshToken', value)
class ListPlaylistsByIDResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListPlaylistsByID Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from YouTube.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListPlaylistsByIDChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListPlaylistsByIDResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/YouTube/Playlists/ListPlaylistsByID.py
|
Python
|
apache-2.0
| 6,403 | 0.00531 |
#!/usr/bin/env python
# "manhole" entry point, friendlier ipython startup to remote container
__author__ = 'Dave Foster <dfoster@asascience.com>'
def main():
import sys, os, re, errno, json, socket
from pkg_resources import load_entry_point
r = re.compile('manhole-(\d+).json')
if len(sys.argv) == 2:
mh_file = sys.argv[1]
else:
# find manhole file in local dir
mh_files = [f for f in os.listdir(os.getcwd()) if r.search(f) is not None]
if len(mh_files) == 0:
print >>sys.stderr, "No manhole files detected, specify it manually"
sys.exit(1)
elif len(mh_files) > 1:
def legal_manhole_file(f):
"""
Helper method to check if a process exists and is likely a manhole-able container.
@return True/False if is a likely container.
"""
mh_pid = int(r.search(f).group(1))
try:
os.getpgid(mh_pid)
except OSError as e:
if e.errno == errno.ESRCH:
return False
raise # unexpected, just re-raise
# the pid seems legal, now check status of sockets - the pid may be reused
with open(f) as ff:
mh_doc = json.load(ff)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((mh_doc['ip'], mh_doc['shell_port']))
except socket.error as e:
if e.errno == errno.EADDRINUSE:
return True
raise # unexpected, re-raise
finally:
s.close()
return False
# try to see if these are active processes
legal_mh_files = filter(legal_manhole_file, mh_files)
if len(legal_mh_files) > 1:
print >>sys.stderr, "Multiple legal manhole files detected, specify it manually:", legal_mh_files
sys.exit(1)
# we found a single legal file, use it
mh_file = legal_mh_files[0]
# perform cleanup of stale files
dead_mh_files = [x for x in mh_files if x not in legal_mh_files]
for df in dead_mh_files:
print >>sys.stderr, "Cleaning up stale manhole file", df
os.unlink(df)
else:
mh_file = mh_files[0]
if not os.access(mh_file, os.R_OK):
print >>sys.stderr, "Manhole file (%s) does not exist" % mh_file
sys.exit(1)
mhpid = r.search(mh_file).group(1)
# configure branding
manhole_logo = """
__ __ _______ __ _ __ __ _______ ___ _______
| |_| || _ || | | || | | || || | | |
| || |_| || |_| || |_| || _ || | | ___|
| || || || || | | || | | |___
| || || _ || || |_| || |___ | ___|
| ||_|| || _ || | | || _ || || || |___
|_| |_||__| |__||_| |__||__| |__||_______||_______||_______|
"""
# manipulate argv!
sys.argv = [sys.argv[0], "console", "--existing", mh_file,
"--PromptManager.in_template=>o> ",
"--PromptManager.in2_template=... ",
"--PromptManager.out_template=--> ",
"--TerminalInteractiveShell.banner1=%s" % manhole_logo,
"--TerminalInteractiveShell.banner2=SciON Container Manhole, connected to %s\n(press Ctrl-D to detach, quit() to exit container)\n" % mhpid]
# HACK: Mock out client shutdown to avoid default shutdown on Ctrl-D
from mock import patch
with patch("IPython.kernel.client.KernelClient.shutdown"):
ipy_entry = load_entry_point('ipython', 'console_scripts', 'ipython')()
sys.exit(ipy_entry)
if __name__ == '__main__':
main()
|
scionrep/scioncc
|
src/scripts/manhole.py
|
Python
|
bsd-2-clause
| 3,942 | 0.003298 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the output module field formatting helper."""
import unittest
from dfdatetime import semantic_time as dfdatetime_semantic_time
from dfvfs.path import fake_path_spec
from plaso.containers import events
from plaso.lib import definitions
from plaso.output import formatting_helper
from tests.containers import test_lib as containers_test_lib
from tests.output import test_lib
class TestFieldFormattingHelper(formatting_helper.FieldFormattingHelper):
"""Field formatter helper for testing purposes."""
_FIELD_FORMAT_CALLBACKS = {'zone': '_FormatTimeZone'}
class FieldFormattingHelperTest(test_lib.OutputModuleTestCase):
"""Test the output module field formatting helper."""
# pylint: disable=protected-access
_TEST_EVENTS = [
{'data_type': 'test:event',
'filename': 'log/syslog.1',
'hostname': 'ubuntu',
'path_spec': fake_path_spec.FakePathSpec(
location='log/syslog.1'),
'text': (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session\n '
'closed for user root)'),
'timestamp': '2012-06-27 18:17:01',
'timestamp_desc': definitions.TIME_DESCRIPTION_CHANGE}]
def testFormatDateTime(self):
"""Tests the _FormatDateTime function with dynamic time."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
output_mediator.SetTimezone('Europe/Amsterdam')
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00')
output_mediator.SetTimezone('UTC')
event.date_time = dfdatetime_semantic_time.InvalidTime()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, 'Invalid')
def testFormatDateTimeWithoutDynamicTime(self):
"""Tests the _FormatDateTime function without dynamic time."""
output_mediator = self._CreateOutputMediator(dynamic_time=False)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
# Test with event.date_time
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
output_mediator.SetTimezone('Europe/Amsterdam')
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00')
output_mediator.SetTimezone('UTC')
event.date_time = dfdatetime_semantic_time.InvalidTime()
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
# Test with event.timestamp
event.date_time = None
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00')
event.timestamp = 0
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
event.timestamp = -9223372036854775808
date_time_string = test_helper._FormatDateTime(
event, event_data, event_data_stream)
self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00')
def testFormatDisplayName(self):
"""Tests the _FormatDisplayName function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
display_name_string = test_helper._FormatDisplayName(
event, event_data, event_data_stream)
self.assertEqual(display_name_string, 'FAKE:log/syslog.1')
def testFormatFilename(self):
"""Tests the _FormatFilename function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
filename_string = test_helper._FormatFilename(
event, event_data, event_data_stream)
self.assertEqual(filename_string, 'log/syslog.1')
def testFormatHostname(self):
"""Tests the _FormatHostname function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
hostname_string = test_helper._FormatHostname(
event, event_data, event_data_stream)
self.assertEqual(hostname_string, 'ubuntu')
def testFormatInode(self):
"""Tests the _FormatInode function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
inode_string = test_helper._FormatInode(
event, event_data, event_data_stream)
self.assertEqual(inode_string, '-')
def testFormatMACB(self):
"""Tests the _FormatMACB function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
macb_string = test_helper._FormatMACB(event, event_data, event_data_stream)
self.assertEqual(macb_string, '..C.')
def testFormatMessage(self):
"""Tests the _FormatMessage function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_string = test_helper._FormatMessage(
event, event_data, event_data_stream)
expected_message_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_string, expected_message_string)
def testFormatMessageShort(self):
"""Tests the _FormatMessageShort function."""
output_mediator = self._CreateOutputMediator()
formatters_directory_path = self._GetTestFilePath(['formatters'])
output_mediator.ReadMessageFormattersFromDirectory(
formatters_directory_path)
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
message_short_string = test_helper._FormatMessageShort(
event, event_data, event_data_stream)
expected_message_short_string = (
'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed '
'for user root)')
self.assertEqual(message_short_string, expected_message_short_string)
def testFormatSource(self):
"""Tests the _FormatSource function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
source_string = test_helper._FormatSource(
event, event_data, event_data_stream)
self.assertEqual(source_string, 'Test log file')
def testFormatSourceShort(self):
"""Tests the _FormatSourceShort function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
source_short_string = test_helper._FormatSourceShort(
event, event_data, event_data_stream)
self.assertEqual(source_short_string, 'FILE')
def testFormatTag(self):
"""Tests the _FormatTag function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
tag_string = test_helper._FormatTag(None)
self.assertEqual(tag_string, '-')
event_tag = events.EventTag()
event_tag.AddLabel('one')
event_tag.AddLabel('two')
tag_string = test_helper._FormatTag(event_tag)
self.assertEqual(tag_string, 'one two')
def testFormatTime(self):
"""Tests the _FormatTime function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
# Test with event.date_time
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '18:17:01')
output_mediator.SetTimezone('Europe/Amsterdam')
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '20:17:01')
output_mediator.SetTimezone('UTC')
# Test with event.timestamp
event.date_time = None
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '18:17:01')
event.timestamp = 0
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '--:--:--')
event.timestamp = -9223372036854775808
time_string = test_helper._FormatTime(
event, event_data, event_data_stream)
self.assertEqual(time_string, '--:--:--')
def testFormatTimeZone(self):
"""Tests the _FormatTimeZone function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
zone_string = test_helper._FormatTimeZone(
event, event_data, event_data_stream)
self.assertEqual(zone_string, 'UTC')
def testFormatUsername(self):
"""Tests the _FormatUsername function."""
output_mediator = self._CreateOutputMediator()
test_helper = formatting_helper.FieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
username_string = test_helper._FormatUsername(
event, event_data, event_data_stream)
self.assertEqual(username_string, '-')
# TODO: add coverage for _ReportEventError
def testGetFormattedField(self):
"""Tests the GetFormattedField function."""
output_mediator = self._CreateOutputMediator()
test_helper = TestFieldFormattingHelper(output_mediator)
event, event_data, event_data_stream = (
containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0]))
zone_string = test_helper.GetFormattedField(
'zone', event, event_data, event_data_stream, None)
self.assertEqual(zone_string, 'UTC')
if __name__ == '__main__':
unittest.main()
|
kiddinn/plaso
|
tests/output/formatting_helper.py
|
Python
|
apache-2.0
| 12,029 | 0.001912 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.hypervisors.compute import views
urlpatterns = [
url(r'^(?P<compute_host>[^/]+)/evacuate_host$',
views.EvacuateHostView.as_view(),
name='evacuate_host'),
url(r'^(?P<compute_host>[^/]+)/disable_service$',
views.DisableServiceView.as_view(),
name='disable_service'),
url(r'^(?P<compute_host>[^/]+)/migrate_host$',
views.MigrateHostView.as_view(),
name='migrate_host'),
]
|
openstack/horizon
|
openstack_dashboard/dashboards/admin/hypervisors/compute/urls.py
|
Python
|
apache-2.0
| 1,052 | 0 |
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import res_partner
import oehealth_annotation
import oehealth_professional_category
import oehealth_professional
import oehealth_tag
import oehealth_event_participant
import oehealth_specialty
|
CLVsol/oehealth
|
oehealth_professional/__init__.py
|
Python
|
agpl-3.0
| 1,597 | 0.010645 |
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Platform library - ml cell magic."""
from __future__ import absolute_import
from __future__ import unicode_literals
try:
import IPython
import IPython.core.display
import IPython.core.magic
except ImportError:
raise Exception('This module can only be loaded in ipython.')
import argparse
import json
import os
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import shutil
import six
from skimage.segmentation import mark_boundaries
import subprocess
import tempfile
import textwrap
from tensorflow.python.lib.io import file_io
import urllib
import google.datalab
import google.datalab.bigquery as bq
from google.datalab import Context
import google.datalab.ml as datalab_ml
import google.datalab.utils.commands
import google.datalab.contrib.mlworkbench._local_predict as _local_predict
import google.datalab.contrib.mlworkbench._shell_process as _shell_process
import google.datalab.contrib.mlworkbench._archive as _archive
import google.datalab.contrib.mlworkbench._prediction_explainer as _prediction_explainer
MLTOOLBOX_CODE_PATH = '/datalab/lib/pydatalab/solutionbox/code_free_ml/mltoolbox/code_free_ml/'
@IPython.core.magic.register_line_cell_magic
def ml(line, cell=None):
"""Implements the datalab cell magic for MLWorkbench operations.
Args:
line: the contents of the ml command line.
Returns:
The results of executing the cell.
"""
parser = google.datalab.utils.commands.CommandParser(
prog='%ml',
description=textwrap.dedent("""\
Execute MLWorkbench operations
Use "%ml <command> -h" for help on a specific command.
"""))
analyze_parser = parser.subcommand(
'analyze',
formatter_class=argparse.RawTextHelpFormatter,
help='Analyze training data and generate stats, such as min/max/mean '
'for numeric values, vocabulary for text columns.',
epilog=textwrap.dedent("""\
Example usage:
%%ml analyze [--cloud]
output: path/to/dir
training_data:
csv: path/to/csv
schema:
- name: serialId
type: STRING
- name: num1
type: FLOAT
- name: num2
type: INTEGER
- name: text1
type: STRING
features:
serialId:
transform: key
num1:
transform: scale
value: 1
num2:
transform: identity
text1:
transform: bag_of_words
Also supports in-notebook variables, such as:
%%ml analyze --output path/to/dir
training_data: $my_csv_dataset
features: $features_def"""))
analyze_parser.add_argument('--output', required=True,
help='path of output directory.')
analyze_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run analysis in cloud or local.')
analyze_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
analyze_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
training data. It is one of the following:
csv (example "csv: file.csv"), or
bigquery_table (example: "bigquery_table: project.dataset.table"), or
bigquery_sql (example: "bigquery_sql: select * from table where num1 > 1.0"), or
a variable defined as google.datalab.ml.CsvDataSet or
google.datalab.ml.BigQueryDataSet."""))
analyze_parser.add_cell_argument(
'features',
required=True,
help=textwrap.dedent("""\
features config indicating how to transform data into features. The
list of supported transforms:
"transform: identity"
does nothing (for numerical columns).
"transform: scale
value: x"
scale a numerical column to [-a, a]. If value is missing, x
defaults to 1.
"transform: one_hot"
treats the string column as categorical and makes one-hot
encoding of it.
"transform: embedding
embedding_dim: d"
treats the string column as categorical and makes embeddings of
it with specified dimension size.
"transform: bag_of_words"
treats the string column as text and make bag of words
transform of it.
"transform: tfidf"
treats the string column as text and make TFIDF transform of it.
"transform: image_to_vec
checkpoint: gs://b/o"
from image gs url to embeddings. "checkpoint" is a inception v3
checkpoint. If absent, a default checkpoint is used.
"transform: target"
denotes the column is the target. If the schema type of this
column is string, a one_hot encoding is automatically applied.
If numerical, an identity transform is automatically applied.
"transform: key"
column contains metadata-like information and will be output
as-is in prediction."""))
analyze_parser.set_defaults(func=_analyze)
transform_parser = parser.subcommand(
'transform',
formatter_class=argparse.RawTextHelpFormatter,
help='Transform the data into tf.example which is more efficient in training.',
epilog=textwrap.dedent("""\
Example usage:
%%ml transform --cloud [--shuffle]
analysis: path/to/analysis_output_folder
output: path/to/dir
prefix: my_filename
batch_size: 100
training_data:
csv: path/to/csv
cloud:
num_workers: 3
worker_machine_type: n1-standard-1
project_id: my_project_id"""))
transform_parser.add_argument('--analysis', required=True,
help='path of analysis output directory.')
transform_parser.add_argument('--output', required=True,
help='path of output directory.')
transform_parser.add_argument(
'--prefix', required=True, metavar='NAME',
help='The prefix of the output file name. The output files will be like '
'NAME_00000_of_00005.tar.gz')
transform_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run transform in cloud or local.')
transform_parser.add_argument('--shuffle', action='store_true', default=False,
help='whether to shuffle the training data in output.')
transform_parser.add_argument('--batch_size', type=int, default=100,
help='number of instances in a batch to process once. '
'Larger batch is more efficient but may consume more memory.')
transform_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
transform_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
Training data. A dict containing one of the following:
csv (example: "csv: file.csv"), or
bigquery_table (example: "bigquery_table: project.dataset.table"), or
bigquery_sql (example: "bigquery_sql: select * from table where num1 > 1.0")"""))
transform_parser.add_cell_argument(
'cloud_config',
help=textwrap.dedent("""\
A dictionary of cloud config. All of them are optional.
num_workers: Dataflow number of workers. If not set, DataFlow
service will determine the number.
worker_machine_type: a machine name from
https://cloud.google.com/compute/docs/machine-types
If not given, the service uses the default machine type.
project_id: id of the project to use for DataFlow service. If not set,
Datalab's default project (set by %%datalab project set) is used.
job_name: Unique name for a Dataflow job to use. If not set, a
random name will be used."""))
transform_parser.set_defaults(func=_transform)
train_parser = parser.subcommand(
'train',
formatter_class=argparse.RawTextHelpFormatter,
help='Train a model.',
epilog=textwrap.dedent("""\
Example usage:
%%ml train --cloud
analysis: path/to/analysis_output
output: path/to/dir
training_data:
transformed: path/to/transformed/train
evaluation_data:
tranaformed: path/to/transformed/eval
model_args:
model: linear_regression
cloud_config:
region: us-central1"""))
train_parser.add_argument('--analysis', required=True,
help='path of analysis output directory.')
train_parser.add_argument('--output', required=True,
help='path of trained model directory.')
train_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run training in cloud or local.')
train_parser.add_argument('--package', required=False,
help='A local or GCS tarball path to use as the source. '
'If not set, the default source package will be used.')
train_parser.add_cell_argument(
'training_data',
required=True,
help=textwrap.dedent("""\
Training data. It is either raw csv file pattern, or transformed file pattern.
For example:
"training_data:
csv: /path/to/csv/mycsv*.csv"
or
"training_data:
transformed: /path/to/transformed-*" """))
train_parser.add_cell_argument('evaluation_data', required=True,
help='same as training_data.')
package_model_help = subprocess.Popen(
['python', '-m', 'trainer.task', '--datalab-help'],
cwd=MLTOOLBOX_CODE_PATH,
stdout=subprocess.PIPE).communicate()[0]
package_model_help = ('model_args: a dictionary of model specific args, including:\n\n' +
package_model_help.decode())
train_parser.add_cell_argument('model_args', help=package_model_help)
train_parser.add_cell_argument(
'cloud_config',
help=textwrap.dedent("""\
A dictionary of cloud training config, including:
job_id: the name of the job. If not provided, a default job name is created.
region: see {url}
runtime_version: see "region". Must be a string like '1.2'.
scale_tier: see "region".""".format(
url='https://cloud.google.com/sdk/gcloud/reference/ml-engine/jobs/submit/training')))
train_parser.set_defaults(func=_train)
predict_parser = parser.subcommand(
'predict',
formatter_class=argparse.RawTextHelpFormatter,
help='Predict with local or deployed models. (Good for small datasets).',
epilog=textwrap.dedent("""\
Example usage:
%%ml predict
headers: key,num
model: path/to/model
prediction_data:
- key1,value1
- key2,value2
Or, in another cell, define a list of dict:
my_data = [{'key': 1, 'num': 1.2}, {'key': 2, 'num': 2.8}]
Then:
%%ml predict
headers: key,num
model: path/to/model
prediction_data: $my_data"""))
predict_parser.add_argument('--model', required=True,
help='The model path if not --cloud, or the id in '
'the form of model.version if --cloud.')
predict_parser.add_argument('--headers',
help='Online models only. ' +
'The comma seperated headers of the prediction data. ' +
'Order must match the training order.')
predict_parser.add_argument('--image_columns',
help='Online models only. ' +
'Comma seperated headers of image URL columns. ' +
'Required if prediction data contains image URL columns.')
predict_parser.add_argument('--no_show_image', action='store_true', default=False,
help='If not set, add a column of images in output.')
predict_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run prediction in cloud or local.')
predict_parser.add_cell_argument(
'prediction_data',
required=True,
help=textwrap.dedent("""\
Prediction data can be
1) CSV lines in the input cell in yaml format or
2) a local variable which is one of
a) list of dict
b) list of strings of csv lines
c) a Pandas DataFrame"""))
predict_parser.set_defaults(func=_predict)
batch_predict_parser = parser.subcommand(
'batch_predict',
formatter_class=argparse.RawTextHelpFormatter,
help='Batch prediction with local or deployed models. (Good for large datasets)',
epilog=textwrap.dedent("""\
Example usage:
%%ml batch_predict [--cloud]
model: path/to/model
output: path/to/output
format: csv
prediction_data:
csv: path/to/file_pattern"""))
batch_predict_parser.add_argument('--model', required=True,
help='The model path if not --cloud, or the id in '
'the form of model.version if --cloud.')
batch_predict_parser.add_argument('--output', required=True,
help='The path of output directory with prediction results. '
'If --cloud, it has to be GCS path.')
batch_predict_parser.add_argument('--format',
help='csv or json. For cloud run, '
'the only supported format is json.')
batch_predict_parser.add_argument('--batch_size', type=int, default=100,
help='number of instances in a batch to process once. '
'Larger batch is more efficient but may consume '
'more memory. Only used in local run.')
batch_predict_parser.add_argument('--cloud', action='store_true', default=False,
help='whether to run prediction in cloud or local.')
batch_predict_parser.add_cell_argument(
'prediction_data',
required=True,
help='Data to predict with. Only csv is supported.')
batch_predict_parser.add_cell_argument(
'cloud_config',
help=textwrap.dedent("""\
A dictionary of cloud batch prediction config.
job_id: the name of the job. If not provided, a default job name is created.
region: see {url}
max_worker_count: see reference in "region".""".format(
url='https://cloud.google.com/sdk/gcloud/reference/ml-engine/jobs/submit/prediction'))) # noqa
batch_predict_parser.set_defaults(func=_batch_predict)
explain_parser = parser.subcommand(
'explain',
formatter_class=argparse.RawTextHelpFormatter,
help='Explain a prediction.')
explain_parser.add_argument('--type', required=True, choices=['text', 'image'],
help='the type of column to explain.')
explain_parser.add_argument('--model', required=True,
help='path of the model directory used for prediction.')
explain_parser.add_argument('--labels', required=True,
help='comma separated labels to explain.')
explain_parser.add_argument('--column_name',
help='the name of the column to explain. Optional if text type ' +
'and there is only one text column, or image type and ' +
'there is only one image column.')
explain_parser.add_argument('--num_features', type=int,
help='number of features to analyze. In text, it is number of ' +
'words. In image, it is number of areas.')
explain_parser.add_argument('--num_samples', type=int,
help='size of the neighborhood to learn the linear model. ')
explain_parser.add_argument('--overview_only', action='store_true', default=False,
help='whether to show only the overview. For text only.')
explain_parser.add_argument('--detailview_only', action='store_true', default=False,
help='whether to show only the detail views for each label. ' +
'For text only.')
explain_parser.add_argument('--include_negative', action='store_true', default=False,
help='whether to show only positive areas. For image only.')
explain_parser.add_argument('--hide_color', type=int, default=0,
help='the color to use for perturbed area. If -1, average of ' +
'each channel is used for each channel. For image only.')
explain_parser.add_argument('--batch_size', type=int, default=100,
help='size of batches passed to prediction. For image only.')
explain_parser.add_cell_argument('data', required=True,
help='Prediction Data. Can be a csv line, or a dict.')
explain_parser.set_defaults(func=_explain)
return google.datalab.utils.commands.handle_magic_line(line, cell, parser)
def _abs_path(path):
"""Convert a non-GCS path to its absolute path.
path can contain special filepath characters like '..', '*' and '.'.
Example: If the current folder is /content/datalab/folder1 and path is
'../folder2/files*', then this function returns the string
'/content/datalab/folder2/files*'.
This function is needed if using _shell_process.run_and_monitor() as that
function runs a command in a different folder.
Args:
path: string.
"""
if path.startswith('gs://'):
return path
return os.path.abspath(path)
def _create_json_file(tmpdir, data, filename):
json_file = os.path.join(tmpdir, filename)
with file_io.FileIO(json_file, 'w') as f:
json.dump(data, f)
return json_file
def _show_job_link(job):
log_url_query_strings = {
'project': Context.default().project_id,
'resource': 'ml.googleapis.com/job_id/' + job.info['jobId']
}
log_url = 'https://console.developers.google.com/logs/viewer?' + \
urllib.urlencode(log_url_query_strings)
html = 'Job "%s" submitted.' % job.info['jobId']
html += '<p>Click <a href="%s" target="_blank">here</a> to view cloud log. <br/>' % log_url
IPython.display.display_html(html, raw=True)
def _analyze(args, cell):
# For now, always run python2. If needed we can run python3 when the current kernel
# is py3. Since now our transform cannot work on py3 anyway, I would rather run
# everything with python2.
cmd_args = ['python', 'analyze.py', '--output', _abs_path(args['output'])]
if args['cloud']:
cmd_args.append('--cloud')
training_data = args['training_data']
if args['cloud']:
tmpdir = os.path.join(args['output'], 'tmp')
else:
tmpdir = tempfile.mkdtemp()
try:
if isinstance(training_data, dict):
if 'csv' in training_data and 'schema' in training_data:
schema = training_data['schema']
schema_file = _create_json_file(tmpdir, schema, 'schema.json')
cmd_args.append('--csv=' + _abs_path(training_data['csv']))
cmd_args.extend(['--schema', schema_file])
elif 'bigquery_table' in training_data:
cmd_args.extend(['--bigquery', training_data['bigquery_table']])
elif 'bigquery_sql' in training_data:
# see https://cloud.google.com/bigquery/querying-data#temporary_and_permanent_tables
print('Creating temporary table that will be deleted in 24 hours')
r = bq.Query(training_data['bigquery_sql']).execute().result()
cmd_args.extend(['--bigquery', r.full_name])
else:
raise ValueError('Invalid training_data dict. '
'Requires either "csv_file_pattern" and "csv_schema", or "bigquery".')
elif isinstance(training_data, google.datalab.ml.CsvDataSet):
schema_file = _create_json_file(tmpdir, training_data.schema, 'schema.json')
for file_name in training_data.input_files:
cmd_args.append('--csv=' + _abs_path(file_name))
cmd_args.extend(['--schema', schema_file])
elif isinstance(training_data, google.datalab.ml.BigQueryDataSet):
# TODO: Support query too once command line supports query.
cmd_args.extend(['--bigquery', training_data.table])
else:
raise ValueError('Invalid training data. Requires either a dict, '
'a google.datalab.ml.CsvDataSet, or a google.datalab.ml.BigQueryDataSet.')
features = args['features']
features_file = _create_json_file(tmpdir, features, 'features.json')
cmd_args.extend(['--features', features_file])
if args['package']:
code_path = os.path.join(tmpdir, 'package')
_archive.extract_archive(args['package'], code_path)
else:
code_path = MLTOOLBOX_CODE_PATH
_shell_process.run_and_monitor(cmd_args, os.getpid(), cwd=code_path)
finally:
file_io.delete_recursively(tmpdir)
def _transform(args, cell):
if args['cloud_config'] and not args['cloud']:
raise ValueError('"cloud_config" is provided but no "--cloud". '
'Do you want local run or cloud run?')
cmd_args = ['python', 'transform.py',
'--output', _abs_path(args['output']),
'--analysis', _abs_path(args['analysis']),
'--prefix', args['prefix']]
if args['cloud']:
cmd_args.append('--cloud')
cmd_args.append('--async')
if args['shuffle']:
cmd_args.append('--shuffle')
if args['batch_size']:
cmd_args.extend(['--batch-size', str(args['batch_size'])])
training_data = args['training_data']
if isinstance(training_data, dict):
if 'csv' in training_data:
cmd_args.append('--csv=' + _abs_path(training_data['csv']))
elif 'bigquery_table' in training_data:
cmd_args.extend(['--bigquery', training_data['bigquery_table']])
elif 'bigquery_sql' in training_data:
# see https://cloud.google.com/bigquery/querying-data#temporary_and_permanent_tables
print('Creating temporary table that will be deleted in 24 hours')
r = bq.Query(training_data['bigquery_sql']).execute().result()
cmd_args.extend(['--bigquery', r.full_name])
else:
raise ValueError('Invalid training_data dict. '
'Requires either "csv", or "bigquery_talbe", or '
'"bigquery_sql".')
elif isinstance(training_data, google.datalab.ml.CsvDataSet):
for file_name in training_data.input_files:
cmd_args.append('--csv=' + _abs_path(file_name))
elif isinstance(training_data, google.datalab.ml.BigQueryDataSet):
cmd_args.extend(['--bigquery', training_data.table])
else:
raise ValueError('Invalid training data. Requires either a dict, '
'a google.datalab.ml.CsvDataSet, or a google.datalab.ml.BigQueryDataSet.')
cloud_config = args['cloud_config']
if cloud_config:
google.datalab.utils.commands.validate_config(
cloud_config,
required_keys=[],
optional_keys=['num_workers', 'worker_machine_type', 'project_id', 'job_name'])
if 'num_workers' in cloud_config:
cmd_args.extend(['--num-workers', str(cloud_config['num_workers'])])
if 'worker_machine_type' in cloud_config:
cmd_args.extend(['--worker-machine-type', cloud_config['worker_machine_type']])
if 'project_id' in cloud_config:
cmd_args.extend(['--project-id', cloud_config['project_id']])
if 'job_name' in cloud_config:
cmd_args.extend(['--job-name', cloud_config['job_name']])
if args['cloud'] and (not cloud_config or 'project_id' not in cloud_config):
cmd_args.extend(['--project-id', google.datalab.Context.default().project_id])
try:
tmpdir = None
if args['package']:
tmpdir = tempfile.mkdtemp()
code_path = os.path.join(tmpdir, 'package')
_archive.extract_archive(args['package'], code_path)
else:
code_path = MLTOOLBOX_CODE_PATH
_shell_process.run_and_monitor(cmd_args, os.getpid(), cwd=code_path)
finally:
if tmpdir:
shutil.rmtree(tmpdir)
def _train(args, cell):
if args['cloud_config'] and not args['cloud']:
raise ValueError('"cloud_config" is provided but no "--cloud". '
'Do you want local run or cloud run?')
job_args = ['--job-dir', _abs_path(args['output']),
'--analysis', _abs_path(args['analysis'])]
def _process_train_eval_data(data, arg_name, job_args):
if isinstance(data, dict):
if 'csv' in data:
job_args.append(arg_name + '=' + _abs_path(data['csv']))
if '--transform' not in job_args:
job_args.append('--transform')
elif 'transformed' in data:
job_args.append(arg_name + '=' + _abs_path(data['transformed']))
else:
raise ValueError('Invalid training_data dict. '
'Requires either "csv" or "transformed".')
elif isinstance(data, google.datalab.ml.CsvDataSet):
for file_name in data.input_files:
job_args.append(arg_name + '=' + _abs_path(file_name))
else:
raise ValueError('Invalid training data. Requires either a dict, or '
'a google.datalab.ml.CsvDataSet')
_process_train_eval_data(args['training_data'], '--train', job_args)
_process_train_eval_data(args['evaluation_data'], '--eval', job_args)
# TODO(brandondutra) document that any model_args that are file paths must
# be given as an absolute path
if args['model_args']:
for k, v in six.iteritems(args['model_args']):
job_args.extend(['--' + k, str(v)])
try:
tmpdir = None
if args['package']:
tmpdir = tempfile.mkdtemp()
code_path = os.path.join(tmpdir, 'package')
_archive.extract_archive(args['package'], code_path)
else:
code_path = MLTOOLBOX_CODE_PATH
if args['cloud']:
cloud_config = args['cloud_config']
if not args['output'].startswith('gs://'):
raise ValueError('Cloud training requires a GCS (starting with "gs://") output.')
staging_tarball = os.path.join(args['output'], 'staging', 'trainer.tar.gz')
datalab_ml.package_and_copy(code_path,
os.path.join(code_path, 'setup.py'),
staging_tarball)
job_request = {
'package_uris': [staging_tarball],
'python_module': 'trainer.task',
'job_dir': args['output'],
'args': job_args,
}
job_request.update(cloud_config)
job_id = cloud_config.get('job_id', None)
job = datalab_ml.Job.submit_training(job_request, job_id)
_show_job_link(job)
else:
cmd_args = ['python', '-m', 'trainer.task'] + job_args
_shell_process.run_and_monitor(cmd_args, os.getpid(), cwd=code_path)
finally:
if tmpdir:
shutil.rmtree(tmpdir)
def _predict(args, cell):
if args['cloud']:
headers, img_cols = None, None
if args['headers']:
headers = args['headers'].split(',')
if args['image_columns']:
img_cols = args['image_columns'].split(',')
else:
schema, features = _local_predict.get_model_schema_and_features(args['model'])
headers = [x['name'] for x in schema]
img_cols = []
for k, v in six.iteritems(features):
if v['transform'] in ['image_to_vec']:
img_cols.append(v['source_column'])
data = args['prediction_data']
df = _local_predict.get_prediction_results(
args['model'], data, headers, img_cols=img_cols, cloud=args['cloud'],
show_image=not args['no_show_image'])
def _show_img(img_bytes):
return '<img src="data:image/png;base64,' + img_bytes + '" />'
def _truncate_text(text):
return (text[:37] + '...') if isinstance(text, six.string_types) and len(text) > 40 else text
# Truncate text explicitly here because we will set display.max_colwidth to -1.
# This applies to images to but images will be overriden with "_show_img()" later.
formatters = {x: _truncate_text for x in df.columns if df[x].dtype == np.object}
if not args['no_show_image'] and img_cols:
formatters.update({x + '_image': _show_img for x in img_cols})
# Set display.max_colwidth to -1 so we can display images.
old_width = pd.get_option('display.max_colwidth')
pd.set_option('display.max_colwidth', -1)
try:
IPython.display.display(IPython.display.HTML(
df.to_html(formatters=formatters, escape=False, index=False)))
finally:
pd.set_option('display.max_colwidth', old_width)
def _batch_predict(args, cell):
if args['cloud_config'] and not args['cloud']:
raise ValueError('"cloud_config" is provided but no "--cloud". '
'Do you want local run or cloud run?')
if args['cloud']:
job_request = {
'data_format': 'TEXT',
'input_paths': file_io.get_matching_files(args['prediction_data']['csv']),
'output_path': args['output'],
}
if args['model'].startswith('gs://'):
job_request['uri'] = args['model']
else:
parts = args['model'].split('.')
if len(parts) != 2:
raise ValueError('Invalid model name for cloud prediction. Use "model.version".')
version_name = ('projects/%s/models/%s/versions/%s' %
(Context.default().project_id, parts[0], parts[1]))
job_request['version_name'] = version_name
cloud_config = args['cloud_config'] or {}
job_id = cloud_config.pop('job_id', None)
job_request.update(cloud_config)
job = datalab_ml.Job.submit_batch_prediction(job_request, job_id)
_show_job_link(job)
else:
print('local prediction...')
_local_predict.local_batch_predict(args['model'],
args['prediction_data']['csv'],
args['output'],
args['format'],
args['batch_size'])
print('done.')
def _explain(args, cell):
explainer = _prediction_explainer.PredictionExplainer(args['model'])
labels = args['labels'].split(',')
if args['type'] == 'text':
num_features = args['num_features'] if args['num_features'] else 10
num_samples = args['num_samples'] if args['num_samples'] else 5000
exp = explainer.explain_text(labels, args['data'], column_name=args['column_name'],
num_features=num_features, num_samples=num_samples)
if not args['detailview_only']:
exp.show_in_notebook()
if not args['overview_only']:
for i in range(len(labels)):
exp.as_pyplot_figure(label=i)
elif args['type'] == 'image':
num_features = args['num_features'] if args['num_features'] else 3
num_samples = args['num_samples'] if args['num_samples'] else 300
hide_color = None if args['hide_color'] == -1 else args['hide_color']
exp = explainer.explain_image(labels, args['data'], column_name=args['column_name'],
num_samples=num_samples, batch_size=args['batch_size'],
hide_color=hide_color)
for i in range(len(labels)):
image, mask = exp.get_image_and_mask(i, positive_only=not args['include_negative'],
num_features=num_features, hide_rest=False)
fig = plt.figure()
fig.suptitle(labels[i], fontsize=16)
plt.imshow(mark_boundaries(image, mask))
|
craigcitro/pydatalab
|
google/datalab/contrib/mlworkbench/commands/_ml.py
|
Python
|
apache-2.0
| 33,262 | 0.008689 |
from flask_admin import expose
from listenbrainz.webserver.admin import AdminIndexView
class HomeView(AdminIndexView):
@expose('/')
def index(self):
return self.render('admin/home.html')
|
Freso/listenbrainz-server
|
listenbrainz/webserver/admin/views.py
|
Python
|
gpl-2.0
| 206 | 0 |
"""
A pythonic interface to the Zabbix API.
"""
from .api import Api, ApiException
from .objects.host import Host
from .objects.hostgroup import HostGroup
from .objects.item import Item
from .objects.trigger import Trigger
from .objects.itservice import ItService
|
erik-stephens/zabbix
|
zabbix/__init__.py
|
Python
|
mit
| 266 | 0 |
from decimal import Decimal
class Integer:
def __init__(self, val=None):
self.val = int(val)
def __repr__(self):
return self.val
class Text:
def __init__(self, val=None):
self.val = str(val)
def __repr__(self):
return self.val
class Bool:
def __init__(self, val=None):
self.val = bool(val)
def __repr__(self):
return self.val
class Real:
def __init__(self, val=None):
self.val = Decimal(val)
def __repr__(self):
return self.val
class Date:
pass
|
shabinesh/Tabject
|
tabject/types.py
|
Python
|
bsd-3-clause
| 550 | 0.016364 |
import codecs
import pathlib
import re
import sys
from distutils.command.build_ext import build_ext
from distutils.errors import (CCompilerError, DistutilsExecError,
DistutilsPlatformError)
from setuptools import Extension, setup
if sys.version_info < (3, 5, 3):
raise RuntimeError("aiohttp 3.x requires Python 3.5.3+")
try:
from Cython.Build import cythonize
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
ext = '.pyx' if USE_CYTHON else '.c'
extensions = [Extension('aiohttp._websocket', ['aiohttp/_websocket' + ext]),
Extension('aiohttp._http_parser',
['aiohttp/_http_parser' + ext,
'vendor/http-parser/http_parser.c',
'aiohttp/_find_header.c'],
define_macros=[('HTTP_PARSER_STRICT', 0)],
),
Extension('aiohttp._frozenlist',
['aiohttp/_frozenlist' + ext]),
Extension('aiohttp._helpers',
['aiohttp/_helpers' + ext]),
Extension('aiohttp._http_writer',
['aiohttp/_http_writer' + ext])]
if USE_CYTHON:
extensions = cythonize(extensions)
class BuildFailed(Exception):
pass
class ve_build_ext(build_ext):
# This class allows C extension building to fail.
def run(self):
try:
build_ext.run(self)
except (DistutilsPlatformError, FileNotFoundError):
raise BuildFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except (DistutilsExecError,
DistutilsPlatformError, ValueError):
raise BuildFailed()
here = pathlib.Path(__file__).parent
txt = (here / 'aiohttp' / '__init__.py').read_text('utf-8')
try:
version = re.findall(r"^__version__ = '([^']+)'\r?$",
txt, re.M)[0]
except IndexError:
raise RuntimeError('Unable to determine version.')
install_requires = [
'attrs>=17.3.0',
'chardet>=2.0,<4.0',
'multidict>=4.0,<5.0',
'async_timeout>=3.0,<4.0',
'yarl>=1.0,<2.0',
'idna-ssl>=1.0; python_version<"3.7"',
]
def read(f):
return (here / f).read_text('utf-8').strip()
NEEDS_PYTEST = {'pytest', 'test'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if NEEDS_PYTEST else []
tests_require = ['pytest', 'gunicorn',
'pytest-timeout', 'async-generator']
args = dict(
name='aiohttp',
version=version,
description='Async http client/server framework (asyncio)',
long_description='\n\n'.join((read('README.rst'), read('CHANGES.rst'))),
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Development Status :: 5 - Production/Stable',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Topic :: Internet :: WWW/HTTP',
'Framework :: AsyncIO',
],
author='Nikolay Kim',
author_email='fafhrd91@gmail.com',
maintainer=', '.join(('Nikolay Kim <fafhrd91@gmail.com>',
'Andrew Svetlov <andrew.svetlov@gmail.com>')),
maintainer_email='aio-libs@googlegroups.com',
url='https://github.com/aio-libs/aiohttp',
project_urls={
'Chat: Gitter': 'https://gitter.im/aio-libs/Lobby',
'CI: AppVeyor': 'https://ci.appveyor.com/project/aio-libs/aiohttp',
'CI: Circle': 'https://circleci.com/gh/aio-libs/aiohttp',
'CI: Shippable': 'https://app.shippable.com/github/aio-libs/aiohttp',
'CI: Travis': 'https://travis-ci.com/aio-libs/aiohttp',
'Coverage: codecov': 'https://codecov.io/github/aio-libs/aiohttp',
'Docs: RTD': 'https://docs.aiohttp.org',
'GitHub: issues': 'https://github.com/aio-libs/aiohttp/issues',
'GitHub: repo': 'https://github.com/aio-libs/aiohttp',
},
license='Apache 2',
packages=['aiohttp'],
python_requires='>=3.5.3',
install_requires=install_requires,
tests_require=tests_require,
setup_requires=pytest_runner,
include_package_data=True,
ext_modules=extensions,
cmdclass=dict(build_ext=ve_build_ext),
)
try:
setup(**args)
except BuildFailed:
print("************************************************************")
print("Cannot compile C accelerator module, use pure python version")
print("************************************************************")
del args['ext_modules']
del args['cmdclass']
setup(**args)
|
rutsky/aiohttp
|
setup.py
|
Python
|
apache-2.0
| 4,887 | 0 |
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from edc_registration.models import RegisteredSubject
from .base_maternal_clinical_measurements import BaseMaternalClinicalMeasurements
class MaternalClinicalMeasurementsOne(BaseMaternalClinicalMeasurements):
height = models.DecimalField(
max_digits=5,
decimal_places=2,
verbose_name="Mother's height? ",
validators=[MinValueValidator(134), MaxValueValidator(195), ],
help_text="Measured in Centimeters (cm)")
class Meta:
app_label = 'td_maternal'
verbose_name = 'Maternal Clinical Measurements One'
verbose_name_plural = 'Maternal Clinical Measurements One'
|
TshepangRas/tshilo-dikotla
|
td_maternal/models/maternal_clinical_measurements_one.py
|
Python
|
gpl-2.0
| 738 | 0.004065 |
#!/usr/bin/env python3
import os
import sys
thispath = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(os.path.dirname(thispath),"helper"))
from MiscFxns import *
from StandardModules import *
import pulsar_psi4
def ApplyBasis(syst,bsname,bslabel="primary"):
return psr.system.apply_single_basis(bslabel,bsname,syst)
def CompareEgy(EgyIn):
return abs(EgyIn+224.89287653924677)<0.00001
def CompareGrad(GradIn):
CorrectGrad=[
-0.000988976949000001, 0.0004443157829999993, 0.05238342271999999,
0.018237358511, -0.002547005771, -0.030731839919000005,
-0.02344281975, -0.0062568701740000005, -0.025360880303,
-0.015409293889000001, -0.047382578540999996, -0.012807191666999996,
0.016869055227000003, 0.024963490952999996, -0.017442968207000004,
0.007207092293000001, 0.025306999363999997, 0.023850402741000004,
0.019786523729999998, 0.04038960502300001, -0.028509120090000006,
-0.026869925129, -0.022975320699000004, 0.005627050168,
0.004610985953999999, -0.011942635934, 0.032991124551000006]
AllGood=True
for i in range(0,len(CorrectGrad)):
AllGood=AllGood and CorrectGrad[i]-GradIn[i]<0.00001
return AllGood
def Run(mm):
try:
tester = psr.testing.Tester("Testing Boys and Bernardi CP")
tester.print_header()
pulsar_psi4.pulsar_psi4_setup(mm)
LoadDefaultModules(mm)
mm.change_option("PSI4_SCF","BASIS_SET","sto-3g")
mm.change_option("PSR_CP","METHOD","PSI4_SCF")
mm.change_option("PSR_MBE","METHOD","PSI4_SCF")
mm.change_option("PSI4_SCF","PRINT",0)
mol=psr.system.make_system("""
0 1
O 1.2361419 1.0137761 -0.0612424
H 0.5104418 0.8944555 0.5514190
H 1.9926927 1.1973129 0.4956931
O -0.9957202 0.0160415 1.2422556
H -1.4542703 -0.5669741 1.8472817
H -0.9377950 -0.4817912 0.4267562
O -0.2432343 -1.0198566 -1.1953808
H 0.4367536 -0.3759433 -0.9973297
H -0.5031835 -0.8251492 -2.0957959
""")
mol = ApplyBasis(mol,"sto-3g","sto-3g")
wfn=psr.datastore.Wavefunction()
wfn.system=mol
MyMod=mm.get_module("PSR_CP",0)
NewWfn,Egy=MyMod.deriv(0,wfn)
tester.test("Testing CP Energy via Deriv(0)", True, CompareEgy, Egy[0])
NewWfn,Egy=MyMod.energy(wfn)
tester.test("Testing CP Energy via Energy()", True, CompareEgy, Egy)
NewWfn,Egy=MyMod.deriv(1,wfn)
tester.test("Testing CP Gradient via Deriv(1)", True, CompareGrad, Egy)
NewWfn,Egy=MyMod.gradient(wfn)
tester.test("Testing CP Gradient via Gradient()", True, CompareGrad, Egy)
tester.print_results()
except Exception as e:
psr.output.Output("Caught exception in main handler\n")
traceback.print_exc()
with psr.ModuleAdministrator() as mm:
Run(mm)
psr.finalize()
|
pulsar-chem/Pulsar-Core
|
test/old/Old2/modules/CP.py
|
Python
|
bsd-3-clause
| 2,969 | 0.022903 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import Axon
from Kamaelia.Chassis.ConnectedServer import ServerCore
class RequestResponseComponent(Axon.Component.component):
def main(self):
while not self.dataReady("control"):
for msg in self.Inbox("inbox"):
self.send(msg, "outbox")
self.pause()
yield 1
self.send(self.recv("control"), "signal")
ServerCore(protocol=RequestResponseComponent,
port=1599).run()
|
sparkslabs/kamaelia
|
Code/Python/Apps/Europython09/App/BB1.py
|
Python
|
apache-2.0
| 1,272 | 0.002358 |
from models.sampler import DynamicBlockGibbsSampler
from models.distribution import DynamicBernoulli
from models.optimizer import DynamicSGD
from utils.utils import prepare_frames
from scipy import io as matio
from data.gwtaylor.path import *
import ipdb
import numpy as np
SIZE_BATCH = 10
EPOCHS = 100
SIZE_HIDDEN = 50
SIZE_VISIBLE = 150
# CRBM Constants
M_LAG_VISIBLE = 2
N_LAG_HIDDEN = 2
SIZE_LAG = max(M_LAG_VISIBLE, N_LAG_HIDDEN)+1
# load and prepare dataset from .mat
mat = matio.loadmat(MOCAP_SAMPLE)
dataset = mat['batchdatabinary']
# generate batches
batch_idx_list = prepare_frames(len(dataset), SIZE_LAG, SIZE_BATCH)
# load distribution
bernoulli = DynamicBernoulli(SIZE_VISIBLE, SIZE_HIDDEN, m_lag_visible=M_LAG_VISIBLE, n_lag_hidden=N_LAG_HIDDEN)
gibbs_sampler = DynamicBlockGibbsSampler(bernoulli, sampling_steps=1)
sgd = DynamicSGD(bernoulli)
for epoch in range(EPOCHS):
error = 0.0
for chunk_idx_list in batch_idx_list:
# get batch data set
data = np.zeros(shape=(SIZE_BATCH, SIZE_VISIBLE, SIZE_LAG))
for idx, (start, end) in enumerate(chunk_idx_list):
data[idx, :, :] = dataset[start:end, :].T
hidden_0_probs, hidden_0_states, \
hidden_k_probs, hidden_k_states, \
visible_k_probs, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
# compute deltas
d_weight_update, d_bias_hidden_update, \
d_bias_visible_update, d_vis_vis, d_vis_hid = sgd.optimize(data[:, :, 0], hidden_0_states, hidden_0_probs, hidden_k_probs,
hidden_k_states, visible_k_probs, visible_k_states, data[:, :, 1:])
# update model values
bernoulli.weights += d_weight_update
bernoulli.bias_hidden += d_bias_hidden_update
bernoulli.bias_visible += d_bias_visible_update
bernoulli.vis_vis_weights += d_vis_vis
bernoulli.vis_hid_weights += d_vis_hid
# compute reconstruction error
_, _, \
_, _, \
_, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
error += np.mean(np.abs(visible_k_states - data[:, :, 0]))
error = 1./len(batch_idx_list) * error;
print error
|
deprofundis/deprofundis
|
models/scripts/example_crbm.py
|
Python
|
mit
| 2,313 | 0.006053 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/event_perk/shared_lambda_shuttle_static_deed.iff"
result.attribute_template_id = 2
result.stfName("event_perk","lambda_shuttle_static_deed_name")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/deed/event_perk/shared_lambda_shuttle_static_deed.py
|
Python
|
mit
| 487 | 0.045175 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from unittest import mock
from configman.dotdict import DotDict
from socorro.lib.task_manager import TaskManager, default_task_func
class TestTaskManager:
def test_constuctor1(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config)
assert tm.config == config
assert tm.task_func == default_task_func
assert tm.quit is False
def test_get_iterator(self):
config = DotDict()
config.quit_on_empty_queue = False
tm = TaskManager(config, job_source_iterator=range(1))
assert list(tm._get_iterator()) == [0]
def an_iter(self):
yield from range(5)
tm = TaskManager(config, job_source_iterator=an_iter)
assert list(tm._get_iterator()) == [0, 1, 2, 3, 4]
class X:
def __init__(self, config):
self.config = config
def __iter__(self):
yield from self.config
tm = TaskManager(config, job_source_iterator=X(config))
assert list(tm._get_iterator()) == list(config.keys())
def test_blocking_start(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = False
class MyTaskManager(TaskManager):
def _responsive_sleep(self, seconds, wait_log_interval=0, wait_reason=""):
try:
if self.count >= 2:
raise KeyboardInterrupt
self.count += 1
except AttributeError:
self.count = 0
tm = MyTaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
def test_blocking_start_with_quit_on_empty(self):
config = DotDict()
config.idle_delay = 1
config.quit_on_empty_queue = True
tm = TaskManager(config, task_func=mock.Mock())
waiting_func = mock.Mock()
tm.blocking_start(waiting_func=waiting_func)
assert tm.task_func.call_count == 10
assert waiting_func.call_count == 0
|
lonnen/socorro
|
socorro/unittest/lib/test_task_manager.py
|
Python
|
mpl-2.0
| 2,418 | 0.000414 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2015-2016 Gramps Development Team
# Copyright (C) 2016 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#------------------------------------------------------------------------
#
# Python Modules
#
#------------------------------------------------------------------------
import random
import pickle
import time
import re
import os
import logging
import bisect
import ast
import sys
import datetime
import glob
#------------------------------------------------------------------------
#
# Gramps Modules
#
#------------------------------------------------------------------------
from . import (DbReadBase, DbWriteBase, DbUndo, DBLOGNAME, DBUNDOFN,
REFERENCE_KEY, PERSON_KEY, FAMILY_KEY,
CITATION_KEY, SOURCE_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY,
REPOSITORY_KEY, NOTE_KEY, TAG_KEY, TXNADD, TXNUPD, TXNDEL,
KEY_TO_NAME_MAP, DBMODE_R, DBMODE_W)
from .utils import write_lock_file, clear_lock_file
from ..errors import HandleError
from ..utils.callback import Callback
from ..updatecallback import UpdateCallback
from .bookmarks import DbBookmarks
from ..utils.id import create_id
from ..lib.researcher import Researcher
from ..lib import (Tag, Media, Person, Family, Source, Citation, Event,
Place, Repository, Note, NameOriginType)
from ..lib.genderstats import GenderStats
from ..config import config
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
LOG = logging.getLogger(DBLOGNAME)
SIGBASE = ('person', 'family', 'source', 'event', 'media',
'place', 'repository', 'reference', 'note', 'tag', 'citation')
def touch(fname, mode=0o666, dir_fd=None, **kwargs):
## After http://stackoverflow.com/questions/1158076/implement-touch-using-python
if sys.version_info < (3, 3, 0):
with open(fname, 'a'):
os.utime(fname, None) # set to now
else:
flags = os.O_CREAT | os.O_APPEND
with os.fdopen(os.open(fname, flags=flags, mode=mode, dir_fd=dir_fd)) as f:
os.utime(f.fileno() if os.utime in os.supports_fd else fname,
dir_fd=None if os.supports_fd else dir_fd, **kwargs)
class DbGenericUndo(DbUndo):
def __init__(self, grampsdb, path):
super(DbGenericUndo, self).__init__(grampsdb)
self.undodb = []
def open(self, value=None):
"""
Open the backing storage. Needs to be overridden in the derived
class.
"""
pass
def close(self):
"""
Close the backing storage. Needs to be overridden in the derived
class.
"""
pass
def append(self, value):
"""
Add a new entry on the end. Needs to be overridden in the derived
class.
"""
self.undodb.append(value)
def __getitem__(self, index):
"""
Returns an entry by index number. Needs to be overridden in the
derived class.
"""
return self.undodb[index]
def __setitem__(self, index, value):
"""
Set an entry to a value. Needs to be overridden in the derived class.
"""
self.undodb[index] = value
def __len__(self):
"""
Returns the number of entries. Needs to be overridden in the derived
class.
"""
return len(self.undodb)
def _redo(self, update_history):
"""
Access the last undone transaction, and revert the data to the state
before the transaction was undone.
"""
txn = self.redoq.pop()
self.undoq.append(txn)
transaction = txn
db = self.db
subitems = transaction.get_recnos()
# sigs[obj_type][trans_type]
sigs = [[[] for trans_type in range(3)] for key in range(11)]
# Process all records in the transaction
try:
self.db._txn_begin()
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.db.undo_reference(new_data, handle)
else:
self.db.undo_data(new_data, handle, key)
sigs[key][trans_type].append(handle)
# now emit the signals
self.undo_sigs(sigs, False)
self.db._txn_commit()
except:
self.db._txn_abort()
raise
# Notify listeners
if db.undo_callback:
db.undo_callback(_("_Undo %s") % transaction.get_description())
if db.redo_callback:
if self.redo_count > 1:
new_transaction = self.redoq[-2]
db.redo_callback(_("_Redo %s")
% new_transaction.get_description())
else:
db.redo_callback(None)
if update_history and db.undo_history_callback:
db.undo_history_callback()
return True
def _undo(self, update_history):
"""
Access the last committed transaction, and revert the data to the
state before the transaction was committed.
"""
txn = self.undoq.pop()
self.redoq.append(txn)
transaction = txn
db = self.db
subitems = transaction.get_recnos(reverse=True)
# sigs[obj_type][trans_type]
sigs = [[[] for trans_type in range(3)] for key in range(11)]
# Process all records in the transaction
try:
self.db._txn_begin()
for record_id in subitems:
(key, trans_type, handle, old_data, new_data) = \
pickle.loads(self.undodb[record_id])
if key == REFERENCE_KEY:
self.db.undo_reference(old_data, handle)
else:
self.db.undo_data(old_data, handle, key)
sigs[key][trans_type].append(handle)
# now emit the signals
self.undo_sigs(sigs, True)
self.db._txn_commit()
except:
self.db._txn_abort()
raise
# Notify listeners
if db.undo_callback:
if self.undo_count > 0:
db.undo_callback(_("_Undo %s")
% self.undoq[-1].get_description())
else:
db.undo_callback(None)
if db.redo_callback:
db.redo_callback(_("_Redo %s")
% transaction.get_description())
if update_history and db.undo_history_callback:
db.undo_history_callback()
return True
def undo_sigs(self, sigs, undo):
"""
Helper method to undo/redo the signals for changes made
We want to do deletes and adds first
Note that if 'undo' we swap emits
"""
for trans_type in [TXNDEL, TXNADD, TXNUPD]:
for obj_type in range(11):
handles = sigs[obj_type][trans_type]
if handles:
if not undo and trans_type == TXNDEL \
or undo and trans_type == TXNADD:
typ = '-delete'
else:
# don't update a handle if its been deleted, and note
# that 'deleted' handles are in the 'add' list if we
# are undoing
handles = [handle for handle in handles
if handle not in
sigs[obj_type][TXNADD if undo else TXNDEL]]
if ((not undo) and trans_type == TXNADD) \
or (undo and trans_type == TXNDEL):
typ = '-add'
else: # TXNUPD
typ = '-update'
if handles:
self.db.emit(KEY_TO_NAME_MAP[obj_type] + typ,
(handles,))
class Cursor:
def __init__(self, iterator):
self.iterator = iterator
self._iter = self.__iter__()
def __enter__(self):
return self
def __iter__(self):
for handle, data in self.iterator():
yield (handle, data)
def __next__(self):
try:
return self._iter.__next__()
except StopIteration:
return None
def __exit__(self, *args, **kwargs):
pass
def iter(self):
for handle, data in self.iterator():
yield (handle, data)
def first(self):
self._iter = self.__iter__()
try:
return next(self._iter)
except:
return
def next(self):
try:
return next(self._iter)
except:
return
def close(self):
pass
class DbGeneric(DbWriteBase, DbReadBase, UpdateCallback, Callback):
"""
A Gramps Database Backend. This replicates the grampsdb functions.
"""
__signals__ = dict((obj+'-'+op, signal)
for obj in
['person', 'family', 'event', 'place', 'repository',
'source', 'citation', 'media', 'note', 'tag']
for op, signal in zip(
['add', 'update', 'delete', 'rebuild'],
[(list,), (list,), (list,), None]
)
)
# 2. Signals for long operations
__signals__.update(('long-op-'+op, signal) for op, signal in zip(
['start', 'heartbeat', 'end'],
[(object,), None, None]
))
# 3. Special signal for change in home person
__signals__['home-person-changed'] = None
# 4. Signal for change in person group name, parameters are
__signals__['person-groupname-rebuild'] = (str, str)
__callback_map = {}
VERSION = (18, 0, 0)
def __init__(self, directory=None):
DbReadBase.__init__(self)
DbWriteBase.__init__(self)
Callback.__init__(self)
self.__tables = {
'Person':
{
"handle_func": self.get_person_from_handle,
"gramps_id_func": self.get_person_from_gramps_id,
"class_func": Person,
"cursor_func": self.get_person_cursor,
"handles_func": self.get_person_handles,
"add_func": self.add_person,
"commit_func": self.commit_person,
"iter_func": self.iter_people,
"ids_func": self.get_person_gramps_ids,
"has_handle_func": self.has_person_handle,
"has_gramps_id_func": self.has_person_gramps_id,
"count_func": self.get_number_of_people,
"raw_func": self.get_raw_person_data,
"raw_id_func": self._get_raw_person_from_id_data,
"del_func": self.remove_person,
},
'Family':
{
"handle_func": self.get_family_from_handle,
"gramps_id_func": self.get_family_from_gramps_id,
"class_func": Family,
"cursor_func": self.get_family_cursor,
"handles_func": self.get_family_handles,
"add_func": self.add_family,
"commit_func": self.commit_family,
"iter_func": self.iter_families,
"ids_func": self.get_family_gramps_ids,
"has_handle_func": self.has_family_handle,
"has_gramps_id_func": self.has_family_gramps_id,
"count_func": self.get_number_of_families,
"raw_func": self.get_raw_family_data,
"raw_id_func": self._get_raw_family_from_id_data,
"del_func": self.remove_family,
},
'Source':
{
"handle_func": self.get_source_from_handle,
"gramps_id_func": self.get_source_from_gramps_id,
"class_func": Source,
"cursor_func": self.get_source_cursor,
"handles_func": self.get_source_handles,
"add_func": self.add_source,
"commit_func": self.commit_source,
"iter_func": self.iter_sources,
"ids_func": self.get_source_gramps_ids,
"has_handle_func": self.has_source_handle,
"has_gramps_id_func": self.has_source_gramps_id,
"count_func": self.get_number_of_sources,
"raw_func": self.get_raw_source_data,
"raw_id_func": self._get_raw_source_from_id_data,
"del_func": self.remove_source,
},
'Citation':
{
"handle_func": self.get_citation_from_handle,
"gramps_id_func": self.get_citation_from_gramps_id,
"class_func": Citation,
"cursor_func": self.get_citation_cursor,
"handles_func": self.get_citation_handles,
"add_func": self.add_citation,
"commit_func": self.commit_citation,
"iter_func": self.iter_citations,
"ids_func": self.get_citation_gramps_ids,
"has_handle_func": self.has_citation_handle,
"has_gramps_id_func": self.has_citation_gramps_id,
"count_func": self.get_number_of_citations,
"raw_func": self.get_raw_citation_data,
"raw_id_func": self._get_raw_citation_from_id_data,
"del_func": self.remove_citation,
},
'Event':
{
"handle_func": self.get_event_from_handle,
"gramps_id_func": self.get_event_from_gramps_id,
"class_func": Event,
"cursor_func": self.get_event_cursor,
"handles_func": self.get_event_handles,
"add_func": self.add_event,
"commit_func": self.commit_event,
"iter_func": self.iter_events,
"ids_func": self.get_event_gramps_ids,
"has_handle_func": self.has_event_handle,
"has_gramps_id_func": self.has_event_gramps_id,
"count_func": self.get_number_of_events,
"raw_func": self.get_raw_event_data,
"raw_id_func": self._get_raw_event_from_id_data,
"del_func": self.remove_event,
},
'Media':
{
"handle_func": self.get_media_from_handle,
"gramps_id_func": self.get_media_from_gramps_id,
"class_func": Media,
"cursor_func": self.get_media_cursor,
"handles_func": self.get_media_handles,
"add_func": self.add_media,
"commit_func": self.commit_media,
"iter_func": self.iter_media,
"ids_func": self.get_media_gramps_ids,
"has_handle_func": self.has_media_handle,
"has_gramps_id_func": self.has_media_gramps_id,
"count_func": self.get_number_of_media,
"raw_func": self.get_raw_media_data,
"raw_id_func": self._get_raw_media_from_id_data,
"del_func": self.remove_media,
},
'Place':
{
"handle_func": self.get_place_from_handle,
"gramps_id_func": self.get_place_from_gramps_id,
"class_func": Place,
"cursor_func": self.get_place_cursor,
"handles_func": self.get_place_handles,
"add_func": self.add_place,
"commit_func": self.commit_place,
"iter_func": self.iter_places,
"ids_func": self.get_place_gramps_ids,
"has_handle_func": self.has_place_handle,
"has_gramps_id_func": self.has_place_gramps_id,
"count_func": self.get_number_of_places,
"raw_func": self.get_raw_place_data,
"raw_id_func": self._get_raw_place_from_id_data,
"del_func": self.remove_place,
},
'Repository':
{
"handle_func": self.get_repository_from_handle,
"gramps_id_func": self.get_repository_from_gramps_id,
"class_func": Repository,
"cursor_func": self.get_repository_cursor,
"handles_func": self.get_repository_handles,
"add_func": self.add_repository,
"commit_func": self.commit_repository,
"iter_func": self.iter_repositories,
"ids_func": self.get_repository_gramps_ids,
"has_handle_func": self.has_repository_handle,
"has_gramps_id_func": self.has_repository_gramps_id,
"count_func": self.get_number_of_repositories,
"raw_func": self.get_raw_repository_data,
"raw_id_func": self._get_raw_repository_from_id_data,
"del_func": self.remove_repository,
},
'Note':
{
"handle_func": self.get_note_from_handle,
"gramps_id_func": self.get_note_from_gramps_id,
"class_func": Note,
"cursor_func": self.get_note_cursor,
"handles_func": self.get_note_handles,
"add_func": self.add_note,
"commit_func": self.commit_note,
"iter_func": self.iter_notes,
"ids_func": self.get_note_gramps_ids,
"has_handle_func": self.has_note_handle,
"has_gramps_id_func": self.has_note_gramps_id,
"count_func": self.get_number_of_notes,
"raw_func": self.get_raw_note_data,
"raw_id_func": self._get_raw_note_from_id_data,
"del_func": self.remove_note,
},
'Tag':
{
"handle_func": self.get_tag_from_handle,
"gramps_id_func": None,
"class_func": Tag,
"cursor_func": self.get_tag_cursor,
"handles_func": self.get_tag_handles,
"add_func": self.add_tag,
"commit_func": self.commit_tag,
"has_handle_func": self.has_tag_handle,
"iter_func": self.iter_tags,
"count_func": self.get_number_of_tags,
"raw_func": self.get_raw_tag_data,
"del_func": self.remove_tag,
}
}
self.readonly = False
self.db_is_open = False
self.name_formats = []
# Bookmarks:
self.bookmarks = DbBookmarks()
self.family_bookmarks = DbBookmarks()
self.event_bookmarks = DbBookmarks()
self.place_bookmarks = DbBookmarks()
self.citation_bookmarks = DbBookmarks()
self.source_bookmarks = DbBookmarks()
self.repo_bookmarks = DbBookmarks()
self.media_bookmarks = DbBookmarks()
self.note_bookmarks = DbBookmarks()
self.set_person_id_prefix('I%04d')
self.set_media_id_prefix('O%04d')
self.set_family_id_prefix('F%04d')
self.set_citation_id_prefix('C%04d')
self.set_source_id_prefix('S%04d')
self.set_place_id_prefix('P%04d')
self.set_event_id_prefix('E%04d')
self.set_repository_id_prefix('R%04d')
self.set_note_id_prefix('N%04d')
# ----------------------------------
self.undodb = None
self.cmap_index = 0
self.smap_index = 0
self.emap_index = 0
self.pmap_index = 0
self.fmap_index = 0
self.lmap_index = 0
self.omap_index = 0
self.rmap_index = 0
self.nmap_index = 0
self.undo_callback = None
self.redo_callback = None
self.undo_history_callback = None
self.modified = 0
self.transaction = None
self.abort_possible = False
self._bm_changes = 0
self.has_changed = False
self.surname_list = []
self.genderStats = GenderStats() # can pass in loaded stats as dict
self.owner = Researcher()
if directory:
self.load(directory)
def _initialize(self, directory, username, password):
"""
Initialize database backend.
"""
raise NotImplementedError
def __check_readonly(self, name):
"""
Return True if we don't have read/write access to the database,
otherwise return False (that is, we DO have read/write access)
"""
# In-memory databases always allow write access.
if name == ':memory:':
return False
# See if we write to the target directory at all?
if not os.access(name, os.W_OK):
return True
# See if we lack write access to the database file
path = os.path.join(name, 'sqlite.db')
if os.path.isfile(path) and not os.access(path, os.W_OK):
return True
# All tests passed. Inform caller that we are NOT read only
return False
def load(self, directory, callback=None, mode=DBMODE_W,
force_schema_upgrade=False,
force_bsddb_upgrade=False,
force_bsddb_downgrade=False,
force_python_upgrade=False,
update=True,
username=None,
password=None):
"""
If update is False: then don't update any files
"""
if self.__check_readonly(directory):
mode = DBMODE_R
self.readonly = mode == DBMODE_R
if not self.readonly and directory != ':memory:':
write_lock_file(directory)
# run backend-specific code:
self._initialize(directory, username, password)
if not self._schema_exists():
self._create_schema()
self._set_metadata('version', str(self.VERSION[0]))
# Load metadata
self.name_formats = self._get_metadata('name_formats')
self.owner = self._get_metadata('researcher', default=Researcher())
# Load bookmarks
self.bookmarks.set(self._get_metadata('bookmarks'))
self.family_bookmarks.set(self._get_metadata('family_bookmarks'))
self.event_bookmarks.set(self._get_metadata('event_bookmarks'))
self.source_bookmarks.set(self._get_metadata('source_bookmarks'))
self.citation_bookmarks.set(self._get_metadata('citation_bookmarks'))
self.repo_bookmarks.set(self._get_metadata('repo_bookmarks'))
self.media_bookmarks.set(self._get_metadata('media_bookmarks'))
self.place_bookmarks.set(self._get_metadata('place_bookmarks'))
self.note_bookmarks.set(self._get_metadata('note_bookmarks'))
# Custom type values
self.event_names = self._get_metadata('event_names', set())
self.family_attributes = self._get_metadata('fattr_names', set())
self.individual_attributes = self._get_metadata('pattr_names', set())
self.source_attributes = self._get_metadata('sattr_names', set())
self.marker_names = self._get_metadata('marker_names', set())
self.child_ref_types = self._get_metadata('child_refs', set())
self.family_rel_types = self._get_metadata('family_rels', set())
self.event_role_names = self._get_metadata('event_roles', set())
self.name_types = self._get_metadata('name_types', set())
self.origin_types = self._get_metadata('origin_types', set())
self.repository_types = self._get_metadata('repo_types', set())
self.note_types = self._get_metadata('note_types', set())
self.source_media_types = self._get_metadata('sm_types', set())
self.url_types = self._get_metadata('url_types', set())
self.media_attributes = self._get_metadata('mattr_names', set())
self.event_attributes = self._get_metadata('eattr_names', set())
self.place_types = self._get_metadata('place_types', set())
# surname list
self.surname_list = self.get_surname_list()
self._set_save_path(directory)
if self._directory:
self.undolog = os.path.join(self._directory, DBUNDOFN)
else:
self.undolog = None
self.undodb = DbGenericUndo(self, self.undolog)
self.undodb.open()
# Other items to load
gstats = self.get_gender_stats()
self.genderStats = GenderStats(gstats)
# Indexes:
self.cmap_index = self._get_metadata('cmap_index', 0)
self.smap_index = self._get_metadata('smap_index', 0)
self.emap_index = self._get_metadata('emap_index', 0)
self.pmap_index = self._get_metadata('pmap_index', 0)
self.fmap_index = self._get_metadata('fmap_index', 0)
self.lmap_index = self._get_metadata('lmap_index', 0)
self.omap_index = self._get_metadata('omap_index', 0)
self.rmap_index = self._get_metadata('rmap_index', 0)
self.nmap_index = self._get_metadata('nmap_index', 0)
self.db_is_open = True
def _close(self):
"""
Close database backend.
"""
raise NotImplementedError
def close(self, update=True, user=None):
"""
Close the database.
if update is False, don't change access times, etc.
"""
if self._directory != ":memory:":
if update and not self.readonly:
# This is just a dummy file to indicate last modified time of
# the database for gramps.cli.clidbman:
filename = os.path.join(self._directory, "meta_data.db")
touch(filename)
# Save metadata
self._set_metadata('name_formats', self.name_formats)
self._set_metadata('researcher', self.owner)
# Bookmarks
self._set_metadata('bookmarks', self.bookmarks.get())
self._set_metadata('family_bookmarks',
self.family_bookmarks.get())
self._set_metadata('event_bookmarks', self.event_bookmarks.get())
self._set_metadata('place_bookmarks', self.place_bookmarks.get())
self._set_metadata('repo_bookmarks', self.repo_bookmarks.get())
self._set_metadata('source_bookmarks',
self.source_bookmarks.get())
self._set_metadata('citation_bookmarks',
self.citation_bookmarks.get())
self._set_metadata('media_bookmarks', self.media_bookmarks.get())
self._set_metadata('note_bookmarks', self.note_bookmarks.get())
# Custom type values, sets
self._set_metadata('event_names', self.event_names)
self._set_metadata('fattr_names', self.family_attributes)
self._set_metadata('pattr_names', self.individual_attributes)
self._set_metadata('sattr_names', self.source_attributes)
self._set_metadata('marker_names', self.marker_names)
self._set_metadata('child_refs', self.child_ref_types)
self._set_metadata('family_rels', self.family_rel_types)
self._set_metadata('event_roles', self.event_role_names)
self._set_metadata('name_types', self.name_types)
self._set_metadata('origin_types', self.origin_types)
self._set_metadata('repo_types', self.repository_types)
self._set_metadata('note_types', self.note_types)
self._set_metadata('sm_types', self.source_media_types)
self._set_metadata('url_types', self.url_types)
self._set_metadata('mattr_names', self.media_attributes)
self._set_metadata('eattr_names', self.event_attributes)
self._set_metadata('place_types', self.place_types)
# Save misc items:
if self.has_changed:
self.save_gender_stats(self.genderStats)
# Indexes:
self._set_metadata('cmap_index', self.cmap_index)
self._set_metadata('smap_index', self.smap_index)
self._set_metadata('emap_index', self.emap_index)
self._set_metadata('pmap_index', self.pmap_index)
self._set_metadata('fmap_index', self.fmap_index)
self._set_metadata('lmap_index', self.lmap_index)
self._set_metadata('omap_index', self.omap_index)
self._set_metadata('rmap_index', self.rmap_index)
self._set_metadata('nmap_index', self.nmap_index)
self._close()
try:
clear_lock_file(self.get_save_path())
except IOError:
pass
self.db_is_open = False
self._directory = None
def is_open(self):
return self.db_is_open
def get_dbid(self):
"""
We use the file directory name as the unique ID for
this database on this computer.
"""
return self.brief_name
def get_dbname(self):
"""
In DbGeneric, the database is in a text file at the path
"""
name = None
if self._directory:
filepath = os.path.join(self._directory, "name.txt")
try:
with open(filepath, "r") as name_file:
name = name_file.readline().strip()
except (OSError, IOError) as msg:
LOG.error(str(msg))
return name
def version_supported(self):
"""Return True when the file has a supported version."""
return True
def _get_table_func(self, table=None, func=None):
"""
Private implementation of get_table_func.
"""
if table is None:
return list(self.__tables.keys())
elif func is None:
return self.__tables[table] # dict of functions
elif func in self.__tables[table].keys():
return self.__tables[table][func]
else:
return None
def _txn_begin(self):
"""
Lowlevel interface to the backend transaction.
Executes a db BEGIN;
"""
pass
def _txn_commit(self):
"""
Lowlevel interface to the backend transaction.
Executes a db END;
"""
pass
def _txn_abort(self):
"""
Lowlevel interface to the backend transaction.
Executes a db ROLLBACK;
"""
pass
def transaction_begin(self, transaction):
"""
Transactions are handled automatically by the db layer.
"""
self.transaction = transaction
return transaction
def _get_metadata(self, key, default=[]):
"""
Get an item from the database.
Default is an empty list, which is a mutable and
thus a bad default (pylint will complain).
However, it is just used as a value, and not altered, so
its use here is ok.
"""
raise NotImplementedError
def _set_metadata(self, key, value):
"""
key: string
value: item, will be serialized here
"""
raise NotImplementedError
################################################################
#
# set_*_id_prefix methods
#
################################################################
@staticmethod
def _validated_id_prefix(val, default):
if isinstance(val, str) and val:
try:
str_ = val % 1
except TypeError: # missing conversion specifier
prefix_var = val + "%d"
except ValueError: # incomplete format
prefix_var = default+"%04d"
else:
prefix_var = val # OK as given
else:
prefix_var = default+"%04d" # not a string or empty string
return prefix_var
@staticmethod
def __id2user_format(id_pattern):
"""
Return a method that accepts a Gramps ID and adjusts it to the users
format.
"""
pattern_match = re.match(r"(.*)%[0 ](\d+)[diu]$", id_pattern)
if pattern_match:
str_prefix = pattern_match.group(1)
#nr_width = int(pattern_match.group(2))
def closure_func(gramps_id):
if gramps_id and gramps_id.startswith(str_prefix):
id_number = gramps_id[len(str_prefix):]
if id_number.isdigit():
id_value = int(id_number, 10)
#if len(str(id_value)) > nr_width:
# # The ID to be imported is too large to fit in the
# # users format. For now just create a new ID,
# # because that is also what happens with IDs that
# # are identical to IDs already in the database. If
# # the problem of colliding import and already
# # present IDs is solved the code here also needs
# # some solution.
# gramps_id = id_pattern % 1
#else:
gramps_id = id_pattern % id_value
return gramps_id
else:
def closure_func(gramps_id):
return gramps_id
return closure_func
def set_person_id_prefix(self, val):
"""
Set the naming template for Gramps Person ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as I%d or I%04d.
"""
self.person_prefix = self._validated_id_prefix(val, "I")
self.id2user_format = self.__id2user_format(self.person_prefix)
def set_citation_id_prefix(self, val):
"""
Set the naming template for Gramps Citation ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as C%d or C%04d.
"""
self.citation_prefix = self._validated_id_prefix(val, "C")
self.cid2user_format = self.__id2user_format(self.citation_prefix)
def set_source_id_prefix(self, val):
"""
Set the naming template for Gramps Source ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as S%d or S%04d.
"""
self.source_prefix = self._validated_id_prefix(val, "S")
self.sid2user_format = self.__id2user_format(self.source_prefix)
def set_media_id_prefix(self, val):
"""
Set the naming template for Gramps Media ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as O%d or O%04d.
"""
self.media_prefix = self._validated_id_prefix(val, "O")
self.oid2user_format = self.__id2user_format(self.media_prefix)
def set_place_id_prefix(self, val):
"""
Set the naming template for Gramps Place ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as P%d or P%04d.
"""
self.place_prefix = self._validated_id_prefix(val, "P")
self.pid2user_format = self.__id2user_format(self.place_prefix)
def set_family_id_prefix(self, val):
"""
Set the naming template for Gramps Family ID values. The string is
expected to be in the form of a simple text string, or in a format
that contains a C/Python style format string using %d, such as F%d
or F%04d.
"""
self.family_prefix = self._validated_id_prefix(val, "F")
self.fid2user_format = self.__id2user_format(self.family_prefix)
def set_event_id_prefix(self, val):
"""
Set the naming template for Gramps Event ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as E%d or E%04d.
"""
self.event_prefix = self._validated_id_prefix(val, "E")
self.eid2user_format = self.__id2user_format(self.event_prefix)
def set_repository_id_prefix(self, val):
"""
Set the naming template for Gramps Repository ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as R%d or R%04d.
"""
self.repository_prefix = self._validated_id_prefix(val, "R")
self.rid2user_format = self.__id2user_format(self.repository_prefix)
def set_note_id_prefix(self, val):
"""
Set the naming template for Gramps Note ID values.
The string is expected to be in the form of a simple text string, or
in a format that contains a C/Python style format string using %d,
such as N%d or N%04d.
"""
self.note_prefix = self._validated_id_prefix(val, "N")
self.nid2user_format = self.__id2user_format(self.note_prefix)
def set_prefixes(self, person, media, family, source, citation,
place, event, repository, note):
self.set_person_id_prefix(person)
self.set_media_id_prefix(media)
self.set_family_id_prefix(family)
self.set_source_id_prefix(source)
self.set_citation_id_prefix(citation)
self.set_place_id_prefix(place)
self.set_event_id_prefix(event)
self.set_repository_id_prefix(repository)
self.set_note_id_prefix(note)
################################################################
#
# find_next_*_gramps_id methods
#
################################################################
def _find_next_gramps_id(self, prefix, map_index, obj_key):
"""
Helper function for find_next_<object>_gramps_id methods
"""
index = prefix % map_index
while self._has_gramps_id(obj_key, index):
map_index += 1
index = prefix % map_index
map_index += 1
return (map_index, index)
def find_next_person_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Person object based off the
person ID prefix.
"""
self.pmap_index, gid = self._find_next_gramps_id(self.person_prefix,
self.pmap_index,
PERSON_KEY)
return gid
def find_next_place_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Place object based off the
place ID prefix.
"""
self.lmap_index, gid = self._find_next_gramps_id(self.place_prefix,
self.lmap_index,
PLACE_KEY)
return gid
def find_next_event_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Event object based off the
event ID prefix.
"""
self.emap_index, gid = self._find_next_gramps_id(self.event_prefix,
self.emap_index,
EVENT_KEY)
return gid
def find_next_media_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Media object based
off the media object ID prefix.
"""
self.omap_index, gid = self._find_next_gramps_id(self.media_prefix,
self.omap_index,
MEDIA_KEY)
return gid
def find_next_citation_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Citation object based off the
citation ID prefix.
"""
self.cmap_index, gid = self._find_next_gramps_id(self.citation_prefix,
self.cmap_index,
CITATION_KEY)
return gid
def find_next_source_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Source object based off the
source ID prefix.
"""
self.smap_index, gid = self._find_next_gramps_id(self.source_prefix,
self.smap_index,
SOURCE_KEY)
return gid
def find_next_family_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Family object based off the
family ID prefix.
"""
self.fmap_index, gid = self._find_next_gramps_id(self.family_prefix,
self.fmap_index,
FAMILY_KEY)
return gid
def find_next_repository_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Respository object based
off the repository ID prefix.
"""
self.rmap_index, gid = self._find_next_gramps_id(self.repository_prefix,
self.rmap_index,
REPOSITORY_KEY)
return gid
def find_next_note_gramps_id(self):
"""
Return the next available GRAMPS' ID for a Note object based off the
note ID prefix.
"""
self.nmap_index, gid = self._find_next_gramps_id(self.note_prefix,
self.nmap_index,
NOTE_KEY)
return gid
################################################################
#
# get_number_of_* methods
#
################################################################
def _get_number_of(self, obj_key):
"""
Return the number of objects currently in the database.
"""
raise NotImplementedError
def get_number_of_people(self):
"""
Return the number of people currently in the database.
"""
return self._get_number_of(PERSON_KEY)
def get_number_of_events(self):
"""
Return the number of events currently in the database.
"""
return self._get_number_of(EVENT_KEY)
def get_number_of_places(self):
"""
Return the number of places currently in the database.
"""
return self._get_number_of(PLACE_KEY)
def get_number_of_tags(self):
"""
Return the number of tags currently in the database.
"""
return self._get_number_of(TAG_KEY)
def get_number_of_families(self):
"""
Return the number of families currently in the database.
"""
return self._get_number_of(FAMILY_KEY)
def get_number_of_notes(self):
"""
Return the number of notes currently in the database.
"""
return self._get_number_of(NOTE_KEY)
def get_number_of_citations(self):
"""
Return the number of citations currently in the database.
"""
return self._get_number_of(CITATION_KEY)
def get_number_of_sources(self):
"""
Return the number of sources currently in the database.
"""
return self._get_number_of(SOURCE_KEY)
def get_number_of_media(self):
"""
Return the number of media objects currently in the database.
"""
return self._get_number_of(MEDIA_KEY)
def get_number_of_repositories(self):
"""
Return the number of source repositories currently in the database.
"""
return self._get_number_of(REPOSITORY_KEY)
################################################################
#
# get_*_gramps_ids methods
#
################################################################
def _get_gramps_ids(self, obj_key):
"""
Return a list of Gramps IDs, one ID for each object in the
database.
"""
raise NotImplementedError
def get_person_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Person in the
database.
"""
return self._get_gramps_ids(PERSON_KEY)
def get_family_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Family in the
database.
"""
return self._get_gramps_ids(FAMILY_KEY)
def get_source_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Source in the
database.
"""
return self._get_gramps_ids(SOURCE_KEY)
def get_citation_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Citation in the
database.
"""
return self._get_gramps_ids(CITATION_KEY)
def get_event_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Event in the
database.
"""
return self._get_gramps_ids(EVENT_KEY)
def get_media_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Media in the
database.
"""
return self._get_gramps_ids(MEDIA_KEY)
def get_place_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Place in the
database.
"""
return self._get_gramps_ids(PLACE_KEY)
def get_repository_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Repository in the
database.
"""
return self._get_gramps_ids(REPOSITORY_KEY)
def get_note_gramps_ids(self):
"""
Return a list of Gramps IDs, one ID for each Note in the
database.
"""
return self._get_gramps_ids(NOTE_KEY)
################################################################
#
# get_*_from_handle methods
#
################################################################
def _get_from_handle(self, obj_key, obj_class, handle):
if handle is None:
raise HandleError('Handle is None')
if not handle:
raise HandleError('Handle is empty')
data = self._get_raw_data(obj_key, handle)
if data:
return obj_class.create(data)
else:
raise HandleError('Handle %s not found' % handle)
def get_event_from_handle(self, handle):
return self._get_from_handle(EVENT_KEY, Event, handle)
def get_family_from_handle(self, handle):
return self._get_from_handle(FAMILY_KEY, Family, handle)
def get_repository_from_handle(self, handle):
return self._get_from_handle(REPOSITORY_KEY, Repository, handle)
def get_person_from_handle(self, handle):
return self._get_from_handle(PERSON_KEY, Person, handle)
def get_place_from_handle(self, handle):
return self._get_from_handle(PLACE_KEY, Place, handle)
def get_citation_from_handle(self, handle):
return self._get_from_handle(CITATION_KEY, Citation, handle)
def get_source_from_handle(self, handle):
return self._get_from_handle(SOURCE_KEY, Source, handle)
def get_note_from_handle(self, handle):
return self._get_from_handle(NOTE_KEY, Note, handle)
def get_media_from_handle(self, handle):
return self._get_from_handle(MEDIA_KEY, Media, handle)
def get_tag_from_handle(self, handle):
return self._get_from_handle(TAG_KEY, Tag, handle)
################################################################
#
# get_*_from_gramps_id methods
#
################################################################
def get_person_from_gramps_id(self, gramps_id):
data = self._get_raw_person_from_id_data(gramps_id)
return Person.create(data)
def get_family_from_gramps_id(self, gramps_id):
data = self._get_raw_family_from_id_data(gramps_id)
return Family.create(data)
def get_citation_from_gramps_id(self, gramps_id):
data = self._get_raw_citation_from_id_data(gramps_id)
return Citation.create(data)
def get_source_from_gramps_id(self, gramps_id):
data = self._get_raw_source_from_id_data(gramps_id)
return Source.create(data)
def get_event_from_gramps_id(self, gramps_id):
data = self._get_raw_event_from_id_data(gramps_id)
return Event.create(data)
def get_media_from_gramps_id(self, gramps_id):
data = self._get_raw_media_from_id_data(gramps_id)
return Media.create(data)
def get_place_from_gramps_id(self, gramps_id):
data = self._get_raw_place_from_id_data(gramps_id)
return Place.create(data)
def get_repository_from_gramps_id(self, gramps_id):
data = self._get_raw_repository_from_id_data(gramps_id)
return Repository.create(data)
def get_note_from_gramps_id(self, gramps_id):
data = self._get_raw_note_from_id_data(gramps_id)
return Note.create(data)
################################################################
#
# has_*_handle methods
#
################################################################
def _has_handle(self, obj_key, handle):
"""
Return True if the handle exists in the database.
"""
raise NotImplementedError
def has_person_handle(self, handle):
return self._has_handle(PERSON_KEY, handle)
def has_family_handle(self, handle):
return self._has_handle(FAMILY_KEY, handle)
def has_source_handle(self, handle):
return self._has_handle(SOURCE_KEY, handle)
def has_citation_handle(self, handle):
return self._has_handle(CITATION_KEY, handle)
def has_event_handle(self, handle):
return self._has_handle(EVENT_KEY, handle)
def has_media_handle(self, handle):
return self._has_handle(MEDIA_KEY, handle)
def has_place_handle(self, handle):
return self._has_handle(PLACE_KEY, handle)
def has_repository_handle(self, handle):
return self._has_handle(REPOSITORY_KEY, handle)
def has_note_handle(self, handle):
return self._has_handle(NOTE_KEY, handle)
def has_tag_handle(self, handle):
return self._has_handle(TAG_KEY, handle)
################################################################
#
# has_*_gramps_id methods
#
################################################################
def _has_gramps_id(self, obj_key, gramps_id):
raise NotImplementedError
def has_person_gramps_id(self, gramps_id):
return self._has_gramps_id(PERSON_KEY, gramps_id)
def has_family_gramps_id(self, gramps_id):
return self._has_gramps_id(FAMILY_KEY, gramps_id)
def has_source_gramps_id(self, gramps_id):
return self._has_gramps_id(SOURCE_KEY, gramps_id)
def has_citation_gramps_id(self, gramps_id):
return self._has_gramps_id(CITATION_KEY, gramps_id)
def has_event_gramps_id(self, gramps_id):
return self._has_gramps_id(EVENT_KEY, gramps_id)
def has_media_gramps_id(self, gramps_id):
return self._has_gramps_id(MEDIA_KEY, gramps_id)
def has_place_gramps_id(self, gramps_id):
return self._has_gramps_id(PLACE_KEY, gramps_id)
def has_repository_gramps_id(self, gramps_id):
return self._has_gramps_id(REPOSITORY_KEY, gramps_id)
def has_note_gramps_id(self, gramps_id):
return self._has_gramps_id(NOTE_KEY, gramps_id)
################################################################
#
# get_*_cursor methods
#
################################################################
def get_place_cursor(self):
return Cursor(self._iter_raw_place_data)
def get_place_tree_cursor(self):
return Cursor(self._iter_raw_place_tree_data)
def get_person_cursor(self):
return Cursor(self._iter_raw_person_data)
def get_family_cursor(self):
return Cursor(self._iter_raw_family_data)
def get_event_cursor(self):
return Cursor(self._iter_raw_event_data)
def get_note_cursor(self):
return Cursor(self._iter_raw_note_data)
def get_tag_cursor(self):
return Cursor(self._iter_raw_tag_data)
def get_repository_cursor(self):
return Cursor(self._iter_raw_repository_data)
def get_media_cursor(self):
return Cursor(self._iter_raw_media_data)
def get_citation_cursor(self):
return Cursor(self._iter_raw_citation_data)
def get_source_cursor(self):
return Cursor(self._iter_raw_source_data)
################################################################
#
# iter_*_handles methods
#
################################################################
def _iter_handles(self, obj_key):
raise NotImplementedError
def iter_person_handles(self):
"""
Return an iterator over handles for Persons in the database
"""
return self._iter_handles(PERSON_KEY)
def iter_family_handles(self):
"""
Return an iterator over handles for Families in the database
"""
return self._iter_handles(FAMILY_KEY)
def iter_citation_handles(self):
"""
Return an iterator over database handles, one handle for each Citation
in the database.
"""
return self._iter_handles(CITATION_KEY)
def iter_event_handles(self):
"""
Return an iterator over handles for Events in the database
"""
return self._iter_handles(EVENT_KEY)
def iter_media_handles(self):
"""
Return an iterator over handles for Media in the database
"""
return self._iter_handles(MEDIA_KEY)
def iter_note_handles(self):
"""
Return an iterator over handles for Notes in the database
"""
return self._iter_handles(NOTE_KEY)
def iter_place_handles(self):
"""
Return an iterator over handles for Places in the database
"""
return self._iter_handles(PLACE_KEY)
def iter_repository_handles(self):
"""
Return an iterator over handles for Repositories in the database
"""
return self._iter_handles(REPOSITORY_KEY)
def iter_source_handles(self):
"""
Return an iterator over handles for Sources in the database
"""
return self._iter_handles(SOURCE_KEY)
def iter_tag_handles(self):
"""
Return an iterator over handles for Tags in the database
"""
return self._iter_handles(TAG_KEY)
################################################################
#
# iter_* methods
#
################################################################
def _iter_objects(self, class_):
"""
Iterate over items in a class.
"""
cursor = self._get_table_func(class_.__name__, "cursor_func")
for data in cursor():
yield class_.create(data[1])
def iter_people(self):
return self._iter_objects(Person)
def iter_families(self):
return self._iter_objects(Family)
def iter_citations(self):
return self._iter_objects(Citation)
def iter_events(self):
return self._iter_objects(Event)
def iter_media(self):
return self._iter_objects(Media)
def iter_notes(self):
return self._iter_objects(Note)
def iter_places(self):
return self._iter_objects(Place)
def iter_repositories(self):
return self._iter_objects(Repository)
def iter_sources(self):
return self._iter_objects(Source)
def iter_tags(self):
return self._iter_objects(Tag)
################################################################
#
# _iter_raw_*_data methods
#
################################################################
def _iter_raw_data(self, obj_key):
raise NotImplementedError
def _iter_raw_person_data(self):
"""
Return an iterator over raw Person data.
"""
return self._iter_raw_data(PERSON_KEY)
def _iter_raw_family_data(self):
"""
Return an iterator over raw Family data.
"""
return self._iter_raw_data(FAMILY_KEY)
def _iter_raw_event_data(self):
"""
Return an iterator over raw Event data.
"""
return self._iter_raw_data(EVENT_KEY)
def _iter_raw_place_data(self):
"""
Return an iterator over raw Place data.
"""
return self._iter_raw_data(PLACE_KEY)
def _iter_raw_repository_data(self):
"""
Return an iterator over raw Repository data.
"""
return self._iter_raw_data(REPOSITORY_KEY)
def _iter_raw_source_data(self):
"""
Return an iterator over raw Source data.
"""
return self._iter_raw_data(SOURCE_KEY)
def _iter_raw_citation_data(self):
"""
Return an iterator over raw Citation data.
"""
return self._iter_raw_data(CITATION_KEY)
def _iter_raw_media_data(self):
"""
Return an iterator over raw Media data.
"""
return self._iter_raw_data(MEDIA_KEY)
def _iter_raw_note_data(self):
"""
Return an iterator over raw Note data.
"""
return self._iter_raw_data(NOTE_KEY)
def _iter_raw_tag_data(self):
"""
Return an iterator over raw Tag data.
"""
return self._iter_raw_data(TAG_KEY)
def _iter_raw_place_tree_data(self):
"""
Return an iterator over raw data in the place hierarchy.
"""
raise NotImplementedError
################################################################
#
# get_raw_*_data methods
#
################################################################
def _get_raw_data(self, obj_key, handle):
"""
Return raw (serialized and pickled) object from handle.
"""
raise NotImplementedError
def get_raw_person_data(self, handle):
return self._get_raw_data(PERSON_KEY, handle)
def get_raw_family_data(self, handle):
return self._get_raw_data(FAMILY_KEY, handle)
def get_raw_source_data(self, handle):
return self._get_raw_data(SOURCE_KEY, handle)
def get_raw_citation_data(self, handle):
return self._get_raw_data(CITATION_KEY, handle)
def get_raw_event_data(self, handle):
return self._get_raw_data(EVENT_KEY, handle)
def get_raw_media_data(self, handle):
return self._get_raw_data(MEDIA_KEY, handle)
def get_raw_place_data(self, handle):
return self._get_raw_data(PLACE_KEY, handle)
def get_raw_repository_data(self, handle):
return self._get_raw_data(REPOSITORY_KEY, handle)
def get_raw_note_data(self, handle):
return self._get_raw_data(NOTE_KEY, handle)
def get_raw_tag_data(self, handle):
return self._get_raw_data(TAG_KEY, handle)
################################################################
#
# get_raw_*_from_id_data methods
#
################################################################
def _get_raw_from_id_data(self, obj_key, gramps_id):
raise NotImplementedError
def _get_raw_person_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(PERSON_KEY, gramps_id)
def _get_raw_family_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(FAMILY_KEY, gramps_id)
def _get_raw_source_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(SOURCE_KEY, gramps_id)
def _get_raw_citation_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(CITATION_KEY, gramps_id)
def _get_raw_event_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(EVENT_KEY, gramps_id)
def _get_raw_media_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(MEDIA_KEY, gramps_id)
def _get_raw_place_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(PLACE_KEY, gramps_id)
def _get_raw_repository_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(REPOSITORY_KEY, gramps_id)
def _get_raw_note_from_id_data(self, gramps_id):
return self._get_raw_from_id_data(NOTE_KEY, gramps_id)
################################################################
#
# add_* methods
#
################################################################
def _add_base(self, obj, trans, set_gid, find_func, commit_func):
if not obj.handle:
obj.handle = create_id()
if (not obj.gramps_id) and set_gid:
obj.gramps_id = find_func()
if (not obj.gramps_id):
# give it a random value for the moment:
obj.gramps_id = str(random.random())
commit_func(obj, trans)
return obj.handle
def add_person(self, person, trans, set_gid=True):
return self._add_base(person, trans, set_gid,
self.find_next_person_gramps_id,
self.commit_person)
def add_family(self, family, trans, set_gid=True):
return self._add_base(family, trans, set_gid,
self.find_next_family_gramps_id,
self.commit_family)
def add_event(self, event, trans, set_gid=True):
return self._add_base(event, trans, set_gid,
self.find_next_event_gramps_id,
self.commit_event)
def add_place(self, place, trans, set_gid=True):
return self._add_base(place, trans, set_gid,
self.find_next_place_gramps_id,
self.commit_place)
def add_repository(self, repository, trans, set_gid=True):
return self._add_base(repository, trans, set_gid,
self.find_next_repository_gramps_id,
self.commit_repository)
def add_source(self, source, trans, set_gid=True):
return self._add_base(source, trans, set_gid,
self.find_next_source_gramps_id,
self.commit_source)
def add_citation(self, citation, trans, set_gid=True):
return self._add_base(citation, trans, set_gid,
self.find_next_citation_gramps_id,
self.commit_citation)
def add_media(self, media, trans, set_gid=True):
return self._add_base(media, trans, set_gid,
self.find_next_media_gramps_id,
self.commit_media)
def add_note(self, note, trans, set_gid=True):
return self._add_base(note, trans, set_gid,
self.find_next_note_gramps_id,
self.commit_note)
def add_tag(self, tag, trans):
if not tag.handle:
tag.handle = create_id()
self.commit_tag(tag, trans)
return tag.handle
################################################################
#
# commit_* methods
#
################################################################
def _commit_base(self, obj, obj_key, trans, change_time):
"""
Commit the specified object to the database, storing the changes as
part of the transaction.
"""
raise NotImplementedError
def commit_person(self, person, trans, change_time=None):
"""
Commit the specified Person to the database, storing the changes as
part of the transaction.
"""
old_data = self._commit_base(person, PERSON_KEY, trans, change_time)
if old_data:
old_person = Person(old_data)
# Update gender statistics if necessary
if (old_person.gender != person.gender
or (old_person.primary_name.first_name !=
person.primary_name.first_name)):
self.genderStats.uncount_person(old_person)
self.genderStats.count_person(person)
# Update surname list if necessary
if (self._order_by_person_key(person) !=
self._order_by_person_key(old_person)):
self.remove_from_surname_list(old_person)
self.add_to_surname_list(person, trans.batch)
else:
self.genderStats.count_person(person)
self.add_to_surname_list(person, trans.batch)
# Other misc update tasks:
self.individual_attributes.update(
[str(attr.type) for attr in person.attribute_list
if attr.type.is_custom() and str(attr.type)])
self.event_role_names.update([str(eref.role)
for eref in person.event_ref_list
if eref.role.is_custom()])
self.name_types.update([str(name.type)
for name in ([person.primary_name]
+ person.alternate_names)
if name.type.is_custom()])
all_surn = [] # new list we will use for storage
all_surn += person.primary_name.get_surname_list()
for asurname in person.alternate_names:
all_surn += asurname.get_surname_list()
self.origin_types.update([str(surn.origintype) for surn in all_surn
if surn.origintype.is_custom()])
all_surn = None
self.url_types.update([str(url.type) for url in person.urls
if url.type.is_custom()])
attr_list = []
for mref in person.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
def commit_family(self, family, trans, change_time=None):
"""
Commit the specified Family to the database, storing the changes as
part of the transaction.
"""
self._commit_base(family, FAMILY_KEY, trans, change_time)
# Misc updates:
self.family_attributes.update(
[str(attr.type) for attr in family.attribute_list
if attr.type.is_custom() and str(attr.type)])
rel_list = []
for ref in family.child_ref_list:
if ref.frel.is_custom():
rel_list.append(str(ref.frel))
if ref.mrel.is_custom():
rel_list.append(str(ref.mrel))
self.child_ref_types.update(rel_list)
self.event_role_names.update(
[str(eref.role) for eref in family.event_ref_list
if eref.role.is_custom()])
if family.type.is_custom():
self.family_rel_types.add(str(family.type))
attr_list = []
for mref in family.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
def commit_citation(self, citation, trans, change_time=None):
"""
Commit the specified Citation to the database, storing the changes as
part of the transaction.
"""
self._commit_base(citation, CITATION_KEY, trans, change_time)
# Misc updates:
attr_list = []
for mref in citation.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
self.source_attributes.update(
[str(attr.type) for attr in citation.attribute_list
if attr.type.is_custom() and str(attr.type)])
def commit_source(self, source, trans, change_time=None):
"""
Commit the specified Source to the database, storing the changes as
part of the transaction.
"""
self._commit_base(source, SOURCE_KEY, trans, change_time)
# Misc updates:
self.source_media_types.update(
[str(ref.media_type) for ref in source.reporef_list
if ref.media_type.is_custom()])
attr_list = []
for mref in source.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
self.source_attributes.update(
[str(attr.type) for attr in source.attribute_list
if attr.type.is_custom() and str(attr.type)])
def commit_repository(self, repository, trans, change_time=None):
"""
Commit the specified Repository to the database, storing the changes
as part of the transaction.
"""
self._commit_base(repository, REPOSITORY_KEY, trans, change_time)
# Misc updates:
if repository.type.is_custom():
self.repository_types.add(str(repository.type))
self.url_types.update([str(url.type) for url in repository.urls
if url.type.is_custom()])
def commit_note(self, note, trans, change_time=None):
"""
Commit the specified Note to the database, storing the changes as part
of the transaction.
"""
self._commit_base(note, NOTE_KEY, trans, change_time)
# Misc updates:
if note.type.is_custom():
self.note_types.add(str(note.type))
def commit_place(self, place, trans, change_time=None):
"""
Commit the specified Place to the database, storing the changes as
part of the transaction.
"""
self._commit_base(place, PLACE_KEY, trans, change_time)
# Misc updates:
if place.get_type().is_custom():
self.place_types.add(str(place.get_type()))
self.url_types.update([str(url.type) for url in place.urls
if url.type.is_custom()])
attr_list = []
for mref in place.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
def commit_event(self, event, trans, change_time=None):
"""
Commit the specified Event to the database, storing the changes as
part of the transaction.
"""
self._commit_base(event, EVENT_KEY, trans, change_time)
# Misc updates:
self.event_attributes.update(
[str(attr.type) for attr in event.attribute_list
if attr.type.is_custom() and str(attr.type)])
if event.type.is_custom():
self.event_names.add(str(event.type))
attr_list = []
for mref in event.media_list:
attr_list += [str(attr.type) for attr in mref.attribute_list
if attr.type.is_custom() and str(attr.type)]
self.media_attributes.update(attr_list)
def commit_tag(self, tag, trans, change_time=None):
"""
Commit the specified Tag to the database, storing the changes as
part of the transaction.
"""
self._commit_base(tag, TAG_KEY, trans, change_time)
def commit_media(self, media, trans, change_time=None):
"""
Commit the specified Media to the database, storing the changes
as part of the transaction.
"""
self._commit_base(media, MEDIA_KEY, trans, change_time)
# Misc updates:
self.media_attributes.update(
[str(attr.type) for attr in media.attribute_list
if attr.type.is_custom() and str(attr.type)])
def _after_commit(self, transaction):
"""
Post-transaction commit processing
"""
# Reset callbacks if necessary
if transaction.batch or not len(transaction):
return
if self.undo_callback:
self.undo_callback(_("_Undo %s") % transaction.get_description())
if self.redo_callback:
self.redo_callback(None)
if self.undo_history_callback:
self.undo_history_callback()
################################################################
#
# remove_* methods
#
################################################################
def _do_remove(self, handle, transaction, obj_key):
raise NotImplementedError
def remove_person(self, handle, transaction):
"""
Remove the Person specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, PERSON_KEY)
def remove_source(self, handle, transaction):
"""
Remove the Source specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, SOURCE_KEY)
def remove_citation(self, handle, transaction):
"""
Remove the Citation specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, CITATION_KEY)
def remove_event(self, handle, transaction):
"""
Remove the Event specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, EVENT_KEY)
def remove_media(self, handle, transaction):
"""
Remove the MediaPerson specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, MEDIA_KEY)
def remove_place(self, handle, transaction):
"""
Remove the Place specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, PLACE_KEY)
def remove_family(self, handle, transaction):
"""
Remove the Family specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, FAMILY_KEY)
def remove_repository(self, handle, transaction):
"""
Remove the Repository specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, REPOSITORY_KEY)
def remove_note(self, handle, transaction):
"""
Remove the Note specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, NOTE_KEY)
def remove_tag(self, handle, transaction):
"""
Remove the Tag specified by the database handle from the
database, preserving the change in the passed transaction.
"""
self._do_remove(handle, transaction, TAG_KEY)
################################################################
#
# get_*_types methods
#
################################################################
def get_event_attribute_types(self):
"""
Return a list of all Attribute types assocated with Event instances
in the database.
"""
return list(self.event_attributes)
def get_event_types(self):
"""
Return a list of all event types in the database.
"""
return list(self.event_names)
def get_person_event_types(self):
"""
Deprecated: Use get_event_types
"""
return list(self.event_names)
def get_person_attribute_types(self):
"""
Return a list of all Attribute types assocated with Person instances
in the database.
"""
return list(self.individual_attributes)
def get_family_attribute_types(self):
"""
Return a list of all Attribute types assocated with Family instances
in the database.
"""
return list(self.family_attributes)
def get_family_event_types(self):
"""
Deprecated: Use get_event_types
"""
return list(self.event_names)
def get_media_attribute_types(self):
"""
Return a list of all Attribute types assocated with Media and MediaRef
instances in the database.
"""
return list(self.media_attributes)
def get_family_relation_types(self):
"""
Return a list of all relationship types assocated with Family
instances in the database.
"""
return list(self.family_rel_types)
def get_child_reference_types(self):
"""
Return a list of all child reference types assocated with Family
instances in the database.
"""
return list(self.child_ref_types)
def get_event_roles(self):
"""
Return a list of all custom event role names assocated with Event
instances in the database.
"""
return list(self.event_role_names)
def get_name_types(self):
"""
Return a list of all custom names types assocated with Person
instances in the database.
"""
return list(self.name_types)
def get_origin_types(self):
"""
Return a list of all custom origin types assocated with Person/Surname
instances in the database.
"""
return list(self.origin_types)
def get_repository_types(self):
"""
Return a list of all custom repository types assocated with Repository
instances in the database.
"""
return list(self.repository_types)
def get_note_types(self):
"""
Return a list of all custom note types assocated with Note instances
in the database.
"""
return list(self.note_types)
def get_source_attribute_types(self):
"""
Return a list of all Attribute types assocated with Source/Citation
instances in the database.
"""
return list(self.source_attributes)
def get_source_media_types(self):
"""
Return a list of all custom source media types assocated with Source
instances in the database.
"""
return list(self.source_media_types)
def get_url_types(self):
"""
Return a list of all custom names types assocated with Url instances
in the database.
"""
return list(self.url_types)
def get_place_types(self):
"""
Return a list of all custom place types assocated with Place instances
in the database.
"""
return list(self.place_types)
################################################################
#
# get_*_bookmarks methods
#
################################################################
def get_bookmarks(self):
return self.bookmarks
def get_citation_bookmarks(self):
return self.citation_bookmarks
def get_event_bookmarks(self):
return self.event_bookmarks
def get_family_bookmarks(self):
return self.family_bookmarks
def get_media_bookmarks(self):
return self.media_bookmarks
def get_note_bookmarks(self):
return self.note_bookmarks
def get_place_bookmarks(self):
return self.place_bookmarks
def get_repo_bookmarks(self):
return self.repo_bookmarks
def get_source_bookmarks(self):
return self.source_bookmarks
################################################################
#
# Other methods
#
################################################################
def get_default_handle(self):
return self._get_metadata("default-person-handle", None)
def get_default_person(self):
handle = self.get_default_handle()
if handle:
return self.get_person_from_handle(handle)
else:
return None
def set_default_person_handle(self, handle):
self._set_metadata("default-person-handle", handle)
self.emit('home-person-changed')
def get_mediapath(self):
return self._get_metadata("media-path", None)
def set_mediapath(self, mediapath):
return self._set_metadata("media-path", mediapath)
def get_surname_list(self):
"""
Return the list of locale-sorted surnames contained in the database.
"""
return self.surname_list
def add_to_surname_list(self, person, batch_transaction):
"""
Add surname to surname list
"""
if batch_transaction:
return
name = None
primary_name = person.get_primary_name()
if primary_name:
surname_list = primary_name.get_surname_list()
if len(surname_list) > 0:
name = surname_list[0].surname
if name is None:
return
i = bisect.bisect(self.surname_list, name)
if 0 < i <= len(self.surname_list):
if self.surname_list[i-1] != name:
self.surname_list.insert(i, name)
else:
self.surname_list.insert(i, name)
def remove_from_surname_list(self, person):
"""
Check whether there are persons with the same surname left in
the database.
If not then we need to remove the name from the list.
The function must be overridden in the derived class.
"""
name = None
primary_name = person.get_primary_name()
if primary_name:
surname_list = primary_name.get_surname_list()
if len(surname_list) > 0:
name = surname_list[0].surname
if name is None:
return
if name in self.surname_list:
self.surname_list.remove(name)
def get_gender_stats(self):
"""
Returns a dictionary of
{given_name: (male_count, female_count, unknown_count)}
"""
raise NotImplementedError
def save_gender_stats(self, gstats):
raise NotImplementedError
def get_researcher(self):
return self.owner
def set_researcher(self, owner):
self.owner.set_from(owner)
def request_rebuild(self):
self.emit('person-rebuild')
self.emit('family-rebuild')
self.emit('place-rebuild')
self.emit('source-rebuild')
self.emit('citation-rebuild')
self.emit('media-rebuild')
self.emit('event-rebuild')
self.emit('repository-rebuild')
self.emit('note-rebuild')
self.emit('tag-rebuild')
def get_save_path(self):
return self._directory
def _set_save_path(self, directory):
self._directory = directory
if directory:
self.full_name = os.path.abspath(self._directory)
self.path = self.full_name
self.brief_name = os.path.basename(self._directory)
else:
self.full_name = None
self.path = None
self.brief_name = None
def report_bm_change(self):
"""
Add 1 to the number of bookmark changes during this session.
"""
self._bm_changes += 1
def db_has_bm_changes(self):
"""
Return whethere there were bookmark changes during the session.
"""
return self._bm_changes > 0
def get_undodb(self):
return self.undodb
def undo(self, update_history=True):
return self.undodb.undo(update_history)
def redo(self, update_history=True):
return self.undodb.redo(update_history)
def get_summary(self):
"""
Returns dictionary of summary item.
Should include, if possible:
_("Number of people")
_("Version")
_("Data version")
"""
return {
_("Number of people"): self.get_number_of_people(),
_("Number of families"): self.get_number_of_families(),
_("Number of sources"): self.get_number_of_sources(),
_("Number of citations"): self.get_number_of_citations(),
_("Number of events"): self.get_number_of_events(),
_("Number of media"): self.get_number_of_media(),
_("Number of places"): self.get_number_of_places(),
_("Number of repositories"): self.get_number_of_repositories(),
_("Number of notes"): self.get_number_of_notes(),
_("Number of tags"): self.get_number_of_tags(),
_("Schema version"): ".".join([str(v) for v in self.VERSION]),
}
def _order_by_person_key(self, person):
"""
All non pa/matronymic surnames are used in indexing.
pa/matronymic not as they change for every generation!
returns a byte string
"""
order_by = ""
if person.primary_name:
order_by_list = [surname.surname + " " +
person.primary_name.first_name
for surname in person.primary_name.surname_list
if (int(surname.origintype) not in
[NameOriginType.PATRONYMIC,
NameOriginType.MATRONYMIC])]
order_by = " ".join(order_by_list)
return glocale.sort_key(order_by)
def _get_person_data(self, person):
"""
Given a Person, return primary_name.first_name and surname.
"""
given_name = ""
surname = ""
if person:
primary_name = person.get_primary_name()
if primary_name:
given_name = primary_name.get_first_name()
surname_list = primary_name.get_surname_list()
if len(surname_list) > 0:
surname_obj = surname_list[0]
if surname_obj:
surname = surname_obj.surname
return (given_name, surname)
def _get_place_data(self, place):
"""
Given a Place, return the first PlaceRef handle.
"""
enclosed_by = ""
for placeref in place.get_placeref_list():
enclosed_by = placeref.ref
break
return enclosed_by
|
dermoth/gramps
|
gramps/gen/db/generic.py
|
Python
|
gpl-2.0
| 88,300 | 0.000453 |
from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
|
reviewboard/reviewboard
|
reviewboard/hostingsvcs/bugtracker.py
|
Python
|
mit
| 1,633 | 0 |
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2013 Task Coach developers <developers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from notebook import Notebook, BookPage
from frame import AuiManagedFrameWithDynamicCenterPane
from dialog import Dialog, NotebookDialog, HTMLDialog, AttachmentSelector
from itemctrl import Column
from listctrl import VirtualListCtrl
from checklistbox import CheckListBox
from treectrl import CheckTreeCtrl, TreeListCtrl
from squaremap import SquareMap
from timeline import Timeline
from datectrl import DateTimeCtrl, TimeEntry
from textctrl import SingleLineTextCtrl, MultiLineTextCtrl, StaticTextWithToolTip
from panel import PanelWithBoxSizer, BoxWithFlexGridSizer, BoxWithBoxSizer
from searchctrl import SearchCtrl
from spinctrl import SpinCtrl
from tooltip import ToolTipMixin, SimpleToolTip
from dirchooser import DirectoryChooser
from fontpicker import FontPickerCtrl
from syncmlwarning import SyncMLWarningDialog
from calendarwidget import Calendar
from calendarconfig import CalendarConfigDialog
from password import GetPassword
import masked
from wx.lib import sized_controls
|
TaskEvolution/Task-Coach-Evolution
|
taskcoach/taskcoachlib/widgets/__init__.py
|
Python
|
gpl-3.0
| 1,717 | 0.000582 |
#!/usr/bin/env python
"""simple thread pool
@author: dn13(dn13@gmail.com)
@author: Fibrizof(dfang84@gmail.com)
"""
import threading
import Queue
import new
def WorkerPoolError( Exception ):
pass
class Task(threading.Thread):
def __init__(self, queue, result_queue):
threading.Thread.__init__(self)
self.queue = queue
self.result_queue = result_queue
self.running = True
def cancel(self):
self.running = False
self.queue.put(None)
def run(self):
while self.running:
call = self.queue.get()
if call:
try:
reslut = call()
self.result_queue.put(reslut)
except:
pass
self.queue.task_done()
class WorkerPool( object ):
def __init__( self, threadnum ):
self.threadnum = threadnum
self.q = Queue.Queue()
self.result_q = Queue.Queue()
self.ts = [ Task(self.q, self.result_q) for i in range(threadnum) ]
self._registfunctions = {}
self.is_in_join = False
for t in self.ts :
t.setDaemon(True)
t.start()
def __del__(self):
try:
# 调用两次的意义在于, 第一次将所有线程的running置成false, 在让他们发一次queue的信号
# 偷懒没有写成两个接口
for t in self.ts:
t.cancel()
for t in self.ts:
t.cancel()
except:
pass
def __call__( self, work ):
if not self.is_in_join:
self.q.put( work )
else:
raise WorkerPoolError, 'Pool has been joined'
def join( self ):
self.is_in_join = True
self.q.join()
self.is_in_join = False
return
def runwithpool( self, _old ):
def _new( *args, **kwargs ):
self.q.put( lambda : _old( *args, **kwargs ) )
return _new
def registtopool( self, _old ):
if _old.__name__ in self._registfunctions :
raise WorkerPoolError, 'function name exists'
self._registfunctions[_old.__name__] = _old
return _old
def get_all_result(self):
result_list = []
while True:
try:
result_list.append(self.result_q.get_nowait())
except Exception as e:
if 0 == self.result_q.qsize():
break
else:
continue
return result_list
def __getattr__( self, name ):
if name in self._registfunctions :
return self._registfunctions[name]
raise AttributeError, '%s not found' % name
if __name__ == '__main__' :
import thread
p = WorkerPool(5)
@p.runwithpool
def foo( a ):
print 'foo>', thread.get_ident(), '>', a
return
@p.registtopool
def bar( b ):
print 'bar>', thread.get_ident(), '>', b
for i in range(10):
foo(i)
p.bar(i+100)
p( lambda : bar(200) )
p.join()
|
hackshel/py-aluminium
|
src/__furture__/simplepool.py
|
Python
|
bsd-3-clause
| 3,289 | 0.023995 |
from ..utils.command import BaseCommand
from ..utils.tabulate import tabulate
from ..utils.info import get_packages, Sources
class Colors:
PURPLE = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
UNDERLINE = '\033[4m'
ENDC = '\033[0m'
class Command(BaseCommand):
name = "outdated"
@classmethod
def run(cls, args):
cls._run()
@classmethod
def _run(cls):
packages = get_packages((Sources.required, Sources.installed))
packages = list(filter(lambda p: p.wanted_rule, packages)) # filter out ones with no wanted version (not in package.json)
packages_to_display = []
for package in packages:
package.get_wanted_version()
if not package.version or (
package.version != package.latest_version or
package.version != package.wanted_version):
packages_to_display.append(package)
cls.display_outdated(packages_to_display)
@staticmethod
def display_outdated(packages):
if len(packages) == 0:
return
headings = ["package", "current", "wanted", "latest"]
headings = list(map(lambda heading: Colors.UNDERLINE+heading+Colors.ENDC, headings))
table = []
packages = sorted(packages, key=lambda package: package.name)
for package in packages:
table.append([
Colors.OKGREEN+package.name+Colors.ENDC,
package.version or "n/a",
Colors.OKGREEN+(package.wanted_version or "n/a")+Colors.ENDC,
Colors.PURPLE+(package.latest_version or "n/a")+Colors.ENDC
])
print(tabulate(table, headings, tablefmt="plain"))
|
piton-package-manager/piton
|
piton/commands/outdated.py
|
Python
|
mit
| 1,511 | 0.031105 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import importlib
import os
import shutil
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import DatabaseError, connection, models
from django.db.migrations import questioner
from django.test import ignore_warnings, mock, override_settings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from .models import UnicodeModel, UnserializableModel
from .test_base import MigrationTestBase
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_fake_initial(self):
"""
#24184 - Tests that --fake-initial only works if all tables created in
the initial migration of an app exists
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower()
)
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs to in
# order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake a apply
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_migrate_conflict_exit(self):
"""
Makes sure that migrate exits if it detects a conflict.
"""
with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"):
call_command("migrate", "migrations")
@ignore_warnings(category=RemovedInDjango20Warning)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_list(self):
"""
Tests --list output of migrate command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("migrate", list=True, stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_migration_list` in the command
call_command("migrate", "migrations", list=True, stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
Tests --list output of showmigrations command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"})
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual("", out.getvalue().lower())
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual("", out.getvalue().lower())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate(self):
"""
Makes sure that sqlmigrate does something.
"""
# Make sure the output is wrapped in a transaction
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue()
self.assertIn(connection.ops.start_transaction_sql(), output)
self.assertIn(connection.ops.end_transaction_sql(), output)
# Test forwards. All the databases agree on CREATE TABLE, at least.
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
self.assertIn("create table", out.getvalue().lower())
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
# And backwards is a DROP TABLE
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
self.assertIn("drop table", out.getvalue().lower())
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app"])
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
https://code.djangoproject.com/ticket/22823
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations.
"""
call_command("migrate", "migrated_unapplied_app", stdout=six.StringIO())
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
# Because the `import_module` performed in `MigrationLoader` will cache
# the migrations package, we can't reuse the same migration package
# between tests. This is only a problem for testing, since `makemigrations`
# is normally called in its own process.
creation_counter = 0
def setUp(self):
MakeMigrationsTests.creation_counter += 1
self.migration_dir = os.path.join(self.test_dir, 'migrations_%d' % self.creation_counter)
self.migration_pkg = "migrations.migrations_%d" % self.creation_counter
self._old_models = apps.app_configs['migrations'].models.copy()
def tearDown(self):
apps.app_configs['migrations'].models = self._old_models
apps.all_models['migrations'] = self._old_models
apps.clear_cache()
_cwd = os.getcwd()
os.chdir(self.test_dir)
try:
try:
self._rmrf(self.migration_dir)
except OSError:
pass
try:
self._rmrf(os.path.join(self.test_dir,
"test_migrations_path_doesnt_exist"))
except OSError:
pass
finally:
os.chdir(_cwd)
def _rmrf(self, dname):
if os.path.commonprefix([self.test_dir, os.path.abspath(dname)]) != self.test_dir:
return
shutil.rmtree(dname)
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model('migrations', UnicodeModel)
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
call_command("makemigrations", "migrations", verbosity=0)
init_file = os.path.join(self.migration_dir, "__init__.py")
# Check for existing __init__.py file in migrations folder
self.assertTrue(os.path.exists(init_file))
with open(init_file, 'r') as fp:
content = force_text(fp.read())
self.assertEqual(content, '')
initial_file = os.path.join(self.migration_dir, "0001_initial.py")
# Check for existing 0001_initial.py file in migration folder
self.assertTrue(os.path.exists(initial_file))
with codecs.open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
self.assertIn('# -*- coding: utf-8 -*-', content)
self.assertIn('migrations.CreateModel', content)
if six.PY3:
self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name
self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural
self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name
self.assertIn('“Ðjáñgó”', content) # title.default
else:
self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8', content) # Meta.verbose_name
self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8\\xdf', content) # Meta.verbose_name_plural
self.assertIn('\\xda\\xd1\\xcd\\xa2\\xd3\\xd0\\xc9', content) # title.verbose_name
self.assertIn('\\u201c\\xd0j\\xe1\\xf1g\\xf3\\u201d', content) # title.default
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model('migrations', UnserializableModel)
with six.assertRaisesRegex(self, ValueError, r'Cannot serialize'):
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(self.migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigrations_conflict_exit(self):
"""
Makes sure that makemigrations exits if it detects a conflict.
"""
with self.assertRaises(CommandError):
call_command("makemigrations")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_makemigrations_merge_no_conflict(self):
"""
Makes sure that makemigrations exits if in merge mode with no conflicts.
"""
out = six.StringIO()
try:
call_command("makemigrations", merge=True, stdout=out)
except CommandError:
self.fail("Makemigrations errored in merge mode with no conflicts")
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_no_app_sys_exit(self):
"""
Makes sure that makemigrations exits if a non-existent app is specified.
"""
err = six.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "this_app_does_not_exist", stderr=err)
self.assertIn("'this_app_does_not_exist' could not be found.", err.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
Makes sure that makemigrations exits if no app is specified with 'empty' mode.
"""
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
self.assertRaises(CommandError, call_command, "makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
Makes sure that makemigrations properly constructs an empty migration.
"""
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
try:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
except CommandError:
self.fail("Makemigrations errored in creating empty migration for a proper app.")
initial_file = os.path.join(self.migration_dir, "0001_initial.py")
# Check for existing 0001_initial.py file in migration folder
self.assertTrue(os.path.exists(initial_file))
with codecs.open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
self.assertIn('# -*- coding: utf-8 -*-', content)
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(' ', '')
self.assertIn('dependencies=[\n]', content)
self.assertIn('operations=[\n]', content)
def test_makemigrations_no_changes_no_apps(self):
"""
Makes sure that makemigrations exits when there are no changes and no apps are specified.
"""
out = six.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_changes"})
def test_makemigrations_no_changes(self):
"""
Makes sure that makemigrations exits when there are no changes to an app.
"""
out = six.StringIO()
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
Makes sure that makemigrations announces the migration at the default verbosity level.
"""
out = six.StringIO()
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_ancestor"})
def test_makemigrations_no_common_ancestor(self):
"""
Makes sure that makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigrations_interactive_reject(self):
"""
Makes sure that makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
old_input = questioner.input
questioner.input = lambda _: "N"
try:
call_command("makemigrations", "migrations", merge=True, interactive=True, verbosity=0)
merge_file = os.path.join(self.test_dir, 'test_migrations_conflict', '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
finally:
questioner.input = old_input
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigrations_interactive_accept(self):
"""
Makes sure that makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
old_input = questioner.input
questioner.input = lambda _: "y"
out = six.StringIO()
try:
call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(self.test_dir, 'test_migrations_conflict', '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
os.remove(merge_file)
self.assertFalse(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
finally:
questioner.input = old_input
self.assertIn("Created new merge migration", force_text(out.getvalue()))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigrations_handle_merge(self):
"""
Makes sure that makemigrations properly merges the conflicting migrations with --noinput.
"""
out = six.StringIO()
call_command("makemigrations", "migrations", merge=True, interactive=False, stdout=out)
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
merge_file = os.path.join(self.test_dir, 'test_migrations_conflict', '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
os.remove(merge_file)
self.assertFalse(os.path.exists(merge_file))
self.assertIn("Created new merge migration", output)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigration_merge_dry_run(self):
"""
Makes sure that makemigrations respects --dry-run option when fixing
migration conflicts (#24427).
"""
out = six.StringIO()
call_command("makemigrations", "migrations", dry_run=True, merge=True, interactive=False, stdout=out)
merge_file = os.path.join(self.test_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
Makes sure that `makemigrations --merge --dry-run` writes the merge
migration file to stdout with `verbosity == 3` (#24427).
"""
out = six.StringIO()
call_command("makemigrations", "migrations", dry_run=True, merge=True, interactive=False,
stdout=out, verbosity=3)
merge_file = os.path.join(self.test_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("# -*- coding: utf-8 -*-", output)
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_default"})
def test_makemigrations_dry_run(self):
"""
Ticket #22676 -- `makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
class Meta:
app_label = "migrations"
out = six.StringIO()
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_default"})
def test_makemigrations_dry_run_verbosity_3(self):
"""
Ticket #22675 -- Allow `makemigrations --dry-run` to output the
migrations file to stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = six.StringIO()
call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("# -*- coding: utf-8 -*-", out.getvalue())
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_path_doesnt_exist.foo.bar"})
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
Ticket #22682 -- Makemigrations fails when specifying custom location
for migration files (using MIGRATION_MODULES) if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = six.StringIO()
call_command("makemigrations", "migrations", stdout=out)
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
# Migrations file is actually created in the expected path.
self.assertTrue(os.path.isfile(os.path.join(self.test_dir,
"test_migrations_path_doesnt_exist", "foo", "bar",
"0001_initial.py")))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_makemigrations_interactive_by_default(self):
"""
Makes sure that the user is prompted to merge by default if there are
conflicts and merge is True. Answer negative to differentiate it from
behavior when --noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
old_input = questioner.input
questioner.input = lambda _: "N"
out = six.StringIO()
merge_file = os.path.join(self.test_dir, 'test_migrations_conflict', '0003_merge.py')
try:
call_command("makemigrations", "migrations", merge=True, stdout=out)
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
finally:
questioner.input = old_input
if os.path.exists(merge_file):
os.remove(merge_file)
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_changes"},
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
Makes sure that makemigrations does not raise a CommandError when an
unspecified app has conflicting migrations.
"""
try:
call_command("makemigrations", "migrations", merge=False, verbosity=0)
except CommandError:
self.fail("Makemigrations fails resolving conflicts in an unspecified app")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
Makes sure that makemigrations does not create a merge for an
unspecified app even if it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
old_input = questioner.input
questioner.input = lambda _: "y"
out = six.StringIO()
merge_file = os.path.join(self.test_dir,
'migrations_test_apps',
'unspecified_app_with_conflict',
'migrations',
'0003_merge.py')
try:
call_command("makemigrations", "migrated_app", merge=True, interactive=True, stdout=out)
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
except CommandError:
self.fail("Makemigrations fails resolving conflicts in an unspecified app")
finally:
questioner.input = old_input
if os.path.exists(merge_file):
os.remove(merge_file)
def test_makemigrations_with_custom_name(self):
"""
Makes sure that makemigrations generate a custom migration.
"""
def cmd(migration_count, migration_name, *args):
with override_settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
try:
call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args)
except CommandError:
self.fail("Makemigrations errored in creating empty migration with custom name for a proper app.")
migration_file = os.path.join(self.migration_dir, "%s_%s.py" % (migration_count, migration_name))
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with codecs.open(migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
self.assertIn("# -*- coding: utf-8 -*-", content)
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn("dependencies=[\n]", content)
# Python 3.3+ importlib caches os.listdir() on some platforms like
# Mac OS X (#23850).
if hasattr(importlib, 'invalidate_caches'):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content)
self.assertIn("operations=[\n]", content)
def test_makemigrations_exit(self):
"""
makemigrations --exit should exit with sys.exit(1) when there are no
changes to an app.
"""
with self.settings(MIGRATION_MODULES={"migrations": self.migration_pkg}):
call_command("makemigrations", "--exit", "migrations", verbosity=0)
with self.settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_changes"}):
with self.assertRaises(SystemExit):
call_command("makemigrations", "--exit", "migrations", verbosity=0)
class SquashMigrationsTest(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
path = "test_migrations/0001_squashed_0002_second.py"
path = os.path.join(MigrationTestBase.test_dir, path)
def tearDown(self):
if os.path.exists(self.path):
os.remove(self.path)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_squashmigrations_squashes(self):
"""
Tests that squashmigrations squashes migrations.
"""
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
self.assertTrue(os.path.exists(self.path))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_squashmigrations_optimizes(self):
"""
Tests that squashmigrations optimizes operations.
"""
out = six.StringIO()
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out)
self.assertIn("Optimized from 7 operations to 5 operations.", force_text(out.getvalue()))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
Makes sure that squashmigrations --no-optimize really doesn't optimize operations.
"""
out = six.StringIO()
call_command("squashmigrations", "migrations", "0002",
interactive=False, verbosity=1, no_optimize=True, stdout=out)
self.assertIn("Skipping optimization", force_text(out.getvalue()))
|
abhattad4/Digi-Menu
|
tests/migrations/test_commands.py
|
Python
|
bsd-3-clause
| 37,861 | 0.002803 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class home_grid_id(Variable):
'''The grid_id of a person's residence.'''
def dependencies(self):
return [my_attribute_label('household_id'),
'psrc.household.grid_id']
def compute(self, dataset_pool):
households = dataset_pool.get_dataset('household')
return self.get_dataset().get_join_data(households, name='grid_id')
from opus_core.tests import opus_unittest
from urbansim.variable_test_toolbox import VariableTestToolbox
from numpy import array
from numpy import ma
from psrc.datasets.person_dataset import PersonDataset
from opus_core.storage_factory import StorageFactory
class Tests(opus_unittest.OpusTestCase):
variable_name = 'psrc.person.home_grid_id'
def test_my_inputs(self):
storage = StorageFactory().get_storage('dict_storage')
persons_table_name = 'persons'
storage.write_table(
table_name=persons_table_name,
table_data={
'person_id':array([1, 2, 3, 4, 5]),
'household_id':array([1, 1, 3, 3, 3]),
'member_id':array([1,2,1,2,3])
},
)
persons = PersonDataset(in_storage=storage, in_table_name=persons_table_name)
values = VariableTestToolbox().compute_variable(self.variable_name,
data_dictionary = {
'household':{
'household_id':array([1,2,3]),
'grid_id':array([9, 9, 7])
},
'person':persons
},
dataset = 'person'
)
should_be = array([9, 9, 7, 7, 7])
self.assert_(ma.allclose(values, should_be, rtol=1e-7),
'Error in ' + self.variable_name)
if __name__=='__main__':
opus_unittest.main()
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/psrc/person/home_grid_id.py
|
Python
|
gpl-2.0
| 2,165 | 0.0194 |
# -*- encoding: utf-8 -*-
"""Unit tests for the Docker feature."""
from fauxfactory import gen_choice, gen_string, gen_url
from nailgun import entities
from random import randint, shuffle
from requests.exceptions import HTTPError
from robottelo.api.utils import promote
from robottelo.constants import DOCKER_REGISTRY_HUB
from robottelo.decorators import run_only_on, skip_if_bug_open, stubbed
from robottelo.helpers import (
get_external_docker_url,
get_internal_docker_url,
valid_data_list,
)
from robottelo.test import APITestCase
DOCKER_PROVIDER = 'Docker'
EXTERNAL_DOCKER_URL = get_external_docker_url()
INTERNAL_DOCKER_URL = get_internal_docker_url()
STRING_TYPES = ['alpha', 'alphanumeric', 'cjk', 'utf8', 'latin1']
def _invalid_names():
"""Return a generator yielding various kinds of invalid strings for
Docker repositories.
"""
return (
# boundaries
gen_string('alphanumeric', 2),
gen_string('alphanumeric', 31),
u'{0}/{1}'.format(
gen_string('alphanumeric', 3),
gen_string('alphanumeric', 3)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 4),
gen_string('alphanumeric', 2)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 31),
gen_string('alphanumeric', 30)
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 30),
gen_string('alphanumeric', 31)
),
# not allowed non alphanumeric character
u'{0}+{1}_{2}/{2}-{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
),
u'{0}-{1}_{2}/{2}+{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
gen_string('alphanumeric', randint(3, 6)),
),
)
def _valid_names():
"""Return a generator yielding various kinds of valid strings for
Docker repositories.
"""
return (
# boundaries
gen_string('alphanumeric', 3).lower(),
gen_string('alphanumeric', 30).lower(),
u'{0}/{1}'.format(
gen_string('alphanumeric', 4).lower(),
gen_string('alphanumeric', 3).lower(),
),
u'{0}/{1}'.format(
gen_string('alphanumeric', 30).lower(),
gen_string('alphanumeric', 30).lower(),
),
# allowed non alphanumeric character
u'{0}-{1}_{2}/{2}-{1}_{0}.{3}'.format(
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
gen_string('alphanumeric', randint(3, 6)).lower(),
),
u'-_-_/-_.',
)
def _create_repository(product, name=None, upstream_name=None):
"""Creates a Docker-based repository.
:param product: A ``Product`` object.
:param str name: Name for the repository. If ``None`` then a random
value will be generated.
:param str upstream_name: A valid name for an existing Docker image.
If ``None`` then defaults to ``busybox``.
:return: A ``Repository`` object.
"""
if name is None:
name = gen_string(gen_choice(STRING_TYPES), 15)
if upstream_name is None:
upstream_name = u'busybox'
return entities.Repository(
content_type=u'docker',
docker_upstream_name=upstream_name,
name=name,
product=product,
url=DOCKER_REGISTRY_HUB,
).create()
@run_only_on('sat')
class DockerRepositoryTestCase(APITestCase):
"""Tests specific to performing CRUD methods against ``Docker``
repositories.
"""
@classmethod
def setUpClass(cls):
"""Create an organization and product which can be re-used in tests."""
super(DockerRepositoryTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
def test_create_one_docker_repo(self):
"""@Test: Create one Docker-type repository
@Assert: A repository is created with a Docker image.
@Feature: Docker
"""
for name in valid_data_list():
with self.subTest(name):
repo = _create_repository(
entities.Product(organization=self.org).create(),
name,
)
self.assertEqual(repo.name, name)
self.assertEqual(repo.docker_upstream_name, 'busybox')
self.assertEqual(repo.content_type, 'docker')
def test_create_docker_repo_valid_upstream_name(self):
"""@Test: Create a Docker-type repository with a valid docker upstream
name
@Assert: A repository is created with the specified upstream name.
@Feature: Docker
"""
for upstream_name in _valid_names():
with self.subTest(upstream_name):
repo = _create_repository(
entities.Product(organization=self.org).create(),
upstream_name=upstream_name,
)
self.assertEqual(repo.docker_upstream_name, upstream_name)
self.assertEqual(repo.content_type, u'docker')
def test_create_docker_repo_invalid_upstream_name(self):
"""@Test: Create a Docker-type repository with a invalid docker
upstream name.
@Assert: A repository is not created and a proper error is raised.
@Feature: Docker
"""
product = entities.Product(organization=self.org).create()
for upstream_name in _invalid_names():
with self.subTest(upstream_name):
with self.assertRaises(HTTPError):
_create_repository(product, upstream_name=upstream_name)
def test_create_multiple_docker_repo(self):
"""@Test: Create multiple Docker-type repositories
@Assert: Multiple docker repositories are created with a Docker image
and they all belong to the same product.
@Feature: Docker
"""
product = entities.Product(organization=self.org).create()
for _ in range(randint(2, 5)):
repo = _create_repository(product)
product = product.read()
self.assertIn(repo.id, [repo_.id for repo_ in product.repository])
def test_create_multiple_docker_repo_multiple_products(self):
"""@Test: Create multiple Docker-type repositories on multiple products.
@Assert: Multiple docker repositories are created with a Docker image
and they all belong to their respective products.
@Feature: Docker
"""
for _ in range(randint(2, 5)):
product = entities.Product(organization=self.org).create()
for _ in range(randint(2, 3)):
repo = _create_repository(product)
product = product.read()
self.assertIn(
repo.id,
[repo_.id for repo_ in product.repository],
)
def test_sync_docker_repo(self):
"""@Test: Create and sync a Docker-type repository
@Assert: A repository is created with a Docker repository
and it is synchronized.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create()
)
repo.sync()
repo = repo.read()
self.assertGreaterEqual(repo.content_counts['docker_image'], 1)
def test_update_docker_repo_name(self):
"""@Test: Create a Docker-type repository and update its name.
@Assert: A repository is created with a Docker image and that its
name can be updated.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
# Update the repository name to random value
for new_name in valid_data_list():
with self.subTest(new_name):
repo.name = new_name
repo = repo.update()
self.assertEqual(repo.name, new_name)
def test_update_docker_repo_upstream_name(self):
"""@Test: Create a Docker-type repository and update its upstream name.
@Assert: A repository is created with a Docker image and that its
upstream name can be updated.
@Feature: Docker
"""
new_upstream_name = u'fedora/ssh'
repo = _create_repository(
entities.Product(organization=self.org).create())
self.assertNotEqual(repo.docker_upstream_name, new_upstream_name)
# Update the repository upstream name
repo.docker_upstream_name = new_upstream_name
repo = repo.update()
self.assertEqual(repo.docker_upstream_name, new_upstream_name)
def test_update_docker_repo_url(self):
"""@Test: Create a Docker-type repository and update its URL.
@Assert: A repository is created with a Docker image and that its
URL can be updated.
@Feature: Docker
"""
new_url = gen_url()
repo = _create_repository(
entities.Product(organization=self.org).create())
self.assertEqual(repo.url, DOCKER_REGISTRY_HUB)
# Update the repository URL
repo.url = new_url
repo = repo.update()
self.assertEqual(repo.url, new_url)
self.assertNotEqual(repo.url, DOCKER_REGISTRY_HUB)
def test_delete_docker_repo(self):
"""@Test: Create and delete a Docker-type repository
@Assert: A repository is created with a Docker image and then deleted.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
# Delete it
repo.delete()
with self.assertRaises(HTTPError):
repo.read()
def test_delete_random_docker_repo(self):
"""@Test: Create Docker-type repositories on multiple products and
delete a random repository from a random product.
@Assert: Random repository can be deleted from random product without
altering the other products.
@Feature: Docker
"""
repos = []
products = [
entities.Product(organization=self.org).create()
for _
in range(randint(2, 5))
]
for product in products:
repo = _create_repository(product)
self.assertEqual(repo.content_type, u'docker')
repos.append(repo)
# Delete a random repository
shuffle(repos)
repo = repos.pop()
repo.delete()
with self.assertRaises(HTTPError):
repo.read()
# Check if others repositories are not touched
for repo in repos:
repo = repo.read()
self.assertIn(repo.product.id, [prod.id for prod in products])
@run_only_on('sat')
class DockerContentViewTestCase(APITestCase):
"""Tests specific to using ``Docker`` repositories with Content Views."""
@classmethod
def setUpClass(cls):
"""Create an organization which can be re-used in tests."""
super(DockerContentViewTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
def test_add_docker_repo_to_content_view(self):
"""@Test: Add one Docker-type repository to a non-composite content view
@Assert: A repository is created with a Docker repository and the
product is added to a non-composite content view
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
# Create content view and associate docker repo
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(repo.id, [repo_.id for repo_ in content_view.repository])
def test_add_multiple_docker_repos_to_content_view(self):
"""@Test: Add multiple Docker-type repositories to a
non-composite content view.
@Assert: Repositories are created with Docker images and the
product is added to a non-composite content view.
@Feature: Docker
"""
product = entities.Product(organization=self.org).create()
repos = [
_create_repository(product, name=gen_string('alpha'))
for _
in range(randint(2, 5))
]
self.assertEqual(len(product.read().repository), len(repos))
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = repos
content_view = content_view.update(['repository'])
self.assertEqual(len(content_view.repository), len(repos))
content_view.repository = [
repo.read() for repo in content_view.repository
]
self.assertEqual(
{repo.id for repo in repos},
{repo.id for repo in content_view.repository}
)
for repo in repos + content_view.repository:
self.assertEqual(repo.content_type, u'docker')
self.assertEqual(repo.docker_upstream_name, u'busybox')
def test_add_synced_docker_repo_to_content_view(self):
"""@Test: Create and sync a Docker-type repository
@Assert: A repository is created with a Docker repository
and it is synchronized.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
repo.sync()
repo = repo.read()
self.assertGreaterEqual(repo.content_counts['docker_image'], 1)
# Create content view and associate docker repo
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(repo.id, [repo_.id for repo_ in content_view.repository])
def test_add_docker_repo_to_composite_content_view(self):
"""@Test: Add one Docker-type repository to a composite content view
@Assert: A repository is created with a Docker repository and the
product is added to a content view which is then added to a composite
content view.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
# Create content view and associate docker repo
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(repo.id, [repo_.id for repo_ in content_view.repository])
# Publish it and grab its version ID (there should only be one version)
content_view.publish()
content_view = content_view.read()
self.assertEqual(len(content_view.version), 1)
# Create composite content view and associate content view to it
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = content_view.version
comp_content_view = comp_content_view.update(['component'])
self.assertIn(
content_view.version[0].id,
[component.id for component in comp_content_view.component]
)
def test_add_multiple_docker_repos_to_composite_content_view(self):
"""@Test: Add multiple Docker-type repositories to a composite
content view.
@Assert: One repository is created with a Docker image and the
product is added to a random number of content views which are then
added to a composite content view.
@Feature: Docker
"""
cv_versions = []
product = entities.Product(organization=self.org).create()
for _ in range(randint(2, 5)):
# Create content view and associate docker repo
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
repo = _create_repository(product)
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(
repo.id,
[repo_.id for repo_ in content_view.repository]
)
# Publish it and grab its version ID (there should be one version)
content_view.publish()
content_view = content_view.read()
cv_versions.append(content_view.version[0])
# Create composite content view and associate content view to it
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
for cv_version in cv_versions:
comp_content_view.component.append(cv_version)
comp_content_view = comp_content_view.update(['component'])
self.assertIn(
cv_version.id,
[component.id for component in comp_content_view.component]
)
def test_publish_once_docker_repo_content_view(self):
"""@Test: Add Docker-type repository to content view and publish
it once.
@Assert: One repository is created with a Docker image and the product
is added to a content view which is then published only once.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(repo.id, [repo_.id for repo_ in content_view.repository])
# Not published yet?
content_view = content_view.read()
self.assertIsNone(content_view.last_published)
self.assertEqual(content_view.next_version, 1)
# Publish it and check that it was indeed published.
content_view.publish()
content_view = content_view.read()
self.assertIsNotNone(content_view.last_published)
self.assertGreater(content_view.next_version, 1)
@skip_if_bug_open('bugzilla', 1217635)
def test_publish_once_docker_repo_composite_content_view(self):
"""@Test: Add Docker-type repository to composite
content view and publish it once.
@Assert: One repository is created with a Docker image and the product
is added to a content view which is then published only once and then
added to a composite content view which is also published only once.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertIn(repo.id, [repo_.id for repo_ in content_view.repository])
# Not published yet?
content_view = content_view.read()
self.assertIsNone(content_view.last_published)
self.assertEqual(content_view.next_version, 1)
# Publish it and check that it was indeed published.
content_view.publish()
content_view = content_view.read()
self.assertIsNotNone(content_view.last_published)
self.assertGreater(content_view.next_version, 1)
# Create composite content view…
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [content_view.version[0]]
comp_content_view = comp_content_view.update(['component'])
self.assertIn(
content_view.version[0].id, # pylint:disable=no-member
[component.id for component in comp_content_view.component]
)
# … publish it…
comp_content_view.publish()
# … and check that it was indeed published
comp_content_view = comp_content_view.read()
self.assertIsNotNone(comp_content_view.last_published)
self.assertGreater(comp_content_view.next_version, 1)
def test_publish_multiple_docker_repo_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it
multiple times.
@Assert: One repository is created with a Docker image and the product
is added to a content view which is then published multiple times.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
self.assertIsNone(content_view.read().last_published)
publish_amount = randint(2, 5)
for _ in range(publish_amount):
content_view.publish()
content_view = content_view.read()
self.assertIsNotNone(content_view.last_published)
self.assertEqual(len(content_view.version), publish_amount)
def test_publish_multiple_docker_repo_composite_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it
multiple times.
@Assert: One repository is created with a Docker image and the product
is added to a content view which is then added to a composite content
view which is then published multiple times.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
self.assertIsNone(content_view.read().last_published)
content_view.publish()
content_view = content_view.read()
self.assertIsNotNone(content_view.last_published)
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [content_view.version[0]]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(
[content_view.version[0].id],
[comp.id for comp in comp_content_view.component],
)
self.assertIsNone(comp_content_view.last_published)
publish_amount = randint(2, 5)
for _ in range(publish_amount):
comp_content_view.publish()
comp_content_view = comp_content_view.read()
self.assertIsNotNone(comp_content_view.last_published)
self.assertEqual(len(comp_content_view.version), publish_amount)
def test_promote_docker_repo_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it.
Then promote it to the next available lifecycle-environment.
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environment.
@Feature: Docker
"""
lce = entities.LifecycleEnvironment(organization=self.org).create()
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
content_view = content_view.read()
cvv = content_view.version[0].read()
self.assertEqual(len(cvv.environment), 1)
promote(cvv, lce.id)
self.assertEqual(len(cvv.read().environment), 2)
def test_promote_multiple_docker_repo_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it.
Then promote it to multiple available lifecycle-environments.
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environments.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0]
self.assertEqual(len(cvv.read().environment), 1)
for i in range(1, randint(3, 6)):
lce = entities.LifecycleEnvironment(organization=self.org).create()
promote(cvv, lce.id)
self.assertEqual(len(cvv.read().environment), i+1)
def test_promote_docker_repo_composite_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it.
Then add that content view to composite one. Publish and promote that
composite content view to the next available lifecycle-environment.
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environment.
@Feature: Docker
"""
lce = entities.LifecycleEnvironment(organization=self.org).create()
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0].read()
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0]
self.assertEqual(len(comp_cvv.read().environment), 1)
promote(comp_cvv, lce.id)
self.assertEqual(len(comp_cvv.read().environment), 2)
def test_promote_multiple_docker_repo_composite_content_view(self):
"""@Test: Add Docker-type repository to content view and publish it.
Then add that content view to composite one. Publish and promote that
composite content view to the multiple available lifecycle-environments
@Assert: Docker-type repository is promoted to content view found in
the specific lifecycle-environments.
@Feature: Docker
"""
repo = _create_repository(
entities.Product(organization=self.org).create())
content_view = entities.ContentView(
composite=False,
organization=self.org,
).create()
content_view.repository = [repo]
content_view = content_view.update(['repository'])
self.assertEqual(
[repo.id], [repo_.id for repo_ in content_view.repository])
content_view.publish()
cvv = content_view.read().version[0].read()
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0]
self.assertEqual(len(comp_cvv.read().environment), 1)
for i in range(1, randint(3, 6)):
lce = entities.LifecycleEnvironment(organization=self.org).create()
promote(comp_cvv, lce.id)
self.assertEqual(len(comp_cvv.read().environment), i+1)
@run_only_on('sat')
class DockerActivationKeyTestCase(APITestCase):
"""Tests specific to adding ``Docker`` repositories to Activation Keys."""
@classmethod
def setUpClass(cls):
"""Create necessary objects which can be re-used in tests."""
super(DockerActivationKeyTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
cls.lce = entities.LifecycleEnvironment(organization=cls.org).create()
cls.repo = _create_repository(
entities.Product(organization=cls.org).create())
content_view = entities.ContentView(
composite=False,
organization=cls.org,
).create()
content_view.repository = [cls.repo]
cls.content_view = content_view.update(['repository'])
cls.content_view.publish()
cls.cvv = content_view.read().version[0].read()
promote(cls.cvv, cls.lce.id)
def test_add_docker_repo_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Then create an activation key and associate it with the
Docker content view.
@Assert: Docker-based content view can be added to activation key
@Feature: Docker
"""
ak = entities.ActivationKey(
content_view=self.content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, self.content_view.id)
self.assertEqual(ak.content_view.read().repository[0].id, self.repo.id)
def test_remove_docker_repo_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Create an activation key and associate it with the
Docker content view. Then remove this content view from the activation
key.
@Assert: Docker-based content view can be added and then removed from
the activation key.
@Feature: Docker
"""
ak = entities.ActivationKey(
content_view=self.content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, self.content_view.id)
ak.content_view = None
self.assertIsNone(ak.update(['content_view']).content_view)
def test_add_docker_repo_composite_view_to_activation_key(self):
"""@Test:Add Docker-type repository to a non-composite content view and
publish it. Then add this content view to a composite content view and
publish it. Create an activation key and associate it with the
composite Docker content view.
@Assert: Docker-based content view can be added to activation key
@Feature: Docker
"""
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [self.cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(self.cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0].read()
promote(comp_cvv, self.lce.id)
ak = entities.ActivationKey(
content_view=comp_content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, comp_content_view.id)
def test_remove_docker_repo_composite_view_to_activation_key(self):
"""@Test: Add Docker-type repository to a non-composite content view
and publish it. Then add this content view to a composite content view
and publish it. Create an activation key and associate it with the
composite Docker content view. Then, remove the composite content view
from the activation key.
@Assert: Docker-based composite content view can be added and then
removed from the activation key.
@Feature: Docker
"""
comp_content_view = entities.ContentView(
composite=True,
organization=self.org,
).create()
comp_content_view.component = [self.cvv]
comp_content_view = comp_content_view.update(['component'])
self.assertEqual(self.cvv.id, comp_content_view.component[0].id)
comp_content_view.publish()
comp_cvv = comp_content_view.read().version[0].read()
promote(comp_cvv, self.lce.id)
ak = entities.ActivationKey(
content_view=comp_content_view,
environment=self.lce,
organization=self.org,
).create()
self.assertEqual(ak.content_view.id, comp_content_view.id)
ak.content_view = None
self.assertIsNone(ak.update(['content_view']).content_view)
@run_only_on('sat')
class DockerComputeResourceTestCase(APITestCase):
"""Tests specific to managing Docker-based Compute Resources."""
@classmethod
def setUpClass(cls):
"""Create an organization and product which can be re-used in tests."""
super(DockerComputeResourceTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
def test_create_internal_docker_compute_resource(self):
"""@Test: Create a Docker-based Compute Resource in the Satellite 6
instance.
@Assert: Compute Resource can be created and listed.
@Feature: Docker
"""
for name in valid_data_list():
with self.subTest(name):
compute_resource = entities.DockerComputeResource(
name=name,
url=INTERNAL_DOCKER_URL,
).create()
self.assertEqual(compute_resource.name, name)
self.assertEqual(compute_resource.provider, DOCKER_PROVIDER)
self.assertEqual(compute_resource.url, INTERNAL_DOCKER_URL)
def test_update_docker_compute_resource(self):
"""@Test: Create a Docker-based Compute Resource in the Satellite 6
instance then edit its attributes.
@Assert: Compute Resource can be created, listed and its attributes can
be updated.
@Feature: Docker
"""
for url in (EXTERNAL_DOCKER_URL, INTERNAL_DOCKER_URL):
with self.subTest(url):
compute_resource = entities.DockerComputeResource(
organization=[self.org],
url=url,
).create()
self.assertEqual(compute_resource.url, url)
compute_resource.url = gen_url()
self.assertEqual(
compute_resource.url,
compute_resource.update(['url']).url,
)
def test_list_containers_internal_docker_compute_resource(self):
"""@Test: Create a Docker-based Compute Resource in the Satellite 6
instance then list its running containers.
@Assert: Compute Resource can be created and existing instances can be
listed.
@Feature: Docker
"""
for url in (EXTERNAL_DOCKER_URL, INTERNAL_DOCKER_URL):
with self.subTest(url):
compute_resource = entities.DockerComputeResource(
organization=[self.org],
url=url,
).create()
self.assertEqual(compute_resource.url, url)
self.assertEqual(len(entities.AbstractDockerContainer(
compute_resource=compute_resource).search()), 0)
container = entities.DockerHubContainer(
command='top',
compute_resource=compute_resource,
organization=[self.org],
).create()
result = entities.AbstractDockerContainer(
compute_resource=compute_resource).search()
self.assertEqual(len(result), 1)
self.assertEqual(result[0].name, container.name)
def test_create_external_docker_compute_resource(self):
"""@Test: Create a Docker-based Compute Resource using an external
Docker-enabled system.
@Assert: Compute Resource can be created and listed.
@Feature: Docker
"""
for name in valid_data_list():
with self.subTest(name):
compute_resource = entities.DockerComputeResource(
name=name,
url=EXTERNAL_DOCKER_URL,
).create()
self.assertEqual(compute_resource.name, name)
self.assertEqual(compute_resource.provider, DOCKER_PROVIDER)
self.assertEqual(compute_resource.url, EXTERNAL_DOCKER_URL)
def test_delete_docker_compute_resource(self):
"""@Test: Create a Docker-based Compute Resource then delete it.
@Assert: Compute Resource can be created, listed and deleted.
@Feature: Docker
"""
for url in (EXTERNAL_DOCKER_URL, INTERNAL_DOCKER_URL):
with self.subTest(url):
resource = entities.DockerComputeResource(url=url).create()
self.assertEqual(resource.url, url)
self.assertEqual(resource.provider, DOCKER_PROVIDER)
resource.delete()
with self.assertRaises(HTTPError):
resource.read()
@run_only_on('sat')
class DockerContainersTestCase(APITestCase):
"""Tests specific to using ``Containers`` in local and external Docker
Compute Resources
"""
@classmethod
def setUpClass(cls):
"""Create an organization and product which can be re-used in tests."""
super(DockerContainersTestCase, cls).setUpClass()
cls.org = entities.Organization().create()
cls.cr_internal = entities.DockerComputeResource(
name=gen_string('alpha'),
organization=[cls.org],
url=INTERNAL_DOCKER_URL,
).create()
cls.cr_external = entities.DockerComputeResource(
name=gen_string('alpha'),
organization=[cls.org],
url=EXTERNAL_DOCKER_URL,
).create()
def test_create_container_compute_resource(self):
"""@Test: Create containers for local and external compute resources
@Feature: Docker
@Assert: The docker container is created for each compute resource
"""
for compute_resource in (self.cr_internal, self.cr_external):
with self.subTest(compute_resource.url):
container = entities.DockerHubContainer(
command='top',
compute_resource=compute_resource,
organization=[self.org],
).create()
self.assertEqual(
container.compute_resource.read().name,
compute_resource.name,
)
def test_create_container_compute_resource_power(self):
"""@Test: Create containers for local and external compute resource,
then power them on and finally power them off
@Feature: Docker
@Assert: The docker container is created for each compute resource
and the power status is showing properly
"""
for compute_resource in (self.cr_internal, self.cr_external):
with self.subTest(compute_resource.url):
container = entities.DockerHubContainer(
command='top',
compute_resource=compute_resource,
organization=[self.org],
).create()
self.assertEqual(
container.compute_resource.read().url,
compute_resource.url,
)
self.assertEqual(
container.power(
data={u'power_action': 'status'})['running'],
True
)
container.power(data={u'power_action': 'stop'})
self.assertEqual(
container.power(
data={u'power_action': 'status'})['running'],
False
)
def test_create_container_compute_resource_read_log(self):
"""@Test: Create containers for local and external compute resource and
read their logs
@Feature: Docker
@Assert: The docker container is created for each compute resource and
its log can be read
"""
for compute_resource in (self.cr_internal, self.cr_external):
with self.subTest(compute_resource.url):
container = entities.DockerHubContainer(
command='date',
compute_resource=compute_resource,
organization=[self.org],
).create()
self.assertTrue(container.logs()['logs'])
@stubbed()
# Return to that case once BZ 1230710 is fixed (with adding
# DockerRegistryContainer class to Nailgun)
def test_create_container_external_registry(self):
"""@Test: Create a container pulling an image from a custom external
registry
@Feature: Docker
@Assert: The docker container is created and the image is pulled from
the external registry
@Status: Manual
"""
def test_delete_container_compute_resource(self):
"""@Test: Delete containers in local and external compute resources
@Feature: Docker
@Assert: The docker containers are deleted in local and external
compute resources
"""
for compute_resource in (self.cr_internal, self.cr_external):
with self.subTest(compute_resource.url):
container = entities.DockerHubContainer(
command='top',
compute_resource=compute_resource,
organization=[self.org],
).create()
container.delete()
with self.assertRaises(HTTPError):
container.read()
@run_only_on('sat')
class DockerRegistriesTestCase(APITestCase):
"""Tests specific to performing CRUD methods against ``Registries``
repositories.
"""
def test_create_registry(self):
"""@Test: Create an external docker registry
@Feature: Docker
@Assert: External registry is created successfully
"""
for name in valid_data_list():
with self.subTest(name):
url = gen_url(subdomain=gen_string('alpha'))
description = gen_string('alphanumeric')
registry = entities.Registry(
description=description,
name=name,
url=url,
).create()
self.assertEqual(registry.name, name)
self.assertEqual(registry.url, url)
self.assertEqual(registry.description, description)
def test_update_registry_name(self):
"""@Test: Create an external docker registry and update its name
@Feature: Docker
@Assert: the external registry is updated with the new name
"""
registry = entities.Registry(name=gen_string('alpha')).create()
for new_name in valid_data_list():
with self.subTest(new_name):
registry.name = new_name
registry = registry.update()
self.assertEqual(registry.name, new_name)
def test_update_registry_url(self):
"""@Test: Create an external docker registry and update its URL
@Feature: Docker
@Assert: the external registry is updated with the new URL
"""
url = gen_url(subdomain=gen_string('alpha'))
new_url = gen_url(subdomain=gen_string('alpha'))
registry = entities.Registry(url=url).create()
self.assertEqual(registry.url, url)
registry.url = new_url
registry = registry.update()
self.assertEqual(registry.url, new_url)
def test_update_registry_description(self):
"""@Test: Create an external docker registry and update its description
@Feature: Docker
@Assert: the external registry is updated with the new description
"""
registry = entities.Registry().create()
for new_desc in valid_data_list():
with self.subTest(new_desc):
registry.description = new_desc
registry = registry.update()
self.assertEqual(registry.description, new_desc)
def test_update_registry_username(self):
"""@Test: Create an external docker registry and update its username
@Feature: Docker
@Assert: the external registry is updated with the new username
"""
username = gen_string('alpha')
new_username = gen_string('alpha')
registry = entities.Registry(
username=username,
password=gen_string('alpha'),
).create()
self.assertEqual(registry.username, username)
registry.username = new_username
registry = registry.update()
self.assertEqual(registry.username, new_username)
def test_delete_registry(self):
"""@Test: Create an external docker registry and then delete it
@Feature: Docker
@Assert: The external registry is deleted successfully
"""
for name in valid_data_list():
with self.subTest(name):
registry = entities.Registry(name=name).create()
registry.delete()
with self.assertRaises(HTTPError):
registry.read()
|
abalakh/robottelo
|
tests/foreman/api/test_docker.py
|
Python
|
gpl-3.0
| 47,259 | 0.000042 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from yali.storage import StorageError
class LibraryError(StorageError):
pass
|
akuster/yali
|
yali/storage/library/__init__.py
|
Python
|
gpl-2.0
| 125 | 0.008 |
"""
Interrogate stick for supported capabilities.
"""
import sys
from codinghyde.ant import driver
from codinghyde.ant import node
from config import *
# Initialize
stick = driver.USB1Driver(SERIAL, debug=DEBUG)
antnode = node.Node(stick)
antnode.start()
# Interrogate stick
# Note: This method will return immediately, as the stick's capabilities are
# interrogated on node initialization (node.start()) in order to set proper
# internal Node instance state.
capabilities = antnode.getCapabilities()
print 'Maximum channels:', capabilities['max_channels']
print 'Maximum network keys:', capabilities['max_net_keys']
print 'Standard options: %X' % capabilities['std_options']
print 'Advanced options: %X' % capabilities['adv_options']
# Shutdown
antnode.stop()
|
mvillalba/codinghyde.ant
|
demos/ant/02-capabilities.py
|
Python
|
mit
| 769 | 0 |
import re
import unicodedata
from collections import defaultdict
from typing import Any, Dict, List, Optional, Sequence, Union
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db.models.query import QuerySet
from django.forms.models import model_to_dict
from django.utils.translation import gettext as _
from typing_extensions import TypedDict
from zulip_bots.custom_exceptions import ConfigValidationError
from zerver.lib.avatar import avatar_url, get_avatar_field
from zerver.lib.cache import (
bulk_cached_fetch,
realm_user_dict_fields,
user_profile_by_id_cache_key,
user_profile_cache_key_id,
)
from zerver.lib.exceptions import OrganizationAdministratorRequired
from zerver.lib.request import JsonableError
from zerver.lib.timezone import canonicalize_timezone
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
Realm,
Service,
UserProfile,
get_realm_user_dicts,
get_user_profile_by_id_in_realm,
)
def check_full_name(full_name_raw: str) -> str:
full_name = full_name_raw.strip()
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
raise JsonableError(_("Name too long!"))
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
raise JsonableError(_("Name too short!"))
for character in full_name:
if unicodedata.category(character)[0] == "C" or character in UserProfile.NAME_INVALID_CHARS:
raise JsonableError(_("Invalid characters in name!"))
# Names ending with e.g. `|15` could be ambiguous for
# sloppily-written parsers of our Markdown syntax for mentioning
# users with ambiguous names, and likely have no real use, so we
# ban them.
if re.search(r"\|\d+$", full_name_raw):
raise JsonableError(_("Invalid format!"))
return full_name
# NOTE: We don't try to absolutely prevent 2 bots from having the same
# name (e.g. you can get there by reactivating a deactivated bot after
# making a new bot with the same name). This is just a check designed
# to make it unlikely to happen by accident.
def check_bot_name_available(realm_id: int, full_name: str) -> None:
dup_exists = UserProfile.objects.filter(
realm_id=realm_id,
full_name=full_name.strip(),
is_active=True,
).exists()
if dup_exists:
raise JsonableError(_("Name is already in use!"))
def check_short_name(short_name_raw: str) -> str:
short_name = short_name_raw.strip()
if len(short_name) == 0:
raise JsonableError(_("Bad name or username"))
return short_name
def check_valid_bot_config(bot_type: int, service_name: str, config_data: Dict[str, str]) -> None:
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT:
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
config_options = None
for integration in WEBHOOK_INTEGRATIONS:
if integration.name == service_name:
# key: validator
config_options = {c[1]: c[2] for c in integration.config_options}
break
if not config_options:
raise JsonableError(_("Invalid integration '{}'.").format(service_name))
missing_keys = set(config_options.keys()) - set(config_data.keys())
if missing_keys:
raise JsonableError(
_("Missing configuration parameters: {}").format(
missing_keys,
)
)
for key, validator in config_options.items():
value = config_data[key]
error = validator(key, value)
if error:
raise JsonableError(_("Invalid {} value {} ({})").format(key, value, error))
elif bot_type == UserProfile.EMBEDDED_BOT:
try:
from zerver.lib.bot_lib import get_bot_handler
bot_handler = get_bot_handler(service_name)
if hasattr(bot_handler, "validate_config"):
bot_handler.validate_config(config_data)
except ConfigValidationError:
# The exception provides a specific error message, but that
# message is not tagged translatable, because it is
# triggered in the external zulip_bots package.
# TODO: Think of some clever way to provide a more specific
# error message.
raise JsonableError(_("Invalid configuration data!"))
# Adds an outgoing webhook or embedded bot service.
def add_service(
name: str,
user_profile: UserProfile,
base_url: Optional[str] = None,
interface: Optional[int] = None,
token: Optional[str] = None,
) -> None:
Service.objects.create(
name=name, user_profile=user_profile, base_url=base_url, interface=interface, token=token
)
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
# Realm administrators can always add bot
if user_profile.is_realm_admin:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
return
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
raise OrganizationAdministratorRequired()
if (
user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS
and bot_type == UserProfile.DEFAULT_BOT
):
raise OrganizationAdministratorRequired()
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
if bot_type not in user_profile.allowed_bot_types:
raise JsonableError(_("Invalid bot type"))
def check_valid_interface_type(interface_type: Optional[int]) -> None:
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
raise JsonableError(_("Invalid interface type"))
def is_administrator_role(role: int) -> bool:
return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER}
def bulk_get_users(
emails: List[str], realm: Optional[Realm], base_query: "QuerySet[UserProfile]" = None
) -> Dict[str, UserProfile]:
if base_query is None:
assert realm is not None
query = UserProfile.objects.filter(realm=realm, is_active=True)
realm_id = realm.id
else:
# WARNING: Currently, this code path only really supports one
# version of `base_query` being used (because otherwise,
# they'll share the cache, which can screw up the filtering).
# If you're using this flow, you'll need to re-do any filters
# in base_query in the code itself; base_query is just a perf
# optimization.
query = base_query
realm_id = 0
def fetch_users_by_email(emails: List[str]) -> List[UserProfile]:
# This should be just
#
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
# realm=realm)
#
# But chaining __in and __iexact doesn't work with Django's
# ORM, so we have the following hack to construct the relevant where clause
where_clause = "upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)"
return query.select_related("realm").extra(where=[where_clause], params=(emails,))
def user_to_email(user_profile: UserProfile) -> str:
return user_profile.email.lower()
return bulk_cached_fetch(
# Use a separate cache key to protect us from conflicts with
# the get_user cache.
lambda email: "bulk_get_users:" + user_profile_cache_key_id(email, realm_id),
fetch_users_by_email,
[email.lower() for email in emails],
id_fetcher=user_to_email,
)
def get_user_id(user: UserProfile) -> int:
return user.id
def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]:
# TODO: Consider adding a flag to control whether deactivated
# users should be included.
def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]:
return list(UserProfile.objects.filter(id__in=user_ids).select_related())
user_profiles_by_id: Dict[int, UserProfile] = bulk_cached_fetch(
cache_key_function=user_profile_by_id_cache_key,
query_function=fetch_users_by_id,
object_ids=user_ids,
id_fetcher=get_user_id,
)
found_user_ids = user_profiles_by_id.keys()
missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids]
if missed_user_ids:
raise JsonableError(_("Invalid user ID: {}").format(missed_user_ids[0]))
user_profiles = list(user_profiles_by_id.values())
for user_profile in user_profiles:
if user_profile.realm != realm:
raise JsonableError(_("Invalid user ID: {}").format(user_profile.id))
return user_profiles
def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile:
try:
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("No such bot"))
if not target.is_bot:
raise JsonableError(_("No such bot"))
if not user_profile.can_admin_user(target):
raise JsonableError(_("Insufficient permission"))
return target
def access_user_by_id(
user_profile: UserProfile,
target_user_id: int,
*,
allow_deactivated: bool = False,
allow_bots: bool = False,
for_admin: bool,
) -> UserProfile:
"""Master function for accessing another user by ID in API code;
verifies the user ID is in the same realm, and if requested checks
for administrative privileges, with flags for various special
cases.
"""
try:
target = get_user_profile_by_id_in_realm(target_user_id, user_profile.realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("No such user"))
if target.is_bot and not allow_bots:
raise JsonableError(_("No such user"))
if not target.is_active and not allow_deactivated:
raise JsonableError(_("User is deactivated"))
if not for_admin:
# Administrative access is not required just to read a user.
return target
if not user_profile.can_admin_user(target):
raise JsonableError(_("Insufficient permission"))
return target
class Accounts(TypedDict):
realm_name: str
realm_id: int
full_name: str
avatar: Optional[str]
def get_accounts_for_email(email: str) -> List[Accounts]:
profiles = (
UserProfile.objects.select_related("realm")
.filter(
delivery_email__iexact=email.strip(),
is_active=True,
realm__deactivated=False,
is_bot=False,
)
.order_by("date_joined")
)
accounts: List[Accounts] = []
for profile in profiles:
accounts.append(
dict(
realm_name=profile.realm.name,
realm_id=profile.realm.id,
full_name=profile.full_name,
avatar=avatar_url(profile),
)
)
return accounts
def get_api_key(user_profile: UserProfile) -> str:
return user_profile.api_key
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
# Users can only have one API key for now
return [user_profile.api_key]
def validate_user_custom_profile_field(
realm_id: int, field: CustomProfileField, value: Union[int, str, List[int]]
) -> Union[int, str, List[int]]:
validators = CustomProfileField.FIELD_VALIDATORS
field_type = field.field_type
var_name = f"{field.name}"
if field_type in validators:
validator = validators[field_type]
return validator(var_name, value)
elif field_type == CustomProfileField.SELECT:
choice_field_validator = CustomProfileField.SELECT_FIELD_VALIDATORS[field_type]
field_data = field.field_data
# Put an assertion so that mypy doesn't complain.
assert field_data is not None
return choice_field_validator(var_name, field_data, value)
elif field_type == CustomProfileField.USER:
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
return user_field_validator(realm_id, value, False)
else:
raise AssertionError("Invalid field type")
def validate_user_custom_profile_data(
realm_id: int, profile_data: List[Dict[str, Union[int, str, List[int]]]]
) -> None:
# This function validate all custom field values according to their field type.
for item in profile_data:
field_id = item["id"]
try:
field = CustomProfileField.objects.get(id=field_id)
except CustomProfileField.DoesNotExist:
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
try:
validate_user_custom_profile_field(realm_id, field, item["value"])
except ValidationError as error:
raise JsonableError(error.message)
def can_access_delivery_email(user_profile: UserProfile) -> bool:
realm = user_profile.realm
if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS:
return user_profile.is_realm_admin
if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_MODERATORS:
return user_profile.is_realm_admin or user_profile.is_moderator
return False
def format_user_row(
realm: Realm,
acting_user: Optional[UserProfile],
row: Dict[str, Any],
client_gravatar: bool,
user_avatar_url_field_optional: bool,
custom_profile_field_data: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
"""Formats a user row returned by a database fetch using
.values(*realm_user_dict_fields) into a dictionary representation
of that user for API delivery to clients. The acting_user
argument is used for permissions checks.
"""
is_admin = is_administrator_role(row["role"])
is_owner = row["role"] == UserProfile.ROLE_REALM_OWNER
is_guest = row["role"] == UserProfile.ROLE_GUEST
is_bot = row["is_bot"]
result = dict(
email=row["email"],
user_id=row["id"],
avatar_version=row["avatar_version"],
is_admin=is_admin,
is_owner=is_owner,
is_guest=is_guest,
is_billing_admin=row["is_billing_admin"],
role=row["role"],
is_bot=is_bot,
full_name=row["full_name"],
timezone=canonicalize_timezone(row["timezone"]),
is_active=row["is_active"],
date_joined=row["date_joined"].isoformat(),
)
# Zulip clients that support using `GET /avatar/{user_id}` as a
# fallback if we didn't send an avatar URL in the user object pass
# user_avatar_url_field_optional in client_capabilities.
#
# This is a major network performance optimization for
# organizations with 10,000s of users where we would otherwise
# send avatar URLs in the payload (either because most users have
# uploaded avatars or because EMAIL_ADDRESS_VISIBILITY_ADMINS
# prevents the older client_gravatar optimization from helping).
# The performance impact is large largely because the hashes in
# avatar URLs structurally cannot compress well.
#
# The user_avatar_url_field_optional gives the server sole
# discretion in deciding for which users we want to send the
# avatar URL (Which saves clients an RTT at the cost of some
# bandwidth). At present, the server looks at `long_term_idle` to
# decide which users to include avatars for, piggy-backing on a
# different optimization for organizations with 10,000s of users.
include_avatar_url = not user_avatar_url_field_optional or not row["long_term_idle"]
if include_avatar_url:
result["avatar_url"] = get_avatar_field(
user_id=row["id"],
realm_id=realm.id,
email=row["delivery_email"],
avatar_source=row["avatar_source"],
avatar_version=row["avatar_version"],
medium=False,
client_gravatar=client_gravatar,
)
if acting_user is not None and can_access_delivery_email(acting_user):
result["delivery_email"] = row["delivery_email"]
if is_bot:
result["bot_type"] = row["bot_type"]
if row["email"] in settings.CROSS_REALM_BOT_EMAILS:
result["is_cross_realm_bot"] = True
# Note that bot_owner_id can be None with legacy data.
result["bot_owner_id"] = row["bot_owner_id"]
elif custom_profile_field_data is not None:
result["profile_data"] = custom_profile_field_data
return result
def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]:
# What we're trying to do is simulate the user_profile having been
# fetched from a QuerySet using `.values(*realm_user_dict_fields)`
# even though we fetched UserProfile objects. This is messier
# than it seems.
#
# What we'd like to do is just call model_to_dict(user,
# fields=realm_user_dict_fields). The problem with this is
# that model_to_dict has a different convention than
# `.values()` in its handling of foreign keys, naming them as
# e.g. `bot_owner`, not `bot_owner_id`; we work around that
# here.
#
# This could be potentially simplified in the future by
# changing realm_user_dict_fields to name the bot owner with
# the less readable `bot_owner` (instead of `bot_owner_id`).
user_row = model_to_dict(user_profile, fields=[*realm_user_dict_fields, "bot_owner"])
user_row["bot_owner_id"] = user_row["bot_owner"]
del user_row["bot_owner"]
return user_row
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
users = bulk_get_users(
list(settings.CROSS_REALM_BOT_EMAILS),
None,
base_query=UserProfile.objects.filter(realm__string_id=settings.SYSTEM_BOT_REALM),
).values()
result = []
for user in users:
# Important: We filter here, is addition to in
# `base_query`, because of how bulk_get_users shares its
# cache with other UserProfile caches.
if user.realm.string_id != settings.SYSTEM_BOT_REALM: # nocoverage
continue
user_row = user_profile_to_user_row(user)
# Because we want to avoid clients becing exposed to the
# implementation detail that these bots are self-owned, we
# just set bot_owner_id=None.
user_row["bot_owner_id"] = None
result.append(
format_user_row(
user.realm,
acting_user=user,
row=user_row,
client_gravatar=False,
user_avatar_url_field_optional=False,
custom_profile_field_data=None,
)
)
return result
def get_custom_profile_field_values(
custom_profile_field_values: List[CustomProfileFieldValue],
) -> Dict[int, Dict[str, Any]]:
profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
for profile_field in custom_profile_field_values:
user_id = profile_field.user_profile_id
if profile_field.field.is_renderable():
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
"value": profile_field.value,
"rendered_value": profile_field.rendered_value,
}
else:
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
"value": profile_field.value,
}
return profiles_by_user_id
def get_raw_user_data(
realm: Realm,
acting_user: Optional[UserProfile],
*,
target_user: Optional[UserProfile] = None,
client_gravatar: bool,
user_avatar_url_field_optional: bool,
include_custom_profile_fields: bool = True,
) -> Dict[int, Dict[str, str]]:
"""Fetches data about the target user(s) appropriate for sending to
acting_user via the standard format for the Zulip API. If
target_user is None, we fetch all users in the realm.
"""
profiles_by_user_id = None
custom_profile_field_data = None
# target_user is an optional parameter which is passed when user data of a specific user
# is required. It is 'None' otherwise.
if target_user is not None:
user_dicts = [user_profile_to_user_row(target_user)]
else:
user_dicts = get_realm_user_dicts(realm.id)
if include_custom_profile_fields:
base_query = CustomProfileFieldValue.objects.select_related("field")
# TODO: Consider optimizing this query away with caching.
if target_user is not None:
custom_profile_field_values = base_query.filter(user_profile=target_user)
else:
custom_profile_field_values = base_query.filter(field__realm_id=realm.id)
profiles_by_user_id = get_custom_profile_field_values(custom_profile_field_values)
result = {}
for row in user_dicts:
if profiles_by_user_id is not None:
custom_profile_field_data = profiles_by_user_id.get(row["id"], {})
result[row["id"]] = format_user_row(
realm,
acting_user=acting_user,
row=row,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
custom_profile_field_data=custom_profile_field_data,
)
return result
|
punchagan/zulip
|
zerver/lib/users.py
|
Python
|
apache-2.0
| 21,312 | 0.001548 |
# -*- coding: utf-8 -*-
"""Installer script for Pywikibot 2.0 framework."""
#
# (C) Pywikibot team, 2009-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import itertools
import os
import sys
PYTHON_VERSION = sys.version_info[:3]
PY2 = (PYTHON_VERSION[0] == 2)
PY26 = (PYTHON_VERSION < (2, 7))
versions_required_message = """
Pywikibot not available on:
%s
Pywikibot is only supported under Python 2.6.5+, 2.7.2+ or 3.3+
"""
def python_is_supported():
"""Check that Python is supported."""
# Any change to this must be copied to pwb.py
return (PYTHON_VERSION >= (3, 3, 0) or
(PY2 and PYTHON_VERSION >= (2, 7, 2)) or
(PY26 and PYTHON_VERSION >= (2, 6, 5)))
if not python_is_supported():
raise RuntimeError(versions_required_message % sys.version)
test_deps = []
dependencies = ['requests']
# the irc module has no Python 2.6 support since 10.0
irc_dep = 'irc==8.9' if sys.version_info < (2, 7) else 'irc'
extra_deps = {
# Core library dependencies
'isbn': ['python-stdnum'],
'Graphviz': ['pydot>=1.0.28'],
'Google': ['google>=1.7'],
'IRC': [irc_dep],
'mwparserfromhell': ['mwparserfromhell>=0.3.3'],
'Tkinter': ['Pillow'],
# 0.6.1 supports socket.io 1.0, but WMF is using 0.9 (T91393 and T85716)
'rcstream': ['socketIO-client<0.6.1'],
'security': ['requests[security]'],
'mwoauth': ['mwoauth>=0.2.4'],
'html': ['BeautifulSoup4'],
}
if PY2:
# Additional core library dependencies which are only available on Python 2
extra_deps.update({
'csv': ['unicodecsv'],
'MySQL': ['oursql'],
'unicode7': ['unicodedata2>=7.0.0-2'],
})
script_deps = {
'flickrripper.py': ['Pillow'],
'states_redirect.py': ['pycountry'],
'weblinkchecker.py': ['memento_client>=0.5.1'],
}
# flickrapi 1.4.4 installs a root logger in verbose mode; 1.4.5 fixes this.
# The problem doesnt exist in flickrapi 2.x.
# pywikibot accepts flickrapi 1.4.5+ on Python 2, as it has been stable for a
# long time, and only depends on python-requests 1.x, whereas flickrapi 2.x
# depends on python-requests 2.x, which is first packaged in Ubuntu 14.04
# and will be first packaged for Fedora Core 21.
# flickrapi 1.4.x does not run on Python 3, and setuptools can only
# select flickrapi 2.x for Python 3 installs.
script_deps['flickrripper.py'].append(
'flickrapi>=1.4.5,<2' if PY26 else 'flickrapi')
# lunatic-python is only available for Linux
if sys.platform.startswith('linux'):
script_deps['script_wui.py'] = [irc_dep, 'lunatic-python', 'crontab']
# The main pywin32 repository contains a Python 2 only setup.py with a small
# wrapper setup3.py for Python 3.
# http://pywin32.hg.sourceforge.net:8000/hgroot/pywin32/pywin32
# The main pywinauto repository doesnt support Python 3.
# The repositories used below have a Python 3 compliant setup.py
dependency_links = [
'git+https://github.com/AlereDevices/lunatic-python.git#egg=lunatic-python',
'hg+https://bitbucket.org/TJG/pywin32#egg=pywin32',
'git+https://github.com/vasily-v-ryabov/pywinauto-64#egg=pywinauto',
'git+https://github.com/nlhepler/pydot#egg=pydot-1.0.29',
]
if PYTHON_VERSION < (2, 7, 3):
# work around distutils hardcoded unittest dependency
# work around T106512
import unittest # noqa
if 'test' in sys.argv:
import unittest2
sys.modules['unittest'] = unittest2
if sys.version_info[0] == 2:
if PY26:
# requests security extra includes pyOpenSSL. cryptography is the
# dependency of pyOpenSSL. 0.8.2 is the newest and compatible version
# for Python 2.6, which won't raise unexpected DeprecationWarning.
extra_deps['security'].append('cryptography<=0.8.2')
script_deps['replicate_wiki.py'] = ['argparse']
dependencies.append('future>=0.15.0') # provides collections backports
dependencies += extra_deps['unicode7'] # T102461 workaround
# tools.ip does not have a hard dependency on an IP address module,
# as it falls back to using regexes if one is not available.
# The functional backport of py3 ipaddress is acceptable:
# https://pypi.python.org/pypi/ipaddress
# However the Debian package python-ipaddr is also supported:
# https://pypi.python.org/pypi/ipaddr
# Other backports are likely broken.
# ipaddr 2.1.10+ is distributed with Debian and Fedora. See T105443.
dependencies.append('ipaddr>=2.1.10')
if sys.version_info < (2, 7, 9):
# Python versions before 2.7.9 will cause urllib3 to trigger
# InsecurePlatformWarning warnings for all HTTPS requests. By
# installing with security extras, requests will automatically set
# them up and the warnings will stop. See
# <https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning>
# for more details.
dependencies += extra_deps['security']
script_deps['data_ingestion.py'] = extra_deps['csv']
# mwlib is not available for py3
script_deps['patrol'] = ['mwlib']
# Some of the ui_tests depend on accessing the console window's menu
# to set the console font and copy and paste, achieved using pywinauto
# which depends on pywin32.
# These tests may be disabled because pywin32 depends on VC++, is time
# comsuming to build, and the console window cant be accessed during appveyor
# builds.
# Microsoft makes available a compiler for Python 2.7
# http://www.microsoft.com/en-au/download/details.aspx?id=44266
# If you set up your own compiler for Python 3, on 3.3 two demo files
# packaged with pywin32 may fail. Remove com/win32com/demos/ie*.py
if os.name == 'nt' and os.environ.get('PYSETUP_TEST_NO_UI', '0') != '1':
# FIXME: tests/ui_tests.py suggests pywinauto 0.4.2
# which isnt provided on pypi.
test_deps += ['pywin32', 'pywinauto>=0.4.0']
extra_deps.update(script_deps)
# Add all dependencies as test dependencies,
# so all scripts can be compiled for script_tests, etc.
if 'PYSETUP_TEST_EXTRAS' in os.environ:
test_deps += list(itertools.chain(*(extra_deps.values())))
# mwlib requires 'pyparsing>=1.4.11,<1.6', which conflicts with
# pydot's requirement for pyparsing>=2.0.1.
if 'mwlib' in test_deps:
test_deps.remove('mwlib')
if 'oursql' in test_deps and os.name == 'nt':
test_deps.remove('oursql') # depends on Cython
if 'requests[security]' in test_deps:
# Bug T105767 on Python 2.7 release 9+
if sys.version_info[:2] == (2, 7) and sys.version_info[2] >= 9:
test_deps.remove('requests[security]')
# These extra dependencies are needed other unittest fails to load tests.
if sys.version_info[0] == 2:
test_deps += extra_deps['csv']
else:
test_deps += ['six']
from setuptools import setup, find_packages
name = 'pywikibot'
version = '2.0rc1.post2'
github_url = 'https://github.com/wikimedia/pywikibot-core'
setup(
name=name,
version=version,
description='Python MediaWiki Bot Framework',
long_description=open('README.rst').read(),
maintainer='The Pywikibot team',
maintainer_email='pywikibot@lists.wikimedia.org',
license='MIT License',
packages=['pywikibot'] + [package
for package in find_packages()
if package.startswith('pywikibot.')],
install_requires=dependencies,
dependency_links=dependency_links,
extras_require=extra_deps,
url='https://www.mediawiki.org/wiki/Pywikibot',
test_suite="tests.collector",
tests_require=test_deps,
classifiers=[
'License :: OSI Approved :: MIT License',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Environment :: Console',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
],
use_2to3=False
)
|
icyflame/batman
|
setup.py
|
Python
|
mit
| 7,976 | 0.000251 |
# (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import time
import errno
try:
import simplejson as json
except ImportError:
import json
from ansible import constants as C
from ansible import utils
from ansible.cache.base import BaseCacheModule
class CacheModule(BaseCacheModule):
"""
A caching module backed by json files.
"""
def __init__(self, *args, **kwargs):
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._cache = {}
self._cache_dir = C.CACHE_PLUGIN_CONNECTION # expects a dir path
if not self._cache_dir:
utils.exit("error, fact_caching_connection is not set, cannot use fact cache")
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
except (OSError,IOError), e:
utils.warning("error while trying to create cache dir %s : %s" % (self._cache_dir, str(e)))
return None
def get(self, key):
if key in self._cache:
return self._cache.get(key)
if self.has_expired(key):
raise KeyError
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open( cachefile, 'r')
except (OSError,IOError), e:
utils.warning("error while trying to write to %s : %s" % (cachefile, str(e)))
else:
value = json.load(f)
self._cache[key] = value
return value
finally:
f.close()
def set(self, key, value):
self._cache[key] = value
cachefile = "%s/%s" % (self._cache_dir, key)
try:
f = open(cachefile, 'w')
except (OSError,IOError), e:
utils.warning("error while trying to read %s : %s" % (cachefile, str(e)))
else:
f.write(utils.jsonify(value))
finally:
f.close()
def has_expired(self, key):
cachefile = "%s/%s" % (self._cache_dir, key)
try:
st = os.stat(cachefile)
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
if time.time() - st.st_mtime <= self._timeout:
return False
if key in self._cache:
del self._cache[key]
return True
def keys(self):
keys = []
for k in os.listdir(self._cache_dir):
if not (k.startswith('.') or self.has_expired(k)):
keys.append(k)
return keys
def contains(self, key):
if key in self._cache:
return True
if self.has_expired(key):
return False
try:
st = os.stat("%s/%s" % (self._cache_dir, key))
return True
except (OSError,IOError), e:
if e.errno == errno.ENOENT:
return False
else:
utils.warning("error while trying to stat %s : %s" % (cachefile, str(e)))
def delete(self, key):
del self._cache[key]
try:
os.remove("%s/%s" % (self._cache_dir, key))
except (OSError,IOError), e:
pass #TODO: only pass on non existing?
def flush(self):
self._cache = {}
for key in self.keys():
self.delete(key)
def copy(self):
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
|
devopservices/ansible
|
lib/ansible/cache/jsonfile.py
|
Python
|
gpl-3.0
| 4,163 | 0.004324 |
#!/usr/bin/env python
"""Upserts Domains from Salesforce Domain__c.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert Domains modified within n-days')
def handle(self, *args, **options):
upsert_domains(options['modified_within'])
def upsert_domains(modified_since=7):
"""Upsert Domains for SF Domain__c modified in last `modified_since` days.
"""
logger.info('upserting domains modified in last {since} days'.
format(since=modified_since))
modified_domains = (iss.salesforce.Domain.get_domains_modified_since(
days_ago=modified_since))
for domain in modified_domains:
iss.models.Domain.upsert(domain)
|
AASHE/iss
|
iss/management/commands/upsert_iss_domains.py
|
Python
|
mit
| 1,085 | 0 |
from sslyze import ServerNetworkLocation
from sslyze.plugins.elliptic_curves_plugin import (
SupportedEllipticCurvesScanResult,
SupportedEllipticCurvesImplementation,
)
from tests.connectivity_utils import check_connectivity_to_server_and_return_info
from tests.markers import can_only_run_on_linux_64
from tests.openssl_server import ModernOpenSslServer
class TestEllipticCurvesPluginWithOnlineServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites
server_location = ServerNetworkLocation("www.cloudflare.com", 443)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning for supported elliptic curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the result confirms that some curves are supported and some are not
assert result.supports_ecdh_key_exchange
assert result.supported_curves
assert result.rejected_curves
# And a CLI output can be generated
assert SupportedEllipticCurvesImplementation.cli_connector_cls.result_to_console_output(result)
@can_only_run_on_linux_64
class TestEllipticCurvesPluginWithLocalServer:
def test_supported_curves(self):
# Given a server to scan that supports ECDH cipher suites with specific curves
server_curves = ["X25519", "X448", "prime256v1", "secp384r1", "secp521r1"]
with ModernOpenSslServer(groups=":".join(server_curves)) as server:
server_location = ServerNetworkLocation(
hostname=server.hostname, ip_address=server.ip_address, port=server.port
)
server_info = check_connectivity_to_server_and_return_info(server_location)
# When scanning the server for supported curves, it succeeds
result: SupportedEllipticCurvesScanResult = SupportedEllipticCurvesImplementation.scan_server(server_info)
# And the supported curves were detected
assert set(server_curves) == {curve.name for curve in result.supported_curves}
|
nabla-c0d3/sslyze
|
tests/plugins_tests/test_elliptic_curves_plugin.py
|
Python
|
agpl-3.0
| 2,148 | 0.005121 |
# Copyright 2012-2013 Ravello Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function
import argparse
import textwrap
from testmill import (console, manifest, keypair, login, error,
application, tasks, util, inflect)
from testmill.state import env
usage = textwrap.dedent("""\
usage: ravtest [OPTION]... run [-i] [-c] [--new] [--vms <vmlist>]
[--dry-run] <application> [<command>]
ravtest run --help
""")
description = textwrap.dedent("""\
Run automated tasks in a Ravello application.
The application defined by <application> is loaded from the manifest
(.ravello.yml). It is then created if it doesn't exist yet, and the
runbook defined in the manifest is run.
If --new is specified, a new application instance is always created,
even if one exists already.
The available options are:
-i, --interactive
Run in interactive mode. All tasks are run directly
connected to the console. In case of multiple virtual
machines, output will be interleaved and may be hard
to understand.
-c, --continue
Continue running even after an error.
--new
Never re-use existing applications.
--vms <vmlist>
Execute tasks only on these virtual machines, instead of on
all virtual machines in the application. <vmlist> is a
comma-separated list of VMs.
--dry-run
Do not execute any tasks. Useful for starting up an
application without doing anything yet.
""")
def add_args(parser):
parser.usage = usage
parser.description = description
parser.add_argument('-i', '--interactive', action='store_true')
parser.add_argument('-c', '--continue', action='store_true',
dest='continue_')
parser.add_argument('--new', action='store_true')
parser.add_argument('--vms')
parser.add_argument('--dry-run', action='store_true')
parser.add_argument('application')
parser.add_argument('command', nargs='?')
def do_run(args, env):
"""The "ravello run" command."""
login.default_login()
keypair.default_keypair()
manif = manifest.default_manifest()
appname = args.application
for appdef in manif.get('applications', []):
if appdef['name'] == appname:
break
else:
error.raise_error("Unknown application `{0}`.", appname)
vms = set((vm['name'] for vm in appdef.get('vms', [])))
if args.vms:
only = set((name for name in args.vms.split(',')))
if not only <= vms:
unknown = [name for name in only if name not in vms]
what = inflect.plural_noun('virtual machine', len(unknown))
error.raise_error("Unknown {0}: {1}", ', '.join(unknown), what)
vms = [name for name in vms if name in only]
if not vms:
error.raise_error('No virtual machines in application.')
app = application.create_or_reuse_application(appdef, args.new)
app = application.wait_for_application(app, vms)
if args.command:
for vm in appdef['vms']:
for task in vm['tasks']:
if task['name'] == 'execute':
task['commands'] = [args.command]
elif args.dry_run:
for vm in appdef['vms']:
vm['tasks'] = []
ret = tasks.run_all_tasks(app, vms)
console.info('\n== The following services will be available for {0} '
'minutes:\n', appdef['keepalive'])
for vm in app['vms']:
if vm['name'] not in vms:
continue
svcs = vm.get('suppliedServices')
if not svcs:
continue
console.info('On virtual machine `{0}`:', vm['name'])
for svc in svcs:
svc = svc['baseService']
addr = util.format_service(vm, svc)
console.info(' * {0}: {1}', svc['name'], addr)
console.info('')
return error.EX_OK if ret == 0 else error.EX_SOFTWARE
|
ravello/testmill
|
lib/testmill/command_run.py
|
Python
|
apache-2.0
| 4,718 | 0.000848 |
# Copyright (c) 2012, Tycho Andersen. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class QtileState(object):
"""
Represents the state of the qtile object. Primarily used for restoring
state across restarts; any additional state which doesn't fit nicely
into X atoms can go here.
"""
def __init__(self, qtile):
# Note: window state is saved and restored via _NET_WM_STATE, so
# the only thing we need to restore here is the layout and screen
# configurations.
self.groups = {}
self.screens = {}
for group in qtile.groups:
self.groups[group.name] = group.layout.name
for index, screen in enumerate(qtile.screens):
self.screens[index] = screen.group.name
def apply(self, qtile):
"""
Rearrange the windows in the specified Qtile object according to
this QtileState.
"""
for (group, layout) in self.groups.items():
try:
qtile.groupMap[group].layout = layout
except KeyError:
pass # group missing
for (screen, group) in self.screens.items():
try:
group = qtile.groupMap[group]
qtile.screens[screen].setGroup(group)
except (KeyError, IndexError):
pass # group or screen missing
|
kiniou/qtile
|
libqtile/state.py
|
Python
|
mit
| 2,415 | 0 |
__author__ = 'thatcher'
from django.contrib import admin
# from django.contrib.auth.models import User
# from django.contrib.auth.admin import UserAdmin
# from django.contrib.sessions.
from django.contrib.sessions.models import Session
from .models import *
from base.forms import *
def images_thubmnail(self):
return '<img style="max-height: 80px; width: auto;" src="{}" alt="{}" >'.format(self.uri(), self.alt)
# return self.uri()
images_thubmnail.short_description = 'Thumbnail'
images_thubmnail.allow_tags = True
class TeamMemberAdmin(admin.ModelAdmin):
model = TeamMember
list_display = ['full_name', 'sort_weight', 'show_as_team']
admin.site.register(TeamMember, TeamMemberAdmin)
class NewsItemAdmin(admin.ModelAdmin):
model = NewsItem
list_display = ['id', 'title', 'publication_date', 'show', 'author']
admin.site.register(NewsItem, NewsItemAdmin)
class EventAdmin(admin.ModelAdmin):
model = Event
list_display = ['title', 'location', 'date_and_time']
admin.site.register(Event, EventAdmin)
class PostAdmin(admin.ModelAdmin):
model = GenericPost
list_display = ['title', 'category', 'publication_date']
admin.site.register(GenericPost, PostAdmin)
class CategoryAdmin(admin.ModelAdmin):
model = PostCategory
list_display = ['name', 'added_date']
admin.site.register(PostCategory, CategoryAdmin)
class ImageAdmin(admin.ModelAdmin):
model = Image
list_display = [images_thubmnail, 'alt', 'image_caption', 'image', ]
admin.site.register(Image, ImageAdmin)
class TagAdmin(admin.ModelAdmin):
model = Tag
list_display = ['name', 'added_date']
admin.site.register(Tag, TagAdmin)
|
ZmG/trywsk
|
base/admin.py
|
Python
|
apache-2.0
| 1,654 | 0.009069 |
"""
:mod:`imports` is here to resolve import statements and return the
modules/classes/functions/whatever, which they stand for. However there's not
any actual importing done. This module is about finding modules in the
filesystem. This can be quite tricky sometimes, because Python imports are not
always that simple.
This module uses imp for python up to 3.2 and importlib for python 3.3 on; the
correct implementation is delegated to _compatibility.
This module also supports import autocompletion, which means to complete
statements like ``from datetim`` (curser at the end would return ``datetime``).
"""
from __future__ import with_statement
import os
import pkgutil
import sys
import itertools
from jedi._compatibility import find_module
from jedi import modules
from jedi import common
from jedi import debug
from jedi.parser import representation as pr
from jedi import cache
import builtin
import evaluate
# for debugging purposes only
imports_processed = 0
class ModuleNotFound(Exception):
pass
class ImportPath(pr.Base):
"""
An ImportPath is the path of a `pr.Import` object.
"""
class GlobalNamespace(object):
def __init__(self):
self.line_offset = 0
GlobalNamespace = GlobalNamespace()
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
direct_resolve=False, is_just_from=False):
self.import_stmt = import_stmt
self.is_like_search = is_like_search
self.direct_resolve = direct_resolve
self.is_just_from = is_just_from
self.is_partial_import = bool(max(0, kill_count))
path = import_stmt.get_parent_until().path
self.file_path = os.path.dirname(path) if path is not None else None
# rest is import_path resolution
self.import_path = []
if import_stmt.from_ns:
self.import_path += import_stmt.from_ns.names
if import_stmt.namespace:
if self._is_nested_import() and not direct_resolve:
self.import_path.append(import_stmt.namespace.names[0])
else:
self.import_path += import_stmt.namespace.names
for i in range(kill_count + int(is_like_search)):
self.import_path.pop()
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.import_stmt)
def _is_nested_import(self):
"""
This checks for the special case of nested imports, without aliases and
from statement::
import foo.bar
"""
return not self.import_stmt.alias and not self.import_stmt.from_ns \
and len(self.import_stmt.namespace.names) > 1 \
and not self.direct_resolve
def _get_nested_import(self, parent):
"""
See documentation of `self._is_nested_import`.
Generates an Import statement, that can be used to fake nested imports.
"""
i = self.import_stmt
# This is not an existing Import statement. Therefore, set position to
# 0 (0 is not a valid line number).
zero = (0, 0)
names = i.namespace.names[1:]
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
new = pr.Import(i._sub_module, zero, zero, n)
new.parent = parent
debug.dbg('Generated a nested import: %s' % new)
return new
def get_defined_names(self, on_import_stmt=False):
names = []
for scope in self.follow():
if scope is ImportPath.GlobalNamespace:
if self._is_relative_import() == 0:
names += self._get_module_names()
if self.file_path is not None:
path = os.path.abspath(self.file_path)
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
names += self._get_module_names([path])
if self._is_relative_import():
rel_path = self._get_relative_path() + '/__init__.py'
with common.ignored(IOError):
m = modules.Module(rel_path)
names += m.parser.module.get_defined_names()
else:
if on_import_stmt and isinstance(scope, pr.Module) \
and scope.path.endswith('__init__.py'):
pkg_path = os.path.dirname(scope.path)
paths = self._namespace_packages(pkg_path, self.import_path)
names += self._get_module_names([pkg_path] + paths)
if self.is_just_from:
# In the case of an import like `from x.` we don't need to
# add all the variables.
if ['os'] == self.import_path and not self._is_relative_import():
# os.path is a hardcoded exception, because it's a
# ``sys.modules`` modification.
p = (0, 0)
names.append(pr.Name(self.GlobalNamespace, [('path', p)],
p, p, self.import_stmt))
continue
for s, scope_names in evaluate.get_names_of_scope(scope,
include_builtin=False):
for n in scope_names:
if self.import_stmt.from_ns is None \
or self.is_partial_import:
# from_ns must be defined to access module
# values plus a partial import means that there
# is something after the import, which
# automatically implies that there must not be
# any non-module scope.
continue
names.append(n)
return names
def _get_module_names(self, search_path=None):
"""
Get the names of all modules in the search_path. This means file names
and not names defined in the files.
"""
def generate_name(name):
return pr.Name(self.GlobalNamespace, [(name, inf_pos)],
inf_pos, inf_pos, self.import_stmt)
names = []
inf_pos = float('inf'), float('inf')
# add builtin module names
if search_path is None:
names += [generate_name(name) for name in sys.builtin_module_names]
if search_path is None:
search_path = self._sys_path_with_modifications()
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
names.append(generate_name(name))
return names
def _sys_path_with_modifications(self):
# If you edit e.g. gunicorn, there will be imports like this:
# `from gunicorn import something`. But gunicorn is not in the
# sys.path. Therefore look if gunicorn is a parent directory, #56.
in_path = []
if self.import_path:
parts = self.file_path.split(os.path.sep)
for i, p in enumerate(parts):
if p == self.import_path[0]:
new = os.path.sep.join(parts[:i])
in_path.append(new)
module = self.import_stmt.get_parent_until()
return in_path + modules.sys_path_with_modifications(module)
def follow(self, is_goto=False):
"""
Returns the imported modules.
"""
if evaluate.follow_statement.push_stmt(self.import_stmt):
# check recursion
return []
if self.import_path:
try:
scope, rest = self._follow_file_system()
except ModuleNotFound:
debug.warning('Module not found: ' + str(self.import_stmt))
evaluate.follow_statement.pop_stmt()
return []
scopes = [scope]
scopes += remove_star_imports(scope)
# follow the rest of the import (not FS -> classes, functions)
if len(rest) > 1 or rest and self.is_like_search:
scopes = []
if ['os', 'path'] == self.import_path[:2] \
and not self._is_relative_import():
# This is a huge exception, we follow a nested import
# ``os.path``, because it's a very important one in Python
# that is being achieved by messing with ``sys.modules`` in
# ``os``.
scopes = evaluate.follow_path(iter(rest), scope, scope)
elif rest:
if is_goto:
scopes = itertools.chain.from_iterable(
evaluate.find_name(s, rest[0], is_goto=True)
for s in scopes)
else:
scopes = itertools.chain.from_iterable(
evaluate.follow_path(iter(rest), s, s)
for s in scopes)
scopes = list(scopes)
if self._is_nested_import():
scopes.append(self._get_nested_import(scope))
else:
scopes = [ImportPath.GlobalNamespace]
debug.dbg('after import', scopes)
evaluate.follow_statement.pop_stmt()
return scopes
def _is_relative_import(self):
return bool(self.import_stmt.relative_count)
def _get_relative_path(self):
path = self.file_path
for i in range(self.import_stmt.relative_count - 1):
path = os.path.dirname(path)
return path
def _namespace_packages(self, found_path, import_path):
"""
Returns a list of paths of possible ``pkgutil``/``pkg_resources``
namespaces. If the package is no "namespace package", an empty list is
returned.
"""
def follow_path(directories, paths):
try:
directory = next(directories)
except StopIteration:
return paths
else:
deeper_paths = []
for p in paths:
new = os.path.join(p, directory)
if os.path.isdir(new) and new != found_path:
deeper_paths.append(new)
return follow_path(directories, deeper_paths)
with open(os.path.join(found_path, '__init__.py')) as f:
content = f.read()
# these are strings that need to be used for namespace packages,
# the first one is ``pkgutil``, the second ``pkg_resources``.
options = 'declare_namespace(__name__)', 'extend_path(__path__'
if options[0] in content or options[1] in content:
# It is a namespace, now try to find the rest of the modules.
return follow_path(iter(import_path), sys.path)
return []
def _follow_file_system(self):
if self.file_path:
sys_path_mod = list(self._sys_path_with_modifications())
module = self.import_stmt.get_parent_until()
if not module.has_explicit_absolute_import:
# If the module explicitly asks for absolute imports,
# there's probably a bogus local one.
sys_path_mod.insert(0, self.file_path)
# First the sys path is searched normally and if that doesn't
# succeed, try to search the parent directories, because sometimes
# Jedi doesn't recognize sys.path modifications (like py.test
# stuff).
old_path, temp_path = self.file_path, os.path.dirname(self.file_path)
while old_path != temp_path:
sys_path_mod.append(temp_path)
old_path, temp_path = temp_path, os.path.dirname(temp_path)
else:
sys_path_mod = list(modules.get_sys_path())
return self._follow_sys_path(sys_path_mod)
def _follow_sys_path(self, sys_path):
"""
Find a module with a path (of the module, like usb.backend.libusb10).
"""
def follow_str(ns_path, string):
debug.dbg('follow_module', ns_path, string)
path = None
if ns_path:
path = ns_path
elif self._is_relative_import():
path = self._get_relative_path()
global imports_processed
imports_processed += 1
if path is not None:
importing = find_module(string, [path])
else:
debug.dbg('search_module', string, self.file_path)
# Override the sys.path. It works only good that way.
# Injecting the path directly into `find_module` did not work.
sys.path, temp = sys_path, sys.path
try:
importing = find_module(string)
finally:
sys.path = temp
return importing
current_namespace = (None, None, None)
# now execute those paths
rest = []
for i, s in enumerate(self.import_path):
try:
current_namespace = follow_str(current_namespace[1], s)
except ImportError:
_continue = False
if self._is_relative_import() and len(self.import_path) == 1:
# follow `from . import some_variable`
rel_path = self._get_relative_path()
with common.ignored(ImportError):
current_namespace = follow_str(rel_path, '__init__')
elif current_namespace[2]: # is a package
for n in self._namespace_packages(current_namespace[1],
self.import_path[:i]):
try:
current_namespace = follow_str(n, s)
if current_namespace[1]:
_continue = True
break
except ImportError:
pass
if not _continue:
if current_namespace[1]:
rest = self.import_path[i:]
break
else:
raise ModuleNotFound('The module you searched has not been found')
path = current_namespace[1]
is_package_directory = current_namespace[2]
f = None
if is_package_directory or current_namespace[0]:
# is a directory module
if is_package_directory:
path += '/__init__.py'
with open(path) as f:
source = f.read()
else:
source = current_namespace[0].read()
current_namespace[0].close()
if path.endswith('.py'):
f = modules.Module(path, source)
else:
f = builtin.BuiltinModule(path=path)
else:
f = builtin.BuiltinModule(name=path)
return f.parser.module, rest
def strip_imports(scopes):
"""
Here we strip the imports - they don't get resolved necessarily.
Really used anymore? Merge with remove_star_imports?
"""
result = []
for s in scopes:
if isinstance(s, pr.Import):
result += ImportPath(s).follow()
else:
result.append(s)
return result
@cache.cache_star_import
def remove_star_imports(scope, ignored_modules=()):
"""
Check a module for star imports:
>>> from module import *
and follow these modules.
"""
modules = strip_imports(i for i in scope.get_imports() if i.star)
new = []
for m in modules:
if m not in ignored_modules:
new += remove_star_imports(m, modules)
modules += new
# Filter duplicate modules.
return set(modules)
|
SamuelDSR/YouCompleteMe-Win7-GVIM
|
third_party/jedi/jedi/imports.py
|
Python
|
gpl-3.0
| 15,994 | 0.000813 |
from __future__ import division
from collections import deque
class MovingAverage(object):
def __init__(self, size):
"""
Initialize your data structure here.
:type size: int
"""
self.queue = deque(maxlen=size)
def next(self, val):
"""
:type val: int
:rtype: float
"""
self.queue.append(val)
return sum(self.queue) / len(self.queue)
# Given a stream of integers and a window size,
# calculate the moving average of all integers in the sliding window.
if __name__ == '__main__':
m = MovingAverage(3)
assert m.next(1) == 1
assert m.next(10) == (1 + 10) / 2
assert m.next(3) == (1 + 10 + 3) / 3
assert m.next(5) == (10 + 3 + 5) / 3
|
keon/algorithms
|
algorithms/queues/moving_average.py
|
Python
|
mit
| 749 | 0 |
# -*- coding: utf-8 -*-
"""Racndicmd setup.py."""
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os
import sys
class Tox(TestCommand):
"""Tox."""
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
"""Init."""
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
"""Finalize."""
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
"""Run."""
import tox
import shlex
if self.tox_args:
errno = tox.cmdline(args=shlex.split(self.tox_args))
else:
errno = tox.cmdline(self.tox_args)
sys.exit(errno)
classifiers = [
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: System :: Networking",
"Topic :: System :: Networking :: Monitoring",
"Topic :: Utilities",
]
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as _file:
README = _file.read()
requires = []
with open('requirements.txt', 'w') as _file:
_file.write('\n'.join(requires))
EXCLUDE_FROM_PACKAGES = []
setup(
name="rancidcmd",
version="0.1.12",
description='RANCID Command Wrapper.',
long_description=README,
author='Toshikatsu Murakoshi',
author_email='mtoshi.g@gmail.com',
url='https://github.com/mtoshi/rancidcmd',
license='MIT',
classifiers=classifiers,
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
py_modules=['rancidcmd'],
install_requires=requires,
include_package_data=True,
tests_require=['tox'],
cmdclass={'test': Tox},
)
|
mtoshi/rancidcmd
|
setup.py
|
Python
|
mit
| 2,154 | 0 |
from .company import Company
from .contact import Contact
from .deal import Deal
from .note import Note
from .requester import Requester
class AgileCRM:
def __init__(self, domain, email, api_key):
requester = Requester(domain, email, api_key)
self.contact = Contact(requester=requester)
self.company = Company(requester=requester)
self.deal = Deal(requester=requester)
self.note = Note(requester=requester)
|
rahmonov/agile-crm-python
|
agilecrm/client.py
|
Python
|
mit
| 454 | 0 |
#!/usr/bin/env python3
#!/usr/bin/python
# https://en.wikipedia.org/wiki/Matplotlib
import numpy
import matplotlib.pyplot as plt
from numpy.random import rand
a = rand(100)
b = rand(100)
plt.scatter(a, b)
plt.show()
|
jtraver/dev
|
python3/matplotlib/plot1.py
|
Python
|
mit
| 220 | 0.004545 |
'''
A MLP algorithm example using TensorFlow library.
This example is using generate random distribution
(http://cs231n.github.io/neural-networks-case-study/)
Code references:
https://github.com/shouvikmani/Tensorflow-Deep-Learning-Tutorial/blob/master/tutorial.ipynb
https://github.com/aymericdamien/TensorFlow-Examples/
http://cs231n.github.io/neural-networks-case-study/
The source code modified modified by S.W. Oh.
'''
from __future__ import print_function
import tensorflow as tf
import numpy as np
from matplotlib import pyplot as plt
# import Dense (fully-connected) layer
from util.layer import Dense
###### Generate 2D spiral random data and Plot ###################################
N = 200 # number of points per class
D = 2 # dimensionality
K = 4 # number of classes
X_train = np.zeros((N*K,D)) # data matrix (each row = single example)
y_train = np.zeros((N*K,K)) # class labels
yc = np.zeros(N*K, dtype='uint8')
for j in range(K):
ix = range(N*j,N*(j+1))
r = np.linspace(0.0,1,N) # radius
t = np.linspace(j*4.8,(j+1)*4.8,N) + np.random.randn(N)*0.2 # theta
X_train[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
y_train[ix,j] = 1
yc[ix] = j
# lets visualize the data:
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.show()
# Random shuffle
perm = np.random.permutation(len(y_train))
X_train = X_train[perm,:]
y_train = y_train[perm,:]
yc = yc[perm]
# Parameters
learning_rate = 0.01
training_epochs = 500
batch_size = 10
display_step = 1
###### Build graph ######################################################
# Place holders
x = tf.placeholder(tf.float32, [None, 2]) # 2 dimensional input
y = tf.placeholder(tf.float32, [None, 4]) # 4 classes
# Construct MLP with two hidden layer
h = Dense(x, [2,64], 'ih')
h = tf.nn.relu(h)
h = Dense(h, [64,64], 'hh')
h = tf.nn.relu(h)
logit = Dense(h, [64,4], 'hl')
pred = tf.nn.softmax(logit) # Softmax
# Directly compute loss from logit (to ensure stability and avoid overflow)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logit, labels=y))
# Define optimizer and train_op
train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
###### Start Training ###################################################
# Open a Session
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(len(y_train)/batch_size)
# Loop over all batches
for i in range(total_batch):
batch_xs = X_train[i:i+batch_size,:]
batch_ys = y_train[i:i+batch_size,:]
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([train_op, cost], feed_dict={x: batch_xs, y: batch_ys})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
if (epoch+1) % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!")
# Visualize Dicision boundary
h = 0.02
x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = sess.run(pred, feed_dict={x: np.c_[xx.ravel(), yy.ravel()]})
Z = np.argmax(Z, axis=1)
Z = Z.reshape(xx.shape)
fig = plt.figure()
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral, alpha=0.8)
plt.scatter(X_train[:, 0], X_train[:, 1], c=yc, s=40, cmap=plt.cm.Spectral)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.show()
|
woozzu/tf_tutorials
|
03_MLP_spiral2D.py
|
Python
|
mit
| 3,767 | 0.009557 |
# Copyright 2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module implements mock classes for minemed.traced tests
"""
import gevent
import gevent.event
import logging
from minemeld.traced.storage import TableNotFound
LOG = logging.getLogger(__name__)
CLOCK = -1
def _get_clock():
global CLOCK
CLOCK += 1
return CLOCK
MOCK_TABLES = []
class MockTable(object):
def __init__(self, name, create_if_missing=True):
self.name = name
self.create_if_missing = create_if_missing
self.last_used = None
self.refs = []
self.db_open = True
self.db = {}
self.max_counter = -1
def add_reference(self, refid):
self.refs.append(refid)
def remove_reference(self, refid):
try:
self.refs.remove(refid)
except ValueError:
pass
def ref_count(self):
return len(self.refs)
def put(self, key, value):
self.last_used = _get_clock()
self.max_counter += 1
new_max_counter = '%016x' % self.max_counter
self.db[key+new_max_counter] = value
def backwards_iterator(self, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
return items
def close(self):
self.db_open = False
@staticmethod
def oldest_table():
tables = [t.name for t in MOCK_TABLES]
LOG.debug(tables)
if len(tables) == 0:
return None
return sorted(tables)[0]
def table_factory(name, create_if_missing=True):
table = next((t for t in MOCK_TABLES if t.name == name), None)
if table is not None:
return table
if not create_if_missing:
raise TableNotFound()
mt = MockTable(name, create_if_missing=create_if_missing)
MOCK_TABLES.append(mt)
return mt
def table_cleanup():
global MOCK_TABLES
MOCK_TABLES = []
class MockStore(object):
def __init__(self, config=None):
if config is None:
config = {}
self.config = config
self.writes = []
self.db = {}
self.counter = 0
self.release_alls = []
def write(self, timestamp, log):
self.writes.append({
'timestamp': timestamp,
'log': log
})
self.db['%016x%016x' % (timestamp, self.counter)] = log
self.counter += 1
def iterate_backwards(self, ref, timestamp, counter):
starting_key = '%016x%016x' % (timestamp, counter)
items = [[k, v] for k, v in self.db.iteritems() if k <= starting_key]
items = sorted(items, cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
for c, i in enumerate(items):
if c % 1 == 0:
yield {'msg': 'test message'}
yield {'timestamp': i[0], 'log': i[1]}
def release_all(self, ref):
self.release_alls.append(ref)
def store_factory(config=None):
return MockStore(config=config)
MOCK_QUERIES = []
class MockQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
LOG.debug("%s finished", self.uuid)
class MockEQuery(gevent.Greenlet):
def __init__(self, store, query, timestamp, counter,
num_lines, uuid, redis_config):
self.store = store
self.query = query
self.timestamp = timestamp
self.counter = counter
self.num_lines = num_lines
self.uuid = uuid
self.redis_config = redis_config
self.finish_event = gevent.event.Event()
super(MockEQuery, self).__init__()
def kill(self):
LOG.debug("%s killed", self.uuid)
super(MockEQuery, self).kill()
def _run(self):
LOG.debug("%s started", self.uuid)
self.finish_event.wait()
raise RuntimeError("BAD BAD QUERY!")
def query_factory(store, query, timestamp, counter,
num_lines, uuid, redis_config):
if query == "bad":
mqf = MockEQuery
else:
mqf = MockQuery
mq = mqf(store, query, timestamp, counter,
num_lines, uuid, redis_config)
MOCK_QUERIES.append(mq)
return mq
def query_cleanup():
global MOCK_QUERIES
MOCK_QUERIES = []
|
PaloAltoNetworks/minemeld-core
|
tests/traced_mock.py
|
Python
|
apache-2.0
| 5,455 | 0.0033 |
from unittest import TestCase
from compile import add_jmp_opcodes, break_to_atoms
from compile.jmp_add import travel, shuffle
from opcode_ import PRT
class TestJMPAdd(TestCase):
def test_added_init_jmp(self):
node_chain = PRT.build_from_string('u', None)
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(len(atoms), 2)
self.assertEqual(len(atoms[0]), 2)
self.assertEqual(len(atoms[1]), 2)
def test_nothing_happend_on_one_and_no_jmp_init(self):
atom = PRT.build_from_string('i', None)
atoms = break_to_atoms(atom)
atoms = add_jmp_opcodes(
atoms,
first_step_is_jmp=False)
self.assertEqual(atoms[0][0], atom[0])
self.assertEqual(atoms[0][1], atom[1])
self.assertEqual(len(atoms), 1)
self.assertEqual(len(atoms[0]), 2)
def test_first_jmp_points_to_first_node(self):
atom = PRT.build_from_string('o', None)
first_node = atom[0]
atoms = break_to_atoms(atom)
atoms = add_jmp_opcodes(atoms)
self.assertEqual(atoms[0][0].target_uuid,
first_node.uuid)
def test_reach_to_end(self):
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
def test_reach_to_end_with_shuffle(self):
# TODO why some are NC of NC and some NC of NODs?
node_chain = PRT.build_from_string('T', None) + \
PRT.build_from_string('h', None) + \
PRT.build_from_string('e', None) + \
PRT.build_from_string('A', None) + \
PRT.build_from_string('n', None) + \
PRT.build_from_string('o', None) + \
PRT.build_from_string('s', None) + \
PRT.build_from_string('m', None) + \
PRT.build_from_string('i', None) + \
PRT.build_from_string('c', None)
last = node_chain[-1]
atoms = break_to_atoms(node_chain)
atoms = add_jmp_opcodes(atoms)
atoms = shuffle(atoms)
atom = atoms[0]
for _ in range(len(node_chain) - 1):
next_atom = travel(atoms, atom)
if next_atom:
atom = next_atom
else:
self.fail("Chain ended too soon")
self.assertIn(last, atom)
|
TheAnosmic/cheetahs_byte
|
tests/test_compile/test_jmp_add.py
|
Python
|
gpl-2.0
| 3,283 | 0 |
from django.contrib import admin
from .models import Album, Song
# Register your models here.
admin.site.register(Album)
admin.site.register(Song)
|
bunnydev26/django_newboston
|
music/admin.py
|
Python
|
gpl-3.0
| 149 | 0 |
#!/usr/bin/env python
from traits.api import HasStrictTraits, Float
from mock import Mock
class MyClass(HasStrictTraits):
number = Float(2.0)
def add_to_number(self, value):
""" Add the value to `number`. """
self.number += value
my_class = MyClass()
# Using my_class.add_to_number = Mock() will fail.
# But setting the mock on the instance `__dict__` works.
my_class.__dict__['add_to_number'] = Mock()
# We can now use the mock in our tests.
my_class.add_to_number(42)
print my_class.add_to_number.call_args_list
|
marshallmcdonnell/interactive_plotting
|
Traits/manual/testing_hasstricttraits.py
|
Python
|
mit
| 547 | 0.005484 |
#
# Copyright (C) 2014 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
class ModelBase(object):
pass
class ModelUser(ModelBase):
def __init__(self, name, email=None, username=None):
self.name = name
self.email = email
self.username = username
def is_in_list(self, users):
if self.name is not None and self.name in users:
return True
if self.username is not None and self.username in users:
return True
return False
@staticmethod
def from_json(data):
return ModelUser(data.get("name", None),
data.get("email", None),
data.get("username", None))
class ModelFile(ModelBase):
ACTION_MODIFIED = "MODIFIED"
ACTION_ADDED = "ADDED"
ACTION_DELETED = "DELETED"
ACTION_RENAMED = "RENAMED"
def __init__(self, path, action):
self.path = path
self.action = action
@staticmethod
def from_json(data):
return ModelFile(data.get("file", None),
data.get("type", None))
class ModelApproval(ModelBase):
ACTION_VERIFIED = "Verified"
ACTION_REVIEWED = "Code-Review"
ACTION_WORKFLOW = "Workflow"
def __init__(self, action, value, description, grantedOn=None, user=None):
self.action = action
self.value = value
self.description = description
if grantedOn is not None:
self.grantedOn = int(grantedOn)
else:
self.grantedOn = None
self.user = user
def is_user_in_list(self, users):
if self.user is None:
return False
return self.user.is_in_list(users)
def is_newer_than(self, then):
if self.grantedOn is None:
return False
if self.grantedOn > then:
return True
return False
def is_nack(self):
if self.value < 0:
return True
return False
def is_reviewer_nack(self):
if self.action != ModelApproval.ACTION_REVIEWED:
return False
if self.value < 0:
return True
return False
def get_age(self, now=None):
if now is None:
now = time.time()
return now - self.grantedOn
@staticmethod
def from_json(data):
user = None
if data.get("by", None):
user = ModelUser.from_json(data["by"])
return ModelApproval(data.get("type", None),
int(data.get("value", 0)),
data.get("description", None),
data.get("grantedOn", None),
user)
class ModelComment(ModelBase):
def __init__(self, message, file, line, reviewer):
self.message = message
self.file = file
self.line = line
self.reviewer = reviewer
def is_reviewer_in_list(self, users):
if self.reviewer is None:
return False
return self.reviewer.is_in_list(users)
@staticmethod
def from_json(data):
user = None
if data.get("reviewer", None):
user = ModelUser.from_json(data["reviewer"])
return ModelComment(data.get("message", ""),
data.get("file", None),
data.get("line", 0),
user)
class ModelPatch(ModelBase):
def __init__(self, number, revision, ref, uploader, createdOn, approvals=[], files=[], comments=[]):
self.number = number
self.revision = revision
self.ref = ref
self.uploader = uploader
self.createdOn = createdOn
self.approvals = approvals
self.files = files
self.comments = comments
@staticmethod
def is_user_in_list(users, user):
if user.username is not None and user.username in users:
return True
if user.email is not None and user.email in users:
return True
return False
def is_reviewer_nacked(self):
for approval in self.approvals:
if approval.is_reviewer_nack():
return True
return False
def is_nacked(self):
for approval in self.approvals:
if approval.is_nack():
return True
return False
def get_age(self, now):
if len(self.approvals) == 0:
return now - self.createdOn
age = 0
for approval in self.approvals:
thisage = now - approval.grantedOn
if thisage > age:
age = thisage
return age
def has_other_reviewers(self, excludeusers):
'''Determine if the patch has been reviewed by any
users that are not in 'excludeusers'''
hasReviewers = False
for approval in self.approvals:
if not approval.is_user_in_list(excludeusers):
hasReviewers = True
return hasReviewers
def has_reviewers(self, includeusers):
'''Determine if the patch has been reviewed by any
users that are in 'includeusers'''
hasReviewers = False
for approval in self.approvals:
if approval.user is None:
continue
if approval.is_user_in_list(includeusers):
hasReviewers = True
return hasReviewers
@staticmethod
def from_json(data):
files = []
for f in data.get("files", []):
files.append(ModelFile.from_json(f))
approvals = []
for f in data.get("approvals", []):
approvals.append(ModelApproval.from_json(f))
user = None
if "uploader" in data:
user = ModelUser.from_json(data["uploader"])
comments = []
for c in data.get("comments", []):
comments.append(ModelComment.from_json(c))
return ModelPatch(int(data.get("number", 0)),
data.get("revision"),
data.get("ref"),
user,
data.get("createdOn"),
approvals,
files,
comments)
class ModelChange(ModelBase):
def __init__(self, project, branch, topic, id, number, subject, owner, url, createdOn, lastUpdated, status, patches = [], comments = []):
self.project = project
self.branch = branch
self.topic = topic
self.id = id
self.number = number
self.subject = subject
self.owner = owner
self.url = url
if createdOn is not None:
self.createdOn = int(createdOn)
else:
self.createdOn = None
if lastUpdated is not None:
self.lastUpdated = int(lastUpdated)
else:
self.lastUpdated = None
self.status = status
self.patches = patches
self.comments = comments
def get_current_patch(self):
if len(self.patches) == 0:
return None
return self.patches[len(self.patches) - 1]
def get_first_patch(self):
if len(self.patches) == 0:
return None
return self.patches[0]
def get_reviewer_not_nacked_patch(self):
prev = None
for patch in reversed(self.patches):
if patch.is_reviewer_nacked():
break
prev = patch
return prev
def get_current_age(self):
patch = self.get_current_patch()
return patch.get_age(time.time())
def get_first_age(self):
patch = self.get_first_patch()
return patch.get_age(time.time())
def get_reviewer_not_nacked_age(self):
patch = self.get_reviewer_not_nacked_patch()
if patch is None:
return 0
return patch.get_age(time.time())
@staticmethod
def is_user_in_list(users, user):
if user.username is not None and user.username in users:
return True
if user.email is not None and user.email in users:
return True
return False
def has_any_other_reviewers(self, excludeusers):
'''Determine if any patch in the change has been
reviewed by any user not in the list of 'excludeusers'''
hasReviewers = False
for patch in self.patches:
if patch.has_other_reviewers(excludeusers):
hasReviewers = True
return hasReviewers
def has_any_reviewers(self, includeusers):
'''Determine if any patch in the change has been
reviewed by any user in the list of 'includeusers'''
hasReviewers = False
for patch in self.patches:
if patch.has_reviewers(includeusers):
hasReviewers = True
return hasReviewers
def has_current_reviewers(self, includeusers):
'''Determine if the current patch version has
been reviewed by any of the users in 'includeusers'. '''
patch = self.get_current_patch()
if patch is None:
return False
return patch.has_reviewers(includeusers)
def has_current_other_reviewers(self, excludeusers):
'''Determine if the current patch version has
been reviewed by any of the users not in 'excludeusers'. '''
patch = self.get_current_patch()
if patch is None:
return False
return patch.has_other_reviewers(excludeusers)
def has_owner(self, includeusers):
'''Determine if the change is owned by anyone
in 'incldueusers' list.'''
return self.is_user_in_list(includeusers, self.owner)
@staticmethod
def from_json(data):
patches = []
for p in data.get("patchSets", []):
patches.append(ModelPatch.from_json(p))
user = None
if "owner" in data:
user = ModelUser.from_json(data["owner"])
number = None
if "number" in data:
number = int(data.get("number"))
comments = []
for c in data.get("comments", []):
comments.append(ModelComment.from_json(c))
return ModelChange(data.get("project", None),
data.get("branch", None),
data.get("topic", None),
data.get("id", None),
number,
data.get("subject", None),
user,
data.get("url", None),
data.get("createdOn", None),
data.get("lastUpdated", None),
data.get("status", None),
patches,
comments)
class ModelEvent(ModelBase):
def __init__(self, change, patch, user):
self.change = change
self.patch = patch
self.user = user
def is_user_in_list(self, users):
if self.user is None:
return False
return self.user.is_in_list(users)
@staticmethod
def from_json(data):
if data["type"] == "comment-added":
return ModelEventCommentAdd.from_json(data)
elif data["type"] == "patchset-created":
return ModelEventPatchCreate.from_json(data)
elif data["type"] == "change-merged":
return ModelEventChangeMerge.from_json(data)
elif data["type"] == "change-abandoned":
return ModelEventChangeAbandon.from_json(data)
elif data["type"] == "change-restored":
return ModelEventChangeRestore.from_json(data)
elif data["type"] == "ref-updated":
return ModelEventRefUpdated.from_json(data)
elif data["type"] == "reviewer-added":
return ModelEventReviewerAdded.from_json(data)
elif data["type"] == "topic-changed":
return ModelEventTopicChanged.from_json(data)
else:
raise Exception("Unknown event '%s'" % data["type"])
class ModelEventCommentAdd(ModelEvent):
def __init__(self, change, patch, user, comment, approvals):
ModelEvent.__init__(self, change, patch, user)
self.comment = comment
self.approvals = approvals
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
patch = ModelPatch.from_json(data["patchSet"])
user = ModelUser.from_json(data["author"])
comment = data["comment"]
approvals = []
for f in data.get("approvals", []):
approvals.append(ModelApproval.from_json(f))
return ModelEventCommentAdd(change, patch, user, comment, approvals)
class ModelEventPatchCreate(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
patch = ModelPatch.from_json(data["patchSet"])
user = ModelUser.from_json(data["uploader"])
return ModelEventPatchCreate(change, patch, user)
class ModelEventChangeMerge(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
patch = ModelPatch.from_json(data["patchSet"])
user = ModelUser.from_json(data["submitter"])
return ModelEventChangeMerge(change, patch, user)
class ModelEventChangeAbandon(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
user = ModelUser.from_json(data["abandoner"])
return ModelEventChangeAbandon(change, None, user)
class ModelEventChangeRestore(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
user = ModelUser.from_json(data["restorer"])
return ModelEventChangeRestore(change, None, user)
class ModelEventReviewerAdded(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
user = ModelUser.from_json(data["reviewer"])
return ModelEventReviewerAdded(change, None, user)
class ModelEventTopicChanged(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
change = ModelChange.from_json(data["change"])
user = ModelUser.from_json(data["changer"])
return ModelEventTopicChanged(change, None, user)
class ModelEventRefUpdated(ModelEvent):
def __init__(self, change, patch, user):
ModelEvent.__init__(self, change, patch, user)
@staticmethod
def from_json(data):
submitter = data.get("submitter", None)
user = None
if submitter is not None:
user = ModelUser.from_json(submitter)
return ModelEventRefUpdated(None, None, user)
|
russellb/gerrymander
|
gerrymander/model.py
|
Python
|
apache-2.0
| 15,825 | 0.000569 |
#!/usr/bin/python
import Adafruit_GPIO as GPIO
import time, os
#print "GETTING GPIO OBJECT"
gpio = GPIO.get_platform_gpio()
#print "SETUP CSID1"
#gpio.setup("CSID1", GPIO.OUT)
#print os.path.exists('/sys/class/gpio/gpio133')
#print "SETUP XIO-P1"
#gpio.setup("XIO-P1", GPIO.IN)
#GPIO.setup("U14_13", GPIO.IN)
#print "READING XIO-P1"
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#time.sleep(1)
#print "LOW", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.HIGH)
#print "HIGH", gpio.input("XIO-P1")
#gpio.output("CSID1", GPIO.LOW)
#print "LOW", gpio.input("XIO-P1")
#this example will test out CHIP XIO-P0 in to XIO-P1
#jumper the pins to test
#
#my test required sudo to work, gpio access requires sudo before changing permissions
#gpio.setup("XIO-P0", GPIO.OUT)
#gpio.setup("XIO-P1", GPIO.IN)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#gpio.output("XIO-P0", GPIO.HIGH)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#time.sleep(4)
#gpio.output("XIO-P0", GPIO.LOW)
#print "LOW", gpio.input("XIO-P0")
#print "LOW", gpio.input("XIO-P1")
#print "CLEANUP"
#gpio.cleanup()
gpio.setup("XIO-P0", GPIO.OUT)
gpio.output("XIO-P0", GPIO.HIGH)
|
fantoms/psychic-octo-spork
|
chipdisable.py
|
Python
|
gpl-3.0
| 1,217 | 0.028759 |
################################################################################
#
# Copyright 2015-2020 Félix Brezo and Yaiza Rubio
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
__author__ = "Felix Brezo, Yaiza Rubio <contacto@i3visio.com>"
__version__ = "2.0"
from osrframework.utils.platforms import Platform
class Teamtreehouse(Platform):
""" A <Platform> object for Teamtreehouse"""
def __init__(self):
self.platformName = "Teamtreehouse"
self.tags = ["social", "news"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://teamtreehouse.com/" + "<usufy>"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Sorry, we can't find the page you are looking for</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
i3visio/osrframework
|
osrframework/wrappers/teamtreehouse.py
|
Python
|
agpl-3.0
| 3,948 | 0.00456 |
import datetime
import json
import logging
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import logout
from django.contrib.sites.shortcuts import get_current_site
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.db import connections
from django.db.models import DateField, F, Q
from django.db.models.functions import Trunc
from django.utils import timezone
from django.utils.translation import ugettext as _
from django.http import Http404, HttpResponse, HttpResponseServerError
from django.shortcuts import get_object_or_404, redirect
from django.views.decorators.cache import cache_page
from djangohelpers.lib import rendered_with, allow_http
from registration.backends.simple.views import RegistrationView
from .forms import OpenDebatesRegistrationForm, VoterForm, QuestionForm, MergeFlagForm
from .models import (Candidate, Category, Debate, Flag, Submission, Vote, Voter,
TopSubmissionCategory, ZipCode, RECENT_EVENTS_CACHE_ENTRY)
from .router import readonly_db
from .utils import (get_ip_address_from_request, get_headers_from_request, choose_sort, sort_list,
vote_needs_captcha, registration_needs_captcha, get_voter)
from opendebates_emails.models import send_email
def health_check(request):
"""
Health check for the load balancer.
"""
logger = logging.getLogger('opendebates.views.health_check')
db_errors = []
for conn_name in connections:
conn = connections[conn_name]
try:
cursor = conn.cursor()
cursor.execute('SELECT 1')
row = cursor.fetchone()
assert row[0] == 1
except Exception as e:
# note that there doesn't seem to be a way to pass a timeout to
# psycopg2 through Django, so this will likely not raise a timeout
# exception
logger.warning('Caught error checking database connection "{0}"'
''.format(conn_name), exc_info=True)
db_errors.append(e)
if not db_errors:
return HttpResponse('OK')
else:
return HttpResponseServerError('Configuration Error')
def state_from_zip(zip):
try:
return ZipCode.objects.get(zip=zip).state
except ZipCode.DoesNotExist:
return ''
def root_redirect(request):
site = get_current_site(request)
# Look for the *next* debate
debate = Debate.objects.annotate(
debate_day=Trunc('debate_time', 'day', output_field=DateField())
).filter(
site=site,
debate_day__gte=datetime.date.today(),
).order_by('debate_time').first()
if debate is None:
# No next debate? Look for the most recently ended debate.
debate = Debate.objects.filter(
site=site,
).order_by('-debate_time').first()
if debate:
return redirect('/%s/' % debate.prefix)
else:
# If no debates at all, redirect to opendebatecoalition.com
return redirect('https://opendebatecoalition.com')
@cache_page(5) # Cache for 5 seconds after rendering
@allow_http("GET")
@rendered_with("opendebates/snippets/recent_activity.html")
def recent_activity(request):
entries = cache.get(RECENT_EVENTS_CACHE_ENTRY.format(request.debate.id), default=[])
return {
"recent_activity": entries
}
@rendered_with("opendebates/list_ideas.html")
def list_ideas(request):
ideas = Submission.objects.filter(category__debate=request.debate)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
return {
'ideas': ideas,
'sort': sort,
'url_name': reverse('list_ideas'),
'stashed_submission': request.session.pop(
"opendebates.stashed_submission", None) if request.user.is_authenticated else None,
}
@rendered_with("opendebates/list_ideas.html")
def list_category(request, cat_id):
category = get_object_or_404(Category, id=cat_id, debate=request.debate)
ideas = Submission.objects.filter(category__debate=request.debate, category=cat_id)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
return {
'ideas': ideas,
'sort': sort,
'url_name': reverse("list_category", kwargs={'cat_id': cat_id}),
'category': category
}
@rendered_with("opendebates/list_ideas.html")
@allow_http("GET")
def search_ideas(request):
try:
search_term = [q for q in request.GET.getlist("q") if q][0]
except IndexError:
return redirect(reverse('list_ideas'))
ideas = Submission.objects.filter(category__debate=request.debate)
citations_only = request.GET.get("citations_only")
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
ideas = ideas.search(search_term.replace("%", ""))
return {
'ideas': ideas,
'search_term': search_term,
'sort': sort,
'url_name': reverse('search_ideas'),
}
@rendered_with("opendebates/list_ideas.html")
def category_search(request, cat_id):
ideas = Submission.objects.filter(category__debate=request.debate, category=cat_id)
citations_only = request.GET.get("citations_only")
search_term = request.GET['q']
sort = choose_sort(request, request.GET.get('sort'))
ideas = sort_list(citations_only, sort, ideas)
ideas = ideas.search(search_term.replace("%", ""))
return {
'ideas': ideas,
'search_term': search_term,
'sort': sort,
'url_name': reverse("list_category", kwargs={'cat_id': cat_id})
}
@rendered_with("opendebates/vote.html")
@allow_http("GET", "POST")
def vote(request, id):
"""Despite the name, this is both the page for voting AND the detail page for submissions"""
try:
with readonly_db():
idea = Submission.objects.get(
id=id, category__debate=request.debate,
)
except Submission.DoesNotExist:
raise Http404
if request.method == "POST" and not idea.approved:
# Don't allow voting on removed submissions, but do allow viewing them
raise Http404
if idea.duplicate_of_id:
if not idea.approved:
# Submissions which have been "unmoderated as duplicates"
# should remain completely inaccessible, and should not redirect
raise Http404
url = reverse("show_idea", kwargs={'id': idea.duplicate_of_id})
url = url + "#i"+str(idea.id)
return redirect(url)
if request.method == "GET":
two_other_approved_ideas = list(Submission.objects.filter(
category=idea.category,
duplicate_of=None,
approved=True).exclude(id=idea.id)[:2]) + [None, None]
related1 = two_other_approved_ideas[0]
related2 = two_other_approved_ideas[1]
return {
'idea': idea,
'show_duplicates': True,
'related1': related1,
'related2': related2,
'duplicates': (Submission.objects.filter(
category__debate=request.debate,
approved=True, duplicate_of=idea)
if idea.has_duplicates else []),
}
if not request.debate.allow_voting_and_submitting_questions:
raise Http404
form = VoterForm(request.POST)
if not vote_needs_captcha(request):
form.ignore_captcha()
if not form.is_valid():
if request.is_ajax():
return HttpResponse(
json.dumps({"status": "400", "errors": form.errors}),
content_type="application/json")
messages.error(request, _('You have some errors in your form'))
return {
'form': form,
'idea': idea,
}
state = state_from_zip(form.cleaned_data['zipcode'])
is_fraudulent = False
session_key = request.session.session_key or ''
if session_key and Vote.objects.filter(submission=idea,
sessionid=session_key).exists():
# Django creates a session for both signed-in users and anonymous, so
# we should be able to rely on this. If it is duplicated on a given
# question, it's because they are scripting votes. Behave the same
# way as if it was a normal email duplicate, i.e. don't increment but
# return without error.
is_fraudulent = True
session_voter = get_voter(request)
if session_voter and session_voter['email'] != form.cleaned_data['email']:
# This can only happen with an manually-created POST request.
is_fraudulent = True
if is_fraudulent:
# Pretend like everything is fine, but don't increment the tally or
# create a Vote. Deny attackers any information about how they are failing.
if request.is_ajax():
result = {"status": "200",
"tally": idea.votes if request.debate.show_question_votes else '',
"id": idea.id}
return HttpResponse(
json.dumps(result),
content_type="application/json")
url = reverse("vote", kwargs={'id': id})
return redirect(url)
voter, created = Voter.objects.get_or_create(
email=form.cleaned_data['email'],
defaults=dict(
source=request.COOKIES.get('opendebates.source'),
state=state,
zip=form.cleaned_data['zipcode'],
user=request.user if request.user.is_authenticated else None,
)
)
if not created and voter.zip != form.cleaned_data['zipcode']:
voter.zip = form.cleaned_data['zipcode']
voter.state = state
voter.save()
vote, created = Vote.objects.get_or_create(
submission=idea,
voter=voter,
defaults=dict(
created_at=timezone.now(),
source=request.COOKIES.get('opendebates.source'),
ip_address=get_ip_address_from_request(request),
sessionid=session_key,
request_headers=get_headers_from_request(request),
is_suspicious=False,
is_invalid=False,
)
)
previous_debate_time = request.debate.previous_debate_time
if created:
# update the DB with the real tally
Submission.objects.filter(category__debate=request.debate, id=id).update(
votes=F('votes')+1,
current_votes=F('current_votes')+(
1 if previous_debate_time is None or vote.created_at > previous_debate_time
else 0
),
local_votes=F('local_votes')+(
1 if voter.state and voter.state == request.debate.debate_state
else 0)
)
# also calculate a simple increment tally for the client
idea.votes += 1
if 'voter' not in request.session:
request.session['voter'] = {"email": voter.email, "zip": voter.zip}
if request.is_ajax():
result = {"status": "200",
"tally": idea.votes if request.debate.show_question_votes else '',
"id": idea.id}
return HttpResponse(
json.dumps(result),
content_type="application/json")
url = reverse("vote", kwargs={'id': id})
return redirect(url)
@rendered_with("opendebates/list_ideas.html")
@allow_http("GET", "POST")
def questions(request):
# If the user is GETting the list of questions, then redirect to the list_ideas
# page for this Debate.
if request.method == 'GET':
return redirect(reverse("list_ideas"))
if not request.debate.allow_voting_and_submitting_questions:
raise Http404
form = QuestionForm(request.POST, request=request)
if not form.is_valid():
# form = QuestionForm(request=request)
messages.error(request, _('You have some errors in the form'))
return {
'form': form,
'categories': Category.objects.filter(debate=request.debate),
'ideas': [],
}
if not request.user.is_authenticated:
request.session['opendebates.stashed_submission'] = {
"category": request.POST['category'],
"headline": request.POST['headline'],
"question": request.POST['question'],
"citation": request.POST.get("citation"),
}
return redirect('registration_register')
category = request.POST.get('category')
form_data = form.cleaned_data
voter, created = Voter.objects.get_or_create(
email=request.user.email,
defaults=dict(
source=request.COOKIES.get('opendebates.source')
)
)
previous_debate_time = request.debate.previous_debate_time
created_at = timezone.now()
idea = Submission.objects.create(
voter=voter,
category_id=category,
headline=form_data['headline'],
followup=form_data['question'],
idea=(u'%s %s' % (form_data['headline'], form_data['question'])).strip(),
citation=form_data['citation'],
created_at=created_at,
ip_address=get_ip_address_from_request(request),
approved=True,
votes=1,
local_votes=1 if voter.state and voter.state == request.debate.debate_state else 0,
current_votes=(1 if previous_debate_time is None or created_at > previous_debate_time
else 0),
source=request.COOKIES.get('opendebates.source'),
)
Vote.objects.create(
submission=idea,
voter=voter,
source=idea.source,
ip_address=get_ip_address_from_request(request),
sessionid=request.session.session_key or '',
request_headers=get_headers_from_request(request),
created_at=created_at,
is_suspicious=False,
is_invalid=False,
)
send_email("submitted_new_idea", {"idea": idea})
send_email("notify_moderators_submitted_new_idea", {"idea": idea})
url = reverse("vote", kwargs={'id': idea.id})
return redirect(url + "#created=%s" % idea.id)
@rendered_with("opendebates/changelog.html")
def changelog(request):
moderated = Submission.objects.filter(
Q(approved=False) | Q(duplicate_of__isnull=False)
).select_related('duplicate_of').order_by('-moderated_at', '-id')
return {
'moderated': moderated
}
class OpenDebatesRegistrationView(RegistrationView):
form_class = OpenDebatesRegistrationForm
next = None
prefix = None
def get(self, request, *args, **kwargs):
self.next = request.GET.get('next', None)
return super(OpenDebatesRegistrationView, self).get(request)
def get_context_data(self, **kwargs):
data = super(OpenDebatesRegistrationView, self).get_context_data(**kwargs)
if self.next:
data['next'] = self.next
return data
def form_valid(self, form):
User = get_user_model()
if User.objects.filter(email__iexact=form.cleaned_data['email']).exists():
return redirect(reverse('registration_duplicate'))
return super(OpenDebatesRegistrationView, self).form_valid(form)
def register(self, form):
new_user = super(OpenDebatesRegistrationView, self).register(form)
voter, created = Voter.objects.update_or_create(
email=form.cleaned_data['email'],
defaults=dict(
source=self.request.COOKIES.get('opendebates.source'),
state=state_from_zip(form.cleaned_data['zip']),
zip=form.cleaned_data['zip'],
display_name=form.cleaned_data.get('display_name'),
twitter_handle=form.cleaned_data.get('twitter_handle'),
phone_number=form.cleaned_data.get('phone_number'),
user=new_user,
)
)
return new_user
def get_form_kwargs(self):
kwargs = super(OpenDebatesRegistrationView, self).get_form_kwargs()
kwargs.update({
'request': self.request,
})
return kwargs
def get_form(self, form_class=None):
form = super(OpenDebatesRegistrationView, self).get_form(form_class)
if not registration_needs_captcha(self.request):
form.ignore_captcha()
return form
def get_success_url(self, user=None):
if self.request.GET.get('next'):
return self.request.GET.get('next')
else:
return reverse('registration_complete')
def registration_complete(request):
request.session['events.account_created'] = True
return redirect(reverse('list_ideas'))
@rendered_with("registration/registration_duplicate.html")
def registration_duplicate(request):
return {}
@rendered_with("opendebates/list_candidates.html")
@allow_http("GET")
def list_candidates(request):
candidates = Candidate.objects.filter(
debate=request.debate,
).order_by('last_name', 'first_name')
return {
'candidates': candidates,
}
@rendered_with("opendebates/flag_report.html")
@allow_http("GET", "POST")
@login_required
def report(request, id):
if not request.debate.allow_voting_and_submitting_questions and not request.user.is_staff:
raise Http404
idea = get_object_or_404(Submission, pk=id, category__debate=request.debate)
voter = Voter.objects.get(user=request.user)
if request.method == 'POST':
flag, created = Flag.objects.get_or_create(
to_remove=idea,
voter=voter,
duplicate_of=None,
defaults=dict(note=request.POST.get("report_why"))
)
messages.info(request, _(u'This question has been flagged for removal.'))
return redirect(idea)
return {
'idea': idea,
}
@rendered_with("opendebates/flag_merge.html")
@allow_http("GET", "POST")
@login_required
def merge(request, id):
if not request.debate.allow_voting_and_submitting_questions and not request.user.is_staff:
raise Http404
idea = get_object_or_404(Submission, pk=id, category__debate=request.debate)
voter = Voter.objects.get(user=request.user)
if Flag.objects.filter(to_remove=idea, voter=voter).exists():
messages.info(request, _(u'You have already flagged this question.'))
return redirect(idea)
form = MergeFlagForm(idea=idea, voter=voter, data=request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save()
messages.info(request, _(u'This question has been flagged for merging.'))
return redirect(idea)
return {
'idea': idea,
'form': form,
}
@rendered_with("opendebates/top_archive.html")
@allow_http("GET")
def top_archive(request, slug):
category = get_object_or_404(TopSubmissionCategory,
debate=request.debate, slug=slug)
submissions = category.submissions.select_related(
"submission", "submission__voter", "submission__voter__user",
"submission__category").order_by("rank", "created_at").all()
return {
'category': category,
'submissions': submissions,
}
def od_logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
if next_page is not None:
next_page = reverse(next_page)
return logout(request, next_page, template_name, redirect_field_name, extra_context)
|
caktus/django-opendebates
|
opendebates/views.py
|
Python
|
apache-2.0
| 19,833 | 0.001311 |
# Copyright (C) 2017 Equinor ASA, Norway.
#
# The file 'site_config.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from cwrap import BaseCClass
from ecl.util.util import StringList, Hash
from res import ResPrototype
from res.enkf import ConfigKeys
from res.job_queue import JobQueue, ExtJoblist, Driver
class QueueConfig(BaseCClass):
TYPE_NAME = "queue_config"
_free = ResPrototype("void queue_config_free( queue_config )")
_alloc = ResPrototype("void* queue_config_alloc_load(char*)", bind=False)
_alloc_full = ResPrototype(
"void* queue_config_alloc_full(char*, bool, int, int, queue_driver_enum)",
bind=False,
)
_alloc_content = ResPrototype(
"void* queue_config_alloc(config_content)", bind=False
)
_alloc_local_copy = ResPrototype(
"queue_config_obj queue_config_alloc_local_copy( queue_config )"
)
_has_job_script = ResPrototype("bool queue_config_has_job_script( queue_config )")
_get_job_script = ResPrototype("char* queue_config_get_job_script(queue_config)")
_max_submit = ResPrototype("int queue_config_get_max_submit(queue_config)")
_queue_system = ResPrototype("char* queue_config_get_queue_system(queue_config)")
_queue_driver = ResPrototype(
"driver_ref queue_config_get_queue_driver(queue_config, char*)"
)
_get_num_cpu = ResPrototype("int queue_config_get_num_cpu(queue_config)")
_lsf_queue_opt = ResPrototype("char* queue_config_lsf_queue_name()", bind=False)
_lsf_server_opt = ResPrototype("char* queue_config_lsf_server()", bind=False)
_lsf_resource_opt = ResPrototype("char* queue_config_lsf_resource()", bind=False)
_lsf_driver_opt = ResPrototype("char* queue_config_lsf_driver_name()", bind=False)
def __init__(self, user_config_file=None, config_content=None, config_dict=None):
configs = sum(
[
1
for x in [user_config_file, config_content, config_dict]
if x is not None
]
)
if configs > 1:
raise ValueError(
"Attempting to create QueueConfig object with multiple config objects"
)
if configs == 0:
raise ValueError(
"Attempting to create QueueConfig object with no config objects"
)
c_ptr = None
if user_config_file is not None:
c_ptr = self._alloc(user_config_file)
if config_content is not None:
c_ptr = self._alloc_content(config_content)
if config_dict is not None:
c_ptr = self._alloc_full(
config_dict[ConfigKeys.JOB_SCRIPT],
config_dict[ConfigKeys.USER_MODE],
config_dict[ConfigKeys.MAX_SUBMIT],
config_dict[ConfigKeys.NUM_CPU],
config_dict[ConfigKeys.QUEUE_SYSTEM],
)
if not c_ptr:
raise ValueError("Unable to create QueueConfig instance")
super(QueueConfig, self).__init__(c_ptr)
# Need to create
if config_dict is not None:
queue_options = config_dict.get(ConfigKeys.QUEUE_OPTION)
for option in queue_options:
self.driver.set_option(
option[ConfigKeys.NAME], option[ConfigKeys.VALUE]
)
def create_job_queue(self):
queue = JobQueue(self.driver, max_submit=self.max_submit)
return queue
def create_local_copy(self):
return self._alloc_local_copy()
def has_job_script(self):
return self._has_job_script()
def free(self):
self._free()
@property
def max_submit(self):
return self._max_submit()
@property
def queue_name(self):
return self.driver.get_option(QueueConfig.LSF_QUEUE_NAME_KEY)
@property
def queue_system(self):
"""The queue system in use, e.g. LSF or LOCAL"""
return self._queue_system()
@property
def job_script(self):
return self._get_job_script()
@property
def driver(self):
return self._queue_driver(self.queue_system).setParent(self)
def _assert_lsf(self, key="driver"):
sys = self.queue_system
if sys != QueueConfig.LSF_KEY:
fmt = "Cannot fetch LSF {key}, current queue is {system}"
raise ValueError(fmt.format(key=key, system=self.queue_system))
@property
def _lsf_driver(self):
self._assert_lsf()
driver = self._queue_driver(self.LSF_KEY)
return driver.setParent(self)
@property
def lsf_resource(self):
self._assert_lsf(key=QueueConfig.LSF_RESOURCE_KEY)
return self._lsf_driver.get_option(self.LSF_RESOURCE_KEY)
@property
def lsf_server(self):
self._assert_lsf(key=QueueConfig.LSF_SERVER_KEY)
return self._lsf_driver.get_option(self.LSF_SERVER_KEY)
@property
def num_cpu(self):
return self._get_num_cpu()
def __eq__(self, other):
if self.max_submit != other.max_submit:
return False
if self.queue_system != other.queue_system:
return False
if self.num_cpu != other.num_cpu:
return False
if self.job_script != other.job_script:
return False
if self.queue_system != "LOCAL":
if self.queue_name != other.queue_name:
return False
if self.lsf_resource != other.lsf_resource:
return False
if self.lsf_server != other.lsf_server:
return False
return True
LSF_KEY = _lsf_driver_opt()
LSF_QUEUE_NAME_KEY = _lsf_queue_opt()
LSF_RESOURCE_KEY = _lsf_resource_opt()
LSF_SERVER_KEY = _lsf_server_opt()
|
joakim-hove/ert
|
res/enkf/queue_config.py
|
Python
|
gpl-3.0
| 6,283 | 0.001751 |
from setuptools import setup, find_packages
import os
import allensdk
# http://bugs.python.org/issue8876#msg208792
if hasattr(os, 'link'):
del os.link
def prepend_find_packages(*roots):
''' Recursively traverse nested packages under the root directories
'''
packages = []
for root in roots:
packages += [root]
packages += [root + '.' + s for s in find_packages(root)]
return packages
setup(
version = allensdk.__version__,
name = 'allensdk',
author = 'David Feng',
author_email = 'davidf@alleninstitute.org',
packages = prepend_find_packages('allensdk'),
package_data={'': ['*.conf', '*.cfg', '*.md', '*.json', '*.dat', '*.env', '*.sh', 'bps', 'Makefile', 'COPYING'] },
description = 'core libraries for the allensdk.',
install_requires = ['h5py>=2.2.1',
'matplotlib>=1.4.2',
'pandas>=0.16.2',
'numpy>=1.8.2',
'six>=1.8.0',
'pynrrd <= 0.2.0.dev'],
dependency_links = [
'git+https://github.com/mhe/pynrrd.git@9e09b24ff1#egg=pynrrd-0.1.999.dev'
],
tests_require=['nose>=1.2.1',
'coverage>=3.7.1',
'mock'],
setup_requires=['setuptools', 'sphinx', 'numpydoc'],
url='http://alleninstitute.github.io/AllenSDK/',
scripts=['allensdk/model/biophys_sim/scripts/bps'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
wvangeit/AllenSDK
|
setup.py
|
Python
|
gpl-3.0
| 1,828 | 0.012582 |
import shutil
from nose.tools import *
from holland.lib.lvm import LogicalVolume
from holland.lib.lvm.snapshot import *
from tests.constants import *
class TestSnapshot(object):
def setup(self):
self.tmpdir = tempfile.mkdtemp()
def teardown(self):
shutil.rmtree(self.tmpdir)
def test_snapshot_fsm(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
snapshot.start(lv)
def test_snapshot_fsm_with_callbacks(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def handle_event(event, *args, **kwargs):
pass
snapshot.register('pre-mount', handle_event)
snapshot.register('post-mount', handle_event)
snapshot.start(lv)
def test_snapshot_fsm_with_failures(self):
lv = LogicalVolume.lookup('%s/%s' % (TEST_VG, TEST_LV))
name = lv.lv_name + '_snapshot'
size = 1 # extent
snapshot = Snapshot(name, size, self.tmpdir)
def bad_callback(event, *args, **kwargs):
raise Exception("Oooh nooo!")
for evt in ('initialize', 'pre-snapshot', 'post-snapshot',
'pre-mount', 'post-mount', 'pre-unmount', 'post-unmount',
'pre-remove', 'post-remove', 'finish'):
snapshot.register(evt, bad_callback)
assert_raises(CallbackFailuresError, snapshot.start, lv)
snapshot.unregister(evt, bad_callback)
if snapshot.sigmgr._handlers:
raise Exception("WTF. sigmgr handlers still exist when checking event => %r", evt)
|
m00dawg/holland
|
plugins/holland.lib.lvm/tests/xfs/test_snapshot.py
|
Python
|
bsd-3-clause
| 1,824 | 0.004386 |
#!/usr/bin/env python3
import json
import os
import subprocess
def connection_lost(network_id, timeout_seconds):
p = subprocess.Popen(["hamachi", "go-online", network_id])
try:
p.wait(timeout_seconds)
except subprocess.TimeoutExpired:
p.kill()
return True
return False
if __name__ == "__main__":
with open("/etc/hamachi-watchdog/hamachi-watchdog.conf", "r") as f:
config = json.load(f)
network_id = config['network_id']
timeout_seconds = config['timeout_seconds']
if connection_lost(network_id, timeout_seconds):
print("Hamachi looks down. Restarting it...")
os.system("systemctl restart logmein-hamachi.service")
print("Hamachi was restarted")
|
frenzykryger/hamachi-watchdog
|
hamachi-watchdog.py
|
Python
|
bsd-2-clause
| 745 | 0 |
#!/usr/bin/env python
## \file merge_solution.py
# \brief Python script for merging of the solution files.
# \author F. Palacios
# \version 6.1.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contributions from the open-source community.
#
# The main research teams contributing to the current release are:
# - Prof. Juan J. Alonso's group at Stanford University.
# - Prof. Piero Colonna's group at Delft University of Technology.
# - Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# - Prof. Alberto Guardone's group at Polytechnic University of Milan.
# - Prof. Rafael Palacios' group at Imperial College London.
# - Prof. Vincent Terrapon's group at the University of Liege.
# - Prof. Edwin van der Weide's group at the University of Twente.
# - Lab. of New Concepts in Aeronautics at Tech. Institute of Aeronautics.
#
# Copyright 2012-2018, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import SU2
# -------------------------------------------------------------------
# Main
# -------------------------------------------------------------------
def main():
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=-1,
help="number of PARTITIONS", metavar="PARTITIONS")
(options, args)=parser.parse_args()
options.partitions = int(options.partitions)
merge_solution( options.filename ,
options.partitions )
# -------------------------------------------------------------------
# MERGE SOLUTION
# -------------------------------------------------------------------
def merge_solution( filename ,
partitions = -1 ):
config = SU2.io.Config(filename)
if partitions > -1 :
config.NUMBER_PART = partitions
SU2.run.merge(config)
#: def merge_solution()
if __name__ == '__main__':
main()
|
drewkett/SU2
|
SU2_PY/merge_solution.py
|
Python
|
lgpl-2.1
| 2,861 | 0.00769 |
# -*- coding: utf-8 -*-
# Copyright 2014-17 Eficent Business and IT Consulting Services S.L.
# <contact@eficent.com>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import progress_measurements_entry
|
sysadminmatmoz/pmis
|
project_progress_measurement/wizard/__init__.py
|
Python
|
agpl-3.0
| 237 | 0 |
"""
Texture Replacement
+++++++++++++++++++
Example of how to replace a texture in game with an external image.
``createTexture()`` and ``removeTexture()`` are to be called from a
module Python Controller.
"""
from bge import logic
from bge import texture
def createTexture(cont):
"""Create a new Dynamic Texture"""
obj = cont.owner
# get the reference pointer (ID) of the internal texture
ID = texture.materialID(obj, 'IMoriginal.png')
# create a texture object
object_texture = texture.Texture(obj, ID)
# create a new source with an external image
url = logic.expandPath("//newtexture.jpg")
new_source = texture.ImageFFmpeg(url)
# the texture has to be stored in a permanent Python object
logic.texture = object_texture
# update/replace the texture
logic.texture.source = new_source
logic.texture.refresh(False)
def removeTexture(cont):
"""Delete the Dynamic Texture, reversing back the final to its original state."""
try:
del logic.texture
except:
pass
|
pawkoz/dyplom
|
blender/doc/python_api/examples/bge.texture.1.py
|
Python
|
gpl-2.0
| 1,051 | 0.001903 |
import os
import shutil
import tempfile
import zipfile
class ZipVolume:
def __init__( self, path ):
self.zf = zipfile.ZipFile( path, 'r' )
self.ls = {}
self.__load_ls()
def __load_ls( self ):
ils = self.zf.infolist()
for i in ils:
try:
ids, e = i.filename.split( '.' )
id = int( ids, 16 )
self.ls[id] = i
except:
print 'WARNING: %s not loaded from zip' % ( i.filename, )
pass
def verify( self ):
return self.zf.testzip() is None
def read( self, id, extension ):
try:
info = self.ls[id]
return self.zf.open( info, 'r' )
except KeyError:
return None
def _debug_write( self, id, extension ):
assert False
def get_state( self ):
return 'clean'
def reset_state( self ):
pass
class FileVolume:
def __init__( self, data_config, vol_id ):
self.data_config = data_config
self.vol_id = vol_id
self.to_commit = []
self.state = 'clean'
self.rm_dir = None
def __get_path( self, id, priority, extension ):
path = self.data_config.get_file_vol_path( self.vol_id, priority )
return os.path.join( path, '%016x.%s' % ( id, extension ) )
def verify( self ):
return True
def read( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
if( not os.path.isfile( p ) ):
return None
else:
try:
return open( p, 'rb' )
except IndexError:
return None
def _debug_write( self, id, priority, extension ):
p = self.__get_path( id, priority, extension )
try:
return open( p, 'wb' )
except IndexError:
return None
def get_state( self ):
return self.state
def reset_state( self ):
self.to_commit = []
self.state = 'clean'
rm_dir = self.rm_dir
self.rm_dir = None
self.to_commit = []
if( rm_dir is not None ):
shutil.rmtree( rm_dir )
def commit( self ):
completion = 0
try:
for t in self.to_commit:
shutil.move( t[0], t[1] )
completion += 1
except:
# Something went wrong, rollback
for t in self.to_commit[:completion]:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behind
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
raise
# Comitted
self.state = 'committed'
def rollback( self ):
if( self.state == 'dirty' ):
self.to_commit = []
self.state = 'clean'
elif( self.state == 'committed' ):
for t in self.to_commit:
shutil.move( t[1], t[0] )
# Sometimes move() seems to leave files behind
for t in self.to_commit:
try:
if( os.path.isfile( t[1] ) ):
os.remove( t[1] )
except:
pass
self.state = 'dirty'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
new_path = self.data_config.get_file_vol_path( self.vol_id, priority )
if( not os.path.isdir( new_path ) ):
os.makedirs( new_path )
tgt = os.path.join( new_path, '%016x.%s' % ( id, extension ) )
self.to_commit.append( ( path, tgt, ) )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
self.reset_state()
self.state = 'dirty'
if( self.rm_dir is None ):
self.rm_dir = tempfile.mkdtemp()
src = self.__get_path( id, priority, extension )
if( not os.path.isfile( src ) ):
return
name = os.path.split( src )[-1]
tgt = os.path.join( self.rm_dir, name )
self.to_commit.append( ( src, tgt, ) )
class StreamDatabase:
def __init__( self, data_config ):
self.volumes = {}
self.data_config = data_config
self.state = 'clean'
def __get_volume( self, vol_id ):
if( self.volumes.has_key( vol_id ) ):
return self.volumes[vol_id]
vol = FileVolume( self.data_config, vol_id )
self.volumes[vol_id] = vol
return vol
def __get_vol_for_id( self, id ):
return self.__get_volume( id >> 12 )
def get_state( self ):
return self.state
def reset_state( self ):
for vol in self.volumes.values():
vol.reset_state()
self.state = 'clean'
def prepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state != 'prepared'
vols = self.volumes.values()
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
for vol in vols:
assert vol.get_state() != 'committed'
try:
# Try to commit all the dirty volumes
for vol in vols:
if( vol.get_state() == 'dirty' ):
vol.commit()
except:
# Something went wrong, rollback
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.rollback()
raise
# Comitted
self.state = 'prepared'
def unprepare_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
assert vol.get_state() != 'dirty'
if( vol.get_state() == 'committed' ):
vol.rollback()
for vol in vols:
assert vol.get_state() != 'committed'
self.state = 'dirty'
def complete_commit( self ):
if( self.state == 'clean' ):
return
assert self.state == 'prepared'
vols = self.volumes.values()
for vol in vols:
if( vol.get_state() == 'committed' ):
vol.reset_state()
self.state = 'clean'
def commit( self ):
self.prepare_commit()
self.complete_commit()
def rollback( self ):
vols = self.volumes.values()
if( self.state == 'clean' ):
for vol in vols:
assert vol.get_state() == 'clean'
return
if( self.state == 'prepared' ):
self.unprepare_commit()
if( self.state == 'dirty' ):
for vol in vols:
assert vol.get_state() != 'committed'
if( vol.get_state() == 'dirty' ):
vol.rollback()
for vol in vols:
assert vol.get_state() == 'clean'
self.state = 'clean'
def load_data( self, path, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.load_data( path, id, priority, extension )
def delete( self, id, priority, extension ):
if( self.state == 'committed' ):
# Clean things up before we begin. We need to do this so that
# We can determine the volumes that changes as part of this
# commit
self.reset_state()
self.state = 'dirty'
v = self.__get_vol_for_id( id )
v.delete( id, priority, extension )
def read( self, id, priority, extension ):
v = self.__get_vol_for_id( id )
return v.read( id, priority, extension )
def _debug_write( self, id, priority, extension ):
v = self.__get_vol_for_id( id )
return v._debug_write( id, priority, extension )
|
hakuya/higu
|
lib/hdbfs/ark.py
|
Python
|
bsd-2-clause
| 8,385 | 0.025164 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- paulett.org)
# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Python library for serializing any arbitrary object graph into JSON.
jsonpickle can take almost any Python object and turn the object into JSON.
Additionally, it can reconstitute the object back into Python.
The object must be accessible globally via a module and must
inherit from object (AKA new-style classes).
Create an object::
class Thing(object):
def __init__(self, name):
self.name = name
obj = Thing('Awesome')
Use jsonpickle to transform the object into a JSON string::
import jsonpickle
frozen = jsonpickle.encode(obj)
Use jsonpickle to recreate a Python object from a JSON string::
thawed = jsonpickle.decode(frozen)
.. warning::
Loading a JSON string from an untrusted source represents a potential
security vulnerability. jsonpickle makes no attempt to sanitize the input.
The new object has the same type and data, but essentially is now a copy of
the original.
.. code-block:: python
assert obj.name == thawed.name
If you will never need to load (regenerate the Python class from JSON), you can
pass in the keyword unpicklable=False to prevent extra information from being
added to JSON::
oneway = jsonpickle.encode(obj, unpicklable=False)
result = jsonpickle.decode(oneway)
assert obj.name == result['name'] == 'Awesome'
"""
import sys, os
from music21 import common
sys.path.append(common.getSourceFilePath() + os.path.sep + 'ext')
from jsonpickle import pickler
from jsonpickle import unpickler
from jsonpickle.backend import JSONBackend
from jsonpickle.version import VERSION
# ensure built-in handlers are loaded
__import__('jsonpickle.handlers')
__all__ = ('encode', 'decode')
__version__ = VERSION
json = JSONBackend()
# Export specific JSONPluginMgr methods into the jsonpickle namespace
set_preferred_backend = json.set_preferred_backend
set_encoder_options = json.set_encoder_options
load_backend = json.load_backend
remove_backend = json.remove_backend
enable_fallthrough = json.enable_fallthrough
def encode(value,
unpicklable=True,
make_refs=True,
keys=False,
max_depth=None,
backend=None,
warn=False,
max_iter=None):
"""Return a JSON formatted representation of value, a Python object.
:param unpicklable: If set to False then the output will not contain the
information necessary to turn the JSON data back into Python objects,
but a simpler JSON stream is produced.
:param max_depth: If set to a non-negative integer then jsonpickle will
not recurse deeper than 'max_depth' steps into the object. Anything
deeper than 'max_depth' is represented using a Python repr() of the
object.
:param make_refs: If set to False jsonpickle's referencing support is
disabled. Objects that are id()-identical won't be preserved across
encode()/decode(), but the resulting JSON stream will be conceptually
simpler. jsonpickle detects cyclical objects and will break the cycle
by calling repr() instead of recursing when make_refs is set False.
:param keys: If set to True then jsonpickle will encode non-string
dictionary keys instead of coercing them into strings via `repr()`.
:param warn: If set to True then jsonpickle will warn when it
returns None for an object which it cannot pickle
(e.g. file descriptors).
:param max_iter: If set to a non-negative integer then jsonpickle will
consume at most `max_iter` items when pickling iterators.
>>> encode('my string')
'"my string"'
>>> encode(36)
'36'
>>> encode({'foo': True})
'{"foo": true}'
>>> encode({'foo': True}, max_depth=0)
'"{\\'foo\\': True}"'
>>> encode({'foo': True}, max_depth=1)
'{"foo": "True"}'
"""
if backend is None:
backend = json
return pickler.encode(value,
backend=backend,
unpicklable=unpicklable,
make_refs=make_refs,
keys=keys,
max_depth=max_depth,
warn=warn)
def decode(string, backend=None, keys=False):
"""Convert a JSON string into a Python object.
The keyword argument 'keys' defaults to False.
If set to True then jsonpickle will decode non-string dictionary keys
into python objects via the jsonpickle protocol.
>>> str(decode('"my string"'))
'my string'
>>> decode('36')
36
"""
if backend is None:
backend = json
return unpickler.decode(string, backend=backend, keys=keys)
# json.load(),loads(), dump(), dumps() compatibility
dumps = encode
loads = decode
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/music21/ext/jsonpickle/__init__.py
|
Python
|
mit
| 5,049 | 0.00099 |
import random
class intDict(object):
"""A dictionary with integer keys"""
def __init__(self, numBuckets):
"""Create an empty dictionary"""
self.buckets = []
self.numBuckets = numBuckets
for i in range(numBuckets):
self.buckets.append([])
def addEntry(self, dictKey, dictVal):
"""Assumes dictKey an int. Adds an entry."""
hashBucket = self.buckets[dictKey%self.numBuckets]
for i in range(len(hashBucket)):
if hashBucket[i][0] == dictKey:
hashBucket[i] = (dictKey, dictVal)
return
hashBucket.append((dictKey, dictVal))
def getValue(self, dictKey):
"""Assumes dictKey an int. Returns entry associated
with the key dictKey"""
hashBucket = self.buckets[dictKey%self.numBuckets]
for e in hashBucket:
if e[0] == dictKey:
return e[1]
return None
def __str__(self):
res = ''
for b in self.buckets:
for t in b:
res = res + str(t[0]) + ':' + str(t[1]) + ','
return '{' + res[:-1] + '}' #res[:-1] removes the last comma
D = intDict(29)
for i in range(29):
#choose a random int in range(10**5)
key = random.choice(range(10**5))
D.addEntry(key, i)
print '\n', 'The buckets are:'
for hashBucket in D.buckets: #violates abstraction barrier
print ' ', hashBucket
|
parmarmanojkumar/MITx_Python
|
6002x/week2/lectureCode_intDict.py
|
Python
|
mit
| 1,482 | 0.008097 |
#!/usr/bin/env python
from sys import argv
def calcRabbits(n,k):
pairs = [1, 1]
for i in range(2,n):
#try:
f1 = pairs[i-1]
f2 = pairs[i-2] * 3
pairs.append((f1+f2))
# except IndexError:
# pass
return pairs
if __name__ == "__main__":
try:
n = int(argv[1])
k = int(argv[2])
print(calcRabbits(n,k))
except (IndexError, ValueError):
print("Usage: python fib.py <intN> <intK>")
|
Jorisvansteenbrugge/advbioinf
|
rosalind/python/fib.py
|
Python
|
gpl-3.0
| 489 | 0.022495 |
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import gettext_lazy as _
from django.utils.translation import get_language
from partners.models import Partner
class PartnersPlugin(CMSPluginBase):
name = _("Partners")
model = CMSPlugin
render_template = "partners/partners_plugin.html"
text_enabled = False
allow_children = False
def render(self, context, instance, placeholder):
language = get_language()
if language is None:
language = 'en'
partners = Partner.objects.filter(active=True).translated(language).order_by('translations__name').all()
context.update({
'partners': partners,
})
return context
plugin_pool.register_plugin(PartnersPlugin)
|
gitsimon/tq_website
|
partners/cms_plugins/partners_plugin.py
|
Python
|
gpl-2.0
| 859 | 0.001164 |
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from pinax.apps.account.utils import get_default_redirect, user_display
from pinax.apps.signup_codes.models import SignupCode
from pinax.apps.signup_codes.forms import SignupForm, InviteUserForm
def group_and_bridge(request):
"""
Given the request we can depend on the GroupMiddleware to provide the
group and bridge.
"""
# be group aware
group = getattr(request, "group", None)
if group:
bridge = request.bridge
else:
bridge = None
return group, bridge
def group_context(group, bridge):
# @@@ use bridge
ctx = {
"group": group,
}
if group:
ctx["group_base"] = bridge.group_base_template()
return ctx
def signup(request, **kwargs):
form_class = kwargs.pop("form_class", SignupForm)
template_name = kwargs.pop("template_name", "account/signup.html")
template_name_failure = kwargs.pop("template_name_failure", "signup_codes/failure.html")
success_url = kwargs.pop("success_url", None)
group, bridge = group_and_bridge(request)
ctx = group_context(group, bridge)
if success_url is None:
if hasattr(settings, "SIGNUP_REDIRECT_URLNAME"):
fallback_url = reverse(settings.SIGNUP_REDIRECT_URLNAME)
else:
if hasattr(settings, "LOGIN_REDIRECT_URLNAME"):
fallback_url = reverse(settings.LOGIN_REDIRECT_URLNAME)
else:
fallback_url = settings.LOGIN_REDIRECT_URL
success_url = get_default_redirect(request, fallback_url)
code = request.GET.get("code")
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
user = form.save(request=request)
signup_code = form.cleaned_data["signup_code"]
if signup_code:
signup_code.use(user)
form.login(request, user)
messages.add_message(request, messages.SUCCESS,
ugettext("Successfully logged in as %(username)s.") % {
"username": user_display(user),
}
)
return HttpResponseRedirect(success_url)
else:
signup_code = SignupCode.check(code)
if signup_code:
initial = {
"signup_code": code,
"email": signup_code.email,
}
form = form_class(initial=initial, group=group)
else:
if not settings.ACCOUNT_OPEN_SIGNUP:
ctx.update({
"code": code,
})
ctx = RequestContext(request, ctx)
# if account signup is not open we want to fail when there is
# no sign up code or what was provided failed.
return render_to_response(template_name_failure, ctx)
else:
form = form_class(group=group)
ctx.update({
"code": code,
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
@staff_member_required
def admin_invite_user(request, **kwargs):
"""
This view, by default, works inside the Django admin.
"""
form_class = kwargs.pop("form_class", InviteUserForm)
template_name = kwargs.pop("template_name", "signup_codes/admin_invite_user.html")
group, bridge = group_and_bridge(request)
if request.method == "POST":
form = form_class(request.POST, group=group)
if form.is_valid():
email = form.cleaned_data["email"]
form.send_signup_code()
messages.add_message(request, messages.INFO,
ugettext("An email has been sent to %(email)s.") % {
"email": email
}
)
form = form_class() # reset
else:
form = form_class(group=group)
ctx = group_context(group, bridge)
ctx.update({
"title": ugettext("Invite user"),
"form": form,
})
return render_to_response(template_name, RequestContext(request, ctx))
|
espenak/pinax-oldversion-backup
|
pinax/apps/signup_codes/views.py
|
Python
|
mit
| 4,496 | 0.004448 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sleeptomusicweb.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
beddit/sleep-musicalization-web
|
manage.py
|
Python
|
bsd-2-clause
| 258 | 0 |
import mimetypes
import unittest
from os import path
from django.conf.urls.static import static
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = 'site_media'
def test_serve(self):
"The static view can serve static media"
media_files = ['file.txt', 'file.txt.gz']
for filename in media_files:
response = self.client.get('/%s/%s' % (self.prefix, filename))
response_content = b''.join(response)
file_path = path.join(media_dir, filename)
with open(file_path, 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None))
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get('/%s/%s' % (self.prefix, 'long-line.txt'))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get('/%s/file.unknown' % self.prefix)
self.assertEqual('application/octet-stream', response['Content-Type'])
response.close()
def test_copes_with_empty_path_component(self):
file_name = 'file.txt'
response = self.client.get('/%s//%s' % (self.prefix, file_name))
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT'
)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Mon, 18 Jan 2038 05:14:07 GMT'
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_404(self):
response = self.client.get('/%s/non_existing_resource' % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get('/%s/' % self.prefix)
self.assertContains(response, 'Index of /')
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super(StaticHelperTest, self).setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static('/media/', document_root=media_dir)
def tearDown(self):
super(StaticHelperTest, self).tearDown()
urls.urlpatterns = self._old_views_urlpatterns
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
|
mattseymour/django
|
tests/view_tests/tests/test_static.py
|
Python
|
bsd-3-clause
| 5,569 | 0.000898 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Triangle Project Code.
# Triangle analyzes the lengths of the sides of a triangle
# (represented by a, b and c) and returns the type of triangle.
#
# It returns:
# 'equilateral' if all sides are equal
# 'isosceles' if exactly 2 sides are equal
# 'scalene' if no sides are equal
#
# The tests for this method can be found in
# about_triangle_project.py
# and
# about_triangle_project_2.py
#
def triangle(a, b, c):
if a <=0 or b <= 0 or c <= 0:
raise TriangleError(f"Non-positive value passed for sides:{a},{b},{c}")
sum1 = a + b
sum2 = a + c
sum3 = b + c
if sum1 <= c or sum2 <= b or sum3 <= a:
raise TriangleError("Sum of any two sides must be greater than third one.")
if a == b == c:
return 'equilateral'
if a == b or b == c or a == c:
return 'isosceles'
return 'scalene'
# Error class used in part 2. No need to change this code.
class TriangleError(Exception):
pass
|
MichaelSEA/python_koans
|
python3/koans/triangle.py
|
Python
|
mit
| 1,017 | 0.003933 |
#!/usr/bin/python
#! -*- coding:utf-8 -*-
from sqlalchemy import Column, Integer, String
from database import Base
class Message(Base):
__tablename__ = 'message'
MessageId = Column(Integer, primary_key=True)
DeviceId = Column(String(50))
MessageBody = Column(String(1000))
MessageType = Column(Integer)
CreatedTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.CreatedTime = json["CreatedTime"]
self.MessageType = json["MessageType"]
self.MessageBody = json["MessageBody"]
def get_json(self):
return {
"MessageId":self.MessageId,
"DeviceId":self.DeviceId,
"CreatedTime":self.CreatedTime,
"MessageType":self.MessageType,
"MessageBody":self.MessageBody
}
def __repr__(self):
return repr(self.get_json())
class UserInfo(Base):
__tablename__ = 'userinfo'
DeviceId = Column(String(50), primary_key=True)
UseTimes = Column(Integer)
LastUseTime = Column(String(50))
def __init__(self, json):
self.DeviceId = json["DeviceId"]
self.UseTimes = json["UseTimes"]
self.LastUseTime = json["LastUseTime"]
def get_json(self):
return {
"DeviceId":self.DeviceId,
"UseTimes":self.UseTimes,
"LastUseTime":self.LastUseTime
}
def __repr__(self):
return repr(self.get_json())
|
HalfLike/qc-web-server
|
app/models.py
|
Python
|
apache-2.0
| 1,467 | 0.00818 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class OneDeviceStrategy implementing DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.distribute.python import values
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import distribute as distribute_lib
# TODO(josh11b): Replace asserts in this file with if ...: raise ...
class OneDeviceStrategy(distribute_lib.DistributionStrategy):
"""A distribution strategy for running on a single device."""
# TODO(josh11b): Do we wrap values in types to generate errors if you are
# doing something that won't work with other DistributionStrategy
# implementations?
def __init__(self, device, prefetch_on_device=None):
super(OneDeviceStrategy, self).__init__()
self._device = device
self._prefetch_on_device = prefetch_on_device
self._default_device = device
def _create_variable(self, next_creator, *args, **kwargs):
# No need to distinguish tower-local variables when not mirroring,
# we just enforce that they are not trainable.
if kwargs.pop("tower_local_reduce_method", None) is not None:
kwargs["trainable"] = False
colocate_with = kwargs.pop("colocate_with", None)
if colocate_with is None:
with ops.device(self._device):
return next_creator(*args, **kwargs)
if isinstance(colocate_with, six.string_types):
with ops.device(colocate_with):
return next_creator(*args, **kwargs)
if (isinstance(colocate_with, list) and len(colocate_with) == 1 and
isinstance(colocate_with[0], six.string_types)):
with ops.device(colocate_with[0]):
return next_creator(*args, **kwargs)
with ops.colocate_with(colocate_with):
return next_creator(*args, **kwargs)
def distribute_dataset(self, dataset_fn):
return values.PerDeviceDataset(
self._call_dataset_fn(dataset_fn), [self._device],
self._prefetch_on_device)
def _broadcast(self, tensor, destinations):
return tensor
def _call_for_each_tower(self, fn, *args, **kwargs):
# We don't run `fn` in multiple threads in OneDeviceStrategy.
kwargs.pop("run_concurrently", None)
with ops.device(self._device), _OneDeviceTowerContext(self):
return fn(*args, **kwargs)
def map(self, map_over, fn, *args, **kwargs):
with ops.device(self._device):
return values.MapOutput([fn(m, *args, **kwargs) for m in map_over])
def _reduce(self, method_string, value, destinations):
if not isinstance(value, values.MapOutput):
return value
l = value.get()
assert l
with ops.device(self._device):
if method_string == "sum":
return math_ops.add_n(l)
elif method_string == "mean":
return math_ops.add_n(l) / len(l)
else:
assert False
def _update(self, var, fn, *args, **kwargs):
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(var, *args, **kwargs)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
del colocate_with
with ops.device(self._device), distribute_lib.UpdateContext(self._device):
return fn(*args, **kwargs)
def _fetch(self, val, destination, fn):
"""Return a copy of `val` or `fn(val)` on `destination`."""
with ops.device(self._device):
v = fn(val)
with ops.device(destination):
return array_ops.identity(v)
def _unwrap(self, value):
return [value]
@property
def is_single_tower(self):
return True
@property
def num_towers(self):
return 1
@property
def worker_devices(self):
return [self._device]
@property
def parameter_devices(self):
return [self._device]
def non_slot_devices(self, var_list):
del var_list
return [self._device]
def _worker_device_index(self):
return 0
class _OneDeviceTowerContext(distribute_lib.TowerContext):
def __init__(self, distribution_strategy):
distribute_lib.TowerContext.__init__(
self, distribution_strategy, tower_id=0)
@property
def device(self):
return self._distribution_strategy.worker_devices[0]
|
nburn42/tensorflow
|
tensorflow/contrib/distribute/python/one_device_strategy.py
|
Python
|
apache-2.0
| 4,931 | 0.009126 |
"""
A HTML5 target.
"""
from targets import _
from html import TYPE
import html
NAME = _('HTML5 page')
EXTENSION = 'html'
HEADER = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
<style>
body{background-color:#fff;color:#000;}
hr{background-color:#000;border:0;color:#000;}
hr.heavy{height:5px;}
hr.light{height:1px;}
img{border:0;display:block;}
img.right{margin:0 0 0 auto;}
img.center{border:0;margin:0 auto;}
table th,table td{padding:4px;}
.center,header{text-align:center;}
table.center {margin-left:auto; margin-right:auto;}
.right{text-align:right;}
.left{text-align:left;}
.tableborder,.tableborder td,.tableborder th{border:1px solid #000;}
.underline{text-decoration:underline;}
</style>
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
HEADERCSS = """\
<!DOCTYPE html>
<html>
<head>
<meta charset="%(ENCODING)s">
<title>%(HEADER1)s</title>
<meta name="generator" content="http://txt2tags.org">
<link rel="stylesheet" href="%(STYLE)s">
</head>
<body>
<header>
<hgroup>
<h1>%(HEADER1)s</h1>
<h2>%(HEADER2)s</h2>
<h3>%(HEADER3)s</h3>
</hgroup>
</header>
<article>
"""
TAGS = html.TAGS.copy()
for tag in TAGS:
TAGS[tag] = TAGS[tag].lower()
HTML5TAGS = {
'title1Open' : '<section~A~>\n<h1>\a</h1>' ,
'title1Close' : '</section>' ,
'title2Open' : '<section~A~>\n<h2>\a</h2>' ,
'title2Close' : '</section>' ,
'title3Open' : '<section~A~>\n<h3>\a</h3>' ,
'title3Close' : '</section>' ,
'title4Open' : '<section~A~>\n<h4>\a</h4>' ,
'title4Close' : '</section>' ,
'title5Open' : '<section~A~>\n<h5>\a</h5>' ,
'title5Close' : '</section>' ,
'fontBoldOpen' : '<strong>' ,
'fontBoldClose' : '</strong>' ,
'fontItalicOpen' : '<em>' ,
'fontItalicClose' : '</em>' ,
'fontUnderlineOpen' : '<span class="underline">',
'fontUnderlineClose' : '</span>' ,
'fontStrikeOpen' : '<del>' ,
'fontStrikeClose' : '</del>' ,
'listItemClose' : '</li>' ,
'numlistItemClose' : '</li>' ,
'deflistItem2Close' : '</dd>' ,
'bar1' : '<hr class="light">' ,
'bar2' : '<hr class="heavy">' ,
'img' : '<img~a~ src="\a" alt="">' ,
'imgEmbed' : '<img~a~ src="\a" alt="">' ,
'_imgAlignLeft' : ' class="left"' ,
'_imgAlignCenter' : ' class="center"',
'_imgAlignRight' : ' class="right"' ,
'tableOpen' : '<table~a~~b~>' ,
'_tableBorder' : ' class="tableborder"' ,
'_tableAlignCenter' : ' style="margin-left: auto; margin-right: auto;"',
'_tableCellAlignRight' : ' class="right"' ,
'_tableCellAlignCenter': ' class="center"',
'cssOpen' : '<style>' ,
'tocOpen' : '<nav>' ,
'tocClose' : '</nav>' ,
'EOD' : '</article></body></html>'
}
TAGS.update(HTML5TAGS)
RULES = html.RULES.copy()
#Update the rules to use explicit <section> </section> tags
HTML5RULES = {
'titleblocks' : 1,
}
RULES.update(HTML5RULES)
|
farvardin/txt2tags-test
|
targets/html5.py
|
Python
|
gpl-2.0
| 3,582 | 0.019542 |
class Solution(object):
def distributeCandies(self, candies):
"""
:type candies: List[int]
:rtype: int
"""
result = 0
kind = list(set(candies))
if len(kind) > len(candies)/2:
result = len(candies)/2
else:
result = len(kind)
return result
|
sadad111/leetcodebox
|
Distribute Candies.py
|
Python
|
gpl-3.0
| 338 | 0 |
import numpy
from chainer import cuda, Function
def _cu_conv_sum(y, x, n):
# Convolutional sum
# TODO(beam2d): Use scan computation
rdim = x.size / (x.shape[0] * x.shape[1])
cuda.elementwise(
'float* y, const float* x, int rdim, int N, int n_',
'''
int half_n = n_ / 2;
int offset = i / rdim * N * rdim + i % rdim;
float* xi = x + offset;
float* yi = y + offset;
float sum_part = 0;
for (int j = 0; j < N + half_n; ++j) {
if (j < N) {
sum_part += xi[j * rdim];
}
if (j >= n_) {
sum_part -= xi[(j - n_) * rdim];
}
if (j >= half_n) {
yi[(j - half_n) * rdim] = sum_part;
}
}
''', 'lrn_conv_sum')(y, x, rdim, x.shape[1], n,
range=slice(0, x.shape[0] * rdim, 1))
class LocalResponseNormalization(Function):
"""Cross-channel normalization function used in AlexNet."""
def __init__(self, n=5, k=2, alpha=1e-4, beta=.75):
self.n = n
self.k = k
self.alpha = alpha
self.beta = beta
def forward_cpu(self, x):
half_n = self.n / 2
x2 = x[0] * x[0]
sum_part = x2.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += x2[:, :-i]
sum_part[:, :-i] += x2[:, i: ]
self.unit_scale = self.k + self.alpha * sum_part
self.scale = self.unit_scale ** -self.beta
self.y = x[0] * self.scale
return self.y,
def backward_cpu(self, x, gy):
half_n = self.n / 2
summand = self.y * gy[0] / self.unit_scale
sum_part = summand.copy()
for i in xrange(1, half_n + 1):
sum_part[:, i: ] += summand[:, :-i]
sum_part[:, :-i] += summand[:, i: ]
gx = gy[0] * self.scale - 2 * self.alpha * self.beta * x[0] * sum_part
return gx,
def forward_gpu(self, x):
self.y = x[0] * x[0] # temporary
self.scale = cuda.empty_like(self.y)
_cu_conv_sum(self.scale, self.y, self.n)
cuda.elementwise(
'''float* y, float* scale, const float* x,
float k, float alpha, float beta''',
'''scale[i] = k + alpha * scale[i];
y[i] = x[i] * __powf(scale[i], -beta);''',
'lrn_fwd')(self.y, self.scale, x[0], self.k, self.alpha, self.beta)
return self.y,
def backward_gpu(self, x, gy):
summand = cuda.empty_like(x[0])
cuda.elementwise(
'''float* summand, const float* scale, const float* y,
const float* gy''',
'summand[i] = y[i] * gy[i] / scale[i]',
'lrn_bwd_summand')(summand, self.scale, self.y, gy[0])
gx = cuda.empty_like(x[0])
_cu_conv_sum(gx, summand, self.n)
cuda.elementwise(
'''float* gx, const float* x, const float* gy, const float* scale,
float beta, float coeff''',
'gx[i] = __powf(scale[i], -beta) * gy[i] - coeff * x[i] * gx[i]',
'lrn_bwd')(gx, x[0], gy[0], self.scale, self.beta,
2 * self.alpha * self.beta)
return gx,
def local_response_normalization(x, n=5, k=2, alpha=1e-4, beta=.75):
"""Local response normalization across neighboring channels.
This function implements normalization across channels. Let :math:`x` an
input image with :math:`N` channels. Then, this function computes an output
image :math:`y` by following formula:
.. math::
y_i = {x_i \\over \\left( k + \\
\\alpha \\sum_{j=\\max{1, i - n/2}}^{\\min{N, i + n/2}} \\
x_j^2 \\right)^\\beta}.
Args:
x (Variable): Input variable.
n (int): Normalization window width.
k (float): Smoothing parameter.
alpha (float): Normalizer scaling parameter.
beta (float): Normalizer power parameter.
Returns:
Variable: Output variable.
See: SSec. 3.3 of `ImageNet Classification with Deep Convolutional Neural \\
Networks <http://www.cs.toronto.edu/~fritz/absps/imagenet.pdf>`_
"""
return LocalResponseNormalization(n, k, alpha, beta)(x)
|
nushio3/chainer
|
chainer/functions/local_response_normalization.py
|
Python
|
mit
| 4,265 | 0.003751 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.